From b9addc7ebf66e4a3b62bc5fb8f2a914800234c39 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Thu, 5 Jun 2025 09:53:01 +0000 Subject: [PATCH 1/3] Update SDK to latest OpenAPI spec --- .codegen.json | 2 - .codegen/_openapi_sha | 2 +- .gitattributes | 3 + .github/workflows/tagging.yml | 3 +- NEXT_CHANGELOG.md | 74 + databricks/sdk/__init__.py | 738 +- databricks/sdk/errors/overrides.py | 36 +- databricks/sdk/errors/platform.py | 114 +- databricks/sdk/service/aibuilder.py | 368 + databricks/sdk/service/apps.py | 1679 +- databricks/sdk/service/billing.py | 2132 ++- databricks/sdk/service/catalog.py | 15053 +++++++--------- databricks/sdk/service/cleanrooms.py | 1430 +- databricks/sdk/service/compute.py | 10997 +++++------ databricks/sdk/service/dashboards.py | 3005 ++- databricks/sdk/service/database.py | 1276 ++ databricks/sdk/service/files.py | 1131 +- databricks/sdk/service/iam.py | 4527 +++-- databricks/sdk/service/jobs.py | 8584 ++++----- databricks/sdk/service/marketplace.py | 5040 +++--- databricks/sdk/service/ml.py | 8511 ++++----- databricks/sdk/service/oauth2.py | 2031 +-- databricks/sdk/service/pipelines.py | 3848 ++-- databricks/sdk/service/provisioning.py | 3233 ++-- databricks/sdk/service/qualitymonitorv2.py | 322 + databricks/sdk/service/serving.py | 4583 ++--- databricks/sdk/service/settings.py | 9084 +++++----- databricks/sdk/service/sharing.py | 3751 ++-- databricks/sdk/service/sql.py | 10675 +++++------ databricks/sdk/service/vectorsearch.py | 1970 +- databricks/sdk/service/workspace.py | 2969 ++- docs/account/billing/billable_usage.rst | 10 +- docs/account/billing/budget_policy.rst | 34 +- docs/account/billing/budgets.rst | 32 +- docs/account/billing/log_delivery.rst | 108 +- docs/account/billing/usage_dashboards.rst | 12 +- .../account/catalog/metastore_assignments.rst | 36 +- docs/account/catalog/metastores.rst | 30 +- docs/account/catalog/storage_credentials.rst | 36 +- docs/account/iam/access_control.rst | 24 +- docs/account/iam/groups.rst | 50 +- docs/account/iam/service_principals.rst | 55 +- docs/account/iam/users.rst | 56 +- docs/account/iam/workspace_assignment.rst | 30 +- .../account/oauth2/custom_app_integration.rst | 36 +- docs/account/oauth2/federation_policy.rst | 40 +- docs/account/oauth2/o_auth_published_apps.rst | 6 +- .../oauth2/published_app_integration.rst | 36 +- .../service_principal_federation_policy.rst | 38 +- .../oauth2/service_principal_secrets.rst | 27 +- docs/account/provisioning/credentials.rst | 36 +- docs/account/provisioning/encryption_keys.rst | 40 +- docs/account/provisioning/networks.rst | 30 +- docs/account/provisioning/private_access.rst | 66 +- docs/account/provisioning/storage.rst | 35 +- docs/account/provisioning/vpc_endpoints.rst | 40 +- docs/account/provisioning/workspaces.rst | 88 +- .../settings/csp_enablement_account.rst | 16 +- .../settings/disable_legacy_features.rst | 22 +- .../settings/enable_ip_access_lists.rst | 20 +- .../settings/esm_enablement_account.rst | 14 +- docs/account/settings/ip_access_lists.rst | 74 +- .../llm_proxy_partner_powered_account.rst | 14 +- .../llm_proxy_partner_powered_enforce.rst | 14 +- .../account/settings/network_connectivity.rst | 90 +- docs/account/settings/network_policies.rst | 32 +- docs/account/settings/personal_compute.rst | 22 +- docs/account/settings/settings.rst | 6 +- .../workspace_network_configuration.rst | 39 +- docs/dbdataclasses/aibuilder.rst | 59 + docs/dbdataclasses/apps.rst | 4 + docs/dbdataclasses/billing.rst | 22 +- docs/dbdataclasses/catalog.rst | 176 +- docs/dbdataclasses/cleanrooms.rst | 4 + docs/dbdataclasses/compute.rst | 10 + docs/dbdataclasses/dashboards.rst | 48 +- docs/dbdataclasses/database.rst | 173 + docs/dbdataclasses/files.rst | 4 + docs/dbdataclasses/iam.rst | 4 + docs/dbdataclasses/index.rst | 3 + docs/dbdataclasses/jobs.rst | 46 + docs/dbdataclasses/marketplace.rst | 4 + docs/dbdataclasses/ml.rst | 40 +- docs/dbdataclasses/oauth2.rst | 4 + docs/dbdataclasses/pipelines.rst | 7 + docs/dbdataclasses/provisioning.rst | 4 + docs/dbdataclasses/qualitymonitorv2.rst | 53 + docs/dbdataclasses/serving.rst | 4 + docs/dbdataclasses/settings.rst | 78 +- docs/dbdataclasses/sharing.rst | 7 + docs/dbdataclasses/sql.rst | 24 + docs/dbdataclasses/vectorsearch.rst | 4 + docs/dbdataclasses/workspace.rst | 4 + docs/gen-client-docs.py | 19 +- docs/workspace/aibuilder/custom_llms.rst | 61 + docs/workspace/aibuilder/index.rst | 10 + docs/workspace/apps/apps.rst | 84 +- .../workspace/catalog/artifact_allowlists.rst | 12 +- docs/workspace/catalog/catalogs.rst | 40 +- docs/workspace/catalog/connections.rst | 38 +- docs/workspace/catalog/credentials.rst | 58 +- docs/workspace/catalog/external_locations.rst | 60 +- docs/workspace/catalog/functions.rst | 38 +- docs/workspace/catalog/grants.rst | 66 +- docs/workspace/catalog/index.rst | 1 - docs/workspace/catalog/metastores.rst | 91 +- docs/workspace/catalog/model_versions.rst | 48 +- docs/workspace/catalog/online_tables.rst | 20 +- docs/workspace/catalog/quality_monitors.rst | 100 +- docs/workspace/catalog/registered_models.rst | 78 +- docs/workspace/catalog/resource_quotas.rst | 15 +- docs/workspace/catalog/schemas.rst | 33 +- .../workspace/catalog/storage_credentials.rst | 59 +- docs/workspace/catalog/system_schemas.rst | 22 +- docs/workspace/catalog/table_constraints.rst | 22 +- docs/workspace/catalog/tables.rst | 48 +- .../catalog/temporary_table_credentials.rst | 6 +- docs/workspace/catalog/volumes.rst | 56 +- docs/workspace/catalog/workspace_bindings.rst | 32 +- .../cleanrooms/clean_room_assets.rst | 42 +- .../cleanrooms/clean_room_task_runs.rst | 6 +- docs/workspace/cleanrooms/clean_rooms.rst | 42 +- docs/workspace/compute/cluster_policies.rst | 80 +- docs/workspace/compute/clusters.rst | 237 +- docs/workspace/compute/command_execution.rst | 46 +- .../workspace/compute/global_init_scripts.rst | 40 +- docs/workspace/compute/instance_pools.rst | 66 +- docs/workspace/compute/instance_profiles.rst | 53 +- docs/workspace/compute/libraries.rst | 32 +- .../policy_compliance_for_clusters.rst | 28 +- docs/workspace/compute/policy_families.rst | 16 +- docs/workspace/dashboards/genie.rst | 80 +- docs/workspace/dashboards/index.rst | 3 +- docs/workspace/dashboards/lakeview.rst | 96 +- .../dashboards/lakeview_embedded.rst | 20 +- docs/workspace/database/database.rst | 175 + docs/workspace/database/index.rst | 10 + docs/workspace/files/dbfs.rst | 64 +- docs/workspace/files/files.rst | 78 +- docs/workspace/iam/access_control.rst | 4 +- .../iam/account_access_control_proxy.rst | 24 +- docs/workspace/iam/current_user.rst | 4 +- docs/workspace/iam/groups.rst | 53 +- docs/workspace/iam/permission_migration.rst | 4 +- docs/workspace/iam/permissions.rst | 26 +- docs/workspace/iam/service_principals.rst | 60 +- docs/workspace/iam/users.rst | 83 +- docs/workspace/index.rst | 3 + docs/workspace/jobs/jobs.rst | 232 +- .../jobs/policy_compliance_for_jobs.rst | 22 +- .../marketplace/consumer_fulfillments.rst | 12 +- .../marketplace/consumer_installations.rst | 32 +- .../marketplace/consumer_listings.rst | 24 +- .../consumer_personalization_requests.rst | 18 +- .../marketplace/consumer_providers.rst | 18 +- .../marketplace/provider_exchange_filters.rst | 26 +- .../marketplace/provider_exchanges.rst | 58 +- docs/workspace/marketplace/provider_files.rst | 26 +- .../marketplace/provider_listings.rst | 32 +- .../provider_personalization_requests.rst | 12 +- ...provider_provider_analytics_dashboards.rst | 18 +- .../marketplace/provider_providers.rst | 32 +- docs/workspace/ml/experiments.rst | 358 +- docs/workspace/ml/forecasting.rst | 12 +- docs/workspace/ml/model_registry.rst | 360 +- docs/workspace/pipelines/pipelines.rst | 113 +- docs/workspace/qualitymonitorv2/index.rst | 10 + .../qualitymonitorv2/quality_monitor_v2.rst | 73 + docs/workspace/serving/serving_endpoints.rst | 106 +- .../serving/serving_endpoints_data_plane.rst | 4 +- ...aibi_dashboard_embedding_access_policy.rst | 20 +- ...i_dashboard_embedding_approved_domains.rst | 20 +- .../settings/automatic_cluster_update.rst | 14 +- .../settings/compliance_security_profile.rst | 16 +- .../settings/credentials_manager.rst | 6 +- .../dashboard_email_subscriptions.rst | 64 + docs/workspace/settings/default_namespace.rst | 24 +- .../settings/disable_legacy_access.rst | 22 +- .../settings/disable_legacy_dbfs.rst | 24 +- .../settings/enable_export_notebook.rst | 12 +- .../enable_notebook_table_clipboard.rst | 12 +- .../settings/enable_results_downloading.rst | 12 +- .../settings/enhanced_security_monitoring.rst | 16 +- docs/workspace/settings/index.rst | 2 + docs/workspace/settings/ip_access_lists.rst | 74 +- .../llm_proxy_partner_powered_workspace.rst | 20 +- .../settings/notification_destinations.rst | 32 +- .../settings/restrict_workspace_admins.rst | 20 +- docs/workspace/settings/settings.rst | 29 +- .../settings/sql_results_download.rst | 63 + docs/workspace/settings/token_management.rst | 46 +- docs/workspace/settings/tokens.rst | 22 +- docs/workspace/settings/workspace_conf.rst | 14 +- docs/workspace/sharing/providers.rst | 63 +- .../sharing/recipient_activation.rst | 16 +- .../sharing/recipient_federation_policies.rst | 40 +- docs/workspace/sharing/recipients.rst | 52 +- docs/workspace/sharing/shares.rst | 58 +- docs/workspace/sql/alerts.rst | 34 +- docs/workspace/sql/alerts_legacy.rst | 56 +- docs/workspace/sql/alerts_v2.rst | 36 +- docs/workspace/sql/dashboard_widgets.rst | 14 +- docs/workspace/sql/dashboards.rst | 42 +- docs/workspace/sql/data_sources.rst | 14 +- docs/workspace/sql/dbsql_permissions.rst | 42 +- docs/workspace/sql/queries.rst | 40 +- docs/workspace/sql/queries_legacy.rst | 90 +- docs/workspace/sql/query_history.rst | 8 +- docs/workspace/sql/query_visualizations.rst | 22 +- .../sql/query_visualizations_legacy.rst | 36 +- docs/workspace/sql/redash_config.rst | 2 +- docs/workspace/sql/statement_execution.rst | 105 +- docs/workspace/sql/warehouses.rst | 120 +- .../vectorsearch/vector_search_endpoints.rst | 36 +- .../vectorsearch/vector_search_indexes.rst | 70 +- docs/workspace/workspace/git_credentials.rst | 40 +- docs/workspace/workspace/repos.rst | 62 +- docs/workspace/workspace/secrets.rst | 118 +- docs/workspace/workspace/workspace.rst | 92 +- 219 files changed, 53594 insertions(+), 61839 deletions(-) create mode 100755 databricks/sdk/service/aibuilder.py create mode 100755 databricks/sdk/service/database.py create mode 100755 databricks/sdk/service/qualitymonitorv2.py create mode 100644 docs/dbdataclasses/aibuilder.rst create mode 100644 docs/dbdataclasses/database.rst create mode 100644 docs/dbdataclasses/qualitymonitorv2.rst create mode 100644 docs/workspace/aibuilder/custom_llms.rst create mode 100644 docs/workspace/aibuilder/index.rst create mode 100644 docs/workspace/database/database.rst create mode 100644 docs/workspace/database/index.rst create mode 100644 docs/workspace/qualitymonitorv2/index.rst create mode 100644 docs/workspace/qualitymonitorv2/quality_monitor_v2.rst create mode 100644 docs/workspace/settings/dashboard_email_subscriptions.rst create mode 100644 docs/workspace/settings/sql_results_download.rst diff --git a/.codegen.json b/.codegen.json index 65077c1cc..592104872 100644 --- a/.codegen.json +++ b/.codegen.json @@ -14,8 +14,6 @@ "pip install '.[dev]'" ], "post_generate": [ - "make fmt", - "pytest -m 'not integration' --cov=databricks --cov-report html tests", "pip install .", "python3.12 docs/gen-client-docs.py" ] diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 3e6708180..ac1c24d10 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -2cee201b2e8d656f7306b2f9ec98edfa721e9829 \ No newline at end of file +a8f547d3728fba835fbdda301e846829c5cbbef5 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index a0bfc0940..3bfcfa2e1 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1,12 +1,14 @@ databricks/sdk/__init__.py linguist-generated=true databricks/sdk/errors/overrides.py linguist-generated=true databricks/sdk/errors/platform.py linguist-generated=true +databricks/sdk/service/aibuilder.py linguist-generated=true databricks/sdk/service/apps.py linguist-generated=true databricks/sdk/service/billing.py linguist-generated=true databricks/sdk/service/catalog.py linguist-generated=true databricks/sdk/service/cleanrooms.py linguist-generated=true databricks/sdk/service/compute.py linguist-generated=true databricks/sdk/service/dashboards.py linguist-generated=true +databricks/sdk/service/database.py linguist-generated=true databricks/sdk/service/files.py linguist-generated=true databricks/sdk/service/iam.py linguist-generated=true databricks/sdk/service/jobs.py linguist-generated=true @@ -15,6 +17,7 @@ databricks/sdk/service/ml.py linguist-generated=true databricks/sdk/service/oauth2.py linguist-generated=true databricks/sdk/service/pipelines.py linguist-generated=true databricks/sdk/service/provisioning.py linguist-generated=true +databricks/sdk/service/qualitymonitorv2.py linguist-generated=true databricks/sdk/service/serving.py linguist-generated=true databricks/sdk/service/settings.py linguist-generated=true databricks/sdk/service/sharing.py linguist-generated=true diff --git a/.github/workflows/tagging.yml b/.github/workflows/tagging.yml index 558f2993a..d4486fb51 100644 --- a/.github/workflows/tagging.yml +++ b/.github/workflows/tagging.yml @@ -21,7 +21,7 @@ jobs: steps: - name: Generate GitHub App Token id: generate-token - uses: actions/create-github-app-token@v1 + uses: actions/create-github-app-token@v2 with: app-id: ${{ secrets.DECO_SDK_TAGGING_APP_ID }} private-key: ${{ secrets.DECO_SDK_TAGGING_PRIVATE_KEY }} @@ -49,4 +49,3 @@ jobs: GITHUB_REPOSITORY: ${{ github.repository }} run: | python tagging.py - diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 8ac78ce49..ad8511e48 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,77 @@ ### Internal Changes ### API Changes +* Added `databricks.sdk.service.aibuilder`, `databricks.sdk.service.database` and `databricks.sdk.service.qualitymonitorv2` packages. +* Added [w.custom_llms](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/aibuilder/custom_llms.html) workspace-level service. +* Added [w.dashboard_email_subscriptions](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/dashboard_email_subscriptions.html) workspace-level service and [w.sql_results_download](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/settings/sql_results_download.html) workspace-level service. +* Added [w.database](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/database/database.html) workspace-level service. +* Added [w.quality_monitor_v2](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/qualitymonitorv2/quality_monitor_v2.html) workspace-level service. +* Added `update_private_endpoint_rule()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service. +* Added `list_spaces()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/genie.html) workspace-level service. +* Added `page_token` field for `databricks.sdk.service.billing.ListLogDeliveryRequest`. +* Added `next_page_token` field for `databricks.sdk.service.billing.WrappedLogDeliveryConfigurations`. +* Added `next_page_token` field for `databricks.sdk.service.catalog.EffectivePermissionsList`. +* Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.GetEffectiveRequest`. +* Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.GetGrantRequest`. +* Added `next_page_token` field for `databricks.sdk.service.catalog.ListMetastoresResponse`. +* Added `clean_room_name` field for `databricks.sdk.service.cleanrooms.CleanRoomAsset`. +* [Breaking] Added `name` field for `databricks.sdk.service.cleanrooms.DeleteCleanRoomAssetRequest`. +* [Breaking] Added `name` field for `databricks.sdk.service.cleanrooms.GetCleanRoomAssetRequest`. +* Added `trigger_state` field for `databricks.sdk.service.jobs.BaseJob`. +* Added `trigger_state` field for `databricks.sdk.service.jobs.Job`. +* Added `dbt_cloud_output` field for `databricks.sdk.service.jobs.RunOutput`. +* Added `dbt_cloud_task` field for `databricks.sdk.service.jobs.RunTask`. +* Added `dbt_cloud_task` field for `databricks.sdk.service.jobs.SubmitTask`. +* Added `dbt_cloud_task` field for `databricks.sdk.service.jobs.Task`. +* Added `tags` field for `databricks.sdk.service.pipelines.CreatePipeline`. +* Added `tags` field for `databricks.sdk.service.pipelines.EditPipeline`. +* Added `tags` field for `databricks.sdk.service.pipelines.PipelineSpec`. +* Added `max_provisioned_concurrency` and `min_provisioned_concurrency` fields for `databricks.sdk.service.serving.ServedEntityInput`. +* Added `max_provisioned_concurrency` and `min_provisioned_concurrency` fields for `databricks.sdk.service.serving.ServedEntityOutput`. +* Added `max_provisioned_concurrency` and `min_provisioned_concurrency` fields for `databricks.sdk.service.serving.ServedModelInput`. +* Added `max_provisioned_concurrency` and `min_provisioned_concurrency` fields for `databricks.sdk.service.serving.ServedModelOutput`. +* Added `endpoint_service` and `resource_names` fields for `databricks.sdk.service.settings.CreatePrivateEndpointRule`. +* Added `aws_private_endpoint_rules` field for `databricks.sdk.service.settings.NccEgressTargetRules`. +* Added `task_time_over_time_range` field for `databricks.sdk.service.sql.QueryMetrics`. +* Added `deltasharing_catalog`, `foreign_catalog`, `internal_catalog`, `managed_catalog`, `managed_online_catalog`, `system_catalog` and `unknown_catalog_type` enum values for `databricks.sdk.service.catalog.CatalogType`. +* Added `ga4_raw_data`, `power_bi`, `salesforce`, `salesforce_data_cloud`, `servicenow`, `unknown_connection_type` and `workday_raas` enum values for `databricks.sdk.service.catalog.ConnectionType`. +* Added `oauth_access_token`, `oauth_m2m`, `oauth_refresh_token`, `oauth_resource_owner_password`, `oauth_u2m`, `oauth_u2m_mapping`, `oidc_token`, `pem_private_key`, `service_credential` and `unknown_credential_type` enum values for `databricks.sdk.service.catalog.CredentialType`. +* Added `internal` and `internal_and_external` enum values for `databricks.sdk.service.catalog.DeltaSharingScopeEnum`. +* Added `catalog`, `clean_room`, `connection`, `credential`, `external_location`, `external_metadata`, `function`, `metastore`, `pipeline`, `provider`, `recipient`, `schema`, `share`, `staging_table`, `storage_credential`, `table`, `unknown_securable_type` and `volume` enum values for `databricks.sdk.service.catalog.SecurableType`. +* Added `cluster_migrated` enum value for `databricks.sdk.service.compute.EventType`. +* Added `driver_unhealthy` enum value for `databricks.sdk.service.compute.TerminationReasonCode`. +* Added `teradata` enum value for `databricks.sdk.service.pipelines.IngestionSourceType`. +* Added `oidc_federation` enum value for `databricks.sdk.service.sharing.AuthenticationType`. +* [Breaking] Changed `create()` method for [a.log_delivery](https://databricks-sdk-py.readthedocs.io/en/latest/account/billing/log_delivery.html) account-level service with new required argument order. +* [Breaking] Changed `get()` method for [a.log_delivery](https://databricks-sdk-py.readthedocs.io/en/latest/account/billing/log_delivery.html) account-level service to return `databricks.sdk.service.billing.GetLogDeliveryConfigurationResponse` dataclass. +* [Breaking] Changed `create_private_endpoint_rule()`, `delete_private_endpoint_rule()` and `get_private_endpoint_rule()` methods for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service to return `databricks.sdk.service.settings.NccPrivateEndpointRule` dataclass. +* [Breaking] Changed `list_private_endpoint_rules()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service to return `databricks.sdk.service.settings.ListPrivateEndpointRulesResponse` dataclass. +* [Breaking] Changed `delete()` and `get()` methods for [w.clean_room_assets](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cleanrooms/clean_room_assets.html) workspace-level service . Method path has changed. +* [Breaking] Changed `delete()` and `get()` methods for [w.clean_room_assets](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cleanrooms/clean_room_assets.html) workspace-level service with new required argument order. +* [Breaking] Changed `get()` method for [w.grants](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/grants.html) workspace-level service to return `databricks.sdk.service.catalog.GetPermissionsResponse` dataclass. +* [Breaking] Changed `update()` method for [w.grants](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/grants.html) workspace-level service to return `databricks.sdk.service.catalog.UpdatePermissionsResponse` dataclass. +* [Breaking] Changed `list()` method for [w.metastores](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/metastores.html) workspace-level service to require request of `databricks.sdk.service.catalog.ListMetastoresRequest` dataclass. +* Changed `account_id`, `credentials_id`, `log_type`, `output_format` and `storage_configuration_id` fields for `databricks.sdk.service.billing.LogDeliveryConfiguration` to be required. +* Changed `message` and `status` fields for `databricks.sdk.service.billing.LogDeliveryStatus` to be required. +* [Breaking] Changed `log_delivery_configuration` field for `databricks.sdk.service.billing.WrappedCreateLogDeliveryConfiguration` to be required. +* [Breaking] Changed `securable_type` field for `databricks.sdk.service.catalog.ConnectionInfo` to type `databricks.sdk.service.catalog.SecurableType` dataclass. +* [Breaking] Changed `securable_type` field for `databricks.sdk.service.catalog.GetEffectiveRequest` to type `str` dataclass. +* [Breaking] Changed `securable_type` field for `databricks.sdk.service.catalog.GetGrantRequest` to type `str` dataclass. +* [Breaking] Changed `delta_sharing_scope` field for `databricks.sdk.service.catalog.GetMetastoreSummaryResponse` to type `databricks.sdk.service.catalog.DeltaSharingScopeEnum` dataclass. +* [Breaking] Changed `delta_sharing_scope` field for `databricks.sdk.service.catalog.MetastoreInfo` to type `databricks.sdk.service.catalog.DeltaSharingScopeEnum` dataclass. +* [Breaking] Changed `catalog_type` field for `databricks.sdk.service.catalog.SchemaInfo` to type `databricks.sdk.service.catalog.CatalogType` dataclass. +* [Breaking] Changed `delta_sharing_scope` field for `databricks.sdk.service.catalog.UpdateMetastore` to type `databricks.sdk.service.catalog.DeltaSharingScopeEnum` dataclass. +* [Breaking] Changed `securable_type` field for `databricks.sdk.service.catalog.UpdatePermissions` to type `str` dataclass. +* Changed `resource_id` field for `databricks.sdk.service.settings.CreatePrivateEndpointRule` to no longer be required. +* [Breaking] Changed pagination for [NetworkConnectivityAPI.list_private_endpoint_rules](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html#databricks.sdk.service.settings.NetworkConnectivityAPI.list_private_endpoint_rules) method. +* [Breaking] Removed [w.database_instances](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/database_instances.html) workspace-level service. +* [Breaking] Removed [w.query_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/query_execution.html) workspace-level service. +* [Breaking] Removed `update_ncc_azure_private_endpoint_rule_public()` method for [a.network_connectivity](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/network_connectivity.html) account-level service. +* [Breaking] Removed `get_credentials_for_trace_data_download()`, `get_credentials_for_trace_data_upload()` and `list_logged_model_artifacts()` methods for [w.experiments](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/experiments.html) workspace-level service. +* [Breaking] Removed `get_published_dashboard_embedded()` method for [w.lakeview_embedded](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/dashboards/lakeview_embedded.html) workspace-level service. +* [Breaking] Removed `asset_full_name` field for `databricks.sdk.service.cleanrooms.DeleteCleanRoomAssetRequest`. +* [Breaking] Removed `asset_full_name` field for `databricks.sdk.service.cleanrooms.GetCleanRoomAssetRequest`. +* [Breaking] Removed `internal` and `internal_and_external` enum values for `databricks.sdk.service.catalog.GetMetastoreSummaryResponseDeltaSharingScope`. +* [Breaking] Removed `internal` and `internal_and_external` enum values for `databricks.sdk.service.catalog.MetastoreInfoDeltaSharingScope`. +* [Breaking] Removed `catalog`, `clean_room`, `connection`, `credential`, `external_location`, `external_metadata`, `function`, `metastore`, `pipeline`, `provider`, `recipient`, `schema`, `share`, `staging_table`, `storage_credential`, `table`, `unknown_securable_type` and `volume` enum values for `databricks.sdk.service.catalog.SecurableType`. +* [Breaking] Removed `internal` and `internal_and_external` enum values for `databricks.sdk.service.catalog.UpdateMetastoreDeltaSharingScope`. diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index f75645d25..4fbaddab8 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -1,24 +1,19 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. import logging -from typing import Optional import databricks.sdk.core as client import databricks.sdk.dbutils as dbutils -from databricks.sdk import azure from databricks.sdk.credentials_provider import CredentialsStrategy from databricks.sdk.data_plane import DataPlaneTokenSource -from databricks.sdk.mixins.compute import ClustersExt -from databricks.sdk.mixins.files import DbfsExt, FilesExt -from databricks.sdk.mixins.jobs import JobsExt -from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt -from databricks.sdk.mixins.workspace import WorkspaceExt +from databricks.sdk.service import aibuilder as pkg_aibuilder from databricks.sdk.service import apps as pkg_apps from databricks.sdk.service import billing as pkg_billing from databricks.sdk.service import catalog as pkg_catalog from databricks.sdk.service import cleanrooms as pkg_cleanrooms from databricks.sdk.service import compute as pkg_compute from databricks.sdk.service import dashboards as pkg_dashboards +from databricks.sdk.service import database as pkg_database from databricks.sdk.service import files as pkg_files from databricks.sdk.service import iam as pkg_iam from databricks.sdk.service import jobs as pkg_jobs @@ -27,114 +22,182 @@ from databricks.sdk.service import oauth2 as pkg_oauth2 from databricks.sdk.service import pipelines as pkg_pipelines from databricks.sdk.service import provisioning as pkg_provisioning +from databricks.sdk.service import qualitymonitorv2 as pkg_qualitymonitorv2 from databricks.sdk.service import serving as pkg_serving from databricks.sdk.service import settings as pkg_settings from databricks.sdk.service import sharing as pkg_sharing from databricks.sdk.service import sql as pkg_sql from databricks.sdk.service import vectorsearch as pkg_vectorsearch from databricks.sdk.service import workspace as pkg_workspace + +from databricks.sdk.mixins.files import DbfsExt, FilesExt +from databricks.sdk.mixins.compute import ClustersExt +from databricks.sdk.mixins.workspace import WorkspaceExt +from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt +from databricks.sdk.mixins.jobs import JobsExt +from databricks.sdk.service.iam import AccessControlAPI +from databricks.sdk.service.iam import AccountAccessControlAPI +from databricks.sdk.service.iam import AccountAccessControlProxyAPI +from databricks.sdk.service.settings import AibiDashboardEmbeddingAccessPolicyAPI +from databricks.sdk.service.settings import AibiDashboardEmbeddingApprovedDomainsAPI +from databricks.sdk.service.sql import AlertsAPI +from databricks.sdk.service.sql import AlertsLegacyAPI +from databricks.sdk.service.sql import AlertsV2API from databricks.sdk.service.apps import AppsAPI -from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI, - BudgetsAPI, LogDeliveryAPI, - UsageDashboardsAPI) -from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, - AccountMetastoresAPI, - AccountStorageCredentialsAPI, - ArtifactAllowlistsAPI, CatalogsAPI, - ConnectionsAPI, CredentialsAPI, - DatabaseInstancesAPI, - ExternalLocationsAPI, FunctionsAPI, - GrantsAPI, MetastoresAPI, - ModelVersionsAPI, OnlineTablesAPI, - QualityMonitorsAPI, - RegisteredModelsAPI, - ResourceQuotasAPI, SchemasAPI, - StorageCredentialsAPI, - SystemSchemasAPI, - TableConstraintsAPI, TablesAPI, - TemporaryTableCredentialsAPI, - VolumesAPI, WorkspaceBindingsAPI) -from databricks.sdk.service.cleanrooms import (CleanRoomAssetsAPI, - CleanRoomsAPI, - CleanRoomTaskRunsAPI) -from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI, - CommandExecutionAPI, - GlobalInitScriptsAPI, - InstancePoolsAPI, - InstanceProfilesAPI, LibrariesAPI, - PolicyComplianceForClustersAPI, - PolicyFamiliesAPI) -from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI, - LakeviewEmbeddedAPI, - QueryExecutionAPI) -from databricks.sdk.service.files import DbfsAPI, FilesAPI -from databricks.sdk.service.iam import (AccessControlAPI, - AccountAccessControlAPI, - AccountAccessControlProxyAPI, - AccountGroupsAPI, - AccountServicePrincipalsAPI, - AccountUsersAPI, CurrentUserAPI, - GroupsAPI, PermissionMigrationAPI, - PermissionsAPI, ServicePrincipalsAPI, - UsersAPI, WorkspaceAssignmentAPI) -from databricks.sdk.service.jobs import JobsAPI, PolicyComplianceForJobsAPI -from databricks.sdk.service.marketplace import ( - ConsumerFulfillmentsAPI, ConsumerInstallationsAPI, ConsumerListingsAPI, - ConsumerPersonalizationRequestsAPI, ConsumerProvidersAPI, - ProviderExchangeFiltersAPI, ProviderExchangesAPI, ProviderFilesAPI, - ProviderListingsAPI, ProviderPersonalizationRequestsAPI, - ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI) -from databricks.sdk.service.ml import (ExperimentsAPI, ForecastingAPI, - ModelRegistryAPI) -from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI, - CustomAppIntegrationAPI, - OAuthPublishedAppsAPI, - PublishedAppIntegrationAPI, - ServicePrincipalFederationPolicyAPI, - ServicePrincipalSecretsAPI) +from databricks.sdk.service.catalog import ArtifactAllowlistsAPI +from databricks.sdk.service.settings import AutomaticClusterUpdateAPI +from databricks.sdk.service.billing import BillableUsageAPI +from databricks.sdk.service.billing import BudgetPolicyAPI +from databricks.sdk.service.catalog import CatalogsAPI +from databricks.sdk.service.cleanrooms import CleanRoomAssetsAPI +from databricks.sdk.service.cleanrooms import CleanRoomTaskRunsAPI +from databricks.sdk.service.cleanrooms import CleanRoomsAPI +from databricks.sdk.service.compute import ClusterPoliciesAPI +from databricks.sdk.service.compute import ClustersAPI +from databricks.sdk.service.compute import CommandExecutionAPI +from databricks.sdk.service.settings import ComplianceSecurityProfileAPI +from databricks.sdk.service.catalog import ConnectionsAPI +from databricks.sdk.service.marketplace import ConsumerFulfillmentsAPI +from databricks.sdk.service.marketplace import ConsumerInstallationsAPI +from databricks.sdk.service.marketplace import ConsumerListingsAPI +from databricks.sdk.service.marketplace import ConsumerPersonalizationRequestsAPI +from databricks.sdk.service.marketplace import ConsumerProvidersAPI +from databricks.sdk.service.catalog import CredentialsAPI +from databricks.sdk.service.provisioning import CredentialsAPI +from databricks.sdk.service.settings import CredentialsManagerAPI +from databricks.sdk.service.settings import CspEnablementAccountAPI +from databricks.sdk.service.iam import CurrentUserAPI +from databricks.sdk.service.oauth2 import CustomAppIntegrationAPI +from databricks.sdk.service.aibuilder import CustomLlmsAPI +from databricks.sdk.service.settings import DashboardEmailSubscriptionsAPI +from databricks.sdk.service.sql import DashboardWidgetsAPI +from databricks.sdk.service.sql import DashboardsAPI +from databricks.sdk.service.sql import DataSourcesAPI +from databricks.sdk.service.database import DatabaseAPI +from databricks.sdk.service.files import DbfsAPI +from databricks.sdk.service.sql import DbsqlPermissionsAPI +from databricks.sdk.service.settings import DefaultNamespaceAPI +from databricks.sdk.service.settings import DisableLegacyAccessAPI +from databricks.sdk.service.settings import DisableLegacyDbfsAPI +from databricks.sdk.service.settings import DisableLegacyFeaturesAPI +from databricks.sdk.service.settings import EnableExportNotebookAPI +from databricks.sdk.service.settings import EnableIpAccessListsAPI +from databricks.sdk.service.settings import EnableNotebookTableClipboardAPI +from databricks.sdk.service.settings import EnableResultsDownloadingAPI +from databricks.sdk.service.provisioning import EncryptionKeysAPI +from databricks.sdk.service.settings import EnhancedSecurityMonitoringAPI +from databricks.sdk.service.settings import EsmEnablementAccountAPI +from databricks.sdk.service.ml import ExperimentsAPI +from databricks.sdk.service.catalog import ExternalLocationsAPI +from databricks.sdk.service.oauth2 import AccountFederationPolicyAPI +from databricks.sdk.service.files import FilesAPI +from databricks.sdk.service.catalog import FunctionsAPI +from databricks.sdk.service.dashboards import GenieAPI +from databricks.sdk.service.workspace import GitCredentialsAPI +from databricks.sdk.service.compute import GlobalInitScriptsAPI +from databricks.sdk.service.catalog import GrantsAPI +from databricks.sdk.service.iam import GroupsAPI +from databricks.sdk.service.iam import AccountGroupsAPI +from databricks.sdk.service.compute import InstancePoolsAPI +from databricks.sdk.service.compute import InstanceProfilesAPI +from databricks.sdk.service.settings import IpAccessListsAPI +from databricks.sdk.service.settings import AccountIpAccessListsAPI +from databricks.sdk.service.jobs import JobsAPI +from databricks.sdk.service.dashboards import LakeviewAPI +from databricks.sdk.service.dashboards import LakeviewEmbeddedAPI +from databricks.sdk.service.compute import LibrariesAPI +from databricks.sdk.service.settings import LlmProxyPartnerPoweredAccountAPI +from databricks.sdk.service.settings import LlmProxyPartnerPoweredEnforceAPI +from databricks.sdk.service.settings import LlmProxyPartnerPoweredWorkspaceAPI +from databricks.sdk.service.billing import LogDeliveryAPI +from databricks.sdk.service.catalog import AccountMetastoreAssignmentsAPI +from databricks.sdk.service.catalog import MetastoresAPI +from databricks.sdk.service.catalog import AccountMetastoresAPI +from databricks.sdk.service.ml import ModelRegistryAPI +from databricks.sdk.service.catalog import ModelVersionsAPI +from databricks.sdk.service.settings import NetworkConnectivityAPI +from databricks.sdk.service.settings import NetworkPoliciesAPI +from databricks.sdk.service.provisioning import NetworksAPI +from databricks.sdk.service.settings import NotificationDestinationsAPI +from databricks.sdk.service.oauth2 import OAuthPublishedAppsAPI +from databricks.sdk.service.catalog import OnlineTablesAPI +from databricks.sdk.service.iam import PermissionMigrationAPI +from databricks.sdk.service.iam import PermissionsAPI +from databricks.sdk.service.settings import PersonalComputeAPI from databricks.sdk.service.pipelines import PipelinesAPI -from databricks.sdk.service.provisioning import (CredentialsAPI, - EncryptionKeysAPI, - NetworksAPI, PrivateAccessAPI, - StorageAPI, VpcEndpointsAPI, - Workspace, WorkspacesAPI) -from databricks.sdk.service.serving import (ServingEndpointsAPI, - ServingEndpointsDataPlaneAPI) -from databricks.sdk.service.settings import ( - AccountIpAccessListsAPI, AccountSettingsAPI, - AibiDashboardEmbeddingAccessPolicyAPI, - AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI, - ComplianceSecurityProfileAPI, CredentialsManagerAPI, - CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI, - DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableExportNotebookAPI, - EnableIpAccessListsAPI, EnableNotebookTableClipboardAPI, - EnableResultsDownloadingAPI, EnhancedSecurityMonitoringAPI, - EsmEnablementAccountAPI, IpAccessListsAPI, - LlmProxyPartnerPoweredAccountAPI, LlmProxyPartnerPoweredEnforceAPI, - LlmProxyPartnerPoweredWorkspaceAPI, NetworkConnectivityAPI, - NetworkPoliciesAPI, NotificationDestinationsAPI, PersonalComputeAPI, - RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI, - WorkspaceConfAPI, WorkspaceNetworkConfigurationAPI) -from databricks.sdk.service.sharing import (ProvidersAPI, - RecipientActivationAPI, - RecipientFederationPoliciesAPI, - RecipientsAPI, SharesAPI) -from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI, - AlertsV2API, DashboardsAPI, - DashboardWidgetsAPI, DataSourcesAPI, - DbsqlPermissionsAPI, QueriesAPI, - QueriesLegacyAPI, QueryHistoryAPI, - QueryVisualizationsAPI, - QueryVisualizationsLegacyAPI, - RedashConfigAPI, StatementExecutionAPI, - WarehousesAPI) -from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI, - VectorSearchIndexesAPI) -from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI, - SecretsAPI, WorkspaceAPI) +from databricks.sdk.service.compute import PolicyComplianceForClustersAPI +from databricks.sdk.service.jobs import PolicyComplianceForJobsAPI +from databricks.sdk.service.compute import PolicyFamiliesAPI +from databricks.sdk.service.provisioning import PrivateAccessAPI +from databricks.sdk.service.marketplace import ProviderExchangeFiltersAPI +from databricks.sdk.service.marketplace import ProviderExchangesAPI +from databricks.sdk.service.marketplace import ProviderFilesAPI +from databricks.sdk.service.marketplace import ProviderListingsAPI +from databricks.sdk.service.marketplace import ProviderPersonalizationRequestsAPI +from databricks.sdk.service.marketplace import ProviderProviderAnalyticsDashboardsAPI +from databricks.sdk.service.marketplace import ProviderProvidersAPI +from databricks.sdk.service.sharing import ProvidersAPI +from databricks.sdk.service.oauth2 import PublishedAppIntegrationAPI +from databricks.sdk.service.qualitymonitorv2 import QualityMonitorV2API +from databricks.sdk.service.catalog import QualityMonitorsAPI +from databricks.sdk.service.sql import QueriesAPI +from databricks.sdk.service.sql import QueriesLegacyAPI +from databricks.sdk.service.sql import QueryHistoryAPI +from databricks.sdk.service.sql import QueryVisualizationsAPI +from databricks.sdk.service.sql import QueryVisualizationsLegacyAPI +from databricks.sdk.service.sharing import RecipientActivationAPI +from databricks.sdk.service.sharing import RecipientFederationPoliciesAPI +from databricks.sdk.service.sharing import RecipientsAPI +from databricks.sdk.service.sql import RedashConfigAPI +from databricks.sdk.service.catalog import RegisteredModelsAPI +from databricks.sdk.service.workspace import ReposAPI +from databricks.sdk.service.catalog import ResourceQuotasAPI +from databricks.sdk.service.settings import RestrictWorkspaceAdminsAPI +from databricks.sdk.service.catalog import SchemasAPI +from databricks.sdk.service.workspace import SecretsAPI +from databricks.sdk.service.oauth2 import ServicePrincipalFederationPolicyAPI +from databricks.sdk.service.oauth2 import ServicePrincipalSecretsAPI +from databricks.sdk.service.iam import ServicePrincipalsAPI +from databricks.sdk.service.iam import AccountServicePrincipalsAPI +from databricks.sdk.service.serving import ServingEndpointsAPI +from databricks.sdk.service.serving import ServingEndpointsDataPlaneAPI +from databricks.sdk.service.settings import SettingsAPI +from databricks.sdk.service.settings import AccountSettingsAPI +from databricks.sdk.service.sharing import SharesAPI +from databricks.sdk.service.settings import SqlResultsDownloadAPI +from databricks.sdk.service.sql import StatementExecutionAPI +from databricks.sdk.service.provisioning import StorageAPI +from databricks.sdk.service.catalog import StorageCredentialsAPI +from databricks.sdk.service.catalog import AccountStorageCredentialsAPI +from databricks.sdk.service.catalog import SystemSchemasAPI +from databricks.sdk.service.catalog import TableConstraintsAPI +from databricks.sdk.service.catalog import TablesAPI +from databricks.sdk.service.catalog import TemporaryTableCredentialsAPI +from databricks.sdk.service.settings import TokenManagementAPI +from databricks.sdk.service.settings import TokensAPI +from databricks.sdk.service.billing import UsageDashboardsAPI +from databricks.sdk.service.iam import UsersAPI +from databricks.sdk.service.iam import AccountUsersAPI +from databricks.sdk.service.vectorsearch import VectorSearchEndpointsAPI +from databricks.sdk.service.vectorsearch import VectorSearchIndexesAPI +from databricks.sdk.service.catalog import VolumesAPI +from databricks.sdk.service.provisioning import VpcEndpointsAPI +from databricks.sdk.service.sql import WarehousesAPI +from databricks.sdk.service.workspace import WorkspaceAPI +from databricks.sdk.service.iam import WorkspaceAssignmentAPI +from databricks.sdk.service.catalog import WorkspaceBindingsAPI +from databricks.sdk.service.settings import WorkspaceConfAPI +from databricks.sdk.service.settings import WorkspaceNetworkConfigurationAPI +from databricks.sdk.service.provisioning import WorkspacesAPI +from databricks.sdk.service.billing import BudgetsAPI +from databricks.sdk.service.ml import ForecastingAPI +from databricks.sdk.service.provisioning import Workspace +from databricks.sdk import azure +from typing import Optional + -_LOG = logging.getLogger(__name__) +_LOG = logging.getLogger(__name__) def _make_dbutils(config: client.Config): # We try to directly check if we are in runtime, instead of @@ -149,7 +212,6 @@ def _make_dbutils(config: client.Config): # We are in runtime, so we can use the runtime dbutils from databricks.sdk.runtime import dbutils as runtime_dbutils - return runtime_dbutils @@ -165,65 +227,25 @@ class WorkspaceClient: """ The WorkspaceClient is a client for the workspace-level Databricks REST API. """ - - def __init__( - self, - *, - host: Optional[str] = None, - account_id: Optional[str] = None, - username: Optional[str] = None, - password: Optional[str] = None, - client_id: Optional[str] = None, - client_secret: Optional[str] = None, - token: Optional[str] = None, - profile: Optional[str] = None, - config_file: Optional[str] = None, - azure_workspace_resource_id: Optional[str] = None, - azure_client_secret: Optional[str] = None, - azure_client_id: Optional[str] = None, - azure_tenant_id: Optional[str] = None, - azure_environment: Optional[str] = None, - auth_type: Optional[str] = None, - cluster_id: Optional[str] = None, - google_credentials: Optional[str] = None, - google_service_account: Optional[str] = None, - debug_truncate_bytes: Optional[int] = None, - debug_headers: Optional[bool] = None, - product="unknown", - product_version="0.0.0", - credentials_strategy: Optional[CredentialsStrategy] = None, - credentials_provider: Optional[CredentialsStrategy] = None, - token_audience: Optional[str] = None, - config: Optional[client.Config] = None, - ): + def __init__(self, *, host: Optional[str] = None, account_id: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, client_id: Optional[str] = None, client_secret: Optional[str] = None, token: Optional[str] = None, profile: Optional[str] = None, config_file: Optional[str] = None, azure_workspace_resource_id: Optional[str] = None, azure_client_secret: Optional[str] = None, azure_client_id: Optional[str] = None, azure_tenant_id: Optional[str] = None, azure_environment: Optional[str] = None, auth_type: Optional[str] = None, cluster_id: Optional[str] = None, google_credentials: Optional[str] = None, google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, + product="unknown", + product_version="0.0.0", + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + token_audience: Optional[str] = None, + config: Optional[client.Config] = None): if not config: - config = client.Config( - host=host, - account_id=account_id, - username=username, - password=password, - client_id=client_id, - client_secret=client_secret, - token=token, - profile=profile, - config_file=config_file, - azure_workspace_resource_id=azure_workspace_resource_id, - azure_client_secret=azure_client_secret, - azure_client_id=azure_client_id, - azure_tenant_id=azure_tenant_id, - azure_environment=azure_environment, - auth_type=auth_type, - cluster_id=cluster_id, - google_credentials=google_credentials, - google_service_account=google_service_account, - credentials_strategy=credentials_strategy, - credentials_provider=credentials_provider, - debug_truncate_bytes=debug_truncate_bytes, - debug_headers=debug_headers, - product=product, - product_version=product_version, - token_audience=token_audience, - ) + config = client.Config(host=host, account_id=account_id, username=username, password=password, client_id=client_id, client_secret=client_secret, token=token, profile=profile, config_file=config_file, azure_workspace_resource_id=azure_workspace_resource_id, azure_client_secret=azure_client_secret, azure_client_id=azure_client_id, azure_tenant_id=azure_tenant_id, azure_environment=azure_environment, auth_type=auth_type, cluster_id=cluster_id, google_credentials=google_credentials, google_service_account=google_service_account, + credentials_strategy=credentials_strategy, + credentials_provider=credentials_provider, + debug_truncate_bytes=debug_truncate_bytes, + debug_headers=debug_headers, + product=product, + product_version=product_version, + token_audience=token_audience, + ) self._config = config.copy() self._dbutils = _make_dbutils(self._config) self._api_client = client.ApiClient(self._config) @@ -251,10 +273,11 @@ def __init__( self._credentials = pkg_catalog.CredentialsAPI(self._api_client) self._credentials_manager = pkg_settings.CredentialsManagerAPI(self._api_client) self._current_user = pkg_iam.CurrentUserAPI(self._api_client) + self._custom_llms = pkg_aibuilder.CustomLlmsAPI(self._api_client) self._dashboard_widgets = pkg_sql.DashboardWidgetsAPI(self._api_client) self._dashboards = pkg_sql.DashboardsAPI(self._api_client) self._data_sources = pkg_sql.DataSourcesAPI(self._api_client) - self._database_instances = pkg_catalog.DatabaseInstancesAPI(self._api_client) + self._database = pkg_database.DatabaseAPI(self._api_client) self._dbfs = DbfsExt(self._api_client) self._dbsql_permissions = pkg_sql.DbsqlPermissionsAPI(self._api_client) self._experiments = pkg_ml.ExperimentsAPI(self._api_client) @@ -289,15 +312,13 @@ def __init__( self._provider_files = pkg_marketplace.ProviderFilesAPI(self._api_client) self._provider_listings = pkg_marketplace.ProviderListingsAPI(self._api_client) self._provider_personalization_requests = pkg_marketplace.ProviderPersonalizationRequestsAPI(self._api_client) - self._provider_provider_analytics_dashboards = pkg_marketplace.ProviderProviderAnalyticsDashboardsAPI( - self._api_client - ) + self._provider_provider_analytics_dashboards = pkg_marketplace.ProviderProviderAnalyticsDashboardsAPI(self._api_client) self._provider_providers = pkg_marketplace.ProviderProvidersAPI(self._api_client) self._providers = pkg_sharing.ProvidersAPI(self._api_client) + self._quality_monitor_v2 = pkg_qualitymonitorv2.QualityMonitorV2API(self._api_client) self._quality_monitors = pkg_catalog.QualityMonitorsAPI(self._api_client) self._queries = pkg_sql.QueriesAPI(self._api_client) self._queries_legacy = pkg_sql.QueriesLegacyAPI(self._api_client) - self._query_execution = pkg_dashboards.QueryExecutionAPI(self._api_client) self._query_history = pkg_sql.QueryHistoryAPI(self._api_client) self._query_visualizations = pkg_sql.QueryVisualizationsAPI(self._api_client) self._query_visualizations_legacy = pkg_sql.QueryVisualizationsLegacyAPI(self._api_client) @@ -312,12 +333,8 @@ def __init__( self._secrets = pkg_workspace.SecretsAPI(self._api_client) self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) self._serving_endpoints = serving_endpoints - serving_endpoints_data_plane_token_source = DataPlaneTokenSource( - self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh - ) - self._serving_endpoints_data_plane = pkg_serving.ServingEndpointsDataPlaneAPI( - self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source - ) + serving_endpoints_data_plane_token_source = DataPlaneTokenSource(self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh) + self._serving_endpoints_data_plane = pkg_serving.ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source) self._settings = pkg_settings.SettingsAPI(self._api_client) self._shares = pkg_sharing.SharesAPI(self._api_client) self._statement_execution = pkg_sql.StatementExecutionAPI(self._api_client) @@ -349,589 +366,556 @@ def api_client(self) -> client.ApiClient: @property def dbutils(self) -> dbutils.RemoteDbUtils: return self._dbutils - @property def access_control(self) -> pkg_iam.AccessControlAPI: """Rule based Access Control for Databricks Resources.""" return self._access_control - + @property def account_access_control_proxy(self) -> pkg_iam.AccountAccessControlProxyAPI: """These APIs manage access rules on resources in an account.""" return self._account_access_control_proxy - + @property def alerts(self) -> pkg_sql.AlertsAPI: """The alerts API can be used to perform CRUD operations on alerts.""" return self._alerts - + @property def alerts_legacy(self) -> pkg_sql.AlertsLegacyAPI: """The alerts API can be used to perform CRUD operations on alerts.""" return self._alerts_legacy - + @property def alerts_v2(self) -> pkg_sql.AlertsV2API: - """TODO: Add description.""" + """New version of SQL Alerts.""" return self._alerts_v2 - + @property def apps(self) -> pkg_apps.AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" return self._apps - + @property def artifact_allowlists(self) -> pkg_catalog.ArtifactAllowlistsAPI: """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode.""" return self._artifact_allowlists - + @property def catalogs(self) -> pkg_catalog.CatalogsAPI: """A catalog is the first layer of Unity Catalog’s three-level namespace.""" return self._catalogs - + @property def clean_room_assets(self) -> pkg_cleanrooms.CleanRoomAssetsAPI: """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room.""" return self._clean_room_assets - + @property def clean_room_task_runs(self) -> pkg_cleanrooms.CleanRoomTaskRunsAPI: """Clean room task runs are the executions of notebooks in a clean room.""" return self._clean_room_task_runs - + @property def clean_rooms(self) -> pkg_cleanrooms.CleanRoomsAPI: """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data.""" return self._clean_rooms - + @property def cluster_policies(self) -> pkg_compute.ClusterPoliciesAPI: """You can use cluster policies to control users' ability to configure clusters based on a set of rules.""" return self._cluster_policies - + @property def clusters(self) -> ClustersExt: """The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.""" return self._clusters - + @property def command_execution(self) -> pkg_compute.CommandExecutionAPI: """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.""" return self._command_execution - + @property def connections(self) -> pkg_catalog.ConnectionsAPI: """Connections allow for creating a connection to an external data source.""" return self._connections - + @property def consumer_fulfillments(self) -> pkg_marketplace.ConsumerFulfillmentsAPI: """Fulfillments are entities that allow consumers to preview installations.""" return self._consumer_fulfillments - + @property def consumer_installations(self) -> pkg_marketplace.ConsumerInstallationsAPI: """Installations are entities that allow consumers to interact with Databricks Marketplace listings.""" return self._consumer_installations - + @property def consumer_listings(self) -> pkg_marketplace.ConsumerListingsAPI: """Listings are the core entities in the Marketplace.""" return self._consumer_listings - + @property def consumer_personalization_requests(self) -> pkg_marketplace.ConsumerPersonalizationRequestsAPI: """Personalization Requests allow customers to interact with the individualized Marketplace listing flow.""" return self._consumer_personalization_requests - + @property def consumer_providers(self) -> pkg_marketplace.ConsumerProvidersAPI: """Providers are the entities that publish listings to the Marketplace.""" return self._consumer_providers - + @property def credentials(self) -> pkg_catalog.CredentialsAPI: """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant.""" return self._credentials - + @property def credentials_manager(self) -> pkg_settings.CredentialsManagerAPI: """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens.""" return self._credentials_manager - + @property def current_user(self) -> pkg_iam.CurrentUserAPI: """This API allows retrieving information about currently authenticated user or service principal.""" return self._current_user - + + @property + def custom_llms(self) -> pkg_aibuilder.CustomLlmsAPI: + """The Custom LLMs service manages state and powers the UI for the Custom LLM product.""" + return self._custom_llms + @property def dashboard_widgets(self) -> pkg_sql.DashboardWidgetsAPI: """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace.""" return self._dashboard_widgets - + @property def dashboards(self) -> pkg_sql.DashboardsAPI: """In general, there is little need to modify dashboards using the API.""" return self._dashboards - + @property def data_sources(self) -> pkg_sql.DataSourcesAPI: """This API is provided to assist you in making new query objects.""" return self._data_sources - + @property - def database_instances(self) -> pkg_catalog.DatabaseInstancesAPI: + def database(self) -> pkg_database.DatabaseAPI: """Database Instances provide access to a database via REST API or direct SQL.""" - return self._database_instances - + return self._database + @property def dbfs(self) -> DbfsExt: """DBFS API makes it simple to interact with various data sources without having to include a users credentials every time to read a file.""" return self._dbfs - + @property def dbsql_permissions(self) -> pkg_sql.DbsqlPermissionsAPI: """The SQL Permissions API is similar to the endpoints of the :method:permissions/set.""" return self._dbsql_permissions - + @property def experiments(self) -> pkg_ml.ExperimentsAPI: """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.""" return self._experiments - + @property def external_locations(self) -> pkg_catalog.ExternalLocationsAPI: """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.""" return self._external_locations - + @property def files(self) -> pkg_files.FilesAPI: """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI.""" return self._files - + @property def functions(self) -> pkg_catalog.FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog.""" return self._functions - + @property def genie(self) -> pkg_dashboards.GenieAPI: """Genie provides a no-code experience for business users, powered by AI/BI.""" return self._genie - + @property def git_credentials(self) -> pkg_workspace.GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user.""" return self._git_credentials - + @property def global_init_scripts(self) -> pkg_compute.GlobalInitScriptsAPI: """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.""" return self._global_init_scripts - + @property def grants(self) -> pkg_catalog.GrantsAPI: """In Unity Catalog, data is secure by default.""" return self._grants - + @property def groups(self) -> pkg_iam.GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" return self._groups - + @property def instance_pools(self) -> pkg_compute.InstancePoolsAPI: """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.""" return self._instance_pools - + @property def instance_profiles(self) -> pkg_compute.InstanceProfilesAPI: """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.""" return self._instance_profiles - + @property def ip_access_lists(self) -> pkg_settings.IpAccessListsAPI: """IP Access List enables admins to configure IP access lists.""" return self._ip_access_lists - + @property def jobs(self) -> JobsExt: """The Jobs API allows you to create, edit, and delete jobs.""" return self._jobs - + @property def lakeview(self) -> pkg_dashboards.LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards.""" return self._lakeview - + @property def lakeview_embedded(self) -> pkg_dashboards.LakeviewEmbeddedAPI: """Token-based Lakeview APIs for embedding dashboards in external applications.""" return self._lakeview_embedded - + @property def libraries(self) -> pkg_compute.LibrariesAPI: """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.""" return self._libraries - + @property def metastores(self) -> pkg_catalog.MetastoresAPI: """A metastore is the top-level container of objects in Unity Catalog.""" return self._metastores - + @property def model_registry(self) -> pkg_ml.ModelRegistryAPI: """Note: This API reference documents APIs for the Workspace Model Registry.""" return self._model_registry - + @property def model_versions(self) -> pkg_catalog.ModelVersionsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.""" return self._model_versions - + @property def notification_destinations(self) -> pkg_settings.NotificationDestinationsAPI: """The notification destinations API lets you programmatically manage a workspace's notification destinations.""" return self._notification_destinations - + @property def online_tables(self) -> pkg_catalog.OnlineTablesAPI: """Online tables provide lower latency and higher QPS access to data from Delta tables.""" return self._online_tables - + @property def permission_migration(self) -> pkg_iam.PermissionMigrationAPI: """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx.""" return self._permission_migration - + @property def permissions(self) -> pkg_iam.PermissionsAPI: """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.""" return self._permissions - + @property def pipelines(self) -> pkg_pipelines.PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.""" return self._pipelines - + @property def policy_compliance_for_clusters(self) -> pkg_compute.PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.""" return self._policy_compliance_for_clusters - + @property def policy_compliance_for_jobs(self) -> pkg_jobs.PolicyComplianceForJobsAPI: """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.""" return self._policy_compliance_for_jobs - + @property def policy_families(self) -> pkg_compute.PolicyFamiliesAPI: """View available policy families.""" return self._policy_families - + @property def provider_exchange_filters(self) -> pkg_marketplace.ProviderExchangeFiltersAPI: """Marketplace exchanges filters curate which groups can access an exchange.""" return self._provider_exchange_filters - + @property def provider_exchanges(self) -> pkg_marketplace.ProviderExchangesAPI: """Marketplace exchanges allow providers to share their listings with a curated set of customers.""" return self._provider_exchanges - + @property def provider_files(self) -> pkg_marketplace.ProviderFilesAPI: """Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons.""" return self._provider_files - + @property def provider_listings(self) -> pkg_marketplace.ProviderListingsAPI: """Listings are the core entities in the Marketplace.""" return self._provider_listings - + @property def provider_personalization_requests(self) -> pkg_marketplace.ProviderPersonalizationRequestsAPI: """Personalization requests are an alternate to instantly available listings.""" return self._provider_personalization_requests - + @property def provider_provider_analytics_dashboards(self) -> pkg_marketplace.ProviderProviderAnalyticsDashboardsAPI: """Manage templated analytics solution for providers.""" return self._provider_provider_analytics_dashboards - + @property def provider_providers(self) -> pkg_marketplace.ProviderProvidersAPI: """Providers are entities that manage assets in Marketplace.""" return self._provider_providers - + @property def providers(self) -> pkg_sharing.ProvidersAPI: """A data provider is an object representing the organization in the real world who shares the data.""" return self._providers - + + @property + def quality_monitor_v2(self) -> pkg_qualitymonitorv2.QualityMonitorV2API: + """Manage data quality of UC objects (currently support `schema`).""" + return self._quality_monitor_v2 + @property def quality_monitors(self) -> pkg_catalog.QualityMonitorsAPI: """A monitor computes and monitors data or model quality metrics for a table over time.""" return self._quality_monitors - + @property def queries(self) -> pkg_sql.QueriesAPI: """The queries API can be used to perform CRUD operations on queries.""" return self._queries - + @property def queries_legacy(self) -> pkg_sql.QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions.""" return self._queries_legacy - - @property - def query_execution(self) -> pkg_dashboards.QueryExecutionAPI: - """Query execution APIs for AI / BI Dashboards.""" - return self._query_execution - + @property def query_history(self) -> pkg_sql.QueryHistoryAPI: """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" return self._query_history - + @property def query_visualizations(self) -> pkg_sql.QueryVisualizationsAPI: """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace.""" return self._query_visualizations - + @property def query_visualizations_legacy(self) -> pkg_sql.QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" return self._query_visualizations_legacy - + @property def recipient_activation(self) -> pkg_sharing.RecipientActivationAPI: """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`.""" return self._recipient_activation - + @property def recipient_federation_policies(self) -> pkg_sharing.RecipientFederationPoliciesAPI: """The Recipient Federation Policies APIs are only applicable in the open sharing model where the recipient object has the authentication type of `OIDC_RECIPIENT`, enabling data sharing from Databricks to non-Databricks recipients.""" return self._recipient_federation_policies - + @property def recipients(self) -> pkg_sharing.RecipientsAPI: """A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares.""" return self._recipients - + @property def redash_config(self) -> pkg_sql.RedashConfigAPI: """Redash V2 service for workspace configurations (internal).""" return self._redash_config - + @property def registered_models(self) -> pkg_catalog.RegisteredModelsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.""" return self._registered_models - + @property def repos(self) -> pkg_workspace.ReposAPI: """The Repos API allows users to manage their git repos.""" return self._repos - + @property def resource_quotas(self) -> pkg_catalog.ResourceQuotasAPI: """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.""" return self._resource_quotas - + @property def schemas(self) -> pkg_catalog.SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.""" return self._schemas - + @property def secrets(self) -> pkg_workspace.SecretsAPI: """The Secrets API allows you to manage secrets, secret scopes, and access permissions.""" return self._secrets - + @property def service_principals(self) -> pkg_iam.ServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" return self._service_principals - + @property def serving_endpoints(self) -> ServingEndpointsExt: """The Serving Endpoints API allows you to create, update, and delete model serving endpoints.""" return self._serving_endpoints - + @property def serving_endpoints_data_plane(self) -> pkg_serving.ServingEndpointsDataPlaneAPI: """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service.""" return self._serving_endpoints_data_plane - + @property def settings(self) -> pkg_settings.SettingsAPI: """Workspace Settings API allows users to manage settings at the workspace level.""" return self._settings - + @property def shares(self) -> pkg_sharing.SharesAPI: """A share is a container instantiated with :method:shares/create.""" return self._shares - + @property def statement_execution(self) -> pkg_sql.StatementExecutionAPI: """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result.""" return self._statement_execution - + @property def storage_credentials(self) -> pkg_catalog.StorageCredentialsAPI: """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.""" return self._storage_credentials - + @property def system_schemas(self) -> pkg_catalog.SystemSchemasAPI: """A system schema is a schema that lives within the system catalog.""" return self._system_schemas - + @property def table_constraints(self) -> pkg_catalog.TableConstraintsAPI: """Primary key and foreign key constraints encode relationships between fields in tables.""" return self._table_constraints - + @property def tables(self) -> pkg_catalog.TablesAPI: """A table resides in the third layer of Unity Catalog’s three-level namespace.""" return self._tables - + @property def temporary_table_credentials(self) -> pkg_catalog.TemporaryTableCredentialsAPI: """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks.""" return self._temporary_table_credentials - + @property def token_management(self) -> pkg_settings.TokenManagementAPI: """Enables administrators to get all tokens and delete tokens for other users.""" return self._token_management - + @property def tokens(self) -> pkg_settings.TokensAPI: """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.""" return self._tokens - + @property def users(self) -> pkg_iam.UsersAPI: """User identities recognized by Databricks and represented by email addresses.""" return self._users - + @property def vector_search_endpoints(self) -> pkg_vectorsearch.VectorSearchEndpointsAPI: """**Endpoint**: Represents the compute resources to host vector search indexes.""" return self._vector_search_endpoints - + @property def vector_search_indexes(self) -> pkg_vectorsearch.VectorSearchIndexesAPI: """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries.""" return self._vector_search_indexes - + @property def volumes(self) -> pkg_catalog.VolumesAPI: """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.""" return self._volumes - + @property def warehouses(self) -> pkg_sql.WarehousesAPI: """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.""" return self._warehouses - + @property def workspace(self) -> WorkspaceExt: """The Workspace API allows you to list, import, export, and delete notebooks and folders.""" return self._workspace - + @property def workspace_bindings(self) -> pkg_catalog.WorkspaceBindingsAPI: """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__.""" return self._workspace_bindings - + @property def workspace_conf(self) -> pkg_settings.WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" return self._workspace_conf - + @property def forecasting(self) -> pkg_ml.ForecastingAPI: """The Forecasting API allows you to create and get serverless forecasting experiments.""" return self._forecasting + def get_workspace_id(self) -> int: """Get the workspace ID of the workspace that this client is connected to.""" - response = self._api_client.do("GET", "/api/2.0/preview/scim/v2/Me", response_headers=["X-Databricks-Org-Id"]) + response = self._api_client.do("GET", + "/api/2.0/preview/scim/v2/Me", + response_headers=['X-Databricks-Org-Id']) return int(response["X-Databricks-Org-Id"]) def __repr__(self): return f"WorkspaceClient(host='{self._config.host}', auth_type='{self._config.auth_type}', ...)" - class AccountClient: """ The AccountClient is a client for the account-level Databricks REST API. """ - def __init__( - self, - *, - host: Optional[str] = None, - account_id: Optional[str] = None, - username: Optional[str] = None, - password: Optional[str] = None, - client_id: Optional[str] = None, - client_secret: Optional[str] = None, - token: Optional[str] = None, - profile: Optional[str] = None, - config_file: Optional[str] = None, - azure_workspace_resource_id: Optional[str] = None, - azure_client_secret: Optional[str] = None, - azure_client_id: Optional[str] = None, - azure_tenant_id: Optional[str] = None, - azure_environment: Optional[str] = None, - auth_type: Optional[str] = None, - cluster_id: Optional[str] = None, - google_credentials: Optional[str] = None, - google_service_account: Optional[str] = None, - debug_truncate_bytes: Optional[int] = None, - debug_headers: Optional[bool] = None, - product="unknown", - product_version="0.0.0", - credentials_strategy: Optional[CredentialsStrategy] = None, - credentials_provider: Optional[CredentialsStrategy] = None, - token_audience: Optional[str] = None, - config: Optional[client.Config] = None, - ): + def __init__(self, *, host: Optional[str] = None, account_id: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, client_id: Optional[str] = None, client_secret: Optional[str] = None, token: Optional[str] = None, profile: Optional[str] = None, config_file: Optional[str] = None, azure_workspace_resource_id: Optional[str] = None, azure_client_secret: Optional[str] = None, azure_client_id: Optional[str] = None, azure_tenant_id: Optional[str] = None, azure_environment: Optional[str] = None, auth_type: Optional[str] = None, cluster_id: Optional[str] = None, google_credentials: Optional[str] = None, google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, + product="unknown", + product_version="0.0.0", + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + token_audience: Optional[str] = None, + config: Optional[client.Config] = None): if not config: - config = client.Config( - host=host, - account_id=account_id, - username=username, - password=password, - client_id=client_id, - client_secret=client_secret, - token=token, - profile=profile, - config_file=config_file, - azure_workspace_resource_id=azure_workspace_resource_id, - azure_client_secret=azure_client_secret, - azure_client_id=azure_client_id, - azure_tenant_id=azure_tenant_id, - azure_environment=azure_environment, - auth_type=auth_type, - cluster_id=cluster_id, - google_credentials=google_credentials, - google_service_account=google_service_account, - credentials_strategy=credentials_strategy, - credentials_provider=credentials_provider, - debug_truncate_bytes=debug_truncate_bytes, - debug_headers=debug_headers, - product=product, - product_version=product_version, - token_audience=token_audience, - ) + config = client.Config(host=host, account_id=account_id, username=username, password=password, client_id=client_id, client_secret=client_secret, token=token, profile=profile, config_file=config_file, azure_workspace_resource_id=azure_workspace_resource_id, azure_client_secret=azure_client_secret, azure_client_id=azure_client_id, azure_tenant_id=azure_tenant_id, azure_environment=azure_environment, auth_type=auth_type, cluster_id=cluster_id, google_credentials=google_credentials, google_service_account=google_service_account, + credentials_strategy=credentials_strategy, + credentials_provider=credentials_provider, + debug_truncate_bytes=debug_truncate_bytes, + debug_headers=debug_headers, + product=product, + product_version=product_version, + token_audience=token_audience, + ) self._config = config.copy() self._api_client = client.ApiClient(self._config) self._access_control = pkg_iam.AccountAccessControlAPI(self._api_client) @@ -973,161 +957,161 @@ def config(self) -> client.Config: @property def api_client(self) -> client.ApiClient: return self._api_client - @property def access_control(self) -> pkg_iam.AccountAccessControlAPI: """These APIs manage access rules on resources in an account.""" return self._access_control - + @property def billable_usage(self) -> pkg_billing.BillableUsageAPI: """This API allows you to download billable usage logs for the specified account and date range.""" return self._billable_usage - + @property def budget_policy(self) -> pkg_billing.BudgetPolicyAPI: """A service serves REST API about Budget policies.""" return self._budget_policy - + @property def credentials(self) -> pkg_provisioning.CredentialsAPI: """These APIs manage credential configurations for this workspace.""" return self._credentials - + @property def custom_app_integration(self) -> pkg_oauth2.CustomAppIntegrationAPI: """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" return self._custom_app_integration - + @property def encryption_keys(self) -> pkg_provisioning.EncryptionKeysAPI: """These APIs manage encryption key configurations for this workspace (optional).""" return self._encryption_keys - + @property def federation_policy(self) -> pkg_oauth2.AccountFederationPolicyAPI: """These APIs manage account federation policies.""" return self._federation_policy - + @property def groups(self) -> pkg_iam.AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" return self._groups - + @property def ip_access_lists(self) -> pkg_settings.AccountIpAccessListsAPI: """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.""" return self._ip_access_lists - + @property def log_delivery(self) -> pkg_billing.LogDeliveryAPI: - """These APIs manage log delivery configurations for this account.""" + """These APIs manage Log delivery configurations for this account.""" return self._log_delivery - + @property def metastore_assignments(self) -> pkg_catalog.AccountMetastoreAssignmentsAPI: """These APIs manage metastore assignments to a workspace.""" return self._metastore_assignments - + @property def metastores(self) -> pkg_catalog.AccountMetastoresAPI: """These APIs manage Unity Catalog metastores for an account.""" return self._metastores - + @property def network_connectivity(self) -> pkg_settings.NetworkConnectivityAPI: """These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.""" return self._network_connectivity - + @property def network_policies(self) -> pkg_settings.NetworkPoliciesAPI: """These APIs manage network policies for this account.""" return self._network_policies - + @property def networks(self) -> pkg_provisioning.NetworksAPI: """These APIs manage network configurations for customer-managed VPCs (optional).""" return self._networks - + @property def o_auth_published_apps(self) -> pkg_oauth2.OAuthPublishedAppsAPI: """These APIs enable administrators to view all the available published OAuth applications in Databricks.""" return self._o_auth_published_apps - + @property def private_access(self) -> pkg_provisioning.PrivateAccessAPI: """These APIs manage private access settings for this account.""" return self._private_access - + @property def published_app_integration(self) -> pkg_oauth2.PublishedAppIntegrationAPI: """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" return self._published_app_integration - + @property def service_principal_federation_policy(self) -> pkg_oauth2.ServicePrincipalFederationPolicyAPI: """These APIs manage service principal federation policies.""" return self._service_principal_federation_policy - + @property def service_principal_secrets(self) -> pkg_oauth2.ServicePrincipalSecretsAPI: """These APIs enable administrators to manage service principal secrets.""" return self._service_principal_secrets - + @property def service_principals(self) -> pkg_iam.AccountServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" return self._service_principals - + @property def settings(self) -> pkg_settings.AccountSettingsAPI: """Accounts Settings API allows users to manage settings at the account level.""" return self._settings - + @property def storage(self) -> pkg_provisioning.StorageAPI: """These APIs manage storage configurations for this workspace.""" return self._storage - + @property def storage_credentials(self) -> pkg_catalog.AccountStorageCredentialsAPI: """These APIs manage storage credentials for a particular metastore.""" return self._storage_credentials - + @property def usage_dashboards(self) -> pkg_billing.UsageDashboardsAPI: """These APIs manage usage dashboards for this account.""" return self._usage_dashboards - + @property def users(self) -> pkg_iam.AccountUsersAPI: """User identities recognized by Databricks and represented by email addresses.""" return self._users - + @property def vpc_endpoints(self) -> pkg_provisioning.VpcEndpointsAPI: """These APIs manage VPC endpoint configurations for this account.""" return self._vpc_endpoints - + @property def workspace_assignment(self) -> pkg_iam.WorkspaceAssignmentAPI: """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.""" return self._workspace_assignment - + @property def workspace_network_configuration(self) -> pkg_settings.WorkspaceNetworkConfigurationAPI: - """These APIs allow configuration of network settings for Databricks workspaces.""" + """These APIs allow configuration of network settings for Databricks workspaces by selecting which network policy to associate with the workspace.""" return self._workspace_network_configuration - + @property def workspaces(self) -> pkg_provisioning.WorkspacesAPI: """These APIs manage workspaces for this account.""" return self._workspaces - + @property def budgets(self) -> pkg_billing.BudgetsAPI: """These APIs manage budget configurations for this account.""" return self._budgets + def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: """Constructs a ``WorkspaceClient`` for the given workspace. @@ -1159,4 +1143,4 @@ def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: return WorkspaceClient(config=config) def __repr__(self): - return f"AccountClient(account_id='{self._config.account_id}', auth_type='{self._config.auth_type}', ...)" + return f"AccountClient(account_id='{self._config.account_id}', auth_type='{self._config.auth_type}', ...)" \ No newline at end of file diff --git a/databricks/sdk/errors/overrides.py b/databricks/sdk/errors/overrides.py index 08311fa9f..714e1033b 100644 --- a/databricks/sdk/errors/overrides.py +++ b/databricks/sdk/errors/overrides.py @@ -1,36 +1,34 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +from .base import _ErrorOverride +from .platform import * import re -from .base import _ErrorOverride -from .platform import ResourceDoesNotExist _ALL_OVERRIDES = [ _ErrorOverride( debug_name="Clusters InvalidParameterValue=>ResourceDoesNotExist", - path_regex=re.compile(r"^/api/2\.\d/clusters/get"), + path_regex=re.compile(r'^/api/2\.\d/clusters/get'), verb="GET", - status_code_matcher=re.compile(r"^400$"), - error_code_matcher=re.compile(r"INVALID_PARAMETER_VALUE"), - message_matcher=re.compile(r"Cluster .* does not exist"), + status_code_matcher=re.compile(r'^400$'), + error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'), + message_matcher=re.compile(r'Cluster .* does not exist'), custom_error=ResourceDoesNotExist, - ), - _ErrorOverride( + ),_ErrorOverride( debug_name="Jobs InvalidParameterValue=>ResourceDoesNotExist", - path_regex=re.compile(r"^/api/2\.\d/jobs/get"), + path_regex=re.compile(r'^/api/2\.\d/jobs/get'), verb="GET", - status_code_matcher=re.compile(r"^400$"), - error_code_matcher=re.compile(r"INVALID_PARAMETER_VALUE"), - message_matcher=re.compile(r"Job .* does not exist"), + status_code_matcher=re.compile(r'^400$'), + error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'), + message_matcher=re.compile(r'Job .* does not exist'), custom_error=ResourceDoesNotExist, - ), - _ErrorOverride( + ),_ErrorOverride( debug_name="Job Runs InvalidParameterValue=>ResourceDoesNotExist", - path_regex=re.compile(r"^/api/2\.\d/jobs/runs/get"), + path_regex=re.compile(r'^/api/2\.\d/jobs/runs/get'), verb="GET", - status_code_matcher=re.compile(r"^400$"), - error_code_matcher=re.compile(r"INVALID_PARAMETER_VALUE"), - message_matcher=re.compile(r"(Run .* does not exist|Run: .* in job: .* doesn\'t exist)"), + status_code_matcher=re.compile(r'^400$'), + error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'), + message_matcher=re.compile(r'(Run .* does not exist|Run: .* in job: .* doesn\'t exist)'), custom_error=ResourceDoesNotExist, ), -] +] \ No newline at end of file diff --git a/databricks/sdk/errors/platform.py b/databricks/sdk/errors/platform.py index a5b3cb111..c2ee30b8e 100755 --- a/databricks/sdk/errors/platform.py +++ b/databricks/sdk/errors/platform.py @@ -4,113 +4,93 @@ class BadRequest(DatabricksError): - """the request is invalid""" - + """the request is invalid""" class Unauthenticated(DatabricksError): - """the request does not have valid authentication (AuthN) credentials for the operation""" - + """the request does not have valid authentication (AuthN) credentials for the operation""" class PermissionDenied(DatabricksError): - """the caller does not have permission to execute the specified operation""" - + """the caller does not have permission to execute the specified operation""" class NotFound(DatabricksError): - """the operation was performed on a resource that does not exist""" - + """the operation was performed on a resource that does not exist""" class ResourceConflict(DatabricksError): - """maps to all HTTP 409 (Conflict) responses""" - + """maps to all HTTP 409 (Conflict) responses""" class TooManyRequests(DatabricksError): - """maps to HTTP code: 429 Too Many Requests""" - + """maps to HTTP code: 429 Too Many Requests""" class Cancelled(DatabricksError): - """the operation was explicitly canceled by the caller""" - + """the operation was explicitly canceled by the caller""" class InternalError(DatabricksError): - """some invariants expected by the underlying system have been broken""" - + """some invariants expected by the underlying system have been broken""" class NotImplemented(DatabricksError): - """the operation is not implemented or is not supported/enabled in this service""" - + """the operation is not implemented or is not supported/enabled in this service""" class TemporarilyUnavailable(DatabricksError): - """the service is currently unavailable""" - + """the service is currently unavailable""" class DeadlineExceeded(DatabricksError): - """the deadline expired before the operation could complete""" - + """the deadline expired before the operation could complete""" class InvalidState(BadRequest): - """unexpected state""" - + """unexpected state""" class InvalidParameterValue(BadRequest): - """supplied value for a parameter was invalid""" - + """supplied value for a parameter was invalid""" class ResourceDoesNotExist(NotFound): - """operation was performed on a resource that does not exist""" - + """operation was performed on a resource that does not exist""" class Aborted(ResourceConflict): - """the operation was aborted, typically due to a concurrency issue such as a sequencer check + """the operation was aborted, typically due to a concurrency issue such as a sequencer check failure""" - class AlreadyExists(ResourceConflict): - """operation was rejected due a conflict with an existing resource""" - + """operation was rejected due a conflict with an existing resource""" class ResourceAlreadyExists(ResourceConflict): - """operation was rejected due a conflict with an existing resource""" - + """operation was rejected due a conflict with an existing resource""" class ResourceExhausted(TooManyRequests): - """operation is rejected due to per-user rate limiting""" - + """operation is rejected due to per-user rate limiting""" class RequestLimitExceeded(TooManyRequests): - """cluster request was rejected because it would exceed a resource limit""" - + """cluster request was rejected because it would exceed a resource limit""" class Unknown(InternalError): - """this error is used as a fallback if the platform-side mapping is missing some reason""" - + """this error is used as a fallback if the platform-side mapping is missing some reason""" class DataLoss(InternalError): - """unrecoverable data loss or corruption""" - - -STATUS_CODE_MAPPING = { - 400: BadRequest, - 401: Unauthenticated, - 403: PermissionDenied, - 404: NotFound, - 409: ResourceConflict, - 429: TooManyRequests, - 499: Cancelled, - 500: InternalError, - 501: NotImplemented, - 503: TemporarilyUnavailable, - 504: DeadlineExceeded, + """unrecoverable data loss or corruption""" + + +STATUS_CODE_MAPPING = { + 400: BadRequest, + 401: Unauthenticated, + 403: PermissionDenied, + 404: NotFound, + 409: ResourceConflict, + 429: TooManyRequests, + 499: Cancelled, + 500: InternalError, + 501: NotImplemented, + 503: TemporarilyUnavailable, + 504: DeadlineExceeded, } -ERROR_CODE_MAPPING = { - "INVALID_STATE": InvalidState, - "INVALID_PARAMETER_VALUE": InvalidParameterValue, - "RESOURCE_DOES_NOT_EXIST": ResourceDoesNotExist, - "ABORTED": Aborted, - "ALREADY_EXISTS": AlreadyExists, - "RESOURCE_ALREADY_EXISTS": ResourceAlreadyExists, - "RESOURCE_EXHAUSTED": ResourceExhausted, - "REQUEST_LIMIT_EXCEEDED": RequestLimitExceeded, - "UNKNOWN": Unknown, - "DATA_LOSS": DataLoss, -} +ERROR_CODE_MAPPING = { + 'INVALID_STATE': InvalidState, + 'INVALID_PARAMETER_VALUE': InvalidParameterValue, + 'RESOURCE_DOES_NOT_EXIST': ResourceDoesNotExist, + 'ABORTED': Aborted, + 'ALREADY_EXISTS': AlreadyExists, + 'RESOURCE_ALREADY_EXISTS': ResourceAlreadyExists, + 'RESOURCE_EXHAUSTED': ResourceExhausted, + 'REQUEST_LIMIT_EXCEEDED': RequestLimitExceeded, + 'UNKNOWN': Unknown, + 'DATA_LOSS': DataLoss, +} \ No newline at end of file diff --git a/databricks/sdk/service/aibuilder.py b/databricks/sdk/service/aibuilder.py new file mode 100755 index 000000000..80e5ea033 --- /dev/null +++ b/databricks/sdk/service/aibuilder.py @@ -0,0 +1,368 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations +from dataclasses import dataclass +from datetime import timedelta +from enum import Enum +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading + +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token + +_LOG = logging.getLogger('databricks.sdk') + + + +# all definitions in this file are in alphabetical order + +@dataclass +class CancelCustomLlmOptimizationRunRequest: + id: Optional[str] = None + + + + + +@dataclass +class CancelResponse: + def as_dict(self) -> dict: + """Serializes the CancelResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CancelResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CancelResponse: + """Deserializes the CancelResponse from a dictionary.""" + return cls() + + + + +@dataclass +class CustomLlm: + name: str + """Name of the custom LLM""" + + instructions: str + """Instructions for the custom LLM to follow""" + + optimization_state: State + """If optimization is kicked off, tracks the state of the custom LLM""" + + agent_artifact_path: Optional[str] = None + + creation_time: Optional[str] = None + """Creation timestamp of the custom LLM""" + + creator: Optional[str] = None + """Creator of the custom LLM""" + + datasets: Optional[List[Dataset]] = None + """Datasets used for training and evaluating the model, not for inference""" + + endpoint_name: Optional[str] = None + """Name of the endpoint that will be used to serve the custom LLM""" + + guidelines: Optional[List[str]] = None + """Guidelines for the custom LLM to adhere to""" + + id: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the CustomLlm into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.agent_artifact_path is not None: body['agent_artifact_path'] = self.agent_artifact_path + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.creator is not None: body['creator'] = self.creator + if self.datasets: body['datasets'] = [v.as_dict() for v in self.datasets] + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.guidelines: body['guidelines'] = [v for v in self.guidelines] + if self.id is not None: body['id'] = self.id + if self.instructions is not None: body['instructions'] = self.instructions + if self.name is not None: body['name'] = self.name + if self.optimization_state is not None: body['optimization_state'] = self.optimization_state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CustomLlm into a shallow dictionary of its immediate attributes.""" + body = {} + if self.agent_artifact_path is not None: body['agent_artifact_path'] = self.agent_artifact_path + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.creator is not None: body['creator'] = self.creator + if self.datasets: body['datasets'] = self.datasets + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.guidelines: body['guidelines'] = self.guidelines + if self.id is not None: body['id'] = self.id + if self.instructions is not None: body['instructions'] = self.instructions + if self.name is not None: body['name'] = self.name + if self.optimization_state is not None: body['optimization_state'] = self.optimization_state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CustomLlm: + """Deserializes the CustomLlm from a dictionary.""" + return cls(agent_artifact_path=d.get('agent_artifact_path', None), creation_time=d.get('creation_time', None), creator=d.get('creator', None), datasets=_repeated_dict(d, 'datasets', Dataset), endpoint_name=d.get('endpoint_name', None), guidelines=d.get('guidelines', None), id=d.get('id', None), instructions=d.get('instructions', None), name=d.get('name', None), optimization_state=_enum(d, 'optimization_state', State)) + + + + +@dataclass +class Dataset: + table: Table + + def as_dict(self) -> dict: + """Serializes the Dataset into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.table: body['table'] = self.table.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Dataset into a shallow dictionary of its immediate attributes.""" + body = {} + if self.table: body['table'] = self.table + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Dataset: + """Deserializes the Dataset from a dictionary.""" + return cls(table=_from_dict(d, 'table', Table)) + + + + + + + +@dataclass +class StartCustomLlmOptimizationRunRequest: + id: Optional[str] = None + """The Id of the tile.""" + + + + + +class State(Enum): + """States of Custom LLM optimization lifecycle.""" + + CANCELLED = 'CANCELLED' + COMPLETED = 'COMPLETED' + CREATED = 'CREATED' + FAILED = 'FAILED' + PENDING = 'PENDING' + RUNNING = 'RUNNING' + +@dataclass +class Table: + table_path: str + """Full UC table path in catalog.schema.table_name format""" + + request_col: str + """Name of the request column""" + + response_col: Optional[str] = None + """Optional: Name of the response column if the data is labeled""" + + def as_dict(self) -> dict: + """Serializes the Table into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.request_col is not None: body['request_col'] = self.request_col + if self.response_col is not None: body['response_col'] = self.response_col + if self.table_path is not None: body['table_path'] = self.table_path + return body + + def as_shallow_dict(self) -> dict: + """Serializes the Table into a shallow dictionary of its immediate attributes.""" + body = {} + if self.request_col is not None: body['request_col'] = self.request_col + if self.response_col is not None: body['response_col'] = self.response_col + if self.table_path is not None: body['table_path'] = self.table_path + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Table: + """Deserializes the Table from a dictionary.""" + return cls(request_col=d.get('request_col', None), response_col=d.get('response_col', None), table_path=d.get('table_path', None)) + + + + +@dataclass +class UpdateCustomLlmRequest: + custom_llm: CustomLlm + """The CustomLlm containing the fields which should be updated.""" + + update_mask: str + """The list of the CustomLlm fields to update. These should correspond to the values (or lack + thereof) present in `custom_llm`. + + The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" + + id: Optional[str] = None + """The id of the custom llm""" + + def as_dict(self) -> dict: + """Serializes the UpdateCustomLlmRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.custom_llm: body['custom_llm'] = self.custom_llm.as_dict() + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateCustomLlmRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.custom_llm: body['custom_llm'] = self.custom_llm + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomLlmRequest: + """Deserializes the UpdateCustomLlmRequest from a dictionary.""" + return cls(custom_llm=_from_dict(d, 'custom_llm', CustomLlm), id=d.get('id', None), update_mask=d.get('update_mask', None)) + + + + + + +class CustomLlmsAPI: + """The Custom LLMs service manages state and powers the UI for the Custom LLM product.""" + + def __init__(self, api_client): + self._api = api_client + + + + + + + + + + def cancel(self + , id: str + ): + """Cancel a Custom LLM Optimization Run. + + :param id: str + + + """ + + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST',f'/api/2.0/custom-llms/{id}/optimize/cancel' + + , headers=headers + ) + + + + + + + def create(self + , id: str + ) -> CustomLlm: + """Start a Custom LLM Optimization Run. + + :param id: str + The Id of the tile. + + :returns: :class:`CustomLlm` + """ + + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/custom-llms/{id}/optimize' + + , headers=headers + ) + return CustomLlm.from_dict(res) + + + + + + def get(self + , id: str + ) -> CustomLlm: + """Get a Custom LLM. + + :param id: str + The id of the custom llm + + :returns: :class:`CustomLlm` + """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/custom-llms/{id}' + + , headers=headers + ) + return CustomLlm.from_dict(res) + + + + + + def update(self + , id: str, custom_llm: CustomLlm, update_mask: str + ) -> CustomLlm: + """Update a Custom LLM. + + :param id: str + The id of the custom llm + :param custom_llm: :class:`CustomLlm` + The CustomLlm containing the fields which should be updated. + :param update_mask: str + The list of the CustomLlm fields to update. These should correspond to the values (or lack thereof) + present in `custom_llm`. + + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`CustomLlm` + """ + body = {} + if custom_llm is not None: body['custom_llm'] = custom_llm.as_dict() + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/custom-llms/{id}', body=body + + , headers=headers + ) + return CustomLlm.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index e0ca7d9a4..1b1e77629 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -1,1568 +1,1507 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class App: name: str """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It must be unique within the workspace.""" - + active_deployment: Optional[AppDeployment] = None """The active deployment of the app. A deployment is considered active when it has been deployed to the app compute.""" - + app_status: Optional[ApplicationStatus] = None - + budget_policy_id: Optional[str] = None - + compute_status: Optional[ComputeStatus] = None - + create_time: Optional[str] = None """The creation time of the app. Formatted timestamp in ISO 6801.""" - + creator: Optional[str] = None """The email of the user that created the app.""" - + default_source_code_path: Optional[str] = None """The default workspace file system path of the source code from which app deployment are created. This field tracks the workspace source code path of the last active deployment.""" - + description: Optional[str] = None """The description of the app.""" - + effective_budget_policy_id: Optional[str] = None - + effective_user_api_scopes: Optional[List[str]] = None """The effective api scopes granted to the user access token.""" - + id: Optional[str] = None """The unique identifier of the app.""" - + oauth2_app_client_id: Optional[str] = None - + oauth2_app_integration_id: Optional[str] = None - + pending_deployment: Optional[AppDeployment] = None """The pending deployment of the app. A deployment is considered pending when it is being prepared for deployment to the app compute.""" - + resources: Optional[List[AppResource]] = None """Resources for the app.""" - + service_principal_client_id: Optional[str] = None - + service_principal_id: Optional[int] = None - + service_principal_name: Optional[str] = None - + update_time: Optional[str] = None """The update time of the app. Formatted timestamp in ISO 6801.""" - + updater: Optional[str] = None """The email of the user that last updated the app.""" - + url: Optional[str] = None """The URL of the app once it is deployed.""" - + user_api_scopes: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the App into a dictionary suitable for use as a JSON request body.""" body = {} - if self.active_deployment: - body["active_deployment"] = self.active_deployment.as_dict() - if self.app_status: - body["app_status"] = self.app_status.as_dict() - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.compute_status: - body["compute_status"] = self.compute_status.as_dict() - if self.create_time is not None: - body["create_time"] = self.create_time - if self.creator is not None: - body["creator"] = self.creator - if self.default_source_code_path is not None: - body["default_source_code_path"] = self.default_source_code_path - if self.description is not None: - body["description"] = self.description - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.effective_user_api_scopes: - body["effective_user_api_scopes"] = [v for v in self.effective_user_api_scopes] - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.oauth2_app_client_id is not None: - body["oauth2_app_client_id"] = self.oauth2_app_client_id - if self.oauth2_app_integration_id is not None: - body["oauth2_app_integration_id"] = self.oauth2_app_integration_id - if self.pending_deployment: - body["pending_deployment"] = self.pending_deployment.as_dict() - if self.resources: - body["resources"] = [v.as_dict() for v in self.resources] - if self.service_principal_client_id is not None: - body["service_principal_client_id"] = self.service_principal_client_id - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.update_time is not None: - body["update_time"] = self.update_time - if self.updater is not None: - body["updater"] = self.updater - if self.url is not None: - body["url"] = self.url - if self.user_api_scopes: - body["user_api_scopes"] = [v for v in self.user_api_scopes] + if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict() + if self.app_status: body['app_status'] = self.app_status.as_dict() + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.compute_status: body['compute_status'] = self.compute_status.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.default_source_code_path is not None: body['default_source_code_path'] = self.default_source_code_path + if self.description is not None: body['description'] = self.description + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.effective_user_api_scopes: body['effective_user_api_scopes'] = [v for v in self.effective_user_api_scopes] + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.oauth2_app_client_id is not None: body['oauth2_app_client_id'] = self.oauth2_app_client_id + if self.oauth2_app_integration_id is not None: body['oauth2_app_integration_id'] = self.oauth2_app_integration_id + if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict() + if self.resources: body['resources'] = [v.as_dict() for v in self.resources] + if self.service_principal_client_id is not None: body['service_principal_client_id'] = self.service_principal_client_id + if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.update_time is not None: body['update_time'] = self.update_time + if self.updater is not None: body['updater'] = self.updater + if self.url is not None: body['url'] = self.url + if self.user_api_scopes: body['user_api_scopes'] = [v for v in self.user_api_scopes] return body def as_shallow_dict(self) -> dict: """Serializes the App into a shallow dictionary of its immediate attributes.""" body = {} - if self.active_deployment: - body["active_deployment"] = self.active_deployment - if self.app_status: - body["app_status"] = self.app_status - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.compute_status: - body["compute_status"] = self.compute_status - if self.create_time is not None: - body["create_time"] = self.create_time - if self.creator is not None: - body["creator"] = self.creator - if self.default_source_code_path is not None: - body["default_source_code_path"] = self.default_source_code_path - if self.description is not None: - body["description"] = self.description - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.effective_user_api_scopes: - body["effective_user_api_scopes"] = self.effective_user_api_scopes - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.oauth2_app_client_id is not None: - body["oauth2_app_client_id"] = self.oauth2_app_client_id - if self.oauth2_app_integration_id is not None: - body["oauth2_app_integration_id"] = self.oauth2_app_integration_id - if self.pending_deployment: - body["pending_deployment"] = self.pending_deployment - if self.resources: - body["resources"] = self.resources - if self.service_principal_client_id is not None: - body["service_principal_client_id"] = self.service_principal_client_id - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.update_time is not None: - body["update_time"] = self.update_time - if self.updater is not None: - body["updater"] = self.updater - if self.url is not None: - body["url"] = self.url - if self.user_api_scopes: - body["user_api_scopes"] = self.user_api_scopes + if self.active_deployment: body['active_deployment'] = self.active_deployment + if self.app_status: body['app_status'] = self.app_status + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.compute_status: body['compute_status'] = self.compute_status + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.default_source_code_path is not None: body['default_source_code_path'] = self.default_source_code_path + if self.description is not None: body['description'] = self.description + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.effective_user_api_scopes: body['effective_user_api_scopes'] = self.effective_user_api_scopes + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.oauth2_app_client_id is not None: body['oauth2_app_client_id'] = self.oauth2_app_client_id + if self.oauth2_app_integration_id is not None: body['oauth2_app_integration_id'] = self.oauth2_app_integration_id + if self.pending_deployment: body['pending_deployment'] = self.pending_deployment + if self.resources: body['resources'] = self.resources + if self.service_principal_client_id is not None: body['service_principal_client_id'] = self.service_principal_client_id + if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.update_time is not None: body['update_time'] = self.update_time + if self.updater is not None: body['updater'] = self.updater + if self.url is not None: body['url'] = self.url + if self.user_api_scopes: body['user_api_scopes'] = self.user_api_scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> App: """Deserializes the App from a dictionary.""" - return cls( - active_deployment=_from_dict(d, "active_deployment", AppDeployment), - app_status=_from_dict(d, "app_status", ApplicationStatus), - budget_policy_id=d.get("budget_policy_id", None), - compute_status=_from_dict(d, "compute_status", ComputeStatus), - create_time=d.get("create_time", None), - creator=d.get("creator", None), - default_source_code_path=d.get("default_source_code_path", None), - description=d.get("description", None), - effective_budget_policy_id=d.get("effective_budget_policy_id", None), - effective_user_api_scopes=d.get("effective_user_api_scopes", None), - id=d.get("id", None), - name=d.get("name", None), - oauth2_app_client_id=d.get("oauth2_app_client_id", None), - oauth2_app_integration_id=d.get("oauth2_app_integration_id", None), - pending_deployment=_from_dict(d, "pending_deployment", AppDeployment), - resources=_repeated_dict(d, "resources", AppResource), - service_principal_client_id=d.get("service_principal_client_id", None), - service_principal_id=d.get("service_principal_id", None), - service_principal_name=d.get("service_principal_name", None), - update_time=d.get("update_time", None), - updater=d.get("updater", None), - url=d.get("url", None), - user_api_scopes=d.get("user_api_scopes", None), - ) + return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment), app_status=_from_dict(d, 'app_status', ApplicationStatus), budget_policy_id=d.get('budget_policy_id', None), compute_status=_from_dict(d, 'compute_status', ComputeStatus), create_time=d.get('create_time', None), creator=d.get('creator', None), default_source_code_path=d.get('default_source_code_path', None), description=d.get('description', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), effective_user_api_scopes=d.get('effective_user_api_scopes', None), id=d.get('id', None), name=d.get('name', None), oauth2_app_client_id=d.get('oauth2_app_client_id', None), oauth2_app_integration_id=d.get('oauth2_app_integration_id', None), pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment), resources=_repeated_dict(d, 'resources', AppResource), service_principal_client_id=d.get('service_principal_client_id', None), service_principal_id=d.get('service_principal_id', None), service_principal_name=d.get('service_principal_name', None), update_time=d.get('update_time', None), updater=d.get('updater', None), url=d.get('url', None), user_api_scopes=d.get('user_api_scopes', None)) + + @dataclass class AppAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[AppPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the AppAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AppAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppAccessControlRequest: """Deserializes the AppAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", AppPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', AppPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class AppAccessControlResponse: all_permissions: Optional[List[AppPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the AppAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AppAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppAccessControlResponse: """Deserializes the AppAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", AppPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', AppPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class AppDeployment: create_time: Optional[str] = None """The creation time of the deployment. Formatted timestamp in ISO 6801.""" - + creator: Optional[str] = None """The email of the user creates the deployment.""" - + deployment_artifacts: Optional[AppDeploymentArtifacts] = None """The deployment artifacts for an app.""" - + deployment_id: Optional[str] = None """The unique id of the deployment.""" - + mode: Optional[AppDeploymentMode] = None """The mode of which the deployment will manage the source code.""" - + source_code_path: Optional[str] = None """The workspace file system path of the source code used to create the app deployment. This is different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. The former refers to the original source code location of the app in the workspace during deployment creation, whereas the latter provides a system generated stable snapshotted source code path used by the deployment.""" - + status: Optional[AppDeploymentStatus] = None """Status and status message of the deployment""" - + update_time: Optional[str] = None """The update time of the deployment. Formatted timestamp in ISO 6801.""" - + def as_dict(self) -> dict: """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.creator is not None: - body["creator"] = self.creator - if self.deployment_artifacts: - body["deployment_artifacts"] = self.deployment_artifacts.as_dict() - if self.deployment_id is not None: - body["deployment_id"] = self.deployment_id - if self.mode is not None: - body["mode"] = self.mode.value - if self.source_code_path is not None: - body["source_code_path"] = self.source_code_path - if self.status: - body["status"] = self.status.as_dict() - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict() + if self.deployment_id is not None: body['deployment_id'] = self.deployment_id + if self.mode is not None: body['mode'] = self.mode.value + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + if self.status: body['status'] = self.status.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the AppDeployment into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.creator is not None: - body["creator"] = self.creator - if self.deployment_artifacts: - body["deployment_artifacts"] = self.deployment_artifacts - if self.deployment_id is not None: - body["deployment_id"] = self.deployment_id - if self.mode is not None: - body["mode"] = self.mode - if self.source_code_path is not None: - body["source_code_path"] = self.source_code_path - if self.status: - body["status"] = self.status - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts + if self.deployment_id is not None: body['deployment_id'] = self.deployment_id + if self.mode is not None: body['mode'] = self.mode + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + if self.status: body['status'] = self.status + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppDeployment: """Deserializes the AppDeployment from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - creator=d.get("creator", None), - deployment_artifacts=_from_dict(d, "deployment_artifacts", AppDeploymentArtifacts), - deployment_id=d.get("deployment_id", None), - mode=_enum(d, "mode", AppDeploymentMode), - source_code_path=d.get("source_code_path", None), - status=_from_dict(d, "status", AppDeploymentStatus), - update_time=d.get("update_time", None), - ) + return cls(create_time=d.get('create_time', None), creator=d.get('creator', None), deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts), deployment_id=d.get('deployment_id', None), mode=_enum(d, 'mode', AppDeploymentMode), source_code_path=d.get('source_code_path', None), status=_from_dict(d, 'status', AppDeploymentStatus), update_time=d.get('update_time', None)) + + @dataclass class AppDeploymentArtifacts: source_code_path: Optional[str] = None """The snapshotted workspace file system path of the source code loaded by the deployed app.""" - + def as_dict(self) -> dict: """Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body.""" body = {} - if self.source_code_path is not None: - body["source_code_path"] = self.source_code_path + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path return body def as_shallow_dict(self) -> dict: """Serializes the AppDeploymentArtifacts into a shallow dictionary of its immediate attributes.""" body = {} - if self.source_code_path is not None: - body["source_code_path"] = self.source_code_path + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppDeploymentArtifacts: """Deserializes the AppDeploymentArtifacts from a dictionary.""" - return cls(source_code_path=d.get("source_code_path", None)) + return cls(source_code_path=d.get('source_code_path', None)) + -class AppDeploymentMode(Enum): - - AUTO_SYNC = "AUTO_SYNC" - SNAPSHOT = "SNAPSHOT" +class AppDeploymentMode(Enum): + + + AUTO_SYNC = 'AUTO_SYNC' + SNAPSHOT = 'SNAPSHOT' class AppDeploymentState(Enum): - - CANCELLED = "CANCELLED" - FAILED = "FAILED" - IN_PROGRESS = "IN_PROGRESS" - SUCCEEDED = "SUCCEEDED" - + + + CANCELLED = 'CANCELLED' + FAILED = 'FAILED' + IN_PROGRESS = 'IN_PROGRESS' + SUCCEEDED = 'SUCCEEDED' @dataclass class AppDeploymentStatus: message: Optional[str] = None """Message corresponding with the deployment state.""" - + state: Optional[AppDeploymentState] = None """State of the deployment.""" - + def as_dict(self) -> dict: """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.state is not None: - body["state"] = self.state.value + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the AppDeploymentStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.state is not None: - body["state"] = self.state + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppDeploymentStatus: """Deserializes the AppDeploymentStatus from a dictionary.""" - return cls(message=d.get("message", None), state=_enum(d, "state", AppDeploymentState)) + return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState)) + + @dataclass class AppPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[AppPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the AppPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the AppPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppPermission: """Deserializes the AppPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", AppPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', AppPermissionLevel)) + + class AppPermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = "CAN_MANAGE" - CAN_USE = "CAN_USE" - + + CAN_MANAGE = 'CAN_MANAGE' + CAN_USE = 'CAN_USE' @dataclass class AppPermissions: access_control_list: Optional[List[AppAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AppPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the AppPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppPermissions: """Deserializes the AppPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AppAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class AppPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[AppPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the AppPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the AppPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppPermissionsDescription: """Deserializes the AppPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), permission_level=_enum(d, "permission_level", AppPermissionLevel) - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', AppPermissionLevel)) + + @dataclass class AppPermissionsRequest: access_control_list: Optional[List[AppAccessControlRequest]] = None - + app_name: Optional[str] = None """The app for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the AppPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.app_name is not None: - body["app_name"] = self.app_name + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.app_name is not None: body['app_name'] = self.app_name return body def as_shallow_dict(self) -> dict: """Serializes the AppPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.app_name is not None: - body["app_name"] = self.app_name + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.app_name is not None: body['app_name'] = self.app_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppPermissionsRequest: """Deserializes the AppPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AppAccessControlRequest), - app_name=d.get("app_name", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlRequest), app_name=d.get('app_name', None)) + + @dataclass class AppResource: name: str """Name of the App Resource.""" - + description: Optional[str] = None """Description of the App Resource.""" - + job: Optional[AppResourceJob] = None - + secret: Optional[AppResourceSecret] = None - + serving_endpoint: Optional[AppResourceServingEndpoint] = None - + sql_warehouse: Optional[AppResourceSqlWarehouse] = None - + uc_securable: Optional[AppResourceUcSecurable] = None - + def as_dict(self) -> dict: """Serializes the AppResource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.job: - body["job"] = self.job.as_dict() - if self.name is not None: - body["name"] = self.name - if self.secret: - body["secret"] = self.secret.as_dict() - if self.serving_endpoint: - body["serving_endpoint"] = self.serving_endpoint.as_dict() - if self.sql_warehouse: - body["sql_warehouse"] = self.sql_warehouse.as_dict() - if self.uc_securable: - body["uc_securable"] = self.uc_securable.as_dict() + if self.description is not None: body['description'] = self.description + if self.job: body['job'] = self.job.as_dict() + if self.name is not None: body['name'] = self.name + if self.secret: body['secret'] = self.secret.as_dict() + if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint.as_dict() + if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse.as_dict() + if self.uc_securable: body['uc_securable'] = self.uc_securable.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AppResource into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.job: - body["job"] = self.job - if self.name is not None: - body["name"] = self.name - if self.secret: - body["secret"] = self.secret - if self.serving_endpoint: - body["serving_endpoint"] = self.serving_endpoint - if self.sql_warehouse: - body["sql_warehouse"] = self.sql_warehouse - if self.uc_securable: - body["uc_securable"] = self.uc_securable + if self.description is not None: body['description'] = self.description + if self.job: body['job'] = self.job + if self.name is not None: body['name'] = self.name + if self.secret: body['secret'] = self.secret + if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint + if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse + if self.uc_securable: body['uc_securable'] = self.uc_securable return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResource: """Deserializes the AppResource from a dictionary.""" - return cls( - description=d.get("description", None), - job=_from_dict(d, "job", AppResourceJob), - name=d.get("name", None), - secret=_from_dict(d, "secret", AppResourceSecret), - serving_endpoint=_from_dict(d, "serving_endpoint", AppResourceServingEndpoint), - sql_warehouse=_from_dict(d, "sql_warehouse", AppResourceSqlWarehouse), - uc_securable=_from_dict(d, "uc_securable", AppResourceUcSecurable), - ) + return cls(description=d.get('description', None), job=_from_dict(d, 'job', AppResourceJob), name=d.get('name', None), secret=_from_dict(d, 'secret', AppResourceSecret), serving_endpoint=_from_dict(d, 'serving_endpoint', AppResourceServingEndpoint), sql_warehouse=_from_dict(d, 'sql_warehouse', AppResourceSqlWarehouse), uc_securable=_from_dict(d, 'uc_securable', AppResourceUcSecurable)) + + @dataclass class AppResourceJob: id: str """Id of the job to grant permission on.""" - + permission: AppResourceJobJobPermission """Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER", "CAN_MANAGE_RUN", "CAN_VIEW".""" - + def as_dict(self) -> dict: """Serializes the AppResourceJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.permission is not None: - body["permission"] = self.permission.value + if self.id is not None: body['id'] = self.id + if self.permission is not None: body['permission'] = self.permission.value return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.permission is not None: - body["permission"] = self.permission + if self.id is not None: body['id'] = self.id + if self.permission is not None: body['permission'] = self.permission return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceJob: """Deserializes the AppResourceJob from a dictionary.""" - return cls(id=d.get("id", None), permission=_enum(d, "permission", AppResourceJobJobPermission)) + return cls(id=d.get('id', None), permission=_enum(d, 'permission', AppResourceJobJobPermission)) + -class AppResourceJobJobPermission(Enum): - - CAN_MANAGE = "CAN_MANAGE" - CAN_MANAGE_RUN = "CAN_MANAGE_RUN" - CAN_VIEW = "CAN_VIEW" - IS_OWNER = "IS_OWNER" +class AppResourceJobJobPermission(Enum): + + + CAN_MANAGE = 'CAN_MANAGE' + CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' + CAN_VIEW = 'CAN_VIEW' + IS_OWNER = 'IS_OWNER' @dataclass class AppResourceSecret: scope: str """Scope of the secret to grant permission on.""" - + key: str """Key of the secret to grant permission on.""" - + permission: AppResourceSecretSecretPermission """Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission must be one of: "READ", "WRITE", "MANAGE".""" - + def as_dict(self) -> dict: """Serializes the AppResourceSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.permission is not None: - body["permission"] = self.permission.value - if self.scope is not None: - body["scope"] = self.scope + if self.key is not None: body['key'] = self.key + if self.permission is not None: body['permission'] = self.permission.value + if self.scope is not None: body['scope'] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.permission is not None: - body["permission"] = self.permission - if self.scope is not None: - body["scope"] = self.scope + if self.key is not None: body['key'] = self.key + if self.permission is not None: body['permission'] = self.permission + if self.scope is not None: body['scope'] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceSecret: """Deserializes the AppResourceSecret from a dictionary.""" - return cls( - key=d.get("key", None), - permission=_enum(d, "permission", AppResourceSecretSecretPermission), - scope=d.get("scope", None), - ) + return cls(key=d.get('key', None), permission=_enum(d, 'permission', AppResourceSecretSecretPermission), scope=d.get('scope', None)) + + class AppResourceSecretSecretPermission(Enum): """Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE".""" - - MANAGE = "MANAGE" - READ = "READ" - WRITE = "WRITE" - + + MANAGE = 'MANAGE' + READ = 'READ' + WRITE = 'WRITE' @dataclass class AppResourceServingEndpoint: name: str """Name of the serving endpoint to grant permission on.""" - + permission: AppResourceServingEndpointServingEndpointPermission """Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE", "CAN_QUERY", "CAN_VIEW".""" - + def as_dict(self) -> dict: """Serializes the AppResourceServingEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.permission is not None: - body["permission"] = self.permission.value + if self.name is not None: body['name'] = self.name + if self.permission is not None: body['permission'] = self.permission.value return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceServingEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.permission is not None: - body["permission"] = self.permission + if self.name is not None: body['name'] = self.name + if self.permission is not None: body['permission'] = self.permission return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceServingEndpoint: """Deserializes the AppResourceServingEndpoint from a dictionary.""" - return cls( - name=d.get("name", None), - permission=_enum(d, "permission", AppResourceServingEndpointServingEndpointPermission), - ) + return cls(name=d.get('name', None), permission=_enum(d, 'permission', AppResourceServingEndpointServingEndpointPermission)) + -class AppResourceServingEndpointServingEndpointPermission(Enum): - - CAN_MANAGE = "CAN_MANAGE" - CAN_QUERY = "CAN_QUERY" - CAN_VIEW = "CAN_VIEW" +class AppResourceServingEndpointServingEndpointPermission(Enum): + + + CAN_MANAGE = 'CAN_MANAGE' + CAN_QUERY = 'CAN_QUERY' + CAN_VIEW = 'CAN_VIEW' @dataclass class AppResourceSqlWarehouse: id: str """Id of the SQL warehouse to grant permission on.""" - + permission: AppResourceSqlWarehouseSqlWarehousePermission """Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE", "IS_OWNER".""" - + def as_dict(self) -> dict: """Serializes the AppResourceSqlWarehouse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.permission is not None: - body["permission"] = self.permission.value + if self.id is not None: body['id'] = self.id + if self.permission is not None: body['permission'] = self.permission.value return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceSqlWarehouse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.permission is not None: - body["permission"] = self.permission + if self.id is not None: body['id'] = self.id + if self.permission is not None: body['permission'] = self.permission return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceSqlWarehouse: """Deserializes the AppResourceSqlWarehouse from a dictionary.""" - return cls( - id=d.get("id", None), permission=_enum(d, "permission", AppResourceSqlWarehouseSqlWarehousePermission) - ) + return cls(id=d.get('id', None), permission=_enum(d, 'permission', AppResourceSqlWarehouseSqlWarehousePermission)) + -class AppResourceSqlWarehouseSqlWarehousePermission(Enum): - - CAN_MANAGE = "CAN_MANAGE" - CAN_USE = "CAN_USE" - IS_OWNER = "IS_OWNER" +class AppResourceSqlWarehouseSqlWarehousePermission(Enum): + + + CAN_MANAGE = 'CAN_MANAGE' + CAN_USE = 'CAN_USE' + IS_OWNER = 'IS_OWNER' @dataclass class AppResourceUcSecurable: securable_full_name: str - + securable_type: AppResourceUcSecurableUcSecurableType - + permission: AppResourceUcSecurableUcSecurablePermission - + def as_dict(self) -> dict: """Serializes the AppResourceUcSecurable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission is not None: - body["permission"] = self.permission.value - if self.securable_full_name is not None: - body["securable_full_name"] = self.securable_full_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type.value + if self.permission is not None: body['permission'] = self.permission.value + if self.securable_full_name is not None: body['securable_full_name'] = self.securable_full_name + if self.securable_type is not None: body['securable_type'] = self.securable_type.value return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceUcSecurable into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission is not None: - body["permission"] = self.permission - if self.securable_full_name is not None: - body["securable_full_name"] = self.securable_full_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type + if self.permission is not None: body['permission'] = self.permission + if self.securable_full_name is not None: body['securable_full_name'] = self.securable_full_name + if self.securable_type is not None: body['securable_type'] = self.securable_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceUcSecurable: """Deserializes the AppResourceUcSecurable from a dictionary.""" - return cls( - permission=_enum(d, "permission", AppResourceUcSecurableUcSecurablePermission), - securable_full_name=d.get("securable_full_name", None), - securable_type=_enum(d, "securable_type", AppResourceUcSecurableUcSecurableType), - ) - + return cls(permission=_enum(d, 'permission', AppResourceUcSecurableUcSecurablePermission), securable_full_name=d.get('securable_full_name', None), securable_type=_enum(d, 'securable_type', AppResourceUcSecurableUcSecurableType)) + -class AppResourceUcSecurableUcSecurablePermission(Enum): - READ_VOLUME = "READ_VOLUME" - WRITE_VOLUME = "WRITE_VOLUME" +class AppResourceUcSecurableUcSecurablePermission(Enum): + + + READ_VOLUME = 'READ_VOLUME' + WRITE_VOLUME = 'WRITE_VOLUME' class AppResourceUcSecurableUcSecurableType(Enum): - - VOLUME = "VOLUME" - + + + VOLUME = 'VOLUME' class ApplicationState(Enum): - - CRASHED = "CRASHED" - DEPLOYING = "DEPLOYING" - RUNNING = "RUNNING" - UNAVAILABLE = "UNAVAILABLE" - + + + CRASHED = 'CRASHED' + DEPLOYING = 'DEPLOYING' + RUNNING = 'RUNNING' + UNAVAILABLE = 'UNAVAILABLE' @dataclass class ApplicationStatus: message: Optional[str] = None """Application status message""" - + state: Optional[ApplicationState] = None """State of the application.""" - + def as_dict(self) -> dict: """Serializes the ApplicationStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.state is not None: - body["state"] = self.state.value + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the ApplicationStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.state is not None: - body["state"] = self.state + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ApplicationStatus: """Deserializes the ApplicationStatus from a dictionary.""" - return cls(message=d.get("message", None), state=_enum(d, "state", ApplicationState)) + return cls(message=d.get('message', None), state=_enum(d, 'state', ApplicationState)) + -class ComputeState(Enum): - - ACTIVE = "ACTIVE" - DELETING = "DELETING" - ERROR = "ERROR" - STARTING = "STARTING" - STOPPED = "STOPPED" - STOPPING = "STOPPING" - UPDATING = "UPDATING" +class ComputeState(Enum): + + + ACTIVE = 'ACTIVE' + DELETING = 'DELETING' + ERROR = 'ERROR' + STARTING = 'STARTING' + STOPPED = 'STOPPED' + STOPPING = 'STOPPING' + UPDATING = 'UPDATING' @dataclass class ComputeStatus: message: Optional[str] = None """Compute status message""" - + state: Optional[ComputeState] = None """State of the app compute.""" - + def as_dict(self) -> dict: """Serializes the ComputeStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.state is not None: - body["state"] = self.state.value + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the ComputeStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.state is not None: - body["state"] = self.state + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComputeStatus: """Deserializes the ComputeStatus from a dictionary.""" - return cls(message=d.get("message", None), state=_enum(d, "state", ComputeState)) + return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState)) + + + + + + + + + + + + + + + + + @dataclass class GetAppPermissionLevelsResponse: permission_levels: Optional[List[AppPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetAppPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetAppPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetAppPermissionLevelsResponse: """Deserializes the GetAppPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", AppPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', AppPermissionsDescription)) + + + + + + + + + + + @dataclass class ListAppDeploymentsResponse: app_deployments: Optional[List[AppDeployment]] = None """Deployment history of the app.""" - + next_page_token: Optional[str] = None """Pagination token to request the next page of apps.""" - + def as_dict(self) -> dict: """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.app_deployments: - body["app_deployments"] = [v.as_dict() for v in self.app_deployments] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListAppDeploymentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_deployments: - body["app_deployments"] = self.app_deployments - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.app_deployments: body['app_deployments'] = self.app_deployments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAppDeploymentsResponse: """Deserializes the ListAppDeploymentsResponse from a dictionary.""" - return cls( - app_deployments=_repeated_dict(d, "app_deployments", AppDeployment), - next_page_token=d.get("next_page_token", None), - ) + return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListAppsResponse: apps: Optional[List[App]] = None - + next_page_token: Optional[str] = None """Pagination token to request the next page of apps.""" - + def as_dict(self) -> dict: """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apps: - body["apps"] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListAppsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.apps: - body["apps"] = self.apps - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.apps: body['apps'] = self.apps + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAppsResponse: """Deserializes the ListAppsResponse from a dictionary.""" - return cls(apps=_repeated_dict(d, "apps", App), next_page_token=d.get("next_page_token", None)) + return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None)) + + @dataclass class StartAppRequest: name: Optional[str] = None """The name of the app.""" + + + @dataclass class StopAppRequest: name: Optional[str] = None """The name of the app.""" + + + + + + + + class AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" - + def __init__(self, api_client): self._api = api_client - - def wait_get_app_active( - self, name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[App], None]] = None - ) -> App: - deadline = time.time() + timeout.total_seconds() - target_states = (ComputeState.ACTIVE,) - failure_states = ( - ComputeState.ERROR, - ComputeState.STOPPED, - ) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.compute_status.state - status_message = f"current status: {status}" - if poll.compute_status: - status_message = poll.compute_status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach ACTIVE, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def wait_get_deployment_app_succeeded( - self, - app_name: str, - deployment_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[AppDeployment], None]] = None, - ) -> AppDeployment: - deadline = time.time() + timeout.total_seconds() - target_states = (AppDeploymentState.SUCCEEDED,) - failure_states = (AppDeploymentState.FAILED,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) - status = poll.status.state - status_message = f"current status: {status}" - if poll.status: - status_message = poll.status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach SUCCEEDED, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"app_name={app_name}, deployment_id={deployment_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def wait_get_app_stopped( - self, name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[App], None]] = None - ) -> App: - deadline = time.time() + timeout.total_seconds() - target_states = (ComputeState.STOPPED,) - failure_states = (ComputeState.ERROR,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.compute_status.state - status_message = f"current status: {status}" - if poll.compute_status: - status_message = poll.compute_status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach STOPPED, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]: + + + + + + def wait_get_app_active(self, name: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[App], None]] = None) -> App: + deadline = time.time() + timeout.total_seconds() + target_states = (ComputeState.ACTIVE, ) + failure_states = (ComputeState.ERROR, ComputeState.STOPPED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.compute_status.state + status_message = f'current status: {status}' + if poll.compute_status: + status_message = poll.compute_status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach ACTIVE, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_get_deployment_app_succeeded(self, app_name: str, deployment_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment: + deadline = time.time() + timeout.total_seconds() + target_states = (AppDeploymentState.SUCCEEDED, ) + failure_states = (AppDeploymentState.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) + status = poll.status.state + status_message = f'current status: {status}' + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"app_name={app_name}, deployment_id={deployment_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_get_app_stopped(self, name: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[App], None]] = None) -> App: + deadline = time.time() + timeout.total_seconds() + target_states = (ComputeState.STOPPED, ) + failure_states = (ComputeState.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.compute_status.state + status_message = f'current status: {status}' + if poll.compute_status: + status_message = poll.compute_status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach STOPPED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + + def create(self + , app: App + , * + , no_compute: Optional[bool] = None) -> Wait[App]: """Create an app. - + Creates a new app. - + :param app: :class:`App` :param no_compute: bool (optional) If true, the app will not be started after creation. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. """ body = app.as_dict() query = {} - if no_compute is not None: - query["no_compute"] = no_compute - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.0/apps", query=query, body=body, headers=headers) - return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response["name"]) - - def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App: + if no_compute is not None: query['no_compute'] = no_compute + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.0/apps', query=query, body=body + + , headers=headers + ) + return Wait(self.wait_get_app_active + , response = App.from_dict(op_response) + , name=op_response['name']) + + + def create_and_wait(self + , app: App + , * + , no_compute: Optional[bool] = None, + timeout=timedelta(minutes=20)) -> App: return self.create(app=app, no_compute=no_compute).result(timeout=timeout) + + + - def delete(self, name: str) -> App: + def delete(self + , name: str + ) -> App: """Delete an app. - + Deletes an app. - + :param name: str The name of the app. - + :returns: :class:`App` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("DELETE", f"/api/2.0/apps/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE',f'/api/2.0/apps/{name}' + + , headers=headers + ) return App.from_dict(res) - def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment]: - """Create an app deployment. + + + + def deploy(self + , app_name: str, app_deployment: AppDeployment + ) -> Wait[AppDeployment]: + """Create an app deployment. + Creates an app deployment for the app with the supplied name. - + :param app_name: str The name of the app. :param app_deployment: :class:`AppDeployment` - + :returns: Long-running operation waiter for :class:`AppDeployment`. See :method:wait_get_deployment_app_succeeded for more details. """ body = app_deployment.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", f"/api/2.0/apps/{app_name}/deployments", body=body, headers=headers) - return Wait( - self.wait_get_deployment_app_succeeded, - response=AppDeployment.from_dict(op_response), - app_name=app_name, - deployment_id=op_response["deployment_id"], - ) - - def deploy_and_wait( - self, app_name: str, app_deployment: AppDeployment, timeout=timedelta(minutes=20) - ) -> AppDeployment: + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/apps/{app_name}/deployments', body=body + + , headers=headers + ) + return Wait(self.wait_get_deployment_app_succeeded + , response = AppDeployment.from_dict(op_response) + , app_name=app_name, deployment_id=op_response['deployment_id']) + + + def deploy_and_wait(self + , app_name: str, app_deployment: AppDeployment + , + timeout=timedelta(minutes=20)) -> AppDeployment: return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout) + + + - def get(self, name: str) -> App: + def get(self + , name: str + ) -> App: """Get an app. - + Retrieves information for the app with the supplied name. - + :param name: str The name of the app. - + :returns: :class:`App` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/apps/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/apps/{name}' + + , headers=headers + ) return App.from_dict(res) - def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment: - """Get an app deployment. + + + + def get_deployment(self + , app_name: str, deployment_id: str + ) -> AppDeployment: + """Get an app deployment. + Retrieves information for the app deployment with the supplied name and deployment id. - + :param app_name: str The name of the app. :param deployment_id: str The unique id of the deployment. - + :returns: :class:`AppDeployment` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/apps/{app_name}/deployments/{deployment_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/apps/{app_name}/deployments/{deployment_id}' + + , headers=headers + ) return AppDeployment.from_dict(res) - def get_permission_levels(self, app_name: str) -> GetAppPermissionLevelsResponse: - """Get app permission levels. + + + + def get_permission_levels(self + , app_name: str + ) -> GetAppPermissionLevelsResponse: + """Get app permission levels. + Gets the permission levels that a user can have on an object. - + :param app_name: str The app for which to get or manage permissions. - + :returns: :class:`GetAppPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/apps/{app_name}/permissionLevels", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/apps/{app_name}/permissionLevels' + + , headers=headers + ) return GetAppPermissionLevelsResponse.from_dict(res) - def get_permissions(self, app_name: str) -> AppPermissions: - """Get app permissions. + + + + def get_permissions(self + , app_name: str + ) -> AppPermissions: + """Get app permissions. + Gets the permissions of an app. Apps can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. - + :returns: :class:`AppPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/apps/{app_name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/apps/{app_name}' + + , headers=headers + ) return AppPermissions.from_dict(res) - def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: - """List apps. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: + """List apps. + Lists all apps in the workspace. - + :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of apps. Requests first page if absent. - + :returns: Iterator over :class:`App` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/apps", query=query, headers=headers) - if "apps" in json: - for v in json["apps"]: - yield App.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_deployments( - self, app_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[AppDeployment]: + json = self._api.do('GET','/api/2.0/apps', query=query + + , headers=headers + ) + if 'apps' in json: + for v in json['apps']: + yield App.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def list_deployments(self + , app_name: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[AppDeployment]: """List app deployments. - + Lists all app deployments for the app with the supplied name. - + :param app_name: str The name of the app. :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of apps. Requests first page if absent. - + :returns: Iterator over :class:`AppDeployment` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", f"/api/2.0/apps/{app_name}/deployments", query=query, headers=headers) - if "app_deployments" in json: - for v in json["app_deployments"]: - yield AppDeployment.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def set_permissions( - self, app_name: str, *, access_control_list: Optional[List[AppAccessControlRequest]] = None - ) -> AppPermissions: + json = self._api.do('GET',f'/api/2.0/apps/{app_name}/deployments', query=query + + , headers=headers + ) + if 'app_deployments' in json: + for v in json['app_deployments']: + yield AppDeployment.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def set_permissions(self + , app_name: str + , * + , access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions: """Set app permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) - + :returns: :class:`AppPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/permissions/apps/{app_name}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/apps/{app_name}', body=body + + , headers=headers + ) return AppPermissions.from_dict(res) - def start(self, name: str) -> Wait[App]: - """Start an app. + + + + def start(self + , name: str + ) -> Wait[App]: + """Start an app. + Start the last active deployment of the app in the workspace. - + :param name: str The name of the app. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. """ - - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", f"/api/2.0/apps/{name}/start", headers=headers) - return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response["name"]) - - def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App: + + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/apps/{name}/start' + + , headers=headers + ) + return Wait(self.wait_get_app_active + , response = App.from_dict(op_response) + , name=op_response['name']) + + + def start_and_wait(self + , name: str + , + timeout=timedelta(minutes=20)) -> App: return self.start(name=name).result(timeout=timeout) + + + - def stop(self, name: str) -> Wait[App]: + def stop(self + , name: str + ) -> Wait[App]: """Stop an app. - + Stops the active deployment of the app in the workspace. - + :param name: str The name of the app. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_stopped for more details. """ - - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", f"/api/2.0/apps/{name}/stop", headers=headers) - return Wait(self.wait_get_app_stopped, response=App.from_dict(op_response), name=op_response["name"]) - - def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App: + + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/apps/{name}/stop' + + , headers=headers + ) + return Wait(self.wait_get_app_stopped + , response = App.from_dict(op_response) + , name=op_response['name']) + + + def stop_and_wait(self + , name: str + , + timeout=timedelta(minutes=20)) -> App: return self.stop(name=name).result(timeout=timeout) + + + - def update(self, name: str, app: App) -> App: + def update(self + , name: str, app: App + ) -> App: """Update an app. - + Updates the app with the supplied name. - + :param name: str The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It must be unique within the workspace. :param app: :class:`App` - + :returns: :class:`App` """ body = app.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/apps/{name}", body=body, headers=headers) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/apps/{name}', body=body + + , headers=headers + ) return App.from_dict(res) - def update_permissions( - self, app_name: str, *, access_control_list: Optional[List[AppAccessControlRequest]] = None - ) -> AppPermissions: - """Update app permissions. + + + + def update_permissions(self + , app_name: str + , * + , access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions: + """Update app permissions. + Updates the permissions on an app. Apps can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) - + :returns: :class:`AppPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/permissions/apps/{app_name}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/apps/{app_name}', body=body + + , headers=headers + ) return AppPermissions.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 3595e4026..a09c58758 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -1,231 +1,191 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Any, BinaryIO, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ._internal import _enum, _from_dict, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') from databricks.sdk.service import compute # all definitions in this file are in alphabetical order - @dataclass class ActionConfiguration: action_configuration_id: Optional[str] = None """Databricks action configuration ID.""" - + action_type: Optional[ActionConfigurationType] = None """The type of the action.""" - + target: Optional[str] = None """Target for the action. For example, an email address.""" - + def as_dict(self) -> dict: """Serializes the ActionConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action_configuration_id is not None: - body["action_configuration_id"] = self.action_configuration_id - if self.action_type is not None: - body["action_type"] = self.action_type.value - if self.target is not None: - body["target"] = self.target + if self.action_configuration_id is not None: body['action_configuration_id'] = self.action_configuration_id + if self.action_type is not None: body['action_type'] = self.action_type.value + if self.target is not None: body['target'] = self.target return body def as_shallow_dict(self) -> dict: """Serializes the ActionConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.action_configuration_id is not None: - body["action_configuration_id"] = self.action_configuration_id - if self.action_type is not None: - body["action_type"] = self.action_type - if self.target is not None: - body["target"] = self.target + if self.action_configuration_id is not None: body['action_configuration_id'] = self.action_configuration_id + if self.action_type is not None: body['action_type'] = self.action_type + if self.target is not None: body['target'] = self.target return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ActionConfiguration: """Deserializes the ActionConfiguration from a dictionary.""" - return cls( - action_configuration_id=d.get("action_configuration_id", None), - action_type=_enum(d, "action_type", ActionConfigurationType), - target=d.get("target", None), - ) - + return cls(action_configuration_id=d.get('action_configuration_id', None), action_type=_enum(d, 'action_type', ActionConfigurationType), target=d.get('target', None)) + -class ActionConfigurationType(Enum): - EMAIL_NOTIFICATION = "EMAIL_NOTIFICATION" +class ActionConfigurationType(Enum): + + + EMAIL_NOTIFICATION = 'EMAIL_NOTIFICATION' @dataclass class AlertConfiguration: action_configurations: Optional[List[ActionConfiguration]] = None """Configured actions for this alert. These define what happens when an alert enters a triggered state.""" - + alert_configuration_id: Optional[str] = None """Databricks alert configuration ID.""" - + quantity_threshold: Optional[str] = None """The threshold for the budget alert to determine if it is in a triggered state. The number is evaluated based on `quantity_type`.""" - + quantity_type: Optional[AlertConfigurationQuantityType] = None """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured in.""" - + time_period: Optional[AlertConfigurationTimePeriod] = None """The time window of usage data for the budget.""" - + trigger_type: Optional[AlertConfigurationTriggerType] = None """The evaluation method to determine when this budget alert is in a triggered state.""" - + def as_dict(self) -> dict: """Serializes the AlertConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action_configurations: - body["action_configurations"] = [v.as_dict() for v in self.action_configurations] - if self.alert_configuration_id is not None: - body["alert_configuration_id"] = self.alert_configuration_id - if self.quantity_threshold is not None: - body["quantity_threshold"] = self.quantity_threshold - if self.quantity_type is not None: - body["quantity_type"] = self.quantity_type.value - if self.time_period is not None: - body["time_period"] = self.time_period.value - if self.trigger_type is not None: - body["trigger_type"] = self.trigger_type.value + if self.action_configurations: body['action_configurations'] = [v.as_dict() for v in self.action_configurations] + if self.alert_configuration_id is not None: body['alert_configuration_id'] = self.alert_configuration_id + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value + if self.time_period is not None: body['time_period'] = self.time_period.value + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value return body def as_shallow_dict(self) -> dict: """Serializes the AlertConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.action_configurations: - body["action_configurations"] = self.action_configurations - if self.alert_configuration_id is not None: - body["alert_configuration_id"] = self.alert_configuration_id - if self.quantity_threshold is not None: - body["quantity_threshold"] = self.quantity_threshold - if self.quantity_type is not None: - body["quantity_type"] = self.quantity_type - if self.time_period is not None: - body["time_period"] = self.time_period - if self.trigger_type is not None: - body["trigger_type"] = self.trigger_type + if self.action_configurations: body['action_configurations'] = self.action_configurations + if self.alert_configuration_id is not None: body['alert_configuration_id'] = self.alert_configuration_id + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type + if self.time_period is not None: body['time_period'] = self.time_period + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertConfiguration: """Deserializes the AlertConfiguration from a dictionary.""" - return cls( - action_configurations=_repeated_dict(d, "action_configurations", ActionConfiguration), - alert_configuration_id=d.get("alert_configuration_id", None), - quantity_threshold=d.get("quantity_threshold", None), - quantity_type=_enum(d, "quantity_type", AlertConfigurationQuantityType), - time_period=_enum(d, "time_period", AlertConfigurationTimePeriod), - trigger_type=_enum(d, "trigger_type", AlertConfigurationTriggerType), - ) - + return cls(action_configurations=_repeated_dict(d, 'action_configurations', ActionConfiguration), alert_configuration_id=d.get('alert_configuration_id', None), quantity_threshold=d.get('quantity_threshold', None), quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType), time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod), trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType)) + -class AlertConfigurationQuantityType(Enum): - LIST_PRICE_DOLLARS_USD = "LIST_PRICE_DOLLARS_USD" +class AlertConfigurationQuantityType(Enum): + + + LIST_PRICE_DOLLARS_USD = 'LIST_PRICE_DOLLARS_USD' class AlertConfigurationTimePeriod(Enum): - - MONTH = "MONTH" - + + + MONTH = 'MONTH' class AlertConfigurationTriggerType(Enum): - - CUMULATIVE_SPENDING_EXCEEDED = "CUMULATIVE_SPENDING_EXCEEDED" - + + + CUMULATIVE_SPENDING_EXCEEDED = 'CUMULATIVE_SPENDING_EXCEEDED' @dataclass class BudgetConfiguration: account_id: Optional[str] = None """Databricks account ID.""" - + alert_configurations: Optional[List[AlertConfiguration]] = None """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one alert configuration.""" - + budget_configuration_id: Optional[str] = None """Databricks budget configuration ID.""" - + create_time: Optional[int] = None """Creation time of this budget configuration.""" - + display_name: Optional[str] = None """Human-readable name of budget configuration. Max Length: 128""" - + filter: Optional[BudgetConfigurationFilter] = None """Configured filters for this budget. These are applied to your account's usage to limit the scope of what is considered for this budget. Leave empty to include all usage for this account. All provided filters must be matched for usage to be included.""" - + update_time: Optional[int] = None """Update time of this budget configuration.""" - + def as_dict(self) -> dict: """Serializes the BudgetConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.alert_configurations: - body["alert_configurations"] = [v.as_dict() for v in self.alert_configurations] - if self.budget_configuration_id is not None: - body["budget_configuration_id"] = self.budget_configuration_id - if self.create_time is not None: - body["create_time"] = self.create_time - if self.display_name is not None: - body["display_name"] = self.display_name - if self.filter: - body["filter"] = self.filter.as_dict() - if self.update_time is not None: - body["update_time"] = self.update_time + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] + if self.budget_configuration_id is not None: body['budget_configuration_id'] = self.budget_configuration_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.alert_configurations: - body["alert_configurations"] = self.alert_configurations - if self.budget_configuration_id is not None: - body["budget_configuration_id"] = self.budget_configuration_id - if self.create_time is not None: - body["create_time"] = self.create_time - if self.display_name is not None: - body["display_name"] = self.display_name - if self.filter: - body["filter"] = self.filter - if self.update_time is not None: - body["update_time"] = self.update_time + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: body['alert_configurations'] = self.alert_configurations + if self.budget_configuration_id is not None: body['budget_configuration_id'] = self.budget_configuration_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfiguration: """Deserializes the BudgetConfiguration from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - alert_configurations=_repeated_dict(d, "alert_configurations", AlertConfiguration), - budget_configuration_id=d.get("budget_configuration_id", None), - create_time=d.get("create_time", None), - display_name=d.get("display_name", None), - filter=_from_dict(d, "filter", BudgetConfigurationFilter), - update_time=d.get("update_time", None), - ) + return cls(account_id=d.get('account_id', None), alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration), budget_configuration_id=d.get('budget_configuration_id', None), create_time=d.get('create_time', None), display_name=d.get('display_name', None), filter=_from_dict(d, 'filter', BudgetConfigurationFilter), update_time=d.get('update_time', None)) + + @dataclass @@ -234,186 +194,164 @@ class BudgetConfigurationFilter: """A list of tag keys and values that will limit the budget to usage that includes those specific custom tags. Tags are case-sensitive and should be entered exactly as they appear in your usage data.""" - + workspace_id: Optional[BudgetConfigurationFilterWorkspaceIdClause] = None """If provided, usage must match with the provided Databricks workspace IDs.""" - + def as_dict(self) -> dict: """Serializes the BudgetConfigurationFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.workspace_id: - body["workspace_id"] = self.workspace_id.as_dict() + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.workspace_id: body['workspace_id'] = self.workspace_id.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfigurationFilter into a shallow dictionary of its immediate attributes.""" body = {} - if self.tags: - body["tags"] = self.tags - if self.workspace_id: - body["workspace_id"] = self.workspace_id + if self.tags: body['tags'] = self.tags + if self.workspace_id: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfigurationFilter: """Deserializes the BudgetConfigurationFilter from a dictionary.""" - return cls( - tags=_repeated_dict(d, "tags", BudgetConfigurationFilterTagClause), - workspace_id=_from_dict(d, "workspace_id", BudgetConfigurationFilterWorkspaceIdClause), - ) + return cls(tags=_repeated_dict(d, 'tags', BudgetConfigurationFilterTagClause), workspace_id=_from_dict(d, 'workspace_id', BudgetConfigurationFilterWorkspaceIdClause)) + + @dataclass class BudgetConfigurationFilterClause: operator: Optional[BudgetConfigurationFilterOperator] = None - + values: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the BudgetConfigurationFilterClause into a dictionary suitable for use as a JSON request body.""" body = {} - if self.operator is not None: - body["operator"] = self.operator.value - if self.values: - body["values"] = [v for v in self.values] + if self.operator is not None: body['operator'] = self.operator.value + if self.values: body['values'] = [v for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfigurationFilterClause into a shallow dictionary of its immediate attributes.""" body = {} - if self.operator is not None: - body["operator"] = self.operator - if self.values: - body["values"] = self.values + if self.operator is not None: body['operator'] = self.operator + if self.values: body['values'] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfigurationFilterClause: """Deserializes the BudgetConfigurationFilterClause from a dictionary.""" - return cls(operator=_enum(d, "operator", BudgetConfigurationFilterOperator), values=d.get("values", None)) - + return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator), values=d.get('values', None)) + -class BudgetConfigurationFilterOperator(Enum): - IN = "IN" +class BudgetConfigurationFilterOperator(Enum): + + + IN = 'IN' @dataclass class BudgetConfigurationFilterTagClause: key: Optional[str] = None - + value: Optional[BudgetConfigurationFilterClause] = None - + def as_dict(self) -> dict: """Serializes the BudgetConfigurationFilterTagClause into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value: - body["value"] = self.value.as_dict() + if self.key is not None: body['key'] = self.key + if self.value: body['value'] = self.value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfigurationFilterTagClause into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfigurationFilterTagClause: """Deserializes the BudgetConfigurationFilterTagClause from a dictionary.""" - return cls(key=d.get("key", None), value=_from_dict(d, "value", BudgetConfigurationFilterClause)) + return cls(key=d.get('key', None), value=_from_dict(d, 'value', BudgetConfigurationFilterClause)) + + @dataclass class BudgetConfigurationFilterWorkspaceIdClause: operator: Optional[BudgetConfigurationFilterOperator] = None - + values: Optional[List[int]] = None - + def as_dict(self) -> dict: """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a dictionary suitable for use as a JSON request body.""" body = {} - if self.operator is not None: - body["operator"] = self.operator.value - if self.values: - body["values"] = [v for v in self.values] + if self.operator is not None: body['operator'] = self.operator.value + if self.values: body['values'] = [v for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a shallow dictionary of its immediate attributes.""" body = {} - if self.operator is not None: - body["operator"] = self.operator - if self.values: - body["values"] = self.values + if self.operator is not None: body['operator'] = self.operator + if self.values: body['values'] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfigurationFilterWorkspaceIdClause: """Deserializes the BudgetConfigurationFilterWorkspaceIdClause from a dictionary.""" - return cls(operator=_enum(d, "operator", BudgetConfigurationFilterOperator), values=d.get("values", None)) + return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator), values=d.get('values', None)) + + @dataclass class BudgetPolicy: """Contains the BudgetPolicy details.""" - + binding_workspace_ids: Optional[List[int]] = None """List of workspaces that this budget policy will be exclusively bound to. An empty binding implies that this budget policy is open to any workspace in the account.""" - + custom_tags: Optional[List[compute.CustomPolicyTag]] = None """A list of tags defined by the customer. At most 20 entries are allowed per policy.""" - + policy_id: Optional[str] = None """The Id of the policy. This field is generated by Databricks and globally unique.""" - + policy_name: Optional[str] = None """The name of the policy. - Must be unique among active policies. - Can contain only characters from the ISO 8859-1 (latin1) set. - Can't start with reserved keywords such as `databricks:default-policy`.""" - + def as_dict(self) -> dict: """Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.binding_workspace_ids: - body["binding_workspace_ids"] = [v for v in self.binding_workspace_ids] - if self.custom_tags: - body["custom_tags"] = [v.as_dict() for v in self.custom_tags] - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.policy_name is not None: - body["policy_name"] = self.policy_name + if self.binding_workspace_ids: body['binding_workspace_ids'] = [v for v in self.binding_workspace_ids] + if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.policy_name is not None: body['policy_name'] = self.policy_name return body def as_shallow_dict(self) -> dict: """Serializes the BudgetPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.binding_workspace_ids: - body["binding_workspace_ids"] = self.binding_workspace_ids - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.policy_name is not None: - body["policy_name"] = self.policy_name + if self.binding_workspace_ids: body['binding_workspace_ids'] = self.binding_workspace_ids + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.policy_name is not None: body['policy_name'] = self.policy_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetPolicy: """Deserializes the BudgetPolicy from a dictionary.""" - return cls( - binding_workspace_ids=d.get("binding_workspace_ids", None), - custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag), - policy_id=d.get("policy_id", None), - policy_name=d.get("policy_name", None), - ) + return cls(binding_workspace_ids=d.get('binding_workspace_ids', None), custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag), policy_id=d.get('policy_id', None), policy_name=d.get('policy_name', None)) + + @dataclass @@ -421,147 +359,128 @@ class CreateBillingUsageDashboardRequest: dashboard_type: Optional[UsageDashboardType] = None """Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account.""" - + workspace_id: Optional[int] = None """The workspace ID of the workspace in which the usage dashboard is created.""" - + def as_dict(self) -> dict: """Serializes the CreateBillingUsageDashboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_type is not None: - body["dashboard_type"] = self.dashboard_type.value - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type.value + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateBillingUsageDashboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_type is not None: - body["dashboard_type"] = self.dashboard_type - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBillingUsageDashboardRequest: """Deserializes the CreateBillingUsageDashboardRequest from a dictionary.""" - return cls( - dashboard_type=_enum(d, "dashboard_type", UsageDashboardType), workspace_id=d.get("workspace_id", None) - ) + return cls(dashboard_type=_enum(d, 'dashboard_type', UsageDashboardType), workspace_id=d.get('workspace_id', None)) + + @dataclass class CreateBillingUsageDashboardResponse: dashboard_id: Optional[str] = None """The unique id of the usage dashboard.""" - + def as_dict(self) -> dict: """Serializes the CreateBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBillingUsageDashboardResponse: """Deserializes the CreateBillingUsageDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get("dashboard_id", None)) + return cls(dashboard_id=d.get('dashboard_id', None)) + + @dataclass class CreateBudgetConfigurationBudget: account_id: Optional[str] = None """Databricks account ID.""" - + alert_configurations: Optional[List[CreateBudgetConfigurationBudgetAlertConfigurations]] = None """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one alert configuration.""" - + display_name: Optional[str] = None """Human-readable name of budget configuration. Max Length: 128""" - + filter: Optional[BudgetConfigurationFilter] = None """Configured filters for this budget. These are applied to your account's usage to limit the scope of what is considered for this budget. Leave empty to include all usage for this account. All provided filters must be matched for usage to be included.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.alert_configurations: - body["alert_configurations"] = [v.as_dict() for v in self.alert_configurations] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.filter: - body["filter"] = self.filter.as_dict() + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.alert_configurations: - body["alert_configurations"] = self.alert_configurations - if self.display_name is not None: - body["display_name"] = self.display_name - if self.filter: - body["filter"] = self.filter + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: body['alert_configurations'] = self.alert_configurations + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationBudget: """Deserializes the CreateBudgetConfigurationBudget from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - alert_configurations=_repeated_dict( - d, "alert_configurations", CreateBudgetConfigurationBudgetAlertConfigurations - ), - display_name=d.get("display_name", None), - filter=_from_dict(d, "filter", BudgetConfigurationFilter), - ) + return cls(account_id=d.get('account_id', None), alert_configurations=_repeated_dict(d, 'alert_configurations', CreateBudgetConfigurationBudgetAlertConfigurations), display_name=d.get('display_name', None), filter=_from_dict(d, 'filter', BudgetConfigurationFilter)) + + @dataclass class CreateBudgetConfigurationBudgetActionConfigurations: action_type: Optional[ActionConfigurationType] = None """The type of the action.""" - + target: Optional[str] = None """Target for the action. For example, an email address.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action_type is not None: - body["action_type"] = self.action_type.value - if self.target is not None: - body["target"] = self.target + if self.action_type is not None: body['action_type'] = self.action_type.value + if self.target is not None: body['target'] = self.target return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a shallow dictionary of its immediate attributes.""" body = {} - if self.action_type is not None: - body["action_type"] = self.action_type - if self.target is not None: - body["target"] = self.target + if self.action_type is not None: body['action_type'] = self.action_type + if self.target is not None: body['target'] = self.target return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationBudgetActionConfigurations: """Deserializes the CreateBudgetConfigurationBudgetActionConfigurations from a dictionary.""" - return cls(action_type=_enum(d, "action_type", ActionConfigurationType), target=d.get("target", None)) + return cls(action_type=_enum(d, 'action_type', ActionConfigurationType), target=d.get('target', None)) + + @dataclass @@ -569,211 +488,190 @@ class CreateBudgetConfigurationBudgetAlertConfigurations: action_configurations: Optional[List[CreateBudgetConfigurationBudgetActionConfigurations]] = None """Configured actions for this alert. These define what happens when an alert enters a triggered state.""" - + quantity_threshold: Optional[str] = None """The threshold for the budget alert to determine if it is in a triggered state. The number is evaluated based on `quantity_type`.""" - + quantity_type: Optional[AlertConfigurationQuantityType] = None """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured in.""" - + time_period: Optional[AlertConfigurationTimePeriod] = None """The time window of usage data for the budget.""" - + trigger_type: Optional[AlertConfigurationTriggerType] = None """The evaluation method to determine when this budget alert is in a triggered state.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action_configurations: - body["action_configurations"] = [v.as_dict() for v in self.action_configurations] - if self.quantity_threshold is not None: - body["quantity_threshold"] = self.quantity_threshold - if self.quantity_type is not None: - body["quantity_type"] = self.quantity_type.value - if self.time_period is not None: - body["time_period"] = self.time_period.value - if self.trigger_type is not None: - body["trigger_type"] = self.trigger_type.value + if self.action_configurations: body['action_configurations'] = [v.as_dict() for v in self.action_configurations] + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value + if self.time_period is not None: body['time_period'] = self.time_period.value + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a shallow dictionary of its immediate attributes.""" body = {} - if self.action_configurations: - body["action_configurations"] = self.action_configurations - if self.quantity_threshold is not None: - body["quantity_threshold"] = self.quantity_threshold - if self.quantity_type is not None: - body["quantity_type"] = self.quantity_type - if self.time_period is not None: - body["time_period"] = self.time_period - if self.trigger_type is not None: - body["trigger_type"] = self.trigger_type + if self.action_configurations: body['action_configurations'] = self.action_configurations + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type + if self.time_period is not None: body['time_period'] = self.time_period + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationBudgetAlertConfigurations: """Deserializes the CreateBudgetConfigurationBudgetAlertConfigurations from a dictionary.""" - return cls( - action_configurations=_repeated_dict( - d, "action_configurations", CreateBudgetConfigurationBudgetActionConfigurations - ), - quantity_threshold=d.get("quantity_threshold", None), - quantity_type=_enum(d, "quantity_type", AlertConfigurationQuantityType), - time_period=_enum(d, "time_period", AlertConfigurationTimePeriod), - trigger_type=_enum(d, "trigger_type", AlertConfigurationTriggerType), - ) + return cls(action_configurations=_repeated_dict(d, 'action_configurations', CreateBudgetConfigurationBudgetActionConfigurations), quantity_threshold=d.get('quantity_threshold', None), quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType), time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod), trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType)) + + @dataclass class CreateBudgetConfigurationRequest: budget: CreateBudgetConfigurationBudget """Properties of the new budget configuration.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: - body["budget"] = self.budget.as_dict() + if self.budget: body['budget'] = self.budget.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: - body["budget"] = self.budget + if self.budget: body['budget'] = self.budget return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationRequest: """Deserializes the CreateBudgetConfigurationRequest from a dictionary.""" - return cls(budget=_from_dict(d, "budget", CreateBudgetConfigurationBudget)) + return cls(budget=_from_dict(d, 'budget', CreateBudgetConfigurationBudget)) + + @dataclass class CreateBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None """The created budget configuration.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: - body["budget"] = self.budget.as_dict() + if self.budget: body['budget'] = self.budget.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: - body["budget"] = self.budget + if self.budget: body['budget'] = self.budget return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationResponse: """Deserializes the CreateBudgetConfigurationResponse from a dictionary.""" - return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) + return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) + + @dataclass class CreateBudgetPolicyRequest: """A request to create a BudgetPolicy.""" - + policy: Optional[BudgetPolicy] = None """The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must be provided, custom_tags may need to be provided depending on the cloud provider. All other fields are optional.""" - + request_id: Optional[str] = None """A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is recommended. This request is only idempotent if a `request_id` is provided.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetPolicyRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.policy: - body["policy"] = self.policy.as_dict() - if self.request_id is not None: - body["request_id"] = self.request_id + if self.policy: body['policy'] = self.policy.as_dict() + if self.request_id is not None: body['request_id'] = self.request_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetPolicyRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.policy: - body["policy"] = self.policy - if self.request_id is not None: - body["request_id"] = self.request_id + if self.policy: body['policy'] = self.policy + if self.request_id is not None: body['request_id'] = self.request_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetPolicyRequest: """Deserializes the CreateBudgetPolicyRequest from a dictionary.""" - return cls(policy=_from_dict(d, "policy", BudgetPolicy), request_id=d.get("request_id", None)) + return cls(policy=_from_dict(d, 'policy', BudgetPolicy), request_id=d.get('request_id', None)) + + @dataclass class CreateLogDeliveryConfigurationParams: - log_type: LogType - """Log delivery type. Supported values are: - - * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the - [View billable usage]. + """* Log Delivery Configuration""" - * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit - logging] + log_type: LogType + """Log delivery type. Supported values are: * `BILLABLE_USAGE` — Configure [billable usage log + delivery]. For the CSV schema, see the [View billable usage]. * `AUDIT_LOGS` — Configure + [audit log delivery]. For the JSON schema, see [Configure audit logging] [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - - output_format: OutputFormat - """The file type of log delivery. - * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated - values) format is supported. For the schema, see the [View billable usage] * If `log_type` is - `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is - supported. For the schema, see the [Configuring audit logs]. + output_format: OutputFormat + """The file type of log delivery. * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. + Only the CSV (comma-separated values) format is supported. For the schema, see the [View + billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON + (JavaScript Object Notation) format is supported. For the schema, see the [Configuring audit + logs]. [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html""" - + credentials_id: str """The ID for a method:credentials/create that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page. See [Configure billable usage delivery]. [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - + storage_configuration_id: str """The ID for a method:storage/create that represents the S3 bucket with bucket policy as described in the main billable usage documentation page. See [Configure billable usage delivery]. [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - + config_name: Optional[str] = None """The optional human-readable name of the log delivery configuration. Defaults to empty.""" - + delivery_path_prefix: Optional[str] = None """The optional delivery path prefix within Amazon S3 storage. Defaults to empty, which means that logs are delivered to the root of the bucket. This must be a valid S3 object key. This must not start or end with a slash character.""" - + delivery_start_time: Optional[str] = None - """This field applies only if `log_type` is `BILLABLE_USAGE`. This is the optional start month and - year for delivery, specified in `YYYY-MM` format. Defaults to current year and month. - `BILLABLE_USAGE` logs are not available for usage before March 2019 (`2019-03`).""" - + """This field applies only if log_type is BILLABLE_USAGE. This is the optional start month and year + for delivery, specified in YYYY-MM format. Defaults to current year and month. BILLABLE_USAGE + logs are not available for usage before March 2019 (2019-03).""" + status: Optional[LogDeliveryConfigStatus] = None """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.""" - + workspace_ids_filter: Optional[List[int]] = None """Optional filter that specifies workspace IDs to deliver logs for. By default the workspace filter is empty and log delivery applies at the account level, delivering workspace-level logs @@ -785,67 +683,44 @@ class CreateLogDeliveryConfigurationParams: new workspaces created in the future, and delivery won't include account level logs. For some types of Databricks deployments there is only one workspace per account ID, so this field is unnecessary.""" - + def as_dict(self) -> dict: """Serializes the CreateLogDeliveryConfigurationParams into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config_name is not None: - body["config_name"] = self.config_name - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.delivery_path_prefix is not None: - body["delivery_path_prefix"] = self.delivery_path_prefix - if self.delivery_start_time is not None: - body["delivery_start_time"] = self.delivery_start_time - if self.log_type is not None: - body["log_type"] = self.log_type.value - if self.output_format is not None: - body["output_format"] = self.output_format.value - if self.status is not None: - body["status"] = self.status.value - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.workspace_ids_filter: - body["workspace_ids_filter"] = [v for v in self.workspace_ids_filter] + if self.config_name is not None: body['config_name'] = self.config_name + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix + if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time + if self.log_type is not None: body['log_type'] = self.log_type.value + if self.output_format is not None: body['output_format'] = self.output_format.value + if self.status is not None: body['status'] = self.status.value + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter] return body def as_shallow_dict(self) -> dict: """Serializes the CreateLogDeliveryConfigurationParams into a shallow dictionary of its immediate attributes.""" body = {} - if self.config_name is not None: - body["config_name"] = self.config_name - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.delivery_path_prefix is not None: - body["delivery_path_prefix"] = self.delivery_path_prefix - if self.delivery_start_time is not None: - body["delivery_start_time"] = self.delivery_start_time - if self.log_type is not None: - body["log_type"] = self.log_type - if self.output_format is not None: - body["output_format"] = self.output_format - if self.status is not None: - body["status"] = self.status - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.workspace_ids_filter: - body["workspace_ids_filter"] = self.workspace_ids_filter + if self.config_name is not None: body['config_name'] = self.config_name + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix + if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time + if self.log_type is not None: body['log_type'] = self.log_type + if self.output_format is not None: body['output_format'] = self.output_format + if self.status is not None: body['status'] = self.status + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateLogDeliveryConfigurationParams: """Deserializes the CreateLogDeliveryConfigurationParams from a dictionary.""" - return cls( - config_name=d.get("config_name", None), - credentials_id=d.get("credentials_id", None), - delivery_path_prefix=d.get("delivery_path_prefix", None), - delivery_start_time=d.get("delivery_start_time", None), - log_type=_enum(d, "log_type", LogType), - output_format=_enum(d, "output_format", OutputFormat), - status=_enum(d, "status", LogDeliveryConfigStatus), - storage_configuration_id=d.get("storage_configuration_id", None), - workspace_ids_filter=d.get("workspace_ids_filter", None), - ) + return cls(config_name=d.get('config_name', None), credentials_id=d.get('credentials_id', None), delivery_path_prefix=d.get('delivery_path_prefix', None), delivery_start_time=d.get('delivery_start_time', None), log_type=_enum(d, 'log_type', LogType), output_format=_enum(d, 'output_format', OutputFormat), status=_enum(d, 'status', LogDeliveryConfigStatus), storage_configuration_id=d.get('storage_configuration_id', None), workspace_ids_filter=d.get('workspace_ids_filter', None)) + + + + + @dataclass @@ -864,6 +739,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteBudgetConfigurationResponse: """Deserializes the DeleteBudgetConfigurationResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -882,155 +762,186 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + class DeliveryStatus(Enum): - """The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery - attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has - succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of - misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The + """* The status string for log delivery. Possible values are: `CREATED`: There were no log delivery + attempts since the config was created. `SUCCEEDED`: The latest attempt of log delivery has + succeeded completely. `USER_FAILURE`: The latest attempt of log delivery failed because of + misconfiguration of customer provided permissions on role or storage. `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support - if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been + if it doesn't go away soon. `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.""" + + CREATED = 'CREATED' + NOT_FOUND = 'NOT_FOUND' + SUCCEEDED = 'SUCCEEDED' + SYSTEM_FAILURE = 'SYSTEM_FAILURE' + USER_FAILURE = 'USER_FAILURE' + - CREATED = "CREATED" - NOT_FOUND = "NOT_FOUND" - SUCCEEDED = "SUCCEEDED" - SYSTEM_FAILURE = "SYSTEM_FAILURE" - USER_FAILURE = "USER_FAILURE" @dataclass class DownloadResponse: contents: Optional[BinaryIO] = None - + def as_dict(self) -> dict: """Serializes the DownloadResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: - body["contents"] = self.contents + if self.contents: body['contents'] = self.contents return body def as_shallow_dict(self) -> dict: """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: - body["contents"] = self.contents + if self.contents: body['contents'] = self.contents return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DownloadResponse: """Deserializes the DownloadResponse from a dictionary.""" - return cls(contents=d.get("contents", None)) + return cls(contents=d.get('contents', None)) + + @dataclass class Filter: """Structured representation of a filter to be applied to a list of policies. All specified filters will be applied in conjunction.""" - + creator_user_id: Optional[int] = None """The policy creator user id to be filtered on. If unspecified, all policies will be returned.""" - + creator_user_name: Optional[str] = None """The policy creator user name to be filtered on. If unspecified, all policies will be returned.""" - + policy_name: Optional[str] = None """The partial name of policies to be filtered on. If unspecified, all policies will be returned.""" - + def as_dict(self) -> dict: """Serializes the Filter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creator_user_id is not None: - body["creator_user_id"] = self.creator_user_id - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.policy_name is not None: - body["policy_name"] = self.policy_name + if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.policy_name is not None: body['policy_name'] = self.policy_name return body def as_shallow_dict(self) -> dict: """Serializes the Filter into a shallow dictionary of its immediate attributes.""" body = {} - if self.creator_user_id is not None: - body["creator_user_id"] = self.creator_user_id - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.policy_name is not None: - body["policy_name"] = self.policy_name + if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.policy_name is not None: body['policy_name'] = self.policy_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Filter: """Deserializes the Filter from a dictionary.""" - return cls( - creator_user_id=d.get("creator_user_id", None), - creator_user_name=d.get("creator_user_name", None), - policy_name=d.get("policy_name", None), - ) + return cls(creator_user_id=d.get('creator_user_id', None), creator_user_name=d.get('creator_user_name', None), policy_name=d.get('policy_name', None)) + + + + + @dataclass class GetBillingUsageDashboardResponse: dashboard_id: Optional[str] = None """The unique id of the usage dashboard.""" - + dashboard_url: Optional[str] = None """The URL of the usage dashboard.""" - + def as_dict(self) -> dict: """Serializes the GetBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.dashboard_url is not None: - body["dashboard_url"] = self.dashboard_url + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url return body def as_shallow_dict(self) -> dict: """Serializes the GetBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.dashboard_url is not None: - body["dashboard_url"] = self.dashboard_url + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetBillingUsageDashboardResponse: """Deserializes the GetBillingUsageDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get("dashboard_id", None), dashboard_url=d.get("dashboard_url", None)) + return cls(dashboard_id=d.get('dashboard_id', None), dashboard_url=d.get('dashboard_url', None)) + + + + + @dataclass class GetBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None - + def as_dict(self) -> dict: """Serializes the GetBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: - body["budget"] = self.budget.as_dict() + if self.budget: body['budget'] = self.budget.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: - body["budget"] = self.budget + if self.budget: body['budget'] = self.budget return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetBudgetConfigurationResponse: """Deserializes the GetBudgetConfigurationResponse from a dictionary.""" - return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) + return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) + + + + + + + +@dataclass +class GetLogDeliveryConfigurationResponse: + log_delivery_configuration: Optional[LogDeliveryConfiguration] = None + """The fetched log delivery configuration""" + + def as_dict(self) -> dict: + """Serializes the GetLogDeliveryConfigurationResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetLogDeliveryConfigurationResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetLogDeliveryConfigurationResponse: + """Deserializes the GetLogDeliveryConfigurationResponse from a dictionary.""" + return cls(log_delivery_configuration=_from_dict(d, 'log_delivery_configuration', LogDeliveryConfiguration)) + + + + + @dataclass class LimitConfig: """The limit configuration of the policy. Limit configuration provide a budget policy level cost control by enforcing the limit.""" - + def as_dict(self) -> dict: """Serializes the LimitConfig into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1045,172 +956,167 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LimitConfig: """Deserializes the LimitConfig from a dictionary.""" return cls() + + + + + @dataclass class ListBudgetConfigurationsResponse: budgets: Optional[List[BudgetConfiguration]] = None - + next_page_token: Optional[str] = None """Token which can be sent as `page_token` to retrieve the next page of results. If this field is omitted, there are no subsequent budgets.""" - + def as_dict(self) -> dict: """Serializes the ListBudgetConfigurationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budgets: - body["budgets"] = [v.as_dict() for v in self.budgets] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.budgets: body['budgets'] = [v.as_dict() for v in self.budgets] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListBudgetConfigurationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.budgets: - body["budgets"] = self.budgets - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.budgets: body['budgets'] = self.budgets + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListBudgetConfigurationsResponse: """Deserializes the ListBudgetConfigurationsResponse from a dictionary.""" - return cls( - budgets=_repeated_dict(d, "budgets", BudgetConfiguration), next_page_token=d.get("next_page_token", None) - ) + return cls(budgets=_repeated_dict(d, 'budgets', BudgetConfiguration), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListBudgetPoliciesResponse: """A list of policies.""" - + next_page_token: Optional[str] = None """A token that can be sent as `page_token` to retrieve the next page. If this field is omitted, there are no subsequent pages.""" - + policies: Optional[List[BudgetPolicy]] = None - + previous_page_token: Optional[str] = None """A token that can be sent as `page_token` to retrieve the previous page. In this field is omitted, there are no previous pages.""" - + def as_dict(self) -> dict: """Serializes the ListBudgetPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policies: - body["policies"] = [v.as_dict() for v in self.policies] - if self.previous_page_token is not None: - body["previous_page_token"] = self.previous_page_token + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = [v.as_dict() for v in self.policies] + if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListBudgetPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policies: - body["policies"] = self.policies - if self.previous_page_token is not None: - body["previous_page_token"] = self.previous_page_token + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = self.policies + if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListBudgetPoliciesResponse: """Deserializes the ListBudgetPoliciesResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - policies=_repeated_dict(d, "policies", BudgetPolicy), - previous_page_token=d.get("previous_page_token", None), - ) + return cls(next_page_token=d.get('next_page_token', None), policies=_repeated_dict(d, 'policies', BudgetPolicy), previous_page_token=d.get('previous_page_token', None)) + + -class LogDeliveryConfigStatus(Enum): - """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). - Defaults to `ENABLED`. You can [enable or disable the - configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration - is not supported, so disable a log delivery configuration that is no longer needed.""" - DISABLED = "DISABLED" - ENABLED = "ENABLED" +class LogDeliveryConfigStatus(Enum): + """* Log Delivery Status + + `ENABLED`: All dependencies have executed and succeeded `DISABLED`: At least one dependency has + succeeded""" + + DISABLED = 'DISABLED' + ENABLED = 'ENABLED' + @dataclass class LogDeliveryConfiguration: - account_id: Optional[str] = None - """The Databricks account ID that hosts the log delivery configuration.""" - + """* Log Delivery Configuration""" + + log_type: LogType + """Log delivery type. Supported values are: * `BILLABLE_USAGE` — Configure [billable usage log + delivery]. For the CSV schema, see the [View billable usage]. * `AUDIT_LOGS` — Configure + [audit log delivery]. For the JSON schema, see [Configure audit logging] + + [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html + [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html + [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html + [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" + + output_format: OutputFormat + """The file type of log delivery. * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. + Only the CSV (comma-separated values) format is supported. For the schema, see the [View + billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON + (JavaScript Object Notation) format is supported. For the schema, see the [Configuring audit + logs]. + + [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html + [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html""" + + account_id: str + """Databricks account ID.""" + + credentials_id: str + """The ID for a method:credentials/create that represents the AWS IAM role with policy and trust + relationship as described in the main billable usage documentation page. See [Configure billable + usage delivery]. + + [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" + + storage_configuration_id: str + """The ID for a method:storage/create that represents the S3 bucket with bucket policy as described + in the main billable usage documentation page. See [Configure billable usage delivery]. + + [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" + config_id: Optional[str] = None - """Databricks log delivery configuration ID.""" - + """The unique UUID of log delivery configuration""" + config_name: Optional[str] = None """The optional human-readable name of the log delivery configuration. Defaults to empty.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when the log delivery configuration was created.""" - - credentials_id: Optional[str] = None - """The ID for a method:credentials/create that represents the AWS IAM role with policy and trust - relationship as described in the main billable usage documentation page. See [Configure billable - usage delivery]. - [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - delivery_path_prefix: Optional[str] = None """The optional delivery path prefix within Amazon S3 storage. Defaults to empty, which means that logs are delivered to the root of the bucket. This must be a valid S3 object key. This must not start or end with a slash character.""" - - delivery_start_time: Optional[str] = None - """This field applies only if `log_type` is `BILLABLE_USAGE`. This is the optional start month and - year for delivery, specified in `YYYY-MM` format. Defaults to current year and month. - `BILLABLE_USAGE` logs are not available for usage before March 2019 (`2019-03`).""" - - log_delivery_status: Optional[LogDeliveryStatus] = None - """Databricks log delivery status.""" - - log_type: Optional[LogType] = None - """Log delivery type. Supported values are: - * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the - [View billable usage]. - - * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit - logging] - - [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html - [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - - output_format: Optional[OutputFormat] = None - """The file type of log delivery. + delivery_start_time: Optional[str] = None + """This field applies only if log_type is BILLABLE_USAGE. This is the optional start month and year + for delivery, specified in YYYY-MM format. Defaults to current year and month. BILLABLE_USAGE + logs are not available for usage before March 2019 (2019-03).""" - * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated - values) format is supported. For the schema, see the [View billable usage] * If `log_type` is - `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is - supported. For the schema, see the [Configuring audit logs]. + log_delivery_status: Optional[LogDeliveryStatus] = None + """The LogDeliveryStatus of this log delivery configuration""" - [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html""" - status: Optional[LogDeliveryConfigStatus] = None """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.""" - - storage_configuration_id: Optional[str] = None - """The ID for a method:storage/create that represents the S3 bucket with bucket policy as described - in the main billable usage documentation page. See [Configure billable usage delivery]. - [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - update_time: Optional[int] = None """Time in epoch milliseconds when the log delivery configuration was updated.""" - + workspace_ids_filter: Optional[List[int]] = None """Optional filter that specifies workspace IDs to deliver logs for. By default the workspace filter is empty and log delivery applies at the account level, delivering workspace-level logs @@ -1222,187 +1128,111 @@ class LogDeliveryConfiguration: new workspaces created in the future, and delivery won't include account level logs. For some types of Databricks deployments there is only one workspace per account ID, so this field is unnecessary.""" - + def as_dict(self) -> dict: """Serializes the LogDeliveryConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.config_id is not None: - body["config_id"] = self.config_id - if self.config_name is not None: - body["config_name"] = self.config_name - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.delivery_path_prefix is not None: - body["delivery_path_prefix"] = self.delivery_path_prefix - if self.delivery_start_time is not None: - body["delivery_start_time"] = self.delivery_start_time - if self.log_delivery_status: - body["log_delivery_status"] = self.log_delivery_status.as_dict() - if self.log_type is not None: - body["log_type"] = self.log_type.value - if self.output_format is not None: - body["output_format"] = self.output_format.value - if self.status is not None: - body["status"] = self.status.value - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.update_time is not None: - body["update_time"] = self.update_time - if self.workspace_ids_filter: - body["workspace_ids_filter"] = [v for v in self.workspace_ids_filter] + if self.account_id is not None: body['account_id'] = self.account_id + if self.config_id is not None: body['config_id'] = self.config_id + if self.config_name is not None: body['config_name'] = self.config_name + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix + if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time + if self.log_delivery_status: body['log_delivery_status'] = self.log_delivery_status.as_dict() + if self.log_type is not None: body['log_type'] = self.log_type.value + if self.output_format is not None: body['output_format'] = self.output_format.value + if self.status is not None: body['status'] = self.status.value + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.update_time is not None: body['update_time'] = self.update_time + if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter] return body def as_shallow_dict(self) -> dict: """Serializes the LogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.config_id is not None: - body["config_id"] = self.config_id - if self.config_name is not None: - body["config_name"] = self.config_name - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.delivery_path_prefix is not None: - body["delivery_path_prefix"] = self.delivery_path_prefix - if self.delivery_start_time is not None: - body["delivery_start_time"] = self.delivery_start_time - if self.log_delivery_status: - body["log_delivery_status"] = self.log_delivery_status - if self.log_type is not None: - body["log_type"] = self.log_type - if self.output_format is not None: - body["output_format"] = self.output_format - if self.status is not None: - body["status"] = self.status - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.update_time is not None: - body["update_time"] = self.update_time - if self.workspace_ids_filter: - body["workspace_ids_filter"] = self.workspace_ids_filter + if self.account_id is not None: body['account_id'] = self.account_id + if self.config_id is not None: body['config_id'] = self.config_id + if self.config_name is not None: body['config_name'] = self.config_name + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix + if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time + if self.log_delivery_status: body['log_delivery_status'] = self.log_delivery_status + if self.log_type is not None: body['log_type'] = self.log_type + if self.output_format is not None: body['output_format'] = self.output_format + if self.status is not None: body['status'] = self.status + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.update_time is not None: body['update_time'] = self.update_time + if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogDeliveryConfiguration: """Deserializes the LogDeliveryConfiguration from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - config_id=d.get("config_id", None), - config_name=d.get("config_name", None), - creation_time=d.get("creation_time", None), - credentials_id=d.get("credentials_id", None), - delivery_path_prefix=d.get("delivery_path_prefix", None), - delivery_start_time=d.get("delivery_start_time", None), - log_delivery_status=_from_dict(d, "log_delivery_status", LogDeliveryStatus), - log_type=_enum(d, "log_type", LogType), - output_format=_enum(d, "output_format", OutputFormat), - status=_enum(d, "status", LogDeliveryConfigStatus), - storage_configuration_id=d.get("storage_configuration_id", None), - update_time=d.get("update_time", None), - workspace_ids_filter=d.get("workspace_ids_filter", None), - ) + return cls(account_id=d.get('account_id', None), config_id=d.get('config_id', None), config_name=d.get('config_name', None), creation_time=d.get('creation_time', None), credentials_id=d.get('credentials_id', None), delivery_path_prefix=d.get('delivery_path_prefix', None), delivery_start_time=d.get('delivery_start_time', None), log_delivery_status=_from_dict(d, 'log_delivery_status', LogDeliveryStatus), log_type=_enum(d, 'log_type', LogType), output_format=_enum(d, 'output_format', OutputFormat), status=_enum(d, 'status', LogDeliveryConfigStatus), storage_configuration_id=d.get('storage_configuration_id', None), update_time=d.get('update_time', None), workspace_ids_filter=d.get('workspace_ids_filter', None)) + + @dataclass class LogDeliveryStatus: - """Databricks log delivery status.""" - - last_attempt_time: Optional[str] = None - """The UTC time for the latest log delivery attempt.""" - - last_successful_attempt_time: Optional[str] = None - """The UTC time for the latest successful log delivery.""" - - message: Optional[str] = None - """Informative message about the latest log delivery attempt. If the log delivery fails with - USER_FAILURE, error details will be provided for fixing misconfigurations in cloud permissions.""" - - status: Optional[DeliveryStatus] = None - """The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery + status: DeliveryStatus + """Enum that describes the status. Possible values are: * `CREATED`: There were no log delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.""" - + + message: str + """Informative message about the latest log delivery attempt. If the log delivery fails with + USER_FAILURE, error details will be provided for fixing misconfigurations in cloud permissions.""" + + last_attempt_time: Optional[str] = None + """The UTC time for the latest log delivery attempt.""" + + last_successful_attempt_time: Optional[str] = None + """The UTC time for the latest successful log delivery.""" + def as_dict(self) -> dict: """Serializes the LogDeliveryStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_attempt_time is not None: - body["last_attempt_time"] = self.last_attempt_time - if self.last_successful_attempt_time is not None: - body["last_successful_attempt_time"] = self.last_successful_attempt_time - if self.message is not None: - body["message"] = self.message - if self.status is not None: - body["status"] = self.status.value + if self.last_attempt_time is not None: body['last_attempt_time'] = self.last_attempt_time + if self.last_successful_attempt_time is not None: body['last_successful_attempt_time'] = self.last_successful_attempt_time + if self.message is not None: body['message'] = self.message + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the LogDeliveryStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_attempt_time is not None: - body["last_attempt_time"] = self.last_attempt_time - if self.last_successful_attempt_time is not None: - body["last_successful_attempt_time"] = self.last_successful_attempt_time - if self.message is not None: - body["message"] = self.message - if self.status is not None: - body["status"] = self.status + if self.last_attempt_time is not None: body['last_attempt_time'] = self.last_attempt_time + if self.last_successful_attempt_time is not None: body['last_successful_attempt_time'] = self.last_successful_attempt_time + if self.message is not None: body['message'] = self.message + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogDeliveryStatus: """Deserializes the LogDeliveryStatus from a dictionary.""" - return cls( - last_attempt_time=d.get("last_attempt_time", None), - last_successful_attempt_time=d.get("last_successful_attempt_time", None), - message=d.get("message", None), - status=_enum(d, "status", DeliveryStatus), - ) - - -class LogType(Enum): - """Log delivery type. Supported values are: + return cls(last_attempt_time=d.get('last_attempt_time', None), last_successful_attempt_time=d.get('last_successful_attempt_time', None), message=d.get('message', None), status=_enum(d, 'status', DeliveryStatus)) + - * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the - [View billable usage]. - * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit - logging] - - [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html - [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - """ - - AUDIT_LOGS = "AUDIT_LOGS" - BILLABLE_USAGE = "BILLABLE_USAGE" +class LogType(Enum): + """* Log Delivery Type""" + + AUDIT_LOGS = 'AUDIT_LOGS' + BILLABLE_USAGE = 'BILLABLE_USAGE' class OutputFormat(Enum): - """The file type of log delivery. - - * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated - values) format is supported. For the schema, see the [View billable usage] * If `log_type` is - `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is - supported. For the schema, see the [Configuring audit logs]. - - [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html""" - - CSV = "CSV" - JSON = "JSON" - + """* Log Delivery Output Format""" + + CSV = 'CSV' + JSON = 'JSON' @dataclass class PatchStatusResponse: @@ -1420,303 +1250,307 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PatchStatusResponse: """Deserializes the PatchStatusResponse from a dictionary.""" return cls() + + @dataclass class SortSpec: descending: Optional[bool] = None """Whether to sort in descending order.""" - + field: Optional[SortSpecField] = None """The filed to sort by""" - + def as_dict(self) -> dict: """Serializes the SortSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.descending is not None: - body["descending"] = self.descending - if self.field is not None: - body["field"] = self.field.value + if self.descending is not None: body['descending'] = self.descending + if self.field is not None: body['field'] = self.field.value return body def as_shallow_dict(self) -> dict: """Serializes the SortSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.descending is not None: - body["descending"] = self.descending - if self.field is not None: - body["field"] = self.field + if self.descending is not None: body['descending'] = self.descending + if self.field is not None: body['field'] = self.field return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SortSpec: """Deserializes the SortSpec from a dictionary.""" - return cls(descending=d.get("descending", None), field=_enum(d, "field", SortSpecField)) - + return cls(descending=d.get('descending', None), field=_enum(d, 'field', SortSpecField)) + -class SortSpecField(Enum): - POLICY_NAME = "POLICY_NAME" +class SortSpecField(Enum): + + + POLICY_NAME = 'POLICY_NAME' @dataclass class UpdateBudgetConfigurationBudget: account_id: Optional[str] = None """Databricks account ID.""" - + alert_configurations: Optional[List[AlertConfiguration]] = None """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one alert configuration.""" - + budget_configuration_id: Optional[str] = None """Databricks budget configuration ID.""" - + display_name: Optional[str] = None """Human-readable name of budget configuration. Max Length: 128""" - + filter: Optional[BudgetConfigurationFilter] = None """Configured filters for this budget. These are applied to your account's usage to limit the scope of what is considered for this budget. Leave empty to include all usage for this account. All provided filters must be matched for usage to be included.""" - + def as_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.alert_configurations: - body["alert_configurations"] = [v.as_dict() for v in self.alert_configurations] - if self.budget_configuration_id is not None: - body["budget_configuration_id"] = self.budget_configuration_id - if self.display_name is not None: - body["display_name"] = self.display_name - if self.filter: - body["filter"] = self.filter.as_dict() + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] + if self.budget_configuration_id is not None: body['budget_configuration_id'] = self.budget_configuration_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.alert_configurations: - body["alert_configurations"] = self.alert_configurations - if self.budget_configuration_id is not None: - body["budget_configuration_id"] = self.budget_configuration_id - if self.display_name is not None: - body["display_name"] = self.display_name - if self.filter: - body["filter"] = self.filter + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: body['alert_configurations'] = self.alert_configurations + if self.budget_configuration_id is not None: body['budget_configuration_id'] = self.budget_configuration_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationBudget: """Deserializes the UpdateBudgetConfigurationBudget from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - alert_configurations=_repeated_dict(d, "alert_configurations", AlertConfiguration), - budget_configuration_id=d.get("budget_configuration_id", None), - display_name=d.get("display_name", None), - filter=_from_dict(d, "filter", BudgetConfigurationFilter), - ) + return cls(account_id=d.get('account_id', None), alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration), budget_configuration_id=d.get('budget_configuration_id', None), display_name=d.get('display_name', None), filter=_from_dict(d, 'filter', BudgetConfigurationFilter)) + + @dataclass class UpdateBudgetConfigurationRequest: budget: UpdateBudgetConfigurationBudget """The updated budget. This will overwrite the budget specified by the budget ID.""" - + budget_id: Optional[str] = None """The Databricks budget configuration ID.""" - + def as_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: - body["budget"] = self.budget.as_dict() - if self.budget_id is not None: - body["budget_id"] = self.budget_id + if self.budget: body['budget'] = self.budget.as_dict() + if self.budget_id is not None: body['budget_id'] = self.budget_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: - body["budget"] = self.budget - if self.budget_id is not None: - body["budget_id"] = self.budget_id + if self.budget: body['budget'] = self.budget + if self.budget_id is not None: body['budget_id'] = self.budget_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationRequest: """Deserializes the UpdateBudgetConfigurationRequest from a dictionary.""" - return cls(budget=_from_dict(d, "budget", UpdateBudgetConfigurationBudget), budget_id=d.get("budget_id", None)) + return cls(budget=_from_dict(d, 'budget', UpdateBudgetConfigurationBudget), budget_id=d.get('budget_id', None)) + + @dataclass class UpdateBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None """The updated budget.""" - + def as_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: - body["budget"] = self.budget.as_dict() + if self.budget: body['budget'] = self.budget.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: - body["budget"] = self.budget + if self.budget: body['budget'] = self.budget return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationResponse: """Deserializes the UpdateBudgetConfigurationResponse from a dictionary.""" - return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) + return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) + + + + + @dataclass class UpdateLogDeliveryConfigurationStatusRequest: + """* Update Log Delivery Configuration""" + status: LogDeliveryConfigStatus """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.""" - + log_delivery_configuration_id: Optional[str] = None - """Databricks log delivery configuration ID""" - + """The log delivery configuration id of customer""" + def as_dict(self) -> dict: """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configuration_id is not None: - body["log_delivery_configuration_id"] = self.log_delivery_configuration_id - if self.status is not None: - body["status"] = self.status.value + if self.log_delivery_configuration_id is not None: body['log_delivery_configuration_id'] = self.log_delivery_configuration_id + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_delivery_configuration_id is not None: - body["log_delivery_configuration_id"] = self.log_delivery_configuration_id - if self.status is not None: - body["status"] = self.status + if self.log_delivery_configuration_id is not None: body['log_delivery_configuration_id'] = self.log_delivery_configuration_id + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateLogDeliveryConfigurationStatusRequest: """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary.""" - return cls( - log_delivery_configuration_id=d.get("log_delivery_configuration_id", None), - status=_enum(d, "status", LogDeliveryConfigStatus), - ) - + return cls(log_delivery_configuration_id=d.get('log_delivery_configuration_id', None), status=_enum(d, 'status', LogDeliveryConfigStatus)) + -class UsageDashboardType(Enum): - USAGE_DASHBOARD_TYPE_GLOBAL = "USAGE_DASHBOARD_TYPE_GLOBAL" - USAGE_DASHBOARD_TYPE_WORKSPACE = "USAGE_DASHBOARD_TYPE_WORKSPACE" +class UsageDashboardType(Enum): + + + USAGE_DASHBOARD_TYPE_GLOBAL = 'USAGE_DASHBOARD_TYPE_GLOBAL' + USAGE_DASHBOARD_TYPE_WORKSPACE = 'USAGE_DASHBOARD_TYPE_WORKSPACE' @dataclass class WrappedCreateLogDeliveryConfiguration: - log_delivery_configuration: Optional[CreateLogDeliveryConfigurationParams] = None - + """* Properties of the new log delivery configuration.""" + + log_delivery_configuration: CreateLogDeliveryConfigurationParams + """* Log Delivery Configuration""" + def as_dict(self) -> dict: """Serializes the WrappedCreateLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configuration: - body["log_delivery_configuration"] = self.log_delivery_configuration.as_dict() + if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the WrappedCreateLogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_delivery_configuration: - body["log_delivery_configuration"] = self.log_delivery_configuration + if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WrappedCreateLogDeliveryConfiguration: """Deserializes the WrappedCreateLogDeliveryConfiguration from a dictionary.""" - return cls( - log_delivery_configuration=_from_dict(d, "log_delivery_configuration", CreateLogDeliveryConfigurationParams) - ) + return cls(log_delivery_configuration=_from_dict(d, 'log_delivery_configuration', CreateLogDeliveryConfigurationParams)) + + @dataclass class WrappedLogDeliveryConfiguration: log_delivery_configuration: Optional[LogDeliveryConfiguration] = None - + """The created log delivery configuration""" + def as_dict(self) -> dict: """Serializes the WrappedLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configuration: - body["log_delivery_configuration"] = self.log_delivery_configuration.as_dict() + if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the WrappedLogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_delivery_configuration: - body["log_delivery_configuration"] = self.log_delivery_configuration + if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WrappedLogDeliveryConfiguration: """Deserializes the WrappedLogDeliveryConfiguration from a dictionary.""" - return cls(log_delivery_configuration=_from_dict(d, "log_delivery_configuration", LogDeliveryConfiguration)) + return cls(log_delivery_configuration=_from_dict(d, 'log_delivery_configuration', LogDeliveryConfiguration)) + + @dataclass class WrappedLogDeliveryConfigurations: log_delivery_configurations: Optional[List[LogDeliveryConfiguration]] = None - + """Log delivery configurations were returned successfully.""" + + next_page_token: Optional[str] = None + """Token which can be sent as `page_token` to retrieve the next page of results. If this field is + omitted, there are no subsequent budgets.""" + def as_dict(self) -> dict: """Serializes the WrappedLogDeliveryConfigurations into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configurations: - body["log_delivery_configurations"] = [v.as_dict() for v in self.log_delivery_configurations] + if self.log_delivery_configurations: body['log_delivery_configurations'] = [v.as_dict() for v in self.log_delivery_configurations] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the WrappedLogDeliveryConfigurations into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_delivery_configurations: - body["log_delivery_configurations"] = self.log_delivery_configurations + if self.log_delivery_configurations: body['log_delivery_configurations'] = self.log_delivery_configurations + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WrappedLogDeliveryConfigurations: """Deserializes the WrappedLogDeliveryConfigurations from a dictionary.""" - return cls( - log_delivery_configurations=_repeated_dict(d, "log_delivery_configurations", LogDeliveryConfiguration) - ) + return cls(log_delivery_configurations=_repeated_dict(d, 'log_delivery_configurations', LogDeliveryConfiguration), next_page_token=d.get('next_page_token', None)) + + + + class BillableUsageAPI: """This API allows you to download billable usage logs for the specified account and date range. This feature works with all account types.""" - + def __init__(self, api_client): self._api = api_client + - def download(self, start_month: str, end_month: str, *, personal_data: Optional[bool] = None) -> DownloadResponse: - """Return billable usage logs. + + + + + + + def download(self + , start_month: str, end_month: str + , * + , personal_data: Optional[bool] = None) -> DownloadResponse: + """Return billable usage logs. + Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see [CSV file schema]. Note that this method might take multiple minutes to complete. - + **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges. - + [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema - + :param start_month: str Format: `YYYY-MM`. First month to return billable usage logs for. This field is required. :param end_month: str @@ -1725,38 +1559,46 @@ def download(self, start_month: str, end_month: str, *, personal_data: Optional[ Specify whether to include personally identifiable information in the billable usage logs, for example the email addresses of cluster creators. Handle this information with care. Defaults to false. - + :returns: :class:`DownloadResponse` """ - + query = {} - if end_month is not None: - query["end_month"] = end_month - if personal_data is not None: - query["personal_data"] = personal_data - if start_month is not None: - query["start_month"] = start_month - headers = { - "Accept": "text/plain", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/usage/download", query=query, headers=headers, raw=True - ) + if end_month is not None: query['end_month'] = end_month + if personal_data is not None: query['personal_data'] = personal_data + if start_month is not None: query['start_month'] = start_month + headers = {'Accept': 'text/plain',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/usage/download', query=query + + , headers=headers + , raw=True) return DownloadResponse.from_dict(res) - + + class BudgetPolicyAPI: """A service serves REST API about Budget policies""" - + def __init__(self, api_client): self._api = api_client + - def create(self, *, policy: Optional[BudgetPolicy] = None, request_id: Optional[str] = None) -> BudgetPolicy: - """Create a budget policy. + - Creates a new policy. + + + + + def create(self + + , * + , policy: Optional[BudgetPolicy] = None, request_id: Optional[str] = None) -> BudgetPolicy: + """Create a budget policy. + + Creates a new policy. + :param policy: :class:`BudgetPolicy` (optional) The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must be provided, custom_tags may need to be provided depending on the cloud provider. All other fields are @@ -1764,73 +1606,82 @@ def create(self, *, policy: Optional[BudgetPolicy] = None, request_id: Optional[ :param request_id: str (optional) A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is recommended. This request is only idempotent if a `request_id` is provided. - + :returns: :class:`BudgetPolicy` """ body = {} - if policy is not None: - body["policy"] = policy.as_dict() - if request_id is not None: - body["request_id"] = request_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.1/accounts/{self._api.account_id}/budget-policies", body=body, headers=headers - ) + if policy is not None: body['policy'] = policy.as_dict() + if request_id is not None: body['request_id'] = request_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.1/accounts/{self._api.account_id}/budget-policies', body=body + + , headers=headers + ) return BudgetPolicy.from_dict(res) - def delete(self, policy_id: str): - """Delete a budget policy. + + + + def delete(self + , policy_id: str + ): + """Delete a budget policy. + Deletes a policy - + :param policy_id: str The Id of the policy. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}", headers=headers) + + + - def get(self, policy_id: str) -> BudgetPolicy: + def get(self + , policy_id: str + ) -> BudgetPolicy: """Get a budget policy. - + Retrieves a policy by it's ID. - + :param policy_id: str The Id of the policy. - + :returns: :class:`BudgetPolicy` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}' + + , headers=headers + ) return BudgetPolicy.from_dict(res) - def list( - self, - *, - filter_by: Optional[Filter] = None, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - sort_spec: Optional[SortSpec] = None, - ) -> Iterator[BudgetPolicy]: - """List policies. + + + + def list(self + + , * + , filter_by: Optional[Filter] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, sort_spec: Optional[SortSpec] = None) -> Iterator[BudgetPolicy]: + """List policies. + Lists all policies. Policies are returned in the alphabetically ascending order of their names. - + :param filter_by: :class:`Filter` (optional) A filter to apply to the list of policies. :param page_size: int (optional) @@ -1839,443 +1690,468 @@ def list( :param page_token: str (optional) A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the subsequent page. If unspecified, the first page will be returned. - + When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the call that provided the page token. :param sort_spec: :class:`SortSpec` (optional) The sort specification. - + :returns: Iterator over :class:`BudgetPolicy` """ - + query = {} - if filter_by is not None: - query["filter_by"] = filter_by.as_dict() - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - if sort_spec is not None: - query["sort_spec"] = sort_spec.as_dict() - headers = { - "Accept": "application/json", - } - + if filter_by is not None: query['filter_by'] = filter_by.as_dict() + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if sort_spec is not None: query['sort_spec'] = sort_spec.as_dict() + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.1/accounts/{self._api.account_id}/budget-policies", query=query, headers=headers - ) - if "policies" in json: - for v in json["policies"]: - yield BudgetPolicy.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, policy_id: str, policy: BudgetPolicy, *, limit_config: Optional[LimitConfig] = None - ) -> BudgetPolicy: - """Update a budget policy. + json = self._api.do('GET',f'/api/2.1/accounts/{self._api.account_id}/budget-policies', query=query + + , headers=headers + ) + if 'policies' in json: + for v in json['policies']: + yield BudgetPolicy.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Updates a policy + + + + def update(self + , policy_id: str, policy: BudgetPolicy + , * + , limit_config: Optional[LimitConfig] = None) -> BudgetPolicy: + """Update a budget policy. + + Updates a policy + :param policy_id: str The Id of the policy. This field is generated by Databricks and globally unique. :param policy: :class:`BudgetPolicy` Contains the BudgetPolicy details. :param limit_config: :class:`LimitConfig` (optional) DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy - + :returns: :class:`BudgetPolicy` """ body = policy.as_dict() query = {} - if limit_config is not None: - query["limit_config"] = limit_config.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}", - query=query, - body=body, - headers=headers, - ) + if limit_config is not None: query['limit_config'] = limit_config.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}', query=query, body=body + + , headers=headers + ) return BudgetPolicy.from_dict(res) - + + class BudgetsAPI: """These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your account. You can set up budgets to either track account-wide spending, or apply filters to track the spending of specific teams, projects, or workspaces.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse: - """Create new budget. + + + + + + + def create(self + , budget: CreateBudgetConfigurationBudget + ) -> CreateBudgetConfigurationResponse: + """Create new budget. + Create a new budget configuration for an account. For full details, see https://docs.databricks.com/en/admin/account-settings/budgets.html. - + :param budget: :class:`CreateBudgetConfigurationBudget` Properties of the new budget configuration. - + :returns: :class:`CreateBudgetConfigurationResponse` """ body = {} - if budget is not None: - body["budget"] = budget.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.1/accounts/{self._api.account_id}/budgets", body=body, headers=headers) + if budget is not None: body['budget'] = budget.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.1/accounts/{self._api.account_id}/budgets', body=body + + , headers=headers + ) return CreateBudgetConfigurationResponse.from_dict(res) - def delete(self, budget_id: str): - """Delete budget. + + + + def delete(self + , budget_id: str + ): + """Delete budget. + Deletes a budget configuration for an account. Both account and budget configuration are specified by ID. This cannot be undone. - + :param budget_id: str The Databricks budget configuration ID. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}", headers=headers) + + + - def get(self, budget_id: str) -> GetBudgetConfigurationResponse: + def get(self + , budget_id: str + ) -> GetBudgetConfigurationResponse: """Get budget. - + Gets a budget configuration for an account. Both account and budget configuration are specified by ID. - + :param budget_id: str The budget configuration ID - + :returns: :class:`GetBudgetConfigurationResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}' + + , headers=headers + ) return GetBudgetConfigurationResponse.from_dict(res) - def list(self, *, page_token: Optional[str] = None) -> Iterator[BudgetConfiguration]: - """Get all budgets. + + + + def list(self + + , * + , page_token: Optional[str] = None) -> Iterator[BudgetConfiguration]: + """Get all budgets. + Gets all budgets associated with this account. - + :param page_token: str (optional) A page token received from a previous get all budget configurations call. This token can be used to retrieve the subsequent page. Requests first page if absent. - + :returns: Iterator over :class:`BudgetConfiguration` """ - + query = {} - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.1/accounts/{self._api.account_id}/budgets", query=query, headers=headers - ) - if "budgets" in json: - for v in json["budgets"]: - yield BudgetConfiguration.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, budget_id: str, budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse: - """Modify budget. + json = self._api.do('GET',f'/api/2.1/accounts/{self._api.account_id}/budgets', query=query + + , headers=headers + ) + if 'budgets' in json: + for v in json['budgets']: + yield BudgetConfiguration.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , budget_id: str, budget: UpdateBudgetConfigurationBudget + ) -> UpdateBudgetConfigurationResponse: + """Modify budget. + Updates a budget configuration for an account. Both account and budget configuration are specified by ID. - + :param budget_id: str The Databricks budget configuration ID. :param budget: :class:`UpdateBudgetConfigurationBudget` The updated budget. This will overwrite the budget specified by the budget ID. - + :returns: :class:`UpdateBudgetConfigurationResponse` """ body = {} - if budget is not None: - body["budget"] = budget.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}", body=body, headers=headers - ) + if budget is not None: body['budget'] = budget.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}', body=body + + , headers=headers + ) return UpdateBudgetConfigurationResponse.from_dict(res) - + + class LogDeliveryAPI: - """These APIs manage log delivery configurations for this account. The two supported log types for this API - are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all - account ID types. - - Log delivery works with all account types. However, if your account is on the E2 version of the platform - or on a select custom plan that allows multiple workspaces per account, you can optionally configure - different storage destinations for each workspace. Log delivery status is also provided to know the latest - status of log delivery attempts. The high-level flow of billable usage delivery: - - 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using - Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create) - that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For - full details, including the required IAM role policies and trust relationship, see [Billable usage log - delivery]. Using Databricks APIs, call the Account API to create a [credential configuration - object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery - configuration**: Using Databricks APIs, call the Account API to [create a log delivery - configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from - previous steps. You can specify if the logs should include all events of that log type in your account - (_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery). - Account level log delivery applies to all current and future workspaces plus account level logs, while - workspace level log delivery solely delivers logs related to the specified workspaces. You can create - multiple types of delivery configurations per account. - - For billable usage delivery: * For more information about billable usage logs, see [Billable usage log - delivery]. For the CSV schema, see the [Usage page]. * The delivery location is - `//billable-usage/csv/`, where `` is the name of the optional delivery path - prefix you set up during log delivery configuration. Files are named - `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific - workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an - _account level_ delivery configuration that delivers logs for all current and future workspaces in your - account. * The files are delivered daily by overwriting the month's CSV file for each workspace. - - For audit log delivery: * For more information about about audit log delivery, see [Audit log delivery], - which includes information about the used JSON schema. * The delivery location is - `//workspaceId=/date=/auditlogs_.json`. - Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the - audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for - those workspaces are delivered. If the log delivery configuration applies to the entire account (_account - level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all - workspaces in the account as well as account-level audit logs. See [Audit log delivery] for details. * - Auditable events are typically available in logs within 15 minutes. - - [Audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html - [create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html""" - + """These APIs manage Log delivery configurations for this account. Log delivery configs enable you to + configure the delivery of the specified type of logs to your storage account.""" + def __init__(self, api_client): self._api = api_client + - def create( - self, *, log_delivery_configuration: Optional[CreateLogDeliveryConfigurationParams] = None - ) -> WrappedLogDeliveryConfiguration: - """Create a new log delivery configuration. + + + + + + + def create(self + , log_delivery_configuration: CreateLogDeliveryConfigurationParams + ) -> WrappedLogDeliveryConfiguration: + """Create a new log delivery configuration. + Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you already created a [credential object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket). - + For full details, including the required IAM role policies and bucket policies, see [Deliver and access billable usage logs] or [Configure audit logging]. - + **Note**: There is a limit on the number of log delivery configurations available per account (each limit applies separately to each log type including billable usage and audit logs). You can create a maximum of two enabled account-level delivery configurations (configurations without a workspace filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per workspace for each log type, which means that the same workspace ID can occur in the workspace filter for no more than two delivery configurations per log type. - + You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log delivery configuration](:method:LogDelivery/PatchStatus)). - + [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - - :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` (optional) - + + :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` + * Log Delivery Configuration + :returns: :class:`WrappedLogDeliveryConfiguration` """ body = {} - if log_delivery_configuration is not None: - body["log_delivery_configuration"] = log_delivery_configuration.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/log-delivery", body=body, headers=headers) + if log_delivery_configuration is not None: body['log_delivery_configuration'] = log_delivery_configuration.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/log-delivery', body=body + + , headers=headers + ) return WrappedLogDeliveryConfiguration.from_dict(res) - def get(self, log_delivery_configuration_id: str) -> WrappedLogDeliveryConfiguration: - """Get log delivery configuration. + + + + def get(self + , log_delivery_configuration_id: str + ) -> GetLogDeliveryConfigurationResponse: + """Get log delivery configuration. + Gets a Databricks log delivery configuration object for an account, both specified by ID. - + :param log_delivery_configuration_id: str - Databricks log delivery configuration ID - - :returns: :class:`WrappedLogDeliveryConfiguration` + The log delivery configuration id of customer + + :returns: :class:`GetLogDeliveryConfigurationResponse` """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}' + + , headers=headers + ) + return GetLogDeliveryConfigurationResponse.from_dict(res) - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}", - headers=headers, - ) - return WrappedLogDeliveryConfiguration.from_dict(res) + + + - def list( - self, - *, - credentials_id: Optional[str] = None, - status: Optional[LogDeliveryConfigStatus] = None, - storage_configuration_id: Optional[str] = None, - ) -> Iterator[LogDeliveryConfiguration]: + def list(self + + , * + , credentials_id: Optional[str] = None, page_token: Optional[str] = None, status: Optional[LogDeliveryConfigStatus] = None, storage_configuration_id: Optional[str] = None) -> Iterator[LogDeliveryConfiguration]: """Get all log delivery configurations. - + Gets all Databricks log delivery configurations associated with an account specified by ID. - + :param credentials_id: str (optional) - Filter by credential configuration ID. + The Credentials id to filter the search results with + :param page_token: str (optional) + A page token received from a previous get all budget configurations call. This token can be used to + retrieve the subsequent page. Requests first page if absent. :param status: :class:`LogDeliveryConfigStatus` (optional) - Filter by status `ENABLED` or `DISABLED`. + The log delivery status to filter the search results with :param storage_configuration_id: str (optional) - Filter by storage configuration ID. - + The Storage Configuration id to filter the search results with + :returns: Iterator over :class:`LogDeliveryConfiguration` """ - + query = {} - if credentials_id is not None: - query["credentials_id"] = credentials_id - if status is not None: - query["status"] = status.value - if storage_configuration_id is not None: - query["storage_configuration_id"] = storage_configuration_id - headers = { - "Accept": "application/json", - } - - json = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/log-delivery", query=query, headers=headers - ) - parsed = WrappedLogDeliveryConfigurations.from_dict(json).log_delivery_configurations - return parsed if parsed is not None else [] - - def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryConfigStatus): - """Enable or disable log delivery configuration. + if credentials_id is not None: query['credentials_id'] = credentials_id + if page_token is not None: query['page_token'] = page_token + if status is not None: query['status'] = status.value + if storage_configuration_id is not None: query['storage_configuration_id'] = storage_configuration_id + headers = {'Accept': 'application/json',} + + + + while True: + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/log-delivery', query=query + + , headers=headers + ) + if 'log_delivery_configurations' in json: + for v in json['log_delivery_configurations']: + yield LogDeliveryConfiguration.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def patch_status(self + , log_delivery_configuration_id: str, status: LogDeliveryConfigStatus + ): + """Enable or disable log delivery configuration. + Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under [Create log delivery](:method:LogDelivery/Create). - + :param log_delivery_configuration_id: str - Databricks log delivery configuration ID + The log delivery configuration id of customer :param status: :class:`LogDeliveryConfigStatus` Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. - - + + """ body = {} - if status is not None: - body["status"] = status.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}", - body=body, - headers=headers, - ) - + if status is not None: body['status'] = status.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}', body=body + + , headers=headers + ) + + + class UsageDashboardsAPI: """These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost drivers.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, *, dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None - ) -> CreateBillingUsageDashboardResponse: - """Create new usage dashboard. + - Create a usage dashboard specified by workspaceId, accountId, and dashboard type. + + + + + def create(self + + , * + , dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None) -> CreateBillingUsageDashboardResponse: + """Create new usage dashboard. + + Create a usage dashboard specified by workspaceId, accountId, and dashboard type. + :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account. :param workspace_id: int (optional) The workspace ID of the workspace in which the usage dashboard is created. - + :returns: :class:`CreateBillingUsageDashboardResponse` """ body = {} - if dashboard_type is not None: - body["dashboard_type"] = dashboard_type.value - if workspace_id is not None: - body["workspace_id"] = workspace_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/dashboard", body=body, headers=headers) + if dashboard_type is not None: body['dashboard_type'] = dashboard_type.value + if workspace_id is not None: body['workspace_id'] = workspace_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/dashboard', body=body + + , headers=headers + ) return CreateBillingUsageDashboardResponse.from_dict(res) - def get( - self, *, dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None - ) -> GetBillingUsageDashboardResponse: - """Get usage dashboard. + + + + def get(self + + , * + , dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None) -> GetBillingUsageDashboardResponse: + """Get usage dashboard. + Get a usage dashboard specified by workspaceId, accountId, and dashboard type. - + :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account. :param workspace_id: int (optional) The workspace ID of the workspace in which the usage dashboard is created. - + :returns: :class:`GetBillingUsageDashboardResponse` """ - + query = {} - if dashboard_type is not None: - query["dashboard_type"] = dashboard_type.value - if workspace_id is not None: - query["workspace_id"] = workspace_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/dashboard", query=query, headers=headers) + if dashboard_type is not None: query['dashboard_type'] = dashboard_type.value + if workspace_id is not None: query['workspace_id'] = workspace_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/dashboard', query=query + + , headers=headers + ) return GetBillingUsageDashboardResponse.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 9553d8877..22be23b7a 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -1,403 +1,358 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class AccountsCreateMetastore: metastore_info: Optional[CreateMetastore] = None - + def as_dict(self) -> dict: """Serializes the AccountsCreateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_info: - body["metastore_info"] = self.metastore_info.as_dict() + if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsCreateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_info: - body["metastore_info"] = self.metastore_info + if self.metastore_info: body['metastore_info'] = self.metastore_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastore: """Deserializes the AccountsCreateMetastore from a dictionary.""" - return cls(metastore_info=_from_dict(d, "metastore_info", CreateMetastore)) + return cls(metastore_info=_from_dict(d, 'metastore_info', CreateMetastore)) + + @dataclass class AccountsCreateMetastoreAssignment: metastore_assignment: Optional[CreateMetastoreAssignment] = None - + metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + workspace_id: Optional[int] = None """Workspace ID.""" - + def as_dict(self) -> dict: """Serializes the AccountsCreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict() + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the AccountsCreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastoreAssignment: """Deserializes the AccountsCreateMetastoreAssignment from a dictionary.""" - return cls( - metastore_assignment=_from_dict(d, "metastore_assignment", CreateMetastoreAssignment), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) + return cls(metastore_assignment=_from_dict(d, 'metastore_assignment', CreateMetastoreAssignment), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) + + @dataclass class AccountsCreateStorageCredential: credential_info: Optional[CreateStorageCredential] = None - + metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + def as_dict(self) -> dict: """Serializes the AccountsCreateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_info: - body["credential_info"] = self.credential_info.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id + if self.credential_info: body['credential_info'] = self.credential_info.as_dict() + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id return body def as_shallow_dict(self) -> dict: """Serializes the AccountsCreateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_info: - body["credential_info"] = self.credential_info - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id + if self.credential_info: body['credential_info'] = self.credential_info + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateStorageCredential: """Deserializes the AccountsCreateStorageCredential from a dictionary.""" - return cls( - credential_info=_from_dict(d, "credential_info", CreateStorageCredential), - metastore_id=d.get("metastore_id", None), - ) + return cls(credential_info=_from_dict(d, 'credential_info', CreateStorageCredential), metastore_id=d.get('metastore_id', None)) + + @dataclass class AccountsMetastoreAssignment: metastore_assignment: Optional[MetastoreAssignment] = None - + def as_dict(self) -> dict: """Serializes the AccountsMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment.as_dict() + if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment + if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsMetastoreAssignment: """Deserializes the AccountsMetastoreAssignment from a dictionary.""" - return cls(metastore_assignment=_from_dict(d, "metastore_assignment", MetastoreAssignment)) + return cls(metastore_assignment=_from_dict(d, 'metastore_assignment', MetastoreAssignment)) + + @dataclass class AccountsMetastoreInfo: metastore_info: Optional[MetastoreInfo] = None - + def as_dict(self) -> dict: """Serializes the AccountsMetastoreInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_info: - body["metastore_info"] = self.metastore_info.as_dict() + if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsMetastoreInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_info: - body["metastore_info"] = self.metastore_info + if self.metastore_info: body['metastore_info'] = self.metastore_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsMetastoreInfo: """Deserializes the AccountsMetastoreInfo from a dictionary.""" - return cls(metastore_info=_from_dict(d, "metastore_info", MetastoreInfo)) + return cls(metastore_info=_from_dict(d, 'metastore_info', MetastoreInfo)) + + @dataclass class AccountsStorageCredentialInfo: credential_info: Optional[StorageCredentialInfo] = None - + def as_dict(self) -> dict: """Serializes the AccountsStorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_info: - body["credential_info"] = self.credential_info.as_dict() + if self.credential_info: body['credential_info'] = self.credential_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_info: - body["credential_info"] = self.credential_info + if self.credential_info: body['credential_info'] = self.credential_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsStorageCredentialInfo: """Deserializes the AccountsStorageCredentialInfo from a dictionary.""" - return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) + return cls(credential_info=_from_dict(d, 'credential_info', StorageCredentialInfo)) + + @dataclass class AccountsUpdateMetastore: metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + metastore_info: Optional[UpdateMetastore] = None - + def as_dict(self) -> dict: """Serializes the AccountsUpdateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.metastore_info: - body["metastore_info"] = self.metastore_info.as_dict() + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsUpdateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.metastore_info: - body["metastore_info"] = self.metastore_info + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.metastore_info: body['metastore_info'] = self.metastore_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastore: """Deserializes the AccountsUpdateMetastore from a dictionary.""" - return cls( - metastore_id=d.get("metastore_id", None), metastore_info=_from_dict(d, "metastore_info", UpdateMetastore) - ) + return cls(metastore_id=d.get('metastore_id', None), metastore_info=_from_dict(d, 'metastore_info', UpdateMetastore)) + + @dataclass class AccountsUpdateMetastoreAssignment: metastore_assignment: Optional[UpdateMetastoreAssignment] = None - + metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + workspace_id: Optional[int] = None """Workspace ID.""" - + def as_dict(self) -> dict: """Serializes the AccountsUpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict() + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the AccountsUpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastoreAssignment: """Deserializes the AccountsUpdateMetastoreAssignment from a dictionary.""" - return cls( - metastore_assignment=_from_dict(d, "metastore_assignment", UpdateMetastoreAssignment), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) + return cls(metastore_assignment=_from_dict(d, 'metastore_assignment', UpdateMetastoreAssignment), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) + + @dataclass class AccountsUpdateStorageCredential: credential_info: Optional[UpdateStorageCredential] = None - + metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + storage_credential_name: Optional[str] = None """Name of the storage credential.""" - + def as_dict(self) -> dict: """Serializes the AccountsUpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_info: - body["credential_info"] = self.credential_info.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name + if self.credential_info: body['credential_info'] = self.credential_info.as_dict() + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name return body def as_shallow_dict(self) -> dict: """Serializes the AccountsUpdateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_info: - body["credential_info"] = self.credential_info - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name + if self.credential_info: body['credential_info'] = self.credential_info + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateStorageCredential: """Deserializes the AccountsUpdateStorageCredential from a dictionary.""" - return cls( - credential_info=_from_dict(d, "credential_info", UpdateStorageCredential), - metastore_id=d.get("metastore_id", None), - storage_credential_name=d.get("storage_credential_name", None), - ) + return cls(credential_info=_from_dict(d, 'credential_info', UpdateStorageCredential), metastore_id=d.get('metastore_id', None), storage_credential_name=d.get('storage_credential_name', None)) + + @dataclass class ArtifactAllowlistInfo: artifact_matchers: Optional[List[ArtifactMatcher]] = None """A list of allowed artifact match patterns.""" - + created_at: Optional[int] = None """Time at which this artifact allowlist was set, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of the user who set the artifact allowlist.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + def as_dict(self) -> dict: """Serializes the ArtifactAllowlistInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_matchers: - body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers] - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id + if self.artifact_matchers: body['artifact_matchers'] = [v.as_dict() for v in self.artifact_matchers] + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id return body def as_shallow_dict(self) -> dict: """Serializes the ArtifactAllowlistInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_matchers: - body["artifact_matchers"] = self.artifact_matchers - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id + if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ArtifactAllowlistInfo: """Deserializes the ArtifactAllowlistInfo from a dictionary.""" - return cls( - artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - metastore_id=d.get("metastore_id", None), - ) + return cls(artifact_matchers=_repeated_dict(d, 'artifact_matchers', ArtifactMatcher), created_at=d.get('created_at', None), created_by=d.get('created_by', None), metastore_id=d.get('metastore_id', None)) + + @dataclass class ArtifactMatcher: artifact: str """The artifact path or maven coordinate""" - + match_type: MatchType """The pattern matching type of the artifact""" - + def as_dict(self) -> dict: """Serializes the ArtifactMatcher into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact is not None: - body["artifact"] = self.artifact - if self.match_type is not None: - body["match_type"] = self.match_type.value + if self.artifact is not None: body['artifact'] = self.artifact + if self.match_type is not None: body['match_type'] = self.match_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ArtifactMatcher into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact is not None: - body["artifact"] = self.artifact - if self.match_type is not None: - body["match_type"] = self.match_type + if self.artifact is not None: body['artifact'] = self.artifact + if self.match_type is not None: body['match_type'] = self.match_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ArtifactMatcher: """Deserializes the ArtifactMatcher from a dictionary.""" - return cls(artifact=d.get("artifact", None), match_type=_enum(d, "match_type", MatchType)) + return cls(artifact=d.get('artifact', None), match_type=_enum(d, 'match_type', MatchType)) + + class ArtifactType(Enum): """The artifact type""" - - INIT_SCRIPT = "INIT_SCRIPT" - LIBRARY_JAR = "LIBRARY_JAR" - LIBRARY_MAVEN = "LIBRARY_MAVEN" - + + INIT_SCRIPT = 'INIT_SCRIPT' + LIBRARY_JAR = 'LIBRARY_JAR' + LIBRARY_MAVEN = 'LIBRARY_MAVEN' @dataclass class AssignResponse: @@ -415,209 +370,182 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AssignResponse: """Deserializes the AssignResponse from a dictionary.""" return cls() + + @dataclass class AwsCredentials: """AWS temporary credentials for API authentication. Read more at https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" - + access_key_id: Optional[str] = None """The access key ID that identifies the temporary credentials.""" - + access_point: Optional[str] = None """The Amazon Resource Name (ARN) of the S3 access point for temporary credentials related the external location.""" - + secret_access_key: Optional[str] = None """The secret access key that can be used to sign AWS API requests.""" - + session_token: Optional[str] = None """The token that users must pass to AWS API to use the temporary credentials.""" - + def as_dict(self) -> dict: """Serializes the AwsCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.access_point is not None: - body["access_point"] = self.access_point - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key - if self.session_token is not None: - body["session_token"] = self.session_token + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.access_point is not None: body['access_point'] = self.access_point + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + if self.session_token is not None: body['session_token'] = self.session_token return body def as_shallow_dict(self) -> dict: """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.access_point is not None: - body["access_point"] = self.access_point - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key - if self.session_token is not None: - body["session_token"] = self.session_token + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.access_point is not None: body['access_point'] = self.access_point + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + if self.session_token is not None: body['session_token'] = self.session_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsCredentials: """Deserializes the AwsCredentials from a dictionary.""" - return cls( - access_key_id=d.get("access_key_id", None), - access_point=d.get("access_point", None), - secret_access_key=d.get("secret_access_key", None), - session_token=d.get("session_token", None), - ) + return cls(access_key_id=d.get('access_key_id', None), access_point=d.get('access_point', None), secret_access_key=d.get('secret_access_key', None), session_token=d.get('session_token', None)) + + @dataclass class AwsIamRole: """The AWS IAM role configuration""" - + external_id: Optional[str] = None """The external ID used in role assumption to prevent the confused deputy problem.""" - + role_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials.""" - + unity_catalog_iam_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.""" - + def as_dict(self) -> dict: """Serializes the AwsIamRole into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_id is not None: - body["external_id"] = self.external_id - if self.role_arn is not None: - body["role_arn"] = self.role_arn - if self.unity_catalog_iam_arn is not None: - body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn return body def as_shallow_dict(self) -> dict: """Serializes the AwsIamRole into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_id is not None: - body["external_id"] = self.external_id - if self.role_arn is not None: - body["role_arn"] = self.role_arn - if self.unity_catalog_iam_arn is not None: - body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsIamRole: """Deserializes the AwsIamRole from a dictionary.""" - return cls( - external_id=d.get("external_id", None), - role_arn=d.get("role_arn", None), - unity_catalog_iam_arn=d.get("unity_catalog_iam_arn", None), - ) + return cls(external_id=d.get('external_id', None), role_arn=d.get('role_arn', None), unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None)) + + @dataclass class AwsIamRoleRequest: role_arn: str """The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access.""" - + def as_dict(self) -> dict: """Serializes the AwsIamRoleRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.role_arn is not None: - body["role_arn"] = self.role_arn + if self.role_arn is not None: body['role_arn'] = self.role_arn return body def as_shallow_dict(self) -> dict: """Serializes the AwsIamRoleRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.role_arn is not None: - body["role_arn"] = self.role_arn + if self.role_arn is not None: body['role_arn'] = self.role_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsIamRoleRequest: """Deserializes the AwsIamRoleRequest from a dictionary.""" - return cls(role_arn=d.get("role_arn", None)) + return cls(role_arn=d.get('role_arn', None)) + + @dataclass class AwsIamRoleResponse: role_arn: str """The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access.""" - + external_id: Optional[str] = None """The external ID used in role assumption to prevent confused deputy problem..""" - + unity_catalog_iam_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.""" - + def as_dict(self) -> dict: """Serializes the AwsIamRoleResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_id is not None: - body["external_id"] = self.external_id - if self.role_arn is not None: - body["role_arn"] = self.role_arn - if self.unity_catalog_iam_arn is not None: - body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn return body def as_shallow_dict(self) -> dict: """Serializes the AwsIamRoleResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_id is not None: - body["external_id"] = self.external_id - if self.role_arn is not None: - body["role_arn"] = self.role_arn - if self.unity_catalog_iam_arn is not None: - body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsIamRoleResponse: """Deserializes the AwsIamRoleResponse from a dictionary.""" - return cls( - external_id=d.get("external_id", None), - role_arn=d.get("role_arn", None), - unity_catalog_iam_arn=d.get("unity_catalog_iam_arn", None), - ) + return cls(external_id=d.get('external_id', None), role_arn=d.get('role_arn', None), unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None)) + + @dataclass class AwsSqsQueue: managed_resource_id: Optional[str] = None """Unique identifier included in the name of file events managed cloud resources.""" - + queue_url: Optional[str] = None """The AQS queue url in the format https://sqs.{region}.amazonaws.com/{account id}/{queue name} REQUIRED for provided_sqs.""" - + def as_dict(self) -> dict: """Serializes the AwsSqsQueue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.queue_url is not None: - body["queue_url"] = self.queue_url + if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id + if self.queue_url is not None: body['queue_url'] = self.queue_url return body def as_shallow_dict(self) -> dict: """Serializes the AwsSqsQueue into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.queue_url is not None: - body["queue_url"] = self.queue_url + if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id + if self.queue_url is not None: body['queue_url'] = self.queue_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsSqsQueue: """Deserializes the AwsSqsQueue from a dictionary.""" - return cls(managed_resource_id=d.get("managed_resource_id", None), queue_url=d.get("queue_url", None)) + return cls(managed_resource_id=d.get('managed_resource_id', None), queue_url=d.get('queue_url', None)) + + @dataclass @@ -625,81 +553,73 @@ class AzureActiveDirectoryToken: """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed Identity. Read more at https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" - + aad_token: Optional[str] = None """Opaque token that contains claims that you can use in Azure Active Directory to access cloud services.""" - + def as_dict(self) -> dict: """Serializes the AzureActiveDirectoryToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aad_token is not None: - body["aad_token"] = self.aad_token + if self.aad_token is not None: body['aad_token'] = self.aad_token return body def as_shallow_dict(self) -> dict: """Serializes the AzureActiveDirectoryToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.aad_token is not None: - body["aad_token"] = self.aad_token + if self.aad_token is not None: body['aad_token'] = self.aad_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureActiveDirectoryToken: """Deserializes the AzureActiveDirectoryToken from a dictionary.""" - return cls(aad_token=d.get("aad_token", None)) + return cls(aad_token=d.get('aad_token', None)) + + @dataclass class AzureManagedIdentity: """The Azure managed identity configuration.""" - + access_connector_id: str """The Azure resource ID of the Azure Databricks Access Connector. Use the format `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`.""" - + credential_id: Optional[str] = None """The Databricks internal ID that represents this managed identity. This field is only used to persist the credential_id once it is fetched from the credentials manager - as we only use the protobuf serializer to store credentials, this ID gets persisted to the database. .""" - + managed_identity_id: Optional[str] = None """The Azure resource ID of the managed identity. Use the format, `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}` This is only available for user-assgined identities. For system-assigned identities, the access_connector_id is used to identify the identity. If this field is not provided, then we assume the AzureManagedIdentity is using the system-assigned identity.""" - + def as_dict(self) -> dict: """Serializes the AzureManagedIdentity into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_connector_id is not None: - body["access_connector_id"] = self.access_connector_id - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.managed_identity_id is not None: - body["managed_identity_id"] = self.managed_identity_id + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureManagedIdentity into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_connector_id is not None: - body["access_connector_id"] = self.access_connector_id - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.managed_identity_id is not None: - body["managed_identity_id"] = self.managed_identity_id + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureManagedIdentity: """Deserializes the AzureManagedIdentity from a dictionary.""" - return cls( - access_connector_id=d.get("access_connector_id", None), - credential_id=d.get("credential_id", None), - managed_identity_id=d.get("managed_identity_id", None), - ) + return cls(access_connector_id=d.get('access_connector_id', None), credential_id=d.get('credential_id', None), managed_identity_id=d.get('managed_identity_id', None)) + + @dataclass @@ -707,39 +627,34 @@ class AzureManagedIdentityRequest: access_connector_id: str """The Azure resource ID of the Azure Databricks Access Connector. Use the format /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.""" - + managed_identity_id: Optional[str] = None """The Azure resource ID of the managed identity. Use the format /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}. This is only available for user-assgined identities. For system-assigned identities, the access_connector_id is used to identify the identity. If this field is not provided, then we assume the AzureManagedIdentity is for a system-assigned identity.""" - + def as_dict(self) -> dict: """Serializes the AzureManagedIdentityRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_connector_id is not None: - body["access_connector_id"] = self.access_connector_id - if self.managed_identity_id is not None: - body["managed_identity_id"] = self.managed_identity_id + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureManagedIdentityRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_connector_id is not None: - body["access_connector_id"] = self.access_connector_id - if self.managed_identity_id is not None: - body["managed_identity_id"] = self.managed_identity_id + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureManagedIdentityRequest: """Deserializes the AzureManagedIdentityRequest from a dictionary.""" - return cls( - access_connector_id=d.get("access_connector_id", None), - managed_identity_id=d.get("managed_identity_id", None), - ) + return cls(access_connector_id=d.get('access_connector_id', None), managed_identity_id=d.get('managed_identity_id', None)) + + @dataclass @@ -747,174 +662,150 @@ class AzureManagedIdentityResponse: access_connector_id: str """The Azure resource ID of the Azure Databricks Access Connector. Use the format /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.""" - + credential_id: Optional[str] = None """The Databricks internal ID that represents this managed identity.""" - + managed_identity_id: Optional[str] = None """The Azure resource ID of the managed identity. Use the format /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}. This is only available for user-assgined identities. For system-assigned identities, the access_connector_id is used to identify the identity. If this field is not provided, then we assume the AzureManagedIdentity is for a system-assigned identity.""" - + def as_dict(self) -> dict: """Serializes the AzureManagedIdentityResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_connector_id is not None: - body["access_connector_id"] = self.access_connector_id - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.managed_identity_id is not None: - body["managed_identity_id"] = self.managed_identity_id + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureManagedIdentityResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_connector_id is not None: - body["access_connector_id"] = self.access_connector_id - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.managed_identity_id is not None: - body["managed_identity_id"] = self.managed_identity_id + if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureManagedIdentityResponse: """Deserializes the AzureManagedIdentityResponse from a dictionary.""" - return cls( - access_connector_id=d.get("access_connector_id", None), - credential_id=d.get("credential_id", None), - managed_identity_id=d.get("managed_identity_id", None), - ) + return cls(access_connector_id=d.get('access_connector_id', None), credential_id=d.get('credential_id', None), managed_identity_id=d.get('managed_identity_id', None)) + + @dataclass class AzureQueueStorage: managed_resource_id: Optional[str] = None """Unique identifier included in the name of file events managed cloud resources.""" - + queue_url: Optional[str] = None """The AQS queue url in the format https://{storage account}.queue.core.windows.net/{queue name} REQUIRED for provided_aqs.""" - + resource_group: Optional[str] = None """The resource group for the queue, event grid subscription, and external location storage account. ONLY REQUIRED for locations with a service principal storage credential""" - + subscription_id: Optional[str] = None """OPTIONAL: The subscription id for the queue, event grid subscription, and external location storage account. REQUIRED for locations with a service principal storage credential""" - + def as_dict(self) -> dict: """Serializes the AzureQueueStorage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.queue_url is not None: - body["queue_url"] = self.queue_url - if self.resource_group is not None: - body["resource_group"] = self.resource_group - if self.subscription_id is not None: - body["subscription_id"] = self.subscription_id + if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id + if self.queue_url is not None: body['queue_url'] = self.queue_url + if self.resource_group is not None: body['resource_group'] = self.resource_group + if self.subscription_id is not None: body['subscription_id'] = self.subscription_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureQueueStorage into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.queue_url is not None: - body["queue_url"] = self.queue_url - if self.resource_group is not None: - body["resource_group"] = self.resource_group - if self.subscription_id is not None: - body["subscription_id"] = self.subscription_id + if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id + if self.queue_url is not None: body['queue_url'] = self.queue_url + if self.resource_group is not None: body['resource_group'] = self.resource_group + if self.subscription_id is not None: body['subscription_id'] = self.subscription_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureQueueStorage: """Deserializes the AzureQueueStorage from a dictionary.""" - return cls( - managed_resource_id=d.get("managed_resource_id", None), - queue_url=d.get("queue_url", None), - resource_group=d.get("resource_group", None), - subscription_id=d.get("subscription_id", None), - ) + return cls(managed_resource_id=d.get('managed_resource_id', None), queue_url=d.get('queue_url', None), resource_group=d.get('resource_group', None), subscription_id=d.get('subscription_id', None)) + + @dataclass class AzureServicePrincipal: """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" - + directory_id: str """The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application.""" - + application_id: str """The application ID of the application registration within the referenced AAD tenant.""" - + client_secret: str """The client secret generated for the above app ID in AAD.""" - + def as_dict(self) -> dict: """Serializes the AzureServicePrincipal into a dictionary suitable for use as a JSON request body.""" body = {} - if self.application_id is not None: - body["application_id"] = self.application_id - if self.client_secret is not None: - body["client_secret"] = self.client_secret - if self.directory_id is not None: - body["directory_id"] = self.directory_id + if self.application_id is not None: body['application_id'] = self.application_id + if self.client_secret is not None: body['client_secret'] = self.client_secret + if self.directory_id is not None: body['directory_id'] = self.directory_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureServicePrincipal into a shallow dictionary of its immediate attributes.""" body = {} - if self.application_id is not None: - body["application_id"] = self.application_id - if self.client_secret is not None: - body["client_secret"] = self.client_secret - if self.directory_id is not None: - body["directory_id"] = self.directory_id + if self.application_id is not None: body['application_id'] = self.application_id + if self.client_secret is not None: body['client_secret'] = self.client_secret + if self.directory_id is not None: body['directory_id'] = self.directory_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureServicePrincipal: """Deserializes the AzureServicePrincipal from a dictionary.""" - return cls( - application_id=d.get("application_id", None), - client_secret=d.get("client_secret", None), - directory_id=d.get("directory_id", None), - ) + return cls(application_id=d.get('application_id', None), client_secret=d.get('client_secret', None), directory_id=d.get('directory_id', None)) + + @dataclass class AzureUserDelegationSas: """Azure temporary credentials for API authentication. Read more at https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas""" - + sas_token: Optional[str] = None """The signed URI (SAS Token) used to access blob services for a given path""" - + def as_dict(self) -> dict: """Serializes the AzureUserDelegationSas into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sas_token is not None: - body["sas_token"] = self.sas_token + if self.sas_token is not None: body['sas_token'] = self.sas_token return body def as_shallow_dict(self) -> dict: """Serializes the AzureUserDelegationSas into a shallow dictionary of its immediate attributes.""" body = {} - if self.sas_token is not None: - body["sas_token"] = self.sas_token + if self.sas_token is not None: body['sas_token'] = self.sas_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureUserDelegationSas: """Deserializes the AzureUserDelegationSas from a dictionary.""" - return cls(sas_token=d.get("sas_token", None)) + return cls(sas_token=d.get('sas_token', None)) + + + + + @dataclass @@ -933,6 +824,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelRefreshResponse: """Deserializes the CancelRefreshResponse from a dictionary.""" return cls() + + @dataclass @@ -940,822 +833,612 @@ class CatalogInfo: browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_type: Optional[CatalogType] = None """The type of the catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + connection_name: Optional[str] = None """The name of the connection to an external data source.""" - + created_at: Optional[int] = None """Time at which this catalog was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of catalog creator.""" - + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None """Whether predictive optimization should be enabled for this object and objects under it.""" - + full_name: Optional[str] = None """The full name of the catalog. Corresponds with the name field.""" - + isolation_mode: Optional[CatalogIsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of catalog.""" - - options: Optional[Dict[str, str]] = None + + options: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + owner: Optional[str] = None """Username of current owner of catalog.""" - - properties: Optional[Dict[str, str]] = None + + properties: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + provider_name: Optional[str] = None """The name of delta sharing provider. A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.""" - + provisioning_info: Optional[ProvisioningInfo] = None """Status of an asynchronously provisioned resource.""" - + securable_type: Optional[SecurableType] = None """The type of Unity Catalog securable.""" - + share_name: Optional[str] = None """The name of the share under the share provider.""" - + storage_location: Optional[str] = None """Storage Location URL (full path) for managed tables within catalog.""" - + storage_root: Optional[str] = None """Storage root URL for managed tables within catalog.""" - + updated_at: Optional[int] = None """Time at which this catalog was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified catalog.""" - + def as_dict(self) -> dict: """Serializes the CatalogInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_type is not None: - body["catalog_type"] = self.catalog_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.full_name is not None: - body["full_name"] = self.full_name - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.provider_name is not None: - body["provider_name"] = self.provider_name - if self.provisioning_info: - body["provisioning_info"] = self.provisioning_info.as_dict() - if self.securable_type is not None: - body["securable_type"] = self.securable_type.value - if self.share_name is not None: - body["share_name"] = self.share_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_type is not None: body['catalog_type'] = self.catalog_type.value + if self.comment is not None: body['comment'] = self.comment + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag.as_dict() + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value + if self.full_name is not None: body['full_name'] = self.full_name + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + if self.provider_name is not None: body['provider_name'] = self.provider_name + if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict() + if self.securable_type is not None: body['securable_type'] = self.securable_type.value + if self.share_name is not None: body['share_name'] = self.share_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the CatalogInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_type is not None: - body["catalog_type"] = self.catalog_type - if self.comment is not None: - body["comment"] = self.comment - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.full_name is not None: - body["full_name"] = self.full_name - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.provider_name is not None: - body["provider_name"] = self.provider_name - if self.provisioning_info: - body["provisioning_info"] = self.provisioning_info - if self.securable_type is not None: - body["securable_type"] = self.securable_type - if self.share_name is not None: - body["share_name"] = self.share_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_type is not None: body['catalog_type'] = self.catalog_type + if self.comment is not None: body['comment'] = self.comment + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.full_name is not None: body['full_name'] = self.full_name + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + if self.provider_name is not None: body['provider_name'] = self.provider_name + if self.provisioning_info: body['provisioning_info'] = self.provisioning_info + if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.share_name is not None: body['share_name'] = self.share_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CatalogInfo: """Deserializes the CatalogInfo from a dictionary.""" - return cls( - browse_only=d.get("browse_only", None), - catalog_type=_enum(d, "catalog_type", CatalogType), - comment=d.get("comment", None), - connection_name=d.get("connection_name", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - effective_predictive_optimization_flag=_from_dict( - d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag - ), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - full_name=d.get("full_name", None), - isolation_mode=_enum(d, "isolation_mode", CatalogIsolationMode), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - options=d.get("options", None), - owner=d.get("owner", None), - properties=d.get("properties", None), - provider_name=d.get("provider_name", None), - provisioning_info=_from_dict(d, "provisioning_info", ProvisioningInfo), - securable_type=_enum(d, "securable_type", SecurableType), - share_name=d.get("share_name", None), - storage_location=d.get("storage_location", None), - storage_root=d.get("storage_root", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) - + return cls(browse_only=d.get('browse_only', None), catalog_type=_enum(d, 'catalog_type', CatalogType), comment=d.get('comment', None), connection_name=d.get('connection_name', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), effective_predictive_optimization_flag=_from_dict(d, 'effective_predictive_optimization_flag', EffectivePredictiveOptimizationFlag), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), full_name=d.get('full_name', None), isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode), metastore_id=d.get('metastore_id', None), name=d.get('name', None), options=d.get('options', None), owner=d.get('owner', None), properties=d.get('properties', None), provider_name=d.get('provider_name', None), provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo), securable_type=_enum(d, 'securable_type', SecurableType), share_name=d.get('share_name', None), storage_location=d.get('storage_location', None), storage_root=d.get('storage_root', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + -class CatalogIsolationMode(Enum): - ISOLATED = "ISOLATED" - OPEN = "OPEN" +class CatalogIsolationMode(Enum): + + + ISOLATED = 'ISOLATED' + OPEN = 'OPEN' class CatalogType(Enum): """The type of the catalog.""" - - DELTASHARING_CATALOG = "DELTASHARING_CATALOG" - FOREIGN_CATALOG = "FOREIGN_CATALOG" - INTERNAL_CATALOG = "INTERNAL_CATALOG" - MANAGED_CATALOG = "MANAGED_CATALOG" - MANAGED_ONLINE_CATALOG = "MANAGED_ONLINE_CATALOG" - SYSTEM_CATALOG = "SYSTEM_CATALOG" - UNKNOWN_CATALOG_TYPE = "UNKNOWN_CATALOG_TYPE" - + + DELTASHARING_CATALOG = 'DELTASHARING_CATALOG' + FOREIGN_CATALOG = 'FOREIGN_CATALOG' + INTERNAL_CATALOG = 'INTERNAL_CATALOG' + MANAGED_CATALOG = 'MANAGED_CATALOG' + MANAGED_ONLINE_CATALOG = 'MANAGED_ONLINE_CATALOG' + SYSTEM_CATALOG = 'SYSTEM_CATALOG' + UNKNOWN_CATALOG_TYPE = 'UNKNOWN_CATALOG_TYPE' @dataclass class CloudflareApiToken: access_key_id: str """The Cloudflare access key id of the token.""" - + secret_access_key: str """The secret access token generated for the access key id""" - + account_id: str """The account id associated with the API token.""" - + def as_dict(self) -> dict: """Serializes the CloudflareApiToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.account_id is not None: - body["account_id"] = self.account_id - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.account_id is not None: body['account_id'] = self.account_id + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key return body def as_shallow_dict(self) -> dict: """Serializes the CloudflareApiToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.account_id is not None: - body["account_id"] = self.account_id - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.account_id is not None: body['account_id'] = self.account_id + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CloudflareApiToken: """Deserializes the CloudflareApiToken from a dictionary.""" - return cls( - access_key_id=d.get("access_key_id", None), - account_id=d.get("account_id", None), - secret_access_key=d.get("secret_access_key", None), - ) + return cls(access_key_id=d.get('access_key_id', None), account_id=d.get('account_id', None), secret_access_key=d.get('secret_access_key', None)) + + @dataclass class ColumnInfo: comment: Optional[str] = None """User-provided free-form text description.""" - + mask: Optional[ColumnMask] = None - + name: Optional[str] = None """Name of Column.""" - + nullable: Optional[bool] = None """Whether field may be Null (default: true).""" - + partition_index: Optional[int] = None """Partition index for column.""" - + position: Optional[int] = None """Ordinal position of column (starting at position 0).""" - + type_interval_type: Optional[str] = None """Format of IntervalType.""" - + type_json: Optional[str] = None """Full data type specification, JSON-serialized.""" - + type_name: Optional[ColumnTypeName] = None - + type_precision: Optional[int] = None """Digits of precision; required for DecimalTypes.""" - + type_scale: Optional[int] = None """Digits to right of decimal; Required for DecimalTypes.""" - + type_text: Optional[str] = None """Full data type specification as SQL/catalogString text.""" - + def as_dict(self) -> dict: """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.mask: - body["mask"] = self.mask.as_dict() - if self.name is not None: - body["name"] = self.name - if self.nullable is not None: - body["nullable"] = self.nullable - if self.partition_index is not None: - body["partition_index"] = self.partition_index - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name.value - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.comment is not None: body['comment'] = self.comment + if self.mask: body['mask'] = self.mask.as_dict() + if self.name is not None: body['name'] = self.name + if self.nullable is not None: body['nullable'] = self.nullable + if self.partition_index is not None: body['partition_index'] = self.partition_index + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_json is not None: body['type_json'] = self.type_json + if self.type_name is not None: body['type_name'] = self.type_name.value + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text return body def as_shallow_dict(self) -> dict: """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.mask: - body["mask"] = self.mask - if self.name is not None: - body["name"] = self.name - if self.nullable is not None: - body["nullable"] = self.nullable - if self.partition_index is not None: - body["partition_index"] = self.partition_index - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.comment is not None: body['comment'] = self.comment + if self.mask: body['mask'] = self.mask + if self.name is not None: body['name'] = self.name + if self.nullable is not None: body['nullable'] = self.nullable + if self.partition_index is not None: body['partition_index'] = self.partition_index + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_json is not None: body['type_json'] = self.type_json + if self.type_name is not None: body['type_name'] = self.type_name + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" - return cls( - comment=d.get("comment", None), - mask=_from_dict(d, "mask", ColumnMask), - name=d.get("name", None), - nullable=d.get("nullable", None), - partition_index=d.get("partition_index", None), - position=d.get("position", None), - type_interval_type=d.get("type_interval_type", None), - type_json=d.get("type_json", None), - type_name=_enum(d, "type_name", ColumnTypeName), - type_precision=d.get("type_precision", None), - type_scale=d.get("type_scale", None), - type_text=d.get("type_text", None), - ) + return cls(comment=d.get('comment', None), mask=_from_dict(d, 'mask', ColumnMask), name=d.get('name', None), nullable=d.get('nullable', None), partition_index=d.get('partition_index', None), position=d.get('position', None), type_interval_type=d.get('type_interval_type', None), type_json=d.get('type_json', None), type_name=_enum(d, 'type_name', ColumnTypeName), type_precision=d.get('type_precision', None), type_scale=d.get('type_scale', None), type_text=d.get('type_text', None)) + + @dataclass class ColumnMask: function_name: Optional[str] = None """The full name of the column mask SQL UDF.""" - + using_column_names: Optional[List[str]] = None """The list of additional table columns to be passed as input to the column mask function. The first arg of the mask function should be of the type of the column being masked and the types of the rest of the args should match the types of columns in 'using_column_names'.""" - + def as_dict(self) -> dict: """Serializes the ColumnMask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.using_column_names: - body["using_column_names"] = [v for v in self.using_column_names] + if self.function_name is not None: body['function_name'] = self.function_name + if self.using_column_names: body['using_column_names'] = [v for v in self.using_column_names] return body def as_shallow_dict(self) -> dict: """Serializes the ColumnMask into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.using_column_names: - body["using_column_names"] = self.using_column_names + if self.function_name is not None: body['function_name'] = self.function_name + if self.using_column_names: body['using_column_names'] = self.using_column_names return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ColumnMask: """Deserializes the ColumnMask from a dictionary.""" - return cls(function_name=d.get("function_name", None), using_column_names=d.get("using_column_names", None)) - + return cls(function_name=d.get('function_name', None), using_column_names=d.get('using_column_names', None)) + -class ColumnTypeName(Enum): - ARRAY = "ARRAY" - BINARY = "BINARY" - BOOLEAN = "BOOLEAN" - BYTE = "BYTE" - CHAR = "CHAR" - DATE = "DATE" - DECIMAL = "DECIMAL" - DOUBLE = "DOUBLE" - FLOAT = "FLOAT" - GEOGRAPHY = "GEOGRAPHY" - GEOMETRY = "GEOMETRY" - INT = "INT" - INTERVAL = "INTERVAL" - LONG = "LONG" - MAP = "MAP" - NULL = "NULL" - SHORT = "SHORT" - STRING = "STRING" - STRUCT = "STRUCT" - TABLE_TYPE = "TABLE_TYPE" - TIMESTAMP = "TIMESTAMP" - TIMESTAMP_NTZ = "TIMESTAMP_NTZ" - USER_DEFINED_TYPE = "USER_DEFINED_TYPE" - VARIANT = "VARIANT" +class ColumnTypeName(Enum): + + + ARRAY = 'ARRAY' + BINARY = 'BINARY' + BOOLEAN = 'BOOLEAN' + BYTE = 'BYTE' + CHAR = 'CHAR' + DATE = 'DATE' + DECIMAL = 'DECIMAL' + DOUBLE = 'DOUBLE' + FLOAT = 'FLOAT' + GEOGRAPHY = 'GEOGRAPHY' + GEOMETRY = 'GEOMETRY' + INT = 'INT' + INTERVAL = 'INTERVAL' + LONG = 'LONG' + MAP = 'MAP' + NULL = 'NULL' + SHORT = 'SHORT' + STRING = 'STRING' + STRUCT = 'STRUCT' + TABLE_TYPE = 'TABLE_TYPE' + TIMESTAMP = 'TIMESTAMP' + TIMESTAMP_NTZ = 'TIMESTAMP_NTZ' + USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' + VARIANT = 'VARIANT' @dataclass class ConnectionInfo: comment: Optional[str] = None """User-provided free-form text description.""" - + connection_id: Optional[str] = None """Unique identifier of the Connection.""" - + connection_type: Optional[ConnectionType] = None """The type of connection.""" - + created_at: Optional[int] = None """Time at which this connection was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of connection creator.""" - + credential_type: Optional[CredentialType] = None """The type of credential.""" - + full_name: Optional[str] = None """Full name of connection.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of the connection.""" - - options: Optional[Dict[str, str]] = None + + options: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + owner: Optional[str] = None """Username of current owner of the connection.""" - - properties: Optional[Dict[str, str]] = None - """An object containing map of key-value properties attached to the connection.""" - + + properties: Optional[Dict[str,str]] = None + """A map of key-value properties attached to the securable.""" + provisioning_info: Optional[ProvisioningInfo] = None """Status of an asynchronously provisioned resource.""" - + read_only: Optional[bool] = None """If the connection is read only.""" - - securable_type: Optional[str] = None - + + securable_type: Optional[SecurableType] = None + """The type of Unity Catalog securable.""" + updated_at: Optional[int] = None """Time at which this connection was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified connection.""" - + url: Optional[str] = None """URL of the remote data source, extracted from options.""" - + def as_dict(self) -> dict: """Serializes the ConnectionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_id is not None: - body["connection_id"] = self.connection_id - if self.connection_type is not None: - body["connection_type"] = self.connection_type.value - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.credential_type is not None: - body["credential_type"] = self.credential_type.value - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.provisioning_info: - body["provisioning_info"] = self.provisioning_info.as_dict() - if self.read_only is not None: - body["read_only"] = self.read_only - if self.securable_type is not None: - body["securable_type"] = self.securable_type - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.url is not None: - body["url"] = self.url + if self.comment is not None: body['comment'] = self.comment + if self.connection_id is not None: body['connection_id'] = self.connection_id + if self.connection_type is not None: body['connection_type'] = self.connection_type.value + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.credential_type is not None: body['credential_type'] = self.credential_type.value + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict() + if self.read_only is not None: body['read_only'] = self.read_only + if self.securable_type is not None: body['securable_type'] = self.securable_type.value + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the ConnectionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_id is not None: - body["connection_id"] = self.connection_id - if self.connection_type is not None: - body["connection_type"] = self.connection_type - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.credential_type is not None: - body["credential_type"] = self.credential_type - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.provisioning_info: - body["provisioning_info"] = self.provisioning_info - if self.read_only is not None: - body["read_only"] = self.read_only - if self.securable_type is not None: - body["securable_type"] = self.securable_type - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.url is not None: - body["url"] = self.url + if self.comment is not None: body['comment'] = self.comment + if self.connection_id is not None: body['connection_id'] = self.connection_id + if self.connection_type is not None: body['connection_type'] = self.connection_type + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.credential_type is not None: body['credential_type'] = self.credential_type + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + if self.provisioning_info: body['provisioning_info'] = self.provisioning_info + if self.read_only is not None: body['read_only'] = self.read_only + if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: """Deserializes the ConnectionInfo from a dictionary.""" - return cls( - comment=d.get("comment", None), - connection_id=d.get("connection_id", None), - connection_type=_enum(d, "connection_type", ConnectionType), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - credential_type=_enum(d, "credential_type", CredentialType), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - options=d.get("options", None), - owner=d.get("owner", None), - properties=d.get("properties", None), - provisioning_info=_from_dict(d, "provisioning_info", ProvisioningInfo), - read_only=d.get("read_only", None), - securable_type=d.get("securable_type", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - url=d.get("url", None), - ) - + return cls(comment=d.get('comment', None), connection_id=d.get('connection_id', None), connection_type=_enum(d, 'connection_type', ConnectionType), created_at=d.get('created_at', None), created_by=d.get('created_by', None), credential_type=_enum(d, 'credential_type', CredentialType), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), options=d.get('options', None), owner=d.get('owner', None), properties=d.get('properties', None), provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo), read_only=d.get('read_only', None), securable_type=_enum(d, 'securable_type', SecurableType), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), url=d.get('url', None)) + -class ConnectionType(Enum): - """The type of connection.""" - BIGQUERY = "BIGQUERY" - DATABRICKS = "DATABRICKS" - GLUE = "GLUE" - HIVE_METASTORE = "HIVE_METASTORE" - HTTP = "HTTP" - MYSQL = "MYSQL" - ORACLE = "ORACLE" - POSTGRESQL = "POSTGRESQL" - REDSHIFT = "REDSHIFT" - SNOWFLAKE = "SNOWFLAKE" - SQLDW = "SQLDW" - SQLSERVER = "SQLSERVER" - TERADATA = "TERADATA" +class ConnectionType(Enum): + """Next Id: 31""" + + BIGQUERY = 'BIGQUERY' + DATABRICKS = 'DATABRICKS' + GA4_RAW_DATA = 'GA4_RAW_DATA' + GLUE = 'GLUE' + HIVE_METASTORE = 'HIVE_METASTORE' + HTTP = 'HTTP' + MYSQL = 'MYSQL' + ORACLE = 'ORACLE' + POSTGRESQL = 'POSTGRESQL' + POWER_BI = 'POWER_BI' + REDSHIFT = 'REDSHIFT' + SALESFORCE = 'SALESFORCE' + SALESFORCE_DATA_CLOUD = 'SALESFORCE_DATA_CLOUD' + SERVICENOW = 'SERVICENOW' + SNOWFLAKE = 'SNOWFLAKE' + SQLDW = 'SQLDW' + SQLSERVER = 'SQLSERVER' + TERADATA = 'TERADATA' + UNKNOWN_CONNECTION_TYPE = 'UNKNOWN_CONNECTION_TYPE' + WORKDAY_RAAS = 'WORKDAY_RAAS' @dataclass class ContinuousUpdateStatus: """Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE or the ONLINE_UPDATING_PIPELINE_RESOURCES state.""" - + initial_pipeline_sync_progress: Optional[PipelineProgress] = None """Progress of the initial data synchronization.""" - + last_processed_commit_version: Optional[int] = None """The last source table Delta version that was synced to the online table. Note that this Delta version may not be completely synced to the online table yet.""" - + timestamp: Optional[str] = None """The timestamp of the last time any data was synchronized from the source table to the online table.""" - + def as_dict(self) -> dict: """Serializes the ContinuousUpdateStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.initial_pipeline_sync_progress: - body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict() + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the ContinuousUpdateStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.initial_pipeline_sync_progress: - body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ContinuousUpdateStatus: """Deserializes the ContinuousUpdateStatus from a dictionary.""" - return cls( - initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress), - last_processed_commit_version=d.get("last_processed_commit_version", None), - timestamp=d.get("timestamp", None), - ) + return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', PipelineProgress), last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None)) + + @dataclass class CreateCatalog: name: str """Name of catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + connection_name: Optional[str] = None """The name of the connection to an external data source.""" - - options: Optional[Dict[str, str]] = None + + options: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - - properties: Optional[Dict[str, str]] = None + + properties: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + provider_name: Optional[str] = None """The name of delta sharing provider. A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.""" - + share_name: Optional[str] = None """The name of the share under the share provider.""" - + storage_root: Optional[str] = None """Storage root URL for managed tables within catalog.""" - + def as_dict(self) -> dict: """Serializes the CreateCatalog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.provider_name is not None: - body["provider_name"] = self.provider_name - if self.share_name is not None: - body["share_name"] = self.share_name - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.comment is not None: body['comment'] = self.comment + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.properties: body['properties'] = self.properties + if self.provider_name is not None: body['provider_name'] = self.provider_name + if self.share_name is not None: body['share_name'] = self.share_name + if self.storage_root is not None: body['storage_root'] = self.storage_root return body def as_shallow_dict(self) -> dict: """Serializes the CreateCatalog into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.provider_name is not None: - body["provider_name"] = self.provider_name - if self.share_name is not None: - body["share_name"] = self.share_name - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.comment is not None: body['comment'] = self.comment + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.properties: body['properties'] = self.properties + if self.provider_name is not None: body['provider_name'] = self.provider_name + if self.share_name is not None: body['share_name'] = self.share_name + if self.storage_root is not None: body['storage_root'] = self.storage_root return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCatalog: """Deserializes the CreateCatalog from a dictionary.""" - return cls( - comment=d.get("comment", None), - connection_name=d.get("connection_name", None), - name=d.get("name", None), - options=d.get("options", None), - properties=d.get("properties", None), - provider_name=d.get("provider_name", None), - share_name=d.get("share_name", None), - storage_root=d.get("storage_root", None), - ) + return cls(comment=d.get('comment', None), connection_name=d.get('connection_name', None), name=d.get('name', None), options=d.get('options', None), properties=d.get('properties', None), provider_name=d.get('provider_name', None), share_name=d.get('share_name', None), storage_root=d.get('storage_root', None)) + + @dataclass class CreateConnection: name: str """Name of the connection.""" - + connection_type: ConnectionType """The type of connection.""" - - options: Dict[str, str] + + options: Dict[str,str] """A map of key-value properties attached to the securable.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - - properties: Optional[Dict[str, str]] = None - """An object containing map of key-value properties attached to the connection.""" - + + properties: Optional[Dict[str,str]] = None + """A map of key-value properties attached to the securable.""" + read_only: Optional[bool] = None """If the connection is read only.""" - + def as_dict(self) -> dict: """Serializes the CreateConnection into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_type is not None: - body["connection_type"] = self.connection_type.value - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.read_only is not None: - body["read_only"] = self.read_only + if self.comment is not None: body['comment'] = self.comment + if self.connection_type is not None: body['connection_type'] = self.connection_type.value + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.properties: body['properties'] = self.properties + if self.read_only is not None: body['read_only'] = self.read_only return body def as_shallow_dict(self) -> dict: """Serializes the CreateConnection into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_type is not None: - body["connection_type"] = self.connection_type - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.read_only is not None: - body["read_only"] = self.read_only + if self.comment is not None: body['comment'] = self.comment + if self.connection_type is not None: body['connection_type'] = self.connection_type + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.properties: body['properties'] = self.properties + if self.read_only is not None: body['read_only'] = self.read_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateConnection: """Deserializes the CreateConnection from a dictionary.""" - return cls( - comment=d.get("comment", None), - connection_type=_enum(d, "connection_type", ConnectionType), - name=d.get("name", None), - options=d.get("options", None), - properties=d.get("properties", None), - read_only=d.get("read_only", None), - ) + return cls(comment=d.get('comment', None), connection_type=_enum(d, 'connection_type', ConnectionType), name=d.get('name', None), options=d.get('options', None), properties=d.get('properties', None), read_only=d.get('read_only', None)) + + @dataclass @@ -1763,413 +1446,295 @@ class CreateCredentialRequest: name: str """The credential name. The name must be unique among storage and service credentials within the metastore.""" - + aws_iam_role: Optional[AwsIamRole] = None """The AWS IAM role configuration""" - + azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + purpose: Optional[CredentialPurpose] = None """Indicates the purpose of the credential.""" - + read_only: Optional[bool] = None """Whether the credential is usable only for read operations. Only applicable when purpose is **STORAGE**.""" - + skip_validation: Optional[bool] = None """Optional. Supplying true to this argument skips validation of the created set of credentials.""" - + def as_dict(self) -> dict: """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.name is not None: - body["name"] = self.name - if self.purpose is not None: - body["purpose"] = self.purpose.value - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.name is not None: body['name'] = self.name + if self.purpose is not None: body['purpose'] = self.purpose.value + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.name is not None: - body["name"] = self.name - if self.purpose is not None: - body["purpose"] = self.purpose - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.name is not None: body['name'] = self.name + if self.purpose is not None: body['purpose'] = self.purpose + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialRequest: """Deserializes the CreateCredentialRequest from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - name=d.get("name", None), - purpose=_enum(d, "purpose", CredentialPurpose), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), comment=d.get('comment', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccount), name=d.get('name', None), purpose=_enum(d, 'purpose', CredentialPurpose), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) + + @dataclass class CreateExternalLocation: name: str """Name of the external location.""" - + url: str """Path URL of the external location.""" - + credential_name: str """Name of the storage credential used with this location.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + enable_file_events: Optional[bool] = None """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + fallback: Optional[bool] = None """Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient.""" - + file_event_queue: Optional[FileEventQueue] = None """[Create:OPT Update:OPT] File event queue settings.""" - + read_only: Optional[bool] = None """Indicates whether the external location is read-only.""" - + skip_validation: Optional[bool] = None """Skips validation of the storage credential associated with the external location.""" - + def as_dict(self) -> dict: """Serializes the CreateExternalLocation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue.as_dict() - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url + if self.comment is not None: body['comment'] = self.comment + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() + if self.fallback is not None: body['fallback'] = self.fallback + if self.file_event_queue: body['file_event_queue'] = self.file_event_queue.as_dict() + if self.name is not None: body['name'] = self.name + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the CreateExternalLocation into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url + if self.comment is not None: body['comment'] = self.comment + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.fallback is not None: body['fallback'] = self.fallback + if self.file_event_queue: body['file_event_queue'] = self.file_event_queue + if self.name is not None: body['name'] = self.name + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExternalLocation: """Deserializes the CreateExternalLocation from a dictionary.""" - return cls( - comment=d.get("comment", None), - credential_name=d.get("credential_name", None), - enable_file_events=d.get("enable_file_events", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - fallback=d.get("fallback", None), - file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), - name=d.get("name", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - url=d.get("url", None), - ) + return cls(comment=d.get('comment', None), credential_name=d.get('credential_name', None), enable_file_events=d.get('enable_file_events', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), fallback=d.get('fallback', None), file_event_queue=_from_dict(d, 'file_event_queue', FileEventQueue), name=d.get('name', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None), url=d.get('url', None)) + + @dataclass class CreateFunction: name: str """Name of function, relative to parent schema.""" - + catalog_name: str """Name of parent catalog.""" - + schema_name: str """Name of parent schema relative to its parent catalog.""" - + input_params: FunctionParameterInfos - + data_type: ColumnTypeName """Scalar function return data type.""" - + full_data_type: str """Pretty printed function data type.""" - + routine_body: CreateFunctionRoutineBody """Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**.""" - + routine_definition: str """Function body.""" - + parameter_style: CreateFunctionParameterStyle """Function parameter style. **S** is the value for SQL.""" - + is_deterministic: bool """Whether the function is deterministic.""" - + sql_data_access: CreateFunctionSqlDataAccess """Function SQL data access.""" - + is_null_call: bool """Function null call.""" - + security_type: CreateFunctionSecurityType """Function security type.""" - + specific_name: str """Specific name of the function; Reserved for future use.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + external_language: Optional[str] = None """External function language.""" - + external_name: Optional[str] = None """External function name.""" - + properties: Optional[str] = None """JSON-serialized key-value pair map, encoded (escaped) as a string.""" - + return_params: Optional[FunctionParameterInfos] = None """Table function return parameters.""" - + routine_dependencies: Optional[DependencyList] = None """Function dependencies.""" - + sql_path: Optional[str] = None """List of schemes whose objects can be referenced without qualification.""" - + def as_dict(self) -> dict: """Serializes the CreateFunction into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.data_type is not None: - body["data_type"] = self.data_type.value - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.input_params: - body["input_params"] = self.input_params.as_dict() - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.name is not None: - body["name"] = self.name - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style.value - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params.as_dict() - if self.routine_body is not None: - body["routine_body"] = self.routine_body.value - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies.as_dict() - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type.value - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access.value - if self.sql_path is not None: - body["sql_path"] = self.sql_path + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.data_type is not None: body['data_type'] = self.data_type.value + if self.external_language is not None: body['external_language'] = self.external_language + if self.external_name is not None: body['external_name'] = self.external_name + if self.full_data_type is not None: body['full_data_type'] = self.full_data_type + if self.input_params: body['input_params'] = self.input_params.as_dict() + if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic + if self.is_null_call is not None: body['is_null_call'] = self.is_null_call + if self.name is not None: body['name'] = self.name + if self.parameter_style is not None: body['parameter_style'] = self.parameter_style.value + if self.properties is not None: body['properties'] = self.properties + if self.return_params: body['return_params'] = self.return_params.as_dict() + if self.routine_body is not None: body['routine_body'] = self.routine_body.value + if self.routine_definition is not None: body['routine_definition'] = self.routine_definition + if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies.as_dict() + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.security_type is not None: body['security_type'] = self.security_type.value + if self.specific_name is not None: body['specific_name'] = self.specific_name + if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access.value + if self.sql_path is not None: body['sql_path'] = self.sql_path return body def as_shallow_dict(self) -> dict: """Serializes the CreateFunction into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.data_type is not None: - body["data_type"] = self.data_type - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.input_params: - body["input_params"] = self.input_params - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.name is not None: - body["name"] = self.name - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params - if self.routine_body is not None: - body["routine_body"] = self.routine_body - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access - if self.sql_path is not None: - body["sql_path"] = self.sql_path + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.data_type is not None: body['data_type'] = self.data_type + if self.external_language is not None: body['external_language'] = self.external_language + if self.external_name is not None: body['external_name'] = self.external_name + if self.full_data_type is not None: body['full_data_type'] = self.full_data_type + if self.input_params: body['input_params'] = self.input_params + if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic + if self.is_null_call is not None: body['is_null_call'] = self.is_null_call + if self.name is not None: body['name'] = self.name + if self.parameter_style is not None: body['parameter_style'] = self.parameter_style + if self.properties is not None: body['properties'] = self.properties + if self.return_params: body['return_params'] = self.return_params + if self.routine_body is not None: body['routine_body'] = self.routine_body + if self.routine_definition is not None: body['routine_definition'] = self.routine_definition + if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.security_type is not None: body['security_type'] = self.security_type + if self.specific_name is not None: body['specific_name'] = self.specific_name + if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access + if self.sql_path is not None: body['sql_path'] = self.sql_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateFunction: """Deserializes the CreateFunction from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - data_type=_enum(d, "data_type", ColumnTypeName), - external_language=d.get("external_language", None), - external_name=d.get("external_name", None), - full_data_type=d.get("full_data_type", None), - input_params=_from_dict(d, "input_params", FunctionParameterInfos), - is_deterministic=d.get("is_deterministic", None), - is_null_call=d.get("is_null_call", None), - name=d.get("name", None), - parameter_style=_enum(d, "parameter_style", CreateFunctionParameterStyle), - properties=d.get("properties", None), - return_params=_from_dict(d, "return_params", FunctionParameterInfos), - routine_body=_enum(d, "routine_body", CreateFunctionRoutineBody), - routine_definition=d.get("routine_definition", None), - routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), - schema_name=d.get("schema_name", None), - security_type=_enum(d, "security_type", CreateFunctionSecurityType), - specific_name=d.get("specific_name", None), - sql_data_access=_enum(d, "sql_data_access", CreateFunctionSqlDataAccess), - sql_path=d.get("sql_path", None), - ) + return cls(catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), data_type=_enum(d, 'data_type', ColumnTypeName), external_language=d.get('external_language', None), external_name=d.get('external_name', None), full_data_type=d.get('full_data_type', None), input_params=_from_dict(d, 'input_params', FunctionParameterInfos), is_deterministic=d.get('is_deterministic', None), is_null_call=d.get('is_null_call', None), name=d.get('name', None), parameter_style=_enum(d, 'parameter_style', CreateFunctionParameterStyle), properties=d.get('properties', None), return_params=_from_dict(d, 'return_params', FunctionParameterInfos), routine_body=_enum(d, 'routine_body', CreateFunctionRoutineBody), routine_definition=d.get('routine_definition', None), routine_dependencies=_from_dict(d, 'routine_dependencies', DependencyList), schema_name=d.get('schema_name', None), security_type=_enum(d, 'security_type', CreateFunctionSecurityType), specific_name=d.get('specific_name', None), sql_data_access=_enum(d, 'sql_data_access', CreateFunctionSqlDataAccess), sql_path=d.get('sql_path', None)) + + class CreateFunctionParameterStyle(Enum): """Function parameter style. **S** is the value for SQL.""" - - S = "S" - + + S = 'S' @dataclass class CreateFunctionRequest: function_info: CreateFunction """Partial __FunctionInfo__ specifying the function to be created.""" - + def as_dict(self) -> dict: """Serializes the CreateFunctionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_info: - body["function_info"] = self.function_info.as_dict() + if self.function_info: body['function_info'] = self.function_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateFunctionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_info: - body["function_info"] = self.function_info + if self.function_info: body['function_info'] = self.function_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateFunctionRequest: """Deserializes the CreateFunctionRequest from a dictionary.""" - return cls(function_info=_from_dict(d, "function_info", CreateFunction)) + return cls(function_info=_from_dict(d, 'function_info', CreateFunction)) + + class CreateFunctionRoutineBody(Enum): @@ -2177,304 +1742,235 @@ class CreateFunctionRoutineBody(Enum): specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**.""" - - EXTERNAL = "EXTERNAL" - SQL = "SQL" - + + EXTERNAL = 'EXTERNAL' + SQL = 'SQL' class CreateFunctionSecurityType(Enum): """The security type of the function.""" - - DEFINER = "DEFINER" - + + DEFINER = 'DEFINER' class CreateFunctionSqlDataAccess(Enum): """Function SQL data access.""" - - CONTAINS_SQL = "CONTAINS_SQL" - NO_SQL = "NO_SQL" - READS_SQL_DATA = "READS_SQL_DATA" - + + CONTAINS_SQL = 'CONTAINS_SQL' + NO_SQL = 'NO_SQL' + READS_SQL_DATA = 'READS_SQL_DATA' @dataclass class CreateMetastore: name: str """The user-specified name of the metastore.""" - + region: Optional[str] = None - """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted - in the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is - omitted, the region of the workspace receiving the request will be used.""" - + """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" + storage_root: Optional[str] = None """The storage root URL for metastore""" - + def as_dict(self) -> dict: """Serializes the CreateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.name is not None: body['name'] = self.name + if self.region is not None: body['region'] = self.region + if self.storage_root is not None: body['storage_root'] = self.storage_root return body def as_shallow_dict(self) -> dict: """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.name is not None: body['name'] = self.name + if self.region is not None: body['region'] = self.region + if self.storage_root is not None: body['storage_root'] = self.storage_root return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateMetastore: """Deserializes the CreateMetastore from a dictionary.""" - return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) + return cls(name=d.get('name', None), region=d.get('region', None), storage_root=d.get('storage_root', None)) + + @dataclass class CreateMetastoreAssignment: metastore_id: str """The unique ID of the metastore.""" - + default_catalog_name: str - """The name of the default catalog in the metastore. This field is depracted. Please use "Default + """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" - + workspace_id: Optional[int] = None """A workspace ID.""" - + def as_dict(self) -> dict: """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateMetastoreAssignment: """Deserializes the CreateMetastoreAssignment from a dictionary.""" - return cls( - default_catalog_name=d.get("default_catalog_name", None), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) + return cls(default_catalog_name=d.get('default_catalog_name', None), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) + + @dataclass class CreateMonitor: assets_dir: str """The directory to store monitoring assets (e.g. dashboard, metric tables).""" - + output_schema_name: str """Schema where output metric tables are created.""" - + baseline_table_name: Optional[str] = None """Name of the baseline table from which drift metrics are computed from. Columns in the monitored table should also be present in the baseline table.""" - + custom_metrics: Optional[List[MonitorMetric]] = None """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).""" - + data_classification_config: Optional[MonitorDataClassificationConfig] = None """The data classification config for the monitor.""" - + inference_log: Optional[MonitorInferenceLog] = None """Configuration for monitoring inference logs.""" - + notifications: Optional[MonitorNotifications] = None """The notification settings for the monitor.""" - + schedule: Optional[MonitorCronSchedule] = None """The schedule for automatically updating and refreshing metric tables.""" - + skip_builtin_dashboard: Optional[bool] = None """Whether to skip creating a default dashboard summarizing data quality metrics.""" - + slicing_exprs: Optional[List[str]] = None """List of column expressions to slice data with for targeted analysis. The data is grouped by each expression independently, resulting in a separate slice for each predicate and its complements. For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" - + snapshot: Optional[MonitorSnapshot] = None """Configuration for monitoring snapshot tables.""" - + table_name: Optional[str] = None """Full name of the table.""" - + time_series: Optional[MonitorTimeSeries] = None """Configuration for monitoring time series tables.""" - + warehouse_id: Optional[str] = None """Optional argument to specify the warehouse for dashboard creation. If not specified, the first running warehouse will be used.""" - + def as_dict(self) -> dict: """Serializes the CreateMonitor into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config.as_dict() - if self.inference_log: - body["inference_log"] = self.inference_log.as_dict() - if self.notifications: - body["notifications"] = self.notifications.as_dict() - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.skip_builtin_dashboard is not None: - body["skip_builtin_dashboard"] = self.skip_builtin_dashboard - if self.slicing_exprs: - body["slicing_exprs"] = [v for v in self.slicing_exprs] - if self.snapshot: - body["snapshot"] = self.snapshot.as_dict() - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series.as_dict() - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.assets_dir is not None: body['assets_dir'] = self.assets_dir + if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name + if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics] + if self.data_classification_config: body['data_classification_config'] = self.data_classification_config.as_dict() + if self.inference_log: body['inference_log'] = self.inference_log.as_dict() + if self.notifications: body['notifications'] = self.notifications.as_dict() + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.schedule: body['schedule'] = self.schedule.as_dict() + if self.skip_builtin_dashboard is not None: body['skip_builtin_dashboard'] = self.skip_builtin_dashboard + if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs] + if self.snapshot: body['snapshot'] = self.snapshot.as_dict() + if self.table_name is not None: body['table_name'] = self.table_name + if self.time_series: body['time_series'] = self.time_series.as_dict() + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateMonitor into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = self.custom_metrics - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config - if self.inference_log: - body["inference_log"] = self.inference_log - if self.notifications: - body["notifications"] = self.notifications - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule - if self.skip_builtin_dashboard is not None: - body["skip_builtin_dashboard"] = self.skip_builtin_dashboard - if self.slicing_exprs: - body["slicing_exprs"] = self.slicing_exprs - if self.snapshot: - body["snapshot"] = self.snapshot - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.assets_dir is not None: body['assets_dir'] = self.assets_dir + if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name + if self.custom_metrics: body['custom_metrics'] = self.custom_metrics + if self.data_classification_config: body['data_classification_config'] = self.data_classification_config + if self.inference_log: body['inference_log'] = self.inference_log + if self.notifications: body['notifications'] = self.notifications + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.schedule: body['schedule'] = self.schedule + if self.skip_builtin_dashboard is not None: body['skip_builtin_dashboard'] = self.skip_builtin_dashboard + if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs + if self.snapshot: body['snapshot'] = self.snapshot + if self.table_name is not None: body['table_name'] = self.table_name + if self.time_series: body['time_series'] = self.time_series + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateMonitor: """Deserializes the CreateMonitor from a dictionary.""" - return cls( - assets_dir=d.get("assets_dir", None), - baseline_table_name=d.get("baseline_table_name", None), - custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), - data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), - inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), - notifications=_from_dict(d, "notifications", MonitorNotifications), - output_schema_name=d.get("output_schema_name", None), - schedule=_from_dict(d, "schedule", MonitorCronSchedule), - skip_builtin_dashboard=d.get("skip_builtin_dashboard", None), - slicing_exprs=d.get("slicing_exprs", None), - snapshot=_from_dict(d, "snapshot", MonitorSnapshot), - table_name=d.get("table_name", None), - time_series=_from_dict(d, "time_series", MonitorTimeSeries), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(assets_dir=d.get('assets_dir', None), baseline_table_name=d.get('baseline_table_name', None), custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric), data_classification_config=_from_dict(d, 'data_classification_config', MonitorDataClassificationConfig), inference_log=_from_dict(d, 'inference_log', MonitorInferenceLog), notifications=_from_dict(d, 'notifications', MonitorNotifications), output_schema_name=d.get('output_schema_name', None), schedule=_from_dict(d, 'schedule', MonitorCronSchedule), skip_builtin_dashboard=d.get('skip_builtin_dashboard', None), slicing_exprs=d.get('slicing_exprs', None), snapshot=_from_dict(d, 'snapshot', MonitorSnapshot), table_name=d.get('table_name', None), time_series=_from_dict(d, 'time_series', MonitorTimeSeries), warehouse_id=d.get('warehouse_id', None)) + + + + + @dataclass class CreateRegisteredModelRequest: catalog_name: str """The name of the catalog where the schema and the registered model reside""" - + schema_name: str """The name of the schema where the registered model resides""" - + name: str """The name of the registered model""" - + comment: Optional[str] = None """The comment attached to the registered model""" - + storage_location: Optional[str] = None """The storage location on the cloud under which model version data files are stored""" - + def as_dict(self) -> dict: """Serializes the CreateRegisteredModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location return body def as_shallow_dict(self) -> dict: """Serializes the CreateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRegisteredModelRequest: """Deserializes the CreateRegisteredModelRequest from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - name=d.get("name", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - ) + return cls(catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), name=d.get('name', None), schema_name=d.get('schema_name', None), storage_location=d.get('storage_location', None)) + + @dataclass @@ -2493,725 +1989,434 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" return cls() + + @dataclass class CreateSchema: name: str """Name of schema, relative to parent catalog.""" - + catalog_name: str """Name of parent catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - - properties: Optional[Dict[str, str]] = None + + properties: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + storage_root: Optional[str] = None """Storage root URL for managed tables within schema.""" - + def as_dict(self) -> dict: """Serializes the CreateSchema into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.properties: - body["properties"] = self.properties - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.properties: body['properties'] = self.properties + if self.storage_root is not None: body['storage_root'] = self.storage_root return body def as_shallow_dict(self) -> dict: """Serializes the CreateSchema into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.properties: - body["properties"] = self.properties - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.properties: body['properties'] = self.properties + if self.storage_root is not None: body['storage_root'] = self.storage_root return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateSchema: """Deserializes the CreateSchema from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - name=d.get("name", None), - properties=d.get("properties", None), - storage_root=d.get("storage_root", None), - ) + return cls(catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), name=d.get('name', None), properties=d.get('properties', None), storage_root=d.get('storage_root', None)) + + @dataclass class CreateStorageCredential: name: str """The credential name. The name must be unique within the metastore.""" - + aws_iam_role: Optional[AwsIamRoleRequest] = None """The AWS IAM role configuration.""" - + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" - + cloudflare_api_token: Optional[CloudflareApiToken] = None """The Cloudflare API token configuration.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None """The Databricks managed GCP service account configuration.""" - + read_only: Optional[bool] = None """Whether the storage credential is only usable for read operations.""" - + skip_validation: Optional[bool] = None """Supplying true to this argument skips validation of the created credential.""" - + def as_dict(self) -> dict: """Serializes the CreateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.name is not None: body['name'] = self.name + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.name is not None: body['name'] = self.name + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateStorageCredential: """Deserializes the CreateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - name=d.get("name", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleRequest), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentityRequest), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), comment=d.get('comment', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccountRequest), name=d.get('name', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) + + @dataclass class CreateTableConstraint: full_name_arg: str """The full name of the table referenced by the constraint.""" - + constraint: TableConstraint """A table constraint, as defined by *one* of the following fields being set: __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" - + def as_dict(self) -> dict: """Serializes the CreateTableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.constraint: - body["constraint"] = self.constraint.as_dict() - if self.full_name_arg is not None: - body["full_name_arg"] = self.full_name_arg + if self.constraint: body['constraint'] = self.constraint.as_dict() + if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg return body def as_shallow_dict(self) -> dict: """Serializes the CreateTableConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.constraint: - body["constraint"] = self.constraint - if self.full_name_arg is not None: - body["full_name_arg"] = self.full_name_arg + if self.constraint: body['constraint'] = self.constraint + if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTableConstraint: """Deserializes the CreateTableConstraint from a dictionary.""" - return cls(constraint=_from_dict(d, "constraint", TableConstraint), full_name_arg=d.get("full_name_arg", None)) + return cls(constraint=_from_dict(d, 'constraint', TableConstraint), full_name_arg=d.get('full_name_arg', None)) + + @dataclass class CreateVolumeRequestContent: catalog_name: str """The name of the catalog where the schema and the volume are""" - + schema_name: str """The name of the schema where the volume is""" - + name: str """The name of the volume""" - + volume_type: VolumeType """The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" - + comment: Optional[str] = None """The comment attached to the volume""" - + storage_location: Optional[str] = None """The storage location on the cloud""" - + def as_dict(self) -> dict: """Serializes the CreateVolumeRequestContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.volume_type is not None: - body["volume_type"] = self.volume_type.value + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.volume_type is not None: body['volume_type'] = self.volume_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.volume_type is not None: - body["volume_type"] = self.volume_type + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.volume_type is not None: body['volume_type'] = self.volume_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVolumeRequestContent: """Deserializes the CreateVolumeRequestContent from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - name=d.get("name", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - volume_type=_enum(d, "volume_type", VolumeType), - ) + return cls(catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), name=d.get('name', None), schema_name=d.get('schema_name', None), storage_location=d.get('storage_location', None), volume_type=_enum(d, 'volume_type', VolumeType)) + + @dataclass class CredentialInfo: aws_iam_role: Optional[AwsIamRole] = None """The AWS IAM role configuration""" - + azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + created_at: Optional[int] = None """Time at which this credential was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of credential creator.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + full_name: Optional[str] = None """The full name of the credential.""" - + id: Optional[str] = None """The unique identifier of the credential.""" - + isolation_mode: Optional[IsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - + metastore_id: Optional[str] = None """Unique identifier of the parent metastore.""" - + name: Optional[str] = None """The credential name. The name must be unique among storage and service credentials within the metastore.""" - + owner: Optional[str] = None """Username of current owner of credential.""" - + purpose: Optional[CredentialPurpose] = None """Indicates the purpose of the credential.""" - + read_only: Optional[bool] = None """Whether the credential is usable only for read operations. Only applicable when purpose is **STORAGE**.""" - + updated_at: Optional[int] = None """Time at which this credential was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the credential.""" - + used_for_managed_storage: Optional[bool] = None """Whether this credential is the current metastore's root storage credential. Only applicable when purpose is **STORAGE**.""" - + def as_dict(self) -> dict: """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.purpose is not None: - body["purpose"] = self.purpose.value - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: body['full_name'] = self.full_name + if self.id is not None: body['id'] = self.id + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.purpose is not None: body['purpose'] = self.purpose.value + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.used_for_managed_storage is not None: body['used_for_managed_storage'] = self.used_for_managed_storage return body def as_shallow_dict(self) -> dict: """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.purpose is not None: - body["purpose"] = self.purpose - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.full_name is not None: body['full_name'] = self.full_name + if self.id is not None: body['id'] = self.id + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.purpose is not None: body['purpose'] = self.purpose + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.used_for_managed_storage is not None: body['used_for_managed_storage'] = self.used_for_managed_storage return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: """Deserializes the CredentialInfo from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - full_name=d.get("full_name", None), - id=d.get("id", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - purpose=_enum(d, "purpose", CredentialPurpose), - read_only=d.get("read_only", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - used_for_managed_storage=d.get("used_for_managed_storage", None), - ) - + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccount), full_name=d.get('full_name', None), id=d.get('id', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), purpose=_enum(d, 'purpose', CredentialPurpose), read_only=d.get('read_only', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), used_for_managed_storage=d.get('used_for_managed_storage', None)) + -class CredentialPurpose(Enum): - SERVICE = "SERVICE" - STORAGE = "STORAGE" +class CredentialPurpose(Enum): + + + SERVICE = 'SERVICE' + STORAGE = 'STORAGE' class CredentialType(Enum): - """The type of credential.""" - - BEARER_TOKEN = "BEARER_TOKEN" - USERNAME_PASSWORD = "USERNAME_PASSWORD" - + """Next Id: 12""" + + BEARER_TOKEN = 'BEARER_TOKEN' + OAUTH_ACCESS_TOKEN = 'OAUTH_ACCESS_TOKEN' + OAUTH_M2M = 'OAUTH_M2M' + OAUTH_REFRESH_TOKEN = 'OAUTH_REFRESH_TOKEN' + OAUTH_RESOURCE_OWNER_PASSWORD = 'OAUTH_RESOURCE_OWNER_PASSWORD' + OAUTH_U2M = 'OAUTH_U2M' + OAUTH_U2M_MAPPING = 'OAUTH_U2M_MAPPING' + OIDC_TOKEN = 'OIDC_TOKEN' + PEM_PRIVATE_KEY = 'PEM_PRIVATE_KEY' + SERVICE_CREDENTIAL = 'SERVICE_CREDENTIAL' + UNKNOWN_CREDENTIAL_TYPE = 'UNKNOWN_CREDENTIAL_TYPE' + USERNAME_PASSWORD = 'USERNAME_PASSWORD' @dataclass class CredentialValidationResult: message: Optional[str] = None """Error message would exist when the result does not equal to **PASS**.""" - + result: Optional[ValidateCredentialResult] = None """The results of the tested operation.""" - + def as_dict(self) -> dict: """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.result is not None: - body["result"] = self.result.value + if self.message is not None: body['message'] = self.message + if self.result is not None: body['result'] = self.result.value return body def as_shallow_dict(self) -> dict: """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.result is not None: - body["result"] = self.result + if self.message is not None: body['message'] = self.message + if self.result is not None: body['result'] = self.result return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CredentialValidationResult: """Deserializes the CredentialValidationResult from a dictionary.""" - return cls(message=d.get("message", None), result=_enum(d, "result", ValidateCredentialResult)) + return cls(message=d.get('message', None), result=_enum(d, 'result', ValidateCredentialResult)) + + class DataSourceFormat(Enum): """Data source format""" - - AVRO = "AVRO" - BIGQUERY_FORMAT = "BIGQUERY_FORMAT" - CSV = "CSV" - DATABRICKS_FORMAT = "DATABRICKS_FORMAT" - DELTA = "DELTA" - DELTASHARING = "DELTASHARING" - HIVE_CUSTOM = "HIVE_CUSTOM" - HIVE_SERDE = "HIVE_SERDE" - JSON = "JSON" - MYSQL_FORMAT = "MYSQL_FORMAT" - NETSUITE_FORMAT = "NETSUITE_FORMAT" - ORC = "ORC" - PARQUET = "PARQUET" - POSTGRESQL_FORMAT = "POSTGRESQL_FORMAT" - REDSHIFT_FORMAT = "REDSHIFT_FORMAT" - SALESFORCE_FORMAT = "SALESFORCE_FORMAT" - SNOWFLAKE_FORMAT = "SNOWFLAKE_FORMAT" - SQLDW_FORMAT = "SQLDW_FORMAT" - SQLSERVER_FORMAT = "SQLSERVER_FORMAT" - TEXT = "TEXT" - UNITY_CATALOG = "UNITY_CATALOG" - VECTOR_INDEX_FORMAT = "VECTOR_INDEX_FORMAT" - WORKDAY_RAAS_FORMAT = "WORKDAY_RAAS_FORMAT" - - -@dataclass -class DatabaseCatalog: - name: str - """The name of the catalog in UC.""" - - database_instance_name: str - """The name of the DatabaseInstance housing the database.""" - - database_name: str - """The name of the database (in a instance) associated with the catalog.""" - - create_database_if_not_exists: Optional[bool] = None - - uid: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the DatabaseCatalog into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.create_database_if_not_exists is not None: - body["create_database_if_not_exists"] = self.create_database_if_not_exists - if self.database_instance_name is not None: - body["database_instance_name"] = self.database_instance_name - if self.database_name is not None: - body["database_name"] = self.database_name - if self.name is not None: - body["name"] = self.name - if self.uid is not None: - body["uid"] = self.uid - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DatabaseCatalog into a shallow dictionary of its immediate attributes.""" - body = {} - if self.create_database_if_not_exists is not None: - body["create_database_if_not_exists"] = self.create_database_if_not_exists - if self.database_instance_name is not None: - body["database_instance_name"] = self.database_instance_name - if self.database_name is not None: - body["database_name"] = self.database_name - if self.name is not None: - body["name"] = self.name - if self.uid is not None: - body["uid"] = self.uid - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog: - """Deserializes the DatabaseCatalog from a dictionary.""" - return cls( - create_database_if_not_exists=d.get("create_database_if_not_exists", None), - database_instance_name=d.get("database_instance_name", None), - database_name=d.get("database_name", None), - name=d.get("name", None), - uid=d.get("uid", None), - ) - - -@dataclass -class DatabaseInstance: - """A DatabaseInstance represents a logical Postgres instance, comprised of both compute and - storage.""" - - name: str - """The name of the instance. This is the unique identifier for the instance.""" - - admin_password: Optional[str] = None - """Password for admin user to create. If not provided, no user will be created.""" - - admin_rolename: Optional[str] = None - """Name of the admin role for the instance. If not provided, defaults to 'databricks_admin'.""" - - capacity: Optional[str] = None - """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4".""" - - creation_time: Optional[str] = None - """The timestamp when the instance was created.""" - - creator: Optional[str] = None - """The email of the creator of the instance.""" - - pg_version: Optional[str] = None - """The version of Postgres running on the instance.""" - - read_write_dns: Optional[str] = None - """The DNS endpoint to connect to the instance for read+write access.""" - - state: Optional[DatabaseInstanceState] = None - """The current state of the instance.""" - - stopped: Optional[bool] = None - """Whether the instance is stopped.""" - - uid: Optional[str] = None - """An immutable UUID identifier for the instance.""" - - def as_dict(self) -> dict: - """Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.admin_password is not None: - body["admin_password"] = self.admin_password - if self.admin_rolename is not None: - body["admin_rolename"] = self.admin_rolename - if self.capacity is not None: - body["capacity"] = self.capacity - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.creator is not None: - body["creator"] = self.creator - if self.name is not None: - body["name"] = self.name - if self.pg_version is not None: - body["pg_version"] = self.pg_version - if self.read_write_dns is not None: - body["read_write_dns"] = self.read_write_dns - if self.state is not None: - body["state"] = self.state.value - if self.stopped is not None: - body["stopped"] = self.stopped - if self.uid is not None: - body["uid"] = self.uid - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DatabaseInstance into a shallow dictionary of its immediate attributes.""" - body = {} - if self.admin_password is not None: - body["admin_password"] = self.admin_password - if self.admin_rolename is not None: - body["admin_rolename"] = self.admin_rolename - if self.capacity is not None: - body["capacity"] = self.capacity - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.creator is not None: - body["creator"] = self.creator - if self.name is not None: - body["name"] = self.name - if self.pg_version is not None: - body["pg_version"] = self.pg_version - if self.read_write_dns is not None: - body["read_write_dns"] = self.read_write_dns - if self.state is not None: - body["state"] = self.state - if self.stopped is not None: - body["stopped"] = self.stopped - if self.uid is not None: - body["uid"] = self.uid - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: - """Deserializes the DatabaseInstance from a dictionary.""" - return cls( - admin_password=d.get("admin_password", None), - admin_rolename=d.get("admin_rolename", None), - capacity=d.get("capacity", None), - creation_time=d.get("creation_time", None), - creator=d.get("creator", None), - name=d.get("name", None), - pg_version=d.get("pg_version", None), - read_write_dns=d.get("read_write_dns", None), - state=_enum(d, "state", DatabaseInstanceState), - stopped=d.get("stopped", None), - uid=d.get("uid", None), - ) - - -class DatabaseInstanceState(Enum): - - AVAILABLE = "AVAILABLE" - DELETING = "DELETING" - FAILING_OVER = "FAILING_OVER" - STARTING = "STARTING" - STOPPED = "STOPPED" - UPDATING = "UPDATING" - + + AVRO = 'AVRO' + BIGQUERY_FORMAT = 'BIGQUERY_FORMAT' + CSV = 'CSV' + DATABRICKS_FORMAT = 'DATABRICKS_FORMAT' + DELTA = 'DELTA' + DELTASHARING = 'DELTASHARING' + HIVE_CUSTOM = 'HIVE_CUSTOM' + HIVE_SERDE = 'HIVE_SERDE' + JSON = 'JSON' + MYSQL_FORMAT = 'MYSQL_FORMAT' + NETSUITE_FORMAT = 'NETSUITE_FORMAT' + ORC = 'ORC' + PARQUET = 'PARQUET' + POSTGRESQL_FORMAT = 'POSTGRESQL_FORMAT' + REDSHIFT_FORMAT = 'REDSHIFT_FORMAT' + SALESFORCE_FORMAT = 'SALESFORCE_FORMAT' + SNOWFLAKE_FORMAT = 'SNOWFLAKE_FORMAT' + SQLDW_FORMAT = 'SQLDW_FORMAT' + SQLSERVER_FORMAT = 'SQLSERVER_FORMAT' + TEXT = 'TEXT' + UNITY_CATALOG = 'UNITY_CATALOG' + VECTOR_INDEX_FORMAT = 'VECTOR_INDEX_FORMAT' + WORKDAY_RAAS_FORMAT = 'WORKDAY_RAAS_FORMAT' @dataclass class DatabricksGcpServiceAccount: """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + credential_id: Optional[str] = None """The Databricks internal ID that represents this managed identity. This field is only used to persist the credential_id once it is fetched from the credentials manager - as we only use the protobuf serializer to store credentials, this ID gets persisted to the database""" - + email: Optional[str] = None """The email of the service account.""" - + private_key_id: Optional[str] = None """The ID that represents the private key for this Service Account""" - + def as_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email - if self.private_key_id is not None: - body["private_key_id"] = self.private_key_id + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.email is not None: body['email'] = self.email + if self.private_key_id is not None: body['private_key_id'] = self.private_key_id return body def as_shallow_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email - if self.private_key_id is not None: - body["private_key_id"] = self.private_key_id + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.email is not None: body['email'] = self.email + if self.private_key_id is not None: body['private_key_id'] = self.private_key_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccount: """Deserializes the DatabricksGcpServiceAccount from a dictionary.""" - return cls( - credential_id=d.get("credential_id", None), - email=d.get("email", None), - private_key_id=d.get("private_key_id", None), - ) + return cls(credential_id=d.get('credential_id', None), email=d.get('email', None), private_key_id=d.get('private_key_id', None)) + + @dataclass @@ -3230,38 +2435,50 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountRequest: """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary.""" return cls() + + @dataclass class DatabricksGcpServiceAccountResponse: credential_id: Optional[str] = None """The Databricks internal ID that represents this service account. This is an output-only field.""" - + email: Optional[str] = None """The email of the service account. This is an output-only field.""" - + def as_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.email is not None: body['email'] = self.email return body def as_shallow_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.email is not None: body['email'] = self.email return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountResponse: """Deserializes the DatabricksGcpServiceAccountResponse from a dictionary.""" - return cls(credential_id=d.get("credential_id", None), email=d.get("email", None)) + return cls(credential_id=d.get('credential_id', None), email=d.get('email', None)) + + + + + + + + + + + + + + @dataclass @@ -3280,6 +2497,17 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteAliasResponse: """Deserializes the DeleteAliasResponse from a dictionary.""" return cls() + + + + + + + + + + + @dataclass @@ -3298,42 +2526,29 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCredentialResponse: """Deserializes the DeleteCredentialResponse from a dictionary.""" return cls() + + + + + + + + + + + + + + + -@dataclass -class DeleteDatabaseCatalogResponse: - def as_dict(self) -> dict: - """Serializes the DeleteDatabaseCatalogResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - def as_shallow_dict(self) -> dict: - """Serializes the DeleteDatabaseCatalogResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseCatalogResponse: - """Deserializes the DeleteDatabaseCatalogResponse from a dictionary.""" - return cls() -@dataclass -class DeleteDatabaseInstanceResponse: - def as_dict(self) -> dict: - """Serializes the DeleteDatabaseInstanceResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - def as_shallow_dict(self) -> dict: - """Serializes the DeleteDatabaseInstanceResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseInstanceResponse: - """Deserializes the DeleteDatabaseInstanceResponse from a dictionary.""" - return cls() @dataclass @@ -3352,116 +2567,120 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + + + + + + + + + + + + -@dataclass -class DeleteSyncedDatabaseTableResponse: - def as_dict(self) -> dict: - """Serializes the DeleteSyncedDatabaseTableResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - def as_shallow_dict(self) -> dict: - """Serializes the DeleteSyncedDatabaseTableResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse: - """Deserializes the DeleteSyncedDatabaseTableResponse from a dictionary.""" - return cls() @dataclass class DeltaRuntimePropertiesKvPairs: """Properties pertaining to the current state of the delta table as given by the commit server. This does not contain **delta.*** (input) properties in __TableInfo.properties__.""" - - delta_runtime_properties: Dict[str, str] + + delta_runtime_properties: Dict[str,str] """A map of key-value properties attached to the securable.""" - + def as_dict(self) -> dict: """Serializes the DeltaRuntimePropertiesKvPairs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.delta_runtime_properties: - body["delta_runtime_properties"] = self.delta_runtime_properties + if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties return body def as_shallow_dict(self) -> dict: """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" body = {} - if self.delta_runtime_properties: - body["delta_runtime_properties"] = self.delta_runtime_properties + if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaRuntimePropertiesKvPairs: """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary.""" - return cls(delta_runtime_properties=d.get("delta_runtime_properties", None)) + return cls(delta_runtime_properties=d.get('delta_runtime_properties', None)) + -@dataclass -class Dependency: - """A dependency of a SQL object. Either the __table__ field or the __function__ field must be - defined.""" +class DeltaSharingScopeEnum(Enum): + + + INTERNAL = 'INTERNAL' + INTERNAL_AND_EXTERNAL = 'INTERNAL_AND_EXTERNAL' + +@dataclass +class Dependency: + """A dependency of a SQL object. Either the __table__ field or the __function__ field must be + defined.""" + function: Optional[FunctionDependency] = None """A function that is dependent on a SQL object.""" - + table: Optional[TableDependency] = None """A table that is dependent on a SQL object.""" - + def as_dict(self) -> dict: """Serializes the Dependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function: - body["function"] = self.function.as_dict() - if self.table: - body["table"] = self.table.as_dict() + if self.function: body['function'] = self.function.as_dict() + if self.table: body['table'] = self.table.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Dependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.function: - body["function"] = self.function - if self.table: - body["table"] = self.table + if self.function: body['function'] = self.function + if self.table: body['table'] = self.table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Dependency: """Deserializes the Dependency from a dictionary.""" - return cls( - function=_from_dict(d, "function", FunctionDependency), table=_from_dict(d, "table", TableDependency) - ) + return cls(function=_from_dict(d, 'function', FunctionDependency), table=_from_dict(d, 'table', TableDependency)) + + @dataclass class DependencyList: """A list of dependencies.""" - + dependencies: Optional[List[Dependency]] = None """Array of dependencies.""" - + def as_dict(self) -> dict: """Serializes the DependencyList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dependencies: - body["dependencies"] = [v.as_dict() for v in self.dependencies] + if self.dependencies: body['dependencies'] = [v.as_dict() for v in self.dependencies] return body def as_shallow_dict(self) -> dict: """Serializes the DependencyList into a shallow dictionary of its immediate attributes.""" body = {} - if self.dependencies: - body["dependencies"] = self.dependencies + if self.dependencies: body['dependencies'] = self.dependencies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DependencyList: """Deserializes the DependencyList from a dictionary.""" - return cls(dependencies=_repeated_dict(d, "dependencies", Dependency)) + return cls(dependencies=_repeated_dict(d, 'dependencies', Dependency)) + + + + + @dataclass @@ -3480,211 +2699,192 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: """Deserializes the DisableResponse from a dictionary.""" return cls() + + @dataclass class EffectivePermissionsList: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + privilege_assignments: Optional[List[EffectivePrivilegeAssignment]] = None """The privileges conveyed to each principal (either directly or via inheritance)""" - + def as_dict(self) -> dict: """Serializes the EffectivePermissionsList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes.""" body = {} - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EffectivePermissionsList: """Deserializes the EffectivePermissionsList from a dictionary.""" - return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", EffectivePrivilegeAssignment)) + return cls(next_page_token=d.get('next_page_token', None), privilege_assignments=_repeated_dict(d, 'privilege_assignments', EffectivePrivilegeAssignment)) + + @dataclass class EffectivePredictiveOptimizationFlag: value: EnablePredictiveOptimization """Whether predictive optimization should be enabled for this object and objects under it.""" - + inherited_from_name: Optional[str] = None """The name of the object from which the flag was inherited. If there was no inheritance, this field is left blank.""" - + inherited_from_type: Optional[EffectivePredictiveOptimizationFlagInheritedFromType] = None """The type of the object from which the flag was inherited. If there was no inheritance, this field is left blank.""" - + def as_dict(self) -> dict: """Serializes the EffectivePredictiveOptimizationFlag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type.value - if self.value is not None: - body["value"] = self.value.value + if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name + if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type.value + if self.value is not None: body['value'] = self.value.value return body def as_shallow_dict(self) -> dict: """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type - if self.value is not None: - body["value"] = self.value + if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name + if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EffectivePredictiveOptimizationFlag: """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary.""" - return cls( - inherited_from_name=d.get("inherited_from_name", None), - inherited_from_type=_enum(d, "inherited_from_type", EffectivePredictiveOptimizationFlagInheritedFromType), - value=_enum(d, "value", EnablePredictiveOptimization), - ) + return cls(inherited_from_name=d.get('inherited_from_name', None), inherited_from_type=_enum(d, 'inherited_from_type', EffectivePredictiveOptimizationFlagInheritedFromType), value=_enum(d, 'value', EnablePredictiveOptimization)) + + class EffectivePredictiveOptimizationFlagInheritedFromType(Enum): """The type of the object from which the flag was inherited. If there was no inheritance, this field is left blank.""" - - CATALOG = "CATALOG" - SCHEMA = "SCHEMA" - + + CATALOG = 'CATALOG' + SCHEMA = 'SCHEMA' @dataclass class EffectivePrivilege: inherited_from_name: Optional[str] = None """The full name of the object that conveys this privilege via inheritance. This field is omitted when privilege is not inherited (it's assigned to the securable itself).""" - + inherited_from_type: Optional[SecurableType] = None """The type of the object that conveys this privilege via inheritance. This field is omitted when privilege is not inherited (it's assigned to the securable itself).""" - + privilege: Optional[Privilege] = None """The privilege assigned to the principal.""" - + def as_dict(self) -> dict: """Serializes the EffectivePrivilege into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type.value - if self.privilege is not None: - body["privilege"] = self.privilege.value + if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name + if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type.value + if self.privilege is not None: body['privilege'] = self.privilege.value return body def as_shallow_dict(self) -> dict: """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type - if self.privilege is not None: - body["privilege"] = self.privilege + if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name + if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type + if self.privilege is not None: body['privilege'] = self.privilege return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilege: """Deserializes the EffectivePrivilege from a dictionary.""" - return cls( - inherited_from_name=d.get("inherited_from_name", None), - inherited_from_type=_enum(d, "inherited_from_type", SecurableType), - privilege=_enum(d, "privilege", Privilege), - ) + return cls(inherited_from_name=d.get('inherited_from_name', None), inherited_from_type=_enum(d, 'inherited_from_type', SecurableType), privilege=_enum(d, 'privilege', Privilege)) + + @dataclass class EffectivePrivilegeAssignment: principal: Optional[str] = None """The principal (user email address or group name).""" - + privileges: Optional[List[EffectivePrivilege]] = None """The privileges conveyed to the principal (either directly or via inheritance).""" - + def as_dict(self) -> dict: """Serializes the EffectivePrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = [v.as_dict() for v in self.privileges] + if self.principal is not None: body['principal'] = self.principal + if self.privileges: body['privileges'] = [v.as_dict() for v in self.privileges] return body def as_shallow_dict(self) -> dict: """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = self.privileges + if self.principal is not None: body['principal'] = self.principal + if self.privileges: body['privileges'] = self.privileges return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilegeAssignment: """Deserializes the EffectivePrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_dict(d, "privileges", EffectivePrivilege)) - + return cls(principal=d.get('principal', None), privileges=_repeated_dict(d, 'privileges', EffectivePrivilege)) + -class EnablePredictiveOptimization(Enum): - DISABLE = "DISABLE" - ENABLE = "ENABLE" - INHERIT = "INHERIT" +class EnablePredictiveOptimization(Enum): + + + DISABLE = 'DISABLE' + ENABLE = 'ENABLE' + INHERIT = 'INHERIT' @dataclass class EnableRequest: catalog_name: Optional[str] = None """the catalog for which the system schema is to enabled in""" - + metastore_id: Optional[str] = None """The metastore ID under which the system schema lives.""" - + schema_name: Optional[str] = None """Full name of the system schema.""" - + def as_dict(self) -> dict: """Serializes the EnableRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.schema_name is not None: body['schema_name'] = self.schema_name return body def as_shallow_dict(self) -> dict: """Serializes the EnableRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.schema_name is not None: body['schema_name'] = self.schema_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnableRequest: """Deserializes the EnableRequest from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - metastore_id=d.get("metastore_id", None), - schema_name=d.get("schema_name", None), - ) + return cls(catalog_name=d.get('catalog_name', None), metastore_id=d.get('metastore_id', None), schema_name=d.get('schema_name', None)) + + @dataclass @@ -3703,33 +2903,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EnableResponse: """Deserializes the EnableResponse from a dictionary.""" return cls() + + @dataclass class EncryptionDetails: """Encryption options that apply to clients connecting to cloud storage.""" - + sse_encryption_details: Optional[SseEncryptionDetails] = None """Server-Side Encryption properties for clients communicating with AWS s3.""" - + def as_dict(self) -> dict: """Serializes the EncryptionDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sse_encryption_details: - body["sse_encryption_details"] = self.sse_encryption_details.as_dict() + if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.sse_encryption_details: - body["sse_encryption_details"] = self.sse_encryption_details + if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EncryptionDetails: """Deserializes the EncryptionDetails from a dictionary.""" - return cls(sse_encryption_details=_from_dict(d, "sse_encryption_details", SseEncryptionDetails)) + return cls(sse_encryption_details=_from_dict(d, 'sse_encryption_details', SseEncryptionDetails)) + + + + + @dataclass @@ -3737,345 +2942,259 @@ class ExternalLocationInfo: browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this external location was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of external location creator.""" - + credential_id: Optional[str] = None """Unique ID of the location's storage credential.""" - + credential_name: Optional[str] = None """Name of the storage credential used with this location.""" - + enable_file_events: Optional[bool] = None """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + fallback: Optional[bool] = None """Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient.""" - + file_event_queue: Optional[FileEventQueue] = None """[Create:OPT Update:OPT] File event queue settings.""" - + isolation_mode: Optional[IsolationMode] = None - + metastore_id: Optional[str] = None """Unique identifier of metastore hosting the external location.""" - + name: Optional[str] = None """Name of the external location.""" - + owner: Optional[str] = None """The owner of the external location.""" - + read_only: Optional[bool] = None """Indicates whether the external location is read-only.""" - + updated_at: Optional[int] = None """Time at which external location this was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the external location.""" - + url: Optional[str] = None """Path URL of the external location.""" - + def as_dict(self) -> dict: """Serializes the ExternalLocationInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue.as_dict() - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.url is not None: - body["url"] = self.url + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() + if self.fallback is not None: body['fallback'] = self.fallback + if self.file_event_queue: body['file_event_queue'] = self.file_event_queue.as_dict() + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.url is not None: - body["url"] = self.url + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.fallback is not None: body['fallback'] = self.fallback + if self.file_event_queue: body['file_event_queue'] = self.file_event_queue + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalLocationInfo: """Deserializes the ExternalLocationInfo from a dictionary.""" - return cls( - browse_only=d.get("browse_only", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - credential_id=d.get("credential_id", None), - credential_name=d.get("credential_name", None), - enable_file_events=d.get("enable_file_events", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - fallback=d.get("fallback", None), - file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - url=d.get("url", None), - ) + return cls(browse_only=d.get('browse_only', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), credential_id=d.get('credential_id', None), credential_name=d.get('credential_name', None), enable_file_events=d.get('enable_file_events', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), fallback=d.get('fallback', None), file_event_queue=_from_dict(d, 'file_event_queue', FileEventQueue), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), url=d.get('url', None)) + + @dataclass class FailedStatus: """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the ONLINE_PIPELINE_FAILED state.""" - + last_processed_commit_version: Optional[int] = None """The last source table Delta version that was synced to the online table. Note that this Delta version may only be partially synced to the online table. Only populated if the table is still online and available for serving.""" - + timestamp: Optional[str] = None """The timestamp of the last time any data was synchronized from the source table to the online table. Only populated if the table is still online and available for serving.""" - + def as_dict(self) -> dict: """Serializes the FailedStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the FailedStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FailedStatus: """Deserializes the FailedStatus from a dictionary.""" - return cls( - last_processed_commit_version=d.get("last_processed_commit_version", None), - timestamp=d.get("timestamp", None), - ) + return cls(last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None)) + + @dataclass class FileEventQueue: managed_aqs: Optional[AzureQueueStorage] = None - + managed_pubsub: Optional[GcpPubsub] = None - + managed_sqs: Optional[AwsSqsQueue] = None - + provided_aqs: Optional[AzureQueueStorage] = None - + provided_pubsub: Optional[GcpPubsub] = None - + provided_sqs: Optional[AwsSqsQueue] = None - + def as_dict(self) -> dict: """Serializes the FileEventQueue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_aqs: - body["managed_aqs"] = self.managed_aqs.as_dict() - if self.managed_pubsub: - body["managed_pubsub"] = self.managed_pubsub.as_dict() - if self.managed_sqs: - body["managed_sqs"] = self.managed_sqs.as_dict() - if self.provided_aqs: - body["provided_aqs"] = self.provided_aqs.as_dict() - if self.provided_pubsub: - body["provided_pubsub"] = self.provided_pubsub.as_dict() - if self.provided_sqs: - body["provided_sqs"] = self.provided_sqs.as_dict() + if self.managed_aqs: body['managed_aqs'] = self.managed_aqs.as_dict() + if self.managed_pubsub: body['managed_pubsub'] = self.managed_pubsub.as_dict() + if self.managed_sqs: body['managed_sqs'] = self.managed_sqs.as_dict() + if self.provided_aqs: body['provided_aqs'] = self.provided_aqs.as_dict() + if self.provided_pubsub: body['provided_pubsub'] = self.provided_pubsub.as_dict() + if self.provided_sqs: body['provided_sqs'] = self.provided_sqs.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the FileEventQueue into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_aqs: - body["managed_aqs"] = self.managed_aqs - if self.managed_pubsub: - body["managed_pubsub"] = self.managed_pubsub - if self.managed_sqs: - body["managed_sqs"] = self.managed_sqs - if self.provided_aqs: - body["provided_aqs"] = self.provided_aqs - if self.provided_pubsub: - body["provided_pubsub"] = self.provided_pubsub - if self.provided_sqs: - body["provided_sqs"] = self.provided_sqs + if self.managed_aqs: body['managed_aqs'] = self.managed_aqs + if self.managed_pubsub: body['managed_pubsub'] = self.managed_pubsub + if self.managed_sqs: body['managed_sqs'] = self.managed_sqs + if self.provided_aqs: body['provided_aqs'] = self.provided_aqs + if self.provided_pubsub: body['provided_pubsub'] = self.provided_pubsub + if self.provided_sqs: body['provided_sqs'] = self.provided_sqs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileEventQueue: """Deserializes the FileEventQueue from a dictionary.""" - return cls( - managed_aqs=_from_dict(d, "managed_aqs", AzureQueueStorage), - managed_pubsub=_from_dict(d, "managed_pubsub", GcpPubsub), - managed_sqs=_from_dict(d, "managed_sqs", AwsSqsQueue), - provided_aqs=_from_dict(d, "provided_aqs", AzureQueueStorage), - provided_pubsub=_from_dict(d, "provided_pubsub", GcpPubsub), - provided_sqs=_from_dict(d, "provided_sqs", AwsSqsQueue), - ) + return cls(managed_aqs=_from_dict(d, 'managed_aqs', AzureQueueStorage), managed_pubsub=_from_dict(d, 'managed_pubsub', GcpPubsub), managed_sqs=_from_dict(d, 'managed_sqs', AwsSqsQueue), provided_aqs=_from_dict(d, 'provided_aqs', AzureQueueStorage), provided_pubsub=_from_dict(d, 'provided_pubsub', GcpPubsub), provided_sqs=_from_dict(d, 'provided_sqs', AwsSqsQueue)) + + @dataclass class ForeignKeyConstraint: name: str """The name of the constraint.""" - + child_columns: List[str] """Column names for this constraint.""" - + parent_table: str """The full name of the parent constraint.""" - + parent_columns: List[str] """Column names for this constraint.""" - + def as_dict(self) -> dict: """Serializes the ForeignKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.child_columns: - body["child_columns"] = [v for v in self.child_columns] - if self.name is not None: - body["name"] = self.name - if self.parent_columns: - body["parent_columns"] = [v for v in self.parent_columns] - if self.parent_table is not None: - body["parent_table"] = self.parent_table + if self.child_columns: body['child_columns'] = [v for v in self.child_columns] + if self.name is not None: body['name'] = self.name + if self.parent_columns: body['parent_columns'] = [v for v in self.parent_columns] + if self.parent_table is not None: body['parent_table'] = self.parent_table return body def as_shallow_dict(self) -> dict: """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.child_columns: - body["child_columns"] = self.child_columns - if self.name is not None: - body["name"] = self.name - if self.parent_columns: - body["parent_columns"] = self.parent_columns - if self.parent_table is not None: - body["parent_table"] = self.parent_table + if self.child_columns: body['child_columns'] = self.child_columns + if self.name is not None: body['name'] = self.name + if self.parent_columns: body['parent_columns'] = self.parent_columns + if self.parent_table is not None: body['parent_table'] = self.parent_table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: """Deserializes the ForeignKeyConstraint from a dictionary.""" - return cls( - child_columns=d.get("child_columns", None), - name=d.get("name", None), - parent_columns=d.get("parent_columns", None), - parent_table=d.get("parent_table", None), - ) + return cls(child_columns=d.get('child_columns', None), name=d.get('name', None), parent_columns=d.get('parent_columns', None), parent_table=d.get('parent_table', None)) + + @dataclass class FunctionDependency: """A function that is dependent on a SQL object.""" - + function_full_name: str """Full name of the dependent function, in the form of __catalog_name__.__schema_name__.__function_name__.""" - + def as_dict(self) -> dict: """Serializes the FunctionDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_full_name is not None: - body["function_full_name"] = self.function_full_name + if self.function_full_name is not None: body['function_full_name'] = self.function_full_name return body def as_shallow_dict(self) -> dict: """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_full_name is not None: - body["function_full_name"] = self.function_full_name + if self.function_full_name is not None: body['function_full_name'] = self.function_full_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionDependency: """Deserializes the FunctionDependency from a dictionary.""" - return cls(function_full_name=d.get("function_full_name", None)) + return cls(function_full_name=d.get('function_full_name', None)) + + @dataclass @@ -4083,606 +3202,464 @@ class FunctionInfo: browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """Name of parent catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this function was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of function creator.""" - + data_type: Optional[ColumnTypeName] = None """Scalar function return data type.""" - + external_language: Optional[str] = None """External function language.""" - + external_name: Optional[str] = None """External function name.""" - + full_data_type: Optional[str] = None """Pretty printed function data type.""" - + full_name: Optional[str] = None """Full name of function, in form of __catalog_name__.__schema_name__.__function__name__""" - + function_id: Optional[str] = None """Id of Function, relative to parent schema.""" - + input_params: Optional[FunctionParameterInfos] = None - + is_deterministic: Optional[bool] = None """Whether the function is deterministic.""" - + is_null_call: Optional[bool] = None """Function null call.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of function, relative to parent schema.""" - + owner: Optional[str] = None """Username of current owner of function.""" - + parameter_style: Optional[FunctionInfoParameterStyle] = None """Function parameter style. **S** is the value for SQL.""" - + properties: Optional[str] = None """JSON-serialized key-value pair map, encoded (escaped) as a string.""" - + return_params: Optional[FunctionParameterInfos] = None """Table function return parameters.""" - + routine_body: Optional[FunctionInfoRoutineBody] = None """Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**.""" - + routine_definition: Optional[str] = None """Function body.""" - + routine_dependencies: Optional[DependencyList] = None """Function dependencies.""" - + schema_name: Optional[str] = None """Name of parent schema relative to its parent catalog.""" - + security_type: Optional[FunctionInfoSecurityType] = None """Function security type.""" - + specific_name: Optional[str] = None """Specific name of the function; Reserved for future use.""" - + sql_data_access: Optional[FunctionInfoSqlDataAccess] = None """Function SQL data access.""" - + sql_path: Optional[str] = None """List of schemes whose objects can be referenced without qualification.""" - + updated_at: Optional[int] = None """Time at which this function was created, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified function.""" - + def as_dict(self) -> dict: """Serializes the FunctionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_type is not None: - body["data_type"] = self.data_type.value - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.full_name is not None: - body["full_name"] = self.full_name - if self.function_id is not None: - body["function_id"] = self.function_id - if self.input_params: - body["input_params"] = self.input_params.as_dict() - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style.value - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params.as_dict() - if self.routine_body is not None: - body["routine_body"] = self.routine_body.value - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies.as_dict() - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type.value - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access.value - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_type is not None: body['data_type'] = self.data_type.value + if self.external_language is not None: body['external_language'] = self.external_language + if self.external_name is not None: body['external_name'] = self.external_name + if self.full_data_type is not None: body['full_data_type'] = self.full_data_type + if self.full_name is not None: body['full_name'] = self.full_name + if self.function_id is not None: body['function_id'] = self.function_id + if self.input_params: body['input_params'] = self.input_params.as_dict() + if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic + if self.is_null_call is not None: body['is_null_call'] = self.is_null_call + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.parameter_style is not None: body['parameter_style'] = self.parameter_style.value + if self.properties is not None: body['properties'] = self.properties + if self.return_params: body['return_params'] = self.return_params.as_dict() + if self.routine_body is not None: body['routine_body'] = self.routine_body.value + if self.routine_definition is not None: body['routine_definition'] = self.routine_definition + if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies.as_dict() + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.security_type is not None: body['security_type'] = self.security_type.value + if self.specific_name is not None: body['specific_name'] = self.specific_name + if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access.value + if self.sql_path is not None: body['sql_path'] = self.sql_path + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_type is not None: - body["data_type"] = self.data_type - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.full_name is not None: - body["full_name"] = self.full_name - if self.function_id is not None: - body["function_id"] = self.function_id - if self.input_params: - body["input_params"] = self.input_params - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params - if self.routine_body is not None: - body["routine_body"] = self.routine_body - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_type is not None: body['data_type'] = self.data_type + if self.external_language is not None: body['external_language'] = self.external_language + if self.external_name is not None: body['external_name'] = self.external_name + if self.full_data_type is not None: body['full_data_type'] = self.full_data_type + if self.full_name is not None: body['full_name'] = self.full_name + if self.function_id is not None: body['function_id'] = self.function_id + if self.input_params: body['input_params'] = self.input_params + if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic + if self.is_null_call is not None: body['is_null_call'] = self.is_null_call + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.parameter_style is not None: body['parameter_style'] = self.parameter_style + if self.properties is not None: body['properties'] = self.properties + if self.return_params: body['return_params'] = self.return_params + if self.routine_body is not None: body['routine_body'] = self.routine_body + if self.routine_definition is not None: body['routine_definition'] = self.routine_definition + if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.security_type is not None: body['security_type'] = self.security_type + if self.specific_name is not None: body['specific_name'] = self.specific_name + if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access + if self.sql_path is not None: body['sql_path'] = self.sql_path + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionInfo: """Deserializes the FunctionInfo from a dictionary.""" - return cls( - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - data_type=_enum(d, "data_type", ColumnTypeName), - external_language=d.get("external_language", None), - external_name=d.get("external_name", None), - full_data_type=d.get("full_data_type", None), - full_name=d.get("full_name", None), - function_id=d.get("function_id", None), - input_params=_from_dict(d, "input_params", FunctionParameterInfos), - is_deterministic=d.get("is_deterministic", None), - is_null_call=d.get("is_null_call", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - parameter_style=_enum(d, "parameter_style", FunctionInfoParameterStyle), - properties=d.get("properties", None), - return_params=_from_dict(d, "return_params", FunctionParameterInfos), - routine_body=_enum(d, "routine_body", FunctionInfoRoutineBody), - routine_definition=d.get("routine_definition", None), - routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), - schema_name=d.get("schema_name", None), - security_type=_enum(d, "security_type", FunctionInfoSecurityType), - specific_name=d.get("specific_name", None), - sql_data_access=_enum(d, "sql_data_access", FunctionInfoSqlDataAccess), - sql_path=d.get("sql_path", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), data_type=_enum(d, 'data_type', ColumnTypeName), external_language=d.get('external_language', None), external_name=d.get('external_name', None), full_data_type=d.get('full_data_type', None), full_name=d.get('full_name', None), function_id=d.get('function_id', None), input_params=_from_dict(d, 'input_params', FunctionParameterInfos), is_deterministic=d.get('is_deterministic', None), is_null_call=d.get('is_null_call', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), parameter_style=_enum(d, 'parameter_style', FunctionInfoParameterStyle), properties=d.get('properties', None), return_params=_from_dict(d, 'return_params', FunctionParameterInfos), routine_body=_enum(d, 'routine_body', FunctionInfoRoutineBody), routine_definition=d.get('routine_definition', None), routine_dependencies=_from_dict(d, 'routine_dependencies', DependencyList), schema_name=d.get('schema_name', None), security_type=_enum(d, 'security_type', FunctionInfoSecurityType), specific_name=d.get('specific_name', None), sql_data_access=_enum(d, 'sql_data_access', FunctionInfoSqlDataAccess), sql_path=d.get('sql_path', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + class FunctionInfoParameterStyle(Enum): """Function parameter style. **S** is the value for SQL.""" - - S = "S" - + + S = 'S' class FunctionInfoRoutineBody(Enum): """Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**.""" - - EXTERNAL = "EXTERNAL" - SQL = "SQL" - + + EXTERNAL = 'EXTERNAL' + SQL = 'SQL' class FunctionInfoSecurityType(Enum): """The security type of the function.""" - - DEFINER = "DEFINER" - + + DEFINER = 'DEFINER' class FunctionInfoSqlDataAccess(Enum): """Function SQL data access.""" - - CONTAINS_SQL = "CONTAINS_SQL" - NO_SQL = "NO_SQL" - READS_SQL_DATA = "READS_SQL_DATA" - + + CONTAINS_SQL = 'CONTAINS_SQL' + NO_SQL = 'NO_SQL' + READS_SQL_DATA = 'READS_SQL_DATA' @dataclass class FunctionParameterInfo: name: str """Name of parameter.""" - + type_text: str """Full data type spec, SQL/catalogString text.""" - + type_name: ColumnTypeName - + position: int """Ordinal position of column (starting at position 0).""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + parameter_default: Optional[str] = None """Default value of the parameter.""" - + parameter_mode: Optional[FunctionParameterMode] = None """The mode of the function parameter.""" - + parameter_type: Optional[FunctionParameterType] = None """The type of function parameter.""" - + type_interval_type: Optional[str] = None """Format of IntervalType.""" - + type_json: Optional[str] = None """Full data type spec, JSON-serialized.""" - + type_precision: Optional[int] = None """Digits of precision; required on Create for DecimalTypes.""" - + type_scale: Optional[int] = None """Digits to right of decimal; Required on Create for DecimalTypes.""" - + def as_dict(self) -> dict: """Serializes the FunctionParameterInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.parameter_default is not None: - body["parameter_default"] = self.parameter_default - if self.parameter_mode is not None: - body["parameter_mode"] = self.parameter_mode.value - if self.parameter_type is not None: - body["parameter_type"] = self.parameter_type.value - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name.value - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.parameter_default is not None: body['parameter_default'] = self.parameter_default + if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode.value + if self.parameter_type is not None: body['parameter_type'] = self.parameter_type.value + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_json is not None: body['type_json'] = self.type_json + if self.type_name is not None: body['type_name'] = self.type_name.value + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text return body def as_shallow_dict(self) -> dict: """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.parameter_default is not None: - body["parameter_default"] = self.parameter_default - if self.parameter_mode is not None: - body["parameter_mode"] = self.parameter_mode - if self.parameter_type is not None: - body["parameter_type"] = self.parameter_type - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.parameter_default is not None: body['parameter_default'] = self.parameter_default + if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode + if self.parameter_type is not None: body['parameter_type'] = self.parameter_type + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_json is not None: body['type_json'] = self.type_json + if self.type_name is not None: body['type_name'] = self.type_name + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: """Deserializes the FunctionParameterInfo from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - parameter_default=d.get("parameter_default", None), - parameter_mode=_enum(d, "parameter_mode", FunctionParameterMode), - parameter_type=_enum(d, "parameter_type", FunctionParameterType), - position=d.get("position", None), - type_interval_type=d.get("type_interval_type", None), - type_json=d.get("type_json", None), - type_name=_enum(d, "type_name", ColumnTypeName), - type_precision=d.get("type_precision", None), - type_scale=d.get("type_scale", None), - type_text=d.get("type_text", None), - ) + return cls(comment=d.get('comment', None), name=d.get('name', None), parameter_default=d.get('parameter_default', None), parameter_mode=_enum(d, 'parameter_mode', FunctionParameterMode), parameter_type=_enum(d, 'parameter_type', FunctionParameterType), position=d.get('position', None), type_interval_type=d.get('type_interval_type', None), type_json=d.get('type_json', None), type_name=_enum(d, 'type_name', ColumnTypeName), type_precision=d.get('type_precision', None), type_scale=d.get('type_scale', None), type_text=d.get('type_text', None)) + + @dataclass class FunctionParameterInfos: parameters: Optional[List[FunctionParameterInfo]] = None """The array of __FunctionParameterInfo__ definitions of the function's parameters.""" - + def as_dict(self) -> dict: """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: - body["parameters"] = self.parameters + if self.parameters: body['parameters'] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: """Deserializes the FunctionParameterInfos from a dictionary.""" - return cls(parameters=_repeated_dict(d, "parameters", FunctionParameterInfo)) + return cls(parameters=_repeated_dict(d, 'parameters', FunctionParameterInfo)) + + class FunctionParameterMode(Enum): """The mode of the function parameter.""" - - IN = "IN" - + + IN = 'IN' class FunctionParameterType(Enum): """The type of function parameter.""" - - COLUMN = "COLUMN" - PARAM = "PARAM" - + + COLUMN = 'COLUMN' + PARAM = 'PARAM' @dataclass class GcpOauthToken: """GCP temporary credentials for API authentication. Read more at https://developers.google.com/identity/protocols/oauth2/service-account""" - + oauth_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.oauth_token is not None: - body["oauth_token"] = self.oauth_token + if self.oauth_token is not None: body['oauth_token'] = self.oauth_token return body def as_shallow_dict(self) -> dict: """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.oauth_token is not None: - body["oauth_token"] = self.oauth_token + if self.oauth_token is not None: body['oauth_token'] = self.oauth_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpOauthToken: """Deserializes the GcpOauthToken from a dictionary.""" - return cls(oauth_token=d.get("oauth_token", None)) + return cls(oauth_token=d.get('oauth_token', None)) + + @dataclass class GcpPubsub: managed_resource_id: Optional[str] = None """Unique identifier included in the name of file events managed cloud resources.""" - + subscription_name: Optional[str] = None """The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription name} REQUIRED for provided_pubsub.""" - + def as_dict(self) -> dict: """Serializes the GcpPubsub into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.subscription_name is not None: - body["subscription_name"] = self.subscription_name + if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id + if self.subscription_name is not None: body['subscription_name'] = self.subscription_name return body def as_shallow_dict(self) -> dict: """Serializes the GcpPubsub into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.subscription_name is not None: - body["subscription_name"] = self.subscription_name + if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id + if self.subscription_name is not None: body['subscription_name'] = self.subscription_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub: """Deserializes the GcpPubsub from a dictionary.""" - return cls( - managed_resource_id=d.get("managed_resource_id", None), subscription_name=d.get("subscription_name", None) - ) + return cls(managed_resource_id=d.get('managed_resource_id', None), subscription_name=d.get('subscription_name', None)) + + @dataclass class GenerateTemporaryServiceCredentialAzureOptions: """The Azure cloud options to customize the requested temporary credential""" - + resources: Optional[List[str]] = None """The resources to which the temporary Azure credential should apply. These resources are the scopes that are passed to the token provider (see https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.resources: - body["resources"] = [v for v in self.resources] + if self.resources: body['resources'] = [v for v in self.resources] return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.resources: - body["resources"] = self.resources + if self.resources: body['resources'] = self.resources return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialAzureOptions: """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary.""" - return cls(resources=d.get("resources", None)) + return cls(resources=d.get('resources', None)) + + @dataclass class GenerateTemporaryServiceCredentialGcpOptions: """The GCP cloud options to customize the requested temporary credential""" - + scopes: Optional[List[str]] = None """The scopes to which the temporary GCP credential should apply. These resources are the scopes that are passed to the token provider (see https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scopes: - body["scopes"] = [v for v in self.scopes] + if self.scopes: body['scopes'] = [v for v in self.scopes] return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.scopes: - body["scopes"] = self.scopes + if self.scopes: body['scopes'] = self.scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialGcpOptions: """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary.""" - return cls(scopes=d.get("scopes", None)) + return cls(scopes=d.get('scopes', None)) + + @dataclass class GenerateTemporaryServiceCredentialRequest: credential_name: str """The name of the service credential used to generate a temporary credential""" - + azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None """The Azure cloud options to customize the requested temporary credential""" - + gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None """The GCP cloud options to customize the requested temporary credential""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.azure_options: - body["azure_options"] = self.azure_options.as_dict() - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.gcp_options: - body["gcp_options"] = self.gcp_options.as_dict() + if self.azure_options: body['azure_options'] = self.azure_options.as_dict() + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.gcp_options: body['gcp_options'] = self.gcp_options.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.azure_options: - body["azure_options"] = self.azure_options - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.gcp_options: - body["gcp_options"] = self.gcp_options + if self.azure_options: body['azure_options'] = self.azure_options + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.gcp_options: body['gcp_options'] = self.gcp_options return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialRequest: """Deserializes the GenerateTemporaryServiceCredentialRequest from a dictionary.""" - return cls( - azure_options=_from_dict(d, "azure_options", GenerateTemporaryServiceCredentialAzureOptions), - credential_name=d.get("credential_name", None), - gcp_options=_from_dict(d, "gcp_options", GenerateTemporaryServiceCredentialGcpOptions), - ) + return cls(azure_options=_from_dict(d, 'azure_options', GenerateTemporaryServiceCredentialAzureOptions), credential_name=d.get('credential_name', None), gcp_options=_from_dict(d, 'gcp_options', GenerateTemporaryServiceCredentialGcpOptions)) + + @dataclass @@ -4691,32 +3668,30 @@ class GenerateTemporaryTableCredentialRequest: """The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is specified, the credentials returned will have write permissions, otherwise, it will be read only.""" - + table_id: Optional[str] = None """UUID of the table to read or write.""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryTableCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.operation is not None: - body["operation"] = self.operation.value - if self.table_id is not None: - body["table_id"] = self.table_id + if self.operation is not None: body['operation'] = self.operation.value + if self.table_id is not None: body['table_id'] = self.table_id return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryTableCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.operation is not None: - body["operation"] = self.operation - if self.table_id is not None: - body["table_id"] = self.table_id + if self.operation is not None: body['operation'] = self.operation + if self.table_id is not None: body['table_id'] = self.table_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialRequest: """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary.""" - return cls(operation=_enum(d, "operation", TableOperation), table_id=d.get("table_id", None)) + return cls(operation=_enum(d, 'operation', TableOperation), table_id=d.get('table_id', None)) + + @dataclass @@ -4724,676 +3699,663 @@ class GenerateTemporaryTableCredentialResponse: aws_temp_credentials: Optional[AwsCredentials] = None """AWS temporary credentials for API authentication. Read more at https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" - + azure_aad: Optional[AzureActiveDirectoryToken] = None """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed Identity. Read more at https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" - + azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None """Azure temporary credentials for API authentication. Read more at https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas""" - + expiration_time: Optional[int] = None """Server time when the credential will expire, in epoch milliseconds. The API client is advised to cache the credential given this expiration time.""" - + gcp_oauth_token: Optional[GcpOauthToken] = None """GCP temporary credentials for API authentication. Read more at https://developers.google.com/identity/protocols/oauth2/service-account""" - + r2_temp_credentials: Optional[R2Credentials] = None """R2 temporary credentials for API authentication. Read more at https://developers.cloudflare.com/r2/api/s3/tokens/.""" - + url: Optional[str] = None """The URL of the storage path accessible by the temporary credential.""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() - if self.azure_aad: - body["azure_aad"] = self.azure_aad.as_dict() - if self.azure_user_delegation_sas: - body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict() - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() - if self.r2_temp_credentials: - body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() - if self.url is not None: - body["url"] = self.url + if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict() + if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict() + if self.azure_user_delegation_sas: body['azure_user_delegation_sas'] = self.azure_user_delegation_sas.as_dict() + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict() + if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials.as_dict() + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials - if self.azure_aad: - body["azure_aad"] = self.azure_aad - if self.azure_user_delegation_sas: - body["azure_user_delegation_sas"] = self.azure_user_delegation_sas - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token - if self.r2_temp_credentials: - body["r2_temp_credentials"] = self.r2_temp_credentials - if self.url is not None: - body["url"] = self.url + if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials + if self.azure_aad: body['azure_aad'] = self.azure_aad + if self.azure_user_delegation_sas: body['azure_user_delegation_sas'] = self.azure_user_delegation_sas + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token + if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialResponse: """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary.""" - return cls( - aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), - azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), - azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas), - expiration_time=d.get("expiration_time", None), - gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), - r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), - url=d.get("url", None), - ) + return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials), azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken), azure_user_delegation_sas=_from_dict(d, 'azure_user_delegation_sas', AzureUserDelegationSas), expiration_time=d.get('expiration_time', None), gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken), r2_temp_credentials=_from_dict(d, 'r2_temp_credentials', R2Credentials), url=d.get('url', None)) + + + + + + + + + + + + + + + + + + + + + + + @dataclass class GetCatalogWorkspaceBindingsResponse: workspaces: Optional[List[int]] = None """A list of workspace IDs""" - + def as_dict(self) -> dict: """Serializes the GetCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspaces: - body["workspaces"] = [v for v in self.workspaces] + if self.workspaces: body['workspaces'] = [v for v in self.workspaces] return body def as_shallow_dict(self) -> dict: """Serializes the GetCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspaces: - body["workspaces"] = self.workspaces + if self.workspaces: body['workspaces'] = self.workspaces return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetCatalogWorkspaceBindingsResponse: """Deserializes the GetCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get("workspaces", None)) + return cls(workspaces=d.get('workspaces', None)) + + + + + + + + + + + + + + + + + + + + + + + @dataclass class GetMetastoreSummaryResponse: cloud: Optional[str] = None """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" - + created_at: Optional[int] = None """Time at which this metastore was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of metastore creator.""" - + default_data_access_config_id: Optional[str] = None """Unique identifier of the metastore's (Default) Data Access Configuration.""" - + delta_sharing_organization_name: Optional[str] = None """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.""" - + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None """The lifetime of delta sharing recipient token in seconds.""" - - delta_sharing_scope: Optional[GetMetastoreSummaryResponseDeltaSharingScope] = None + + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None """The scope of Delta Sharing enabled for the metastore.""" - + external_access_enabled: Optional[bool] = None """Whether to allow non-DBR clients to directly access entities under the metastore.""" - + global_metastore_id: Optional[str] = None """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`.""" - + metastore_id: Optional[str] = None """Unique identifier of metastore.""" - + name: Optional[str] = None """The user-specified name of the metastore.""" - + owner: Optional[str] = None """The owner of the metastore.""" - + privilege_model_version: Optional[str] = None """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" - + region: Optional[str] = None """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - + storage_root: Optional[str] = None """The storage root URL for metastore""" - + storage_root_credential_id: Optional[str] = None """UUID of storage credential to access the metastore storage_root.""" - + storage_root_credential_name: Optional[str] = None """Name of the storage credential to access the metastore storage_root.""" - + updated_at: Optional[int] = None """Time at which the metastore was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the metastore.""" - + def as_dict(self) -> dict: """Serializes the GetMetastoreSummaryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.default_data_access_config_id is not None: - body["default_data_access_config_id"] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope.value - if self.external_access_enabled is not None: - body["external_access_enabled"] = self.external_access_enabled - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: - body["storage_root_credential_name"] = self.storage_root_credential_name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.cloud is not None: body['cloud'] = self.cloud + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.default_data_access_config_id is not None: body['default_data_access_config_id'] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds + if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value + if self.external_access_enabled is not None: body['external_access_enabled'] = self.external_access_enabled + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version + if self.region is not None: body['region'] = self.region + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: body['storage_root_credential_name'] = self.storage_root_credential_name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.default_data_access_config_id is not None: - body["default_data_access_config_id"] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope - if self.external_access_enabled is not None: - body["external_access_enabled"] = self.external_access_enabled - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: - body["storage_root_credential_name"] = self.storage_root_credential_name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.cloud is not None: body['cloud'] = self.cloud + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.default_data_access_config_id is not None: body['default_data_access_config_id'] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds + if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope + if self.external_access_enabled is not None: body['external_access_enabled'] = self.external_access_enabled + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version + if self.region is not None: body['region'] = self.region + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: body['storage_root_credential_name'] = self.storage_root_credential_name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetMetastoreSummaryResponse: """Deserializes the GetMetastoreSummaryResponse from a dictionary.""" - return cls( - cloud=d.get("cloud", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - default_data_access_config_id=d.get("default_data_access_config_id", None), - delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), - delta_sharing_recipient_token_lifetime_in_seconds=d.get( - "delta_sharing_recipient_token_lifetime_in_seconds", None - ), - delta_sharing_scope=_enum(d, "delta_sharing_scope", GetMetastoreSummaryResponseDeltaSharingScope), - external_access_enabled=d.get("external_access_enabled", None), - global_metastore_id=d.get("global_metastore_id", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - privilege_model_version=d.get("privilege_model_version", None), - region=d.get("region", None), - storage_root=d.get("storage_root", None), - storage_root_credential_id=d.get("storage_root_credential_id", None), - storage_root_credential_name=d.get("storage_root_credential_name", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) - - -class GetMetastoreSummaryResponseDeltaSharingScope(Enum): - """The scope of Delta Sharing enabled for the metastore.""" + return cls(cloud=d.get('cloud', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), default_data_access_config_id=d.get('default_data_access_config_id', None), delta_sharing_organization_name=d.get('delta_sharing_organization_name', None), delta_sharing_recipient_token_lifetime_in_seconds=d.get('delta_sharing_recipient_token_lifetime_in_seconds', None), delta_sharing_scope=_enum(d, 'delta_sharing_scope', DeltaSharingScopeEnum), external_access_enabled=d.get('external_access_enabled', None), global_metastore_id=d.get('global_metastore_id', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), privilege_model_version=d.get('privilege_model_version', None), region=d.get('region', None), storage_root=d.get('storage_root', None), storage_root_credential_id=d.get('storage_root_credential_id', None), storage_root_credential_name=d.get('storage_root_credential_name', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + + + + + + + + + +@dataclass +class GetPermissionsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + + privilege_assignments: Optional[List[PrivilegeAssignment]] = None + """The privileges assigned to each principal""" + + def as_dict(self) -> dict: + """Serializes the GetPermissionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GetPermissionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetPermissionsResponse: + """Deserializes the GetPermissionsResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment)) + + + + + + + - INTERNAL = "INTERNAL" - INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL" @dataclass class GetQuotaResponse: quota_info: Optional[QuotaInfo] = None """The returned QuotaInfo.""" - + def as_dict(self) -> dict: """Serializes the GetQuotaResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.quota_info: - body["quota_info"] = self.quota_info.as_dict() + if self.quota_info: body['quota_info'] = self.quota_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.quota_info: - body["quota_info"] = self.quota_info + if self.quota_info: body['quota_info'] = self.quota_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetQuotaResponse: """Deserializes the GetQuotaResponse from a dictionary.""" - return cls(quota_info=_from_dict(d, "quota_info", QuotaInfo)) + return cls(quota_info=_from_dict(d, 'quota_info', QuotaInfo)) + + + + + + + + + + + + + + + + + + + + @dataclass class GetWorkspaceBindingsResponse: bindings: Optional[List[WorkspaceBinding]] = None """List of workspace bindings""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the GetWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bindings: - body["bindings"] = [v.as_dict() for v in self.bindings] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.bindings: body['bindings'] = [v.as_dict() for v in self.bindings] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bindings: - body["bindings"] = self.bindings - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.bindings: body['bindings'] = self.bindings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceBindingsResponse: """Deserializes the GetWorkspaceBindingsResponse from a dictionary.""" - return cls( - bindings=_repeated_dict(d, "bindings", WorkspaceBinding), next_page_token=d.get("next_page_token", None) - ) + return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding), next_page_token=d.get('next_page_token', None)) + + class IsolationMode(Enum): + + + ISOLATION_MODE_ISOLATED = 'ISOLATION_MODE_ISOLATED' + ISOLATION_MODE_OPEN = 'ISOLATION_MODE_OPEN' + - ISOLATION_MODE_ISOLATED = "ISOLATION_MODE_ISOLATED" - ISOLATION_MODE_OPEN = "ISOLATION_MODE_OPEN" @dataclass class ListAccountMetastoreAssignmentsResponse: """The list of workspaces to which the given metastore is assigned.""" - + workspace_ids: Optional[List[int]] = None - + def as_dict(self) -> dict: """Serializes the ListAccountMetastoreAssignmentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspace_ids: - body["workspace_ids"] = [v for v in self.workspace_ids] + if self.workspace_ids: body['workspace_ids'] = [v for v in self.workspace_ids] return body def as_shallow_dict(self) -> dict: """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspace_ids: - body["workspace_ids"] = self.workspace_ids + if self.workspace_ids: body['workspace_ids'] = self.workspace_ids return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAccountMetastoreAssignmentsResponse: """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary.""" - return cls(workspace_ids=d.get("workspace_ids", None)) + return cls(workspace_ids=d.get('workspace_ids', None)) + + + + + @dataclass class ListAccountStorageCredentialsResponse: storage_credentials: Optional[List[StorageCredentialInfo]] = None """An array of metastore storage credentials.""" - + def as_dict(self) -> dict: """Serializes the ListAccountStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.storage_credentials: - body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] + if self.storage_credentials: body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials] return body def as_shallow_dict(self) -> dict: """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.storage_credentials: - body["storage_credentials"] = self.storage_credentials + if self.storage_credentials: body['storage_credentials'] = self.storage_credentials return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAccountStorageCredentialsResponse: """Deserializes the ListAccountStorageCredentialsResponse from a dictionary.""" - return cls(storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo)) + return cls(storage_credentials=_repeated_dict(d, 'storage_credentials', StorageCredentialInfo)) + + + + + @dataclass class ListCatalogsResponse: catalogs: Optional[List[CatalogInfo]] = None """An array of catalog information objects.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListCatalogsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalogs: - body["catalogs"] = [v.as_dict() for v in self.catalogs] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.catalogs: body['catalogs'] = [v.as_dict() for v in self.catalogs] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalogs: - body["catalogs"] = self.catalogs - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.catalogs: body['catalogs'] = self.catalogs + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCatalogsResponse: """Deserializes the ListCatalogsResponse from a dictionary.""" - return cls(catalogs=_repeated_dict(d, "catalogs", CatalogInfo), next_page_token=d.get("next_page_token", None)) + return cls(catalogs=_repeated_dict(d, 'catalogs', CatalogInfo), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListConnectionsResponse: connections: Optional[List[ConnectionInfo]] = None """An array of connection information objects.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListConnectionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connections: - body["connections"] = [v.as_dict() for v in self.connections] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.connections: body['connections'] = [v.as_dict() for v in self.connections] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.connections: - body["connections"] = self.connections - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.connections: body['connections'] = self.connections + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListConnectionsResponse: """Deserializes the ListConnectionsResponse from a dictionary.""" - return cls( - connections=_repeated_dict(d, "connections", ConnectionInfo), next_page_token=d.get("next_page_token", None) - ) + return cls(connections=_repeated_dict(d, 'connections', ConnectionInfo), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListCredentialsResponse: credentials: Optional[List[CredentialInfo]] = None - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credentials: - body["credentials"] = [v.as_dict() for v in self.credentials] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credentials: - body["credentials"] = self.credentials - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.credentials: body['credentials'] = self.credentials + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCredentialsResponse: """Deserializes the ListCredentialsResponse from a dictionary.""" - return cls( - credentials=_repeated_dict(d, "credentials", CredentialInfo), next_page_token=d.get("next_page_token", None) - ) + return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo), next_page_token=d.get('next_page_token', None)) + -@dataclass -class ListDatabaseInstancesResponse: - database_instances: Optional[List[DatabaseInstance]] = None - """List of instances.""" - next_page_token: Optional[str] = None - """Pagination token to request the next page of instances.""" - - def as_dict(self) -> dict: - """Serializes the ListDatabaseInstancesResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.database_instances: - body["database_instances"] = [v.as_dict() for v in self.database_instances] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ListDatabaseInstancesResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.database_instances: - body["database_instances"] = self.database_instances - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse: - """Deserializes the ListDatabaseInstancesResponse from a dictionary.""" - return cls( - database_instances=_repeated_dict(d, "database_instances", DatabaseInstance), - next_page_token=d.get("next_page_token", None), - ) @dataclass class ListExternalLocationsResponse: external_locations: Optional[List[ExternalLocationInfo]] = None """An array of external locations.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListExternalLocationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_locations: - body["external_locations"] = [v.as_dict() for v in self.external_locations] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.external_locations: body['external_locations'] = [v.as_dict() for v in self.external_locations] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_locations: - body["external_locations"] = self.external_locations - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.external_locations: body['external_locations'] = self.external_locations + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExternalLocationsResponse: """Deserializes the ListExternalLocationsResponse from a dictionary.""" - return cls( - external_locations=_repeated_dict(d, "external_locations", ExternalLocationInfo), - next_page_token=d.get("next_page_token", None), - ) + return cls(external_locations=_repeated_dict(d, 'external_locations', ExternalLocationInfo), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListFunctionsResponse: functions: Optional[List[FunctionInfo]] = None """An array of function information objects.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListFunctionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.functions: - body["functions"] = [v.as_dict() for v in self.functions] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.functions: body['functions'] = [v.as_dict() for v in self.functions] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.functions: - body["functions"] = self.functions - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.functions: body['functions'] = self.functions + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFunctionsResponse: """Deserializes the ListFunctionsResponse from a dictionary.""" - return cls( - functions=_repeated_dict(d, "functions", FunctionInfo), next_page_token=d.get("next_page_token", None) - ) + return cls(functions=_repeated_dict(d, 'functions', FunctionInfo), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListMetastoresResponse: metastores: Optional[List[MetastoreInfo]] = None """An array of metastore information objects.""" - + + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + def as_dict(self) -> dict: """Serializes the ListMetastoresResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastores: - body["metastores"] = [v.as_dict() for v in self.metastores] + if self.metastores: body['metastores'] = [v.as_dict() for v in self.metastores] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastores: - body["metastores"] = self.metastores + if self.metastores: body['metastores'] = self.metastores + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListMetastoresResponse: """Deserializes the ListMetastoresResponse from a dictionary.""" - return cls(metastores=_repeated_dict(d, "metastores", MetastoreInfo)) + return cls(metastores=_repeated_dict(d, 'metastores', MetastoreInfo), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListModelVersionsResponse: model_versions: Optional[List[ModelVersionInfo]] = None - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListModelVersionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_versions: - body["model_versions"] = [v.as_dict() for v in self.model_versions] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_versions: - body["model_versions"] = self.model_versions - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.model_versions: body['model_versions'] = self.model_versions + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListModelVersionsResponse: """Deserializes the ListModelVersionsResponse from a dictionary.""" - return cls( - model_versions=_repeated_dict(d, "model_versions", ModelVersionInfo), - next_page_token=d.get("next_page_token", None), - ) + return cls(model_versions=_repeated_dict(d, 'model_versions', ModelVersionInfo), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass @@ -5401,32 +4363,36 @@ class ListQuotasResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request.""" - + quotas: Optional[List[QuotaInfo]] = None """An array of returned QuotaInfos.""" - + def as_dict(self) -> dict: """Serializes the ListQuotasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.quotas: - body["quotas"] = [v.as_dict() for v in self.quotas] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.quotas: body['quotas'] = [v.as_dict() for v in self.quotas] return body def as_shallow_dict(self) -> dict: """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.quotas: - body["quotas"] = self.quotas + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.quotas: body['quotas'] = self.quotas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListQuotasResponse: """Deserializes the ListQuotasResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), quotas=_repeated_dict(d, "quotas", QuotaInfo)) + return cls(next_page_token=d.get('next_page_token', None), quotas=_repeated_dict(d, 'quotas', QuotaInfo)) + + + + + + + + @dataclass @@ -5434,34 +4400,32 @@ class ListRegisteredModelsResponse: next_page_token: Optional[str] = None """Opaque token for pagination. Omitted if there are no more results. page_token should be set to this value for fetching the next page.""" - + registered_models: Optional[List[RegisteredModelInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListRegisteredModelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = [v.as_dict() for v in self.registered_models] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models] return body def as_shallow_dict(self) -> dict: """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = self.registered_models + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.registered_models: body['registered_models'] = self.registered_models return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListRegisteredModelsResponse: """Deserializes the ListRegisteredModelsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - registered_models=_repeated_dict(d, "registered_models", RegisteredModelInfo), - ) + return cls(next_page_token=d.get('next_page_token', None), registered_models=_repeated_dict(d, 'registered_models', RegisteredModelInfo)) + + + + + @dataclass @@ -5469,32 +4433,33 @@ class ListSchemasResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + schemas: Optional[List[SchemaInfo]] = None """An array of schema information objects.""" - + def as_dict(self) -> dict: """Serializes the ListSchemasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = [v.as_dict() for v in self.schemas] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = self.schemas + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.schemas: body['schemas'] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSchemasResponse: """Deserializes the ListSchemasResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SchemaInfo)) + return cls(next_page_token=d.get('next_page_token', None), schemas=_repeated_dict(d, 'schemas', SchemaInfo)) + + + + + @dataclass @@ -5502,34 +4467,35 @@ class ListStorageCredentialsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + storage_credentials: Optional[List[StorageCredentialInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.storage_credentials: - body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.storage_credentials: body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials] return body def as_shallow_dict(self) -> dict: """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.storage_credentials: - body["storage_credentials"] = self.storage_credentials + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.storage_credentials: body['storage_credentials'] = self.storage_credentials return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListStorageCredentialsResponse: """Deserializes the ListStorageCredentialsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo), - ) + return cls(next_page_token=d.get('next_page_token', None), storage_credentials=_repeated_dict(d, 'storage_credentials', StorageCredentialInfo)) + + + + + + + + @dataclass @@ -5537,34 +4503,30 @@ class ListSystemSchemasResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + schemas: Optional[List[SystemSchemaInfo]] = None """An array of system schema information objects.""" - + def as_dict(self) -> dict: """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = [v.as_dict() for v in self.schemas] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = self.schemas + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.schemas: body['schemas'] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSystemSchemasResponse: """Deserializes the ListSystemSchemasResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SystemSchemaInfo) - ) + return cls(next_page_token=d.get('next_page_token', None), schemas=_repeated_dict(d, 'schemas', SystemSchemaInfo)) + + @dataclass @@ -5572,32 +4534,33 @@ class ListTableSummariesResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + tables: Optional[List[TableSummary]] = None """List of table summaries.""" - + def as_dict(self) -> dict: """Serializes the ListTableSummariesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = [v.as_dict() for v in self.tables] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.tables: body['tables'] = [v.as_dict() for v in self.tables] return body def as_shallow_dict(self) -> dict: """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = self.tables + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.tables: body['tables'] = self.tables return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListTableSummariesResponse: """Deserializes the ListTableSummariesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableSummary)) + return cls(next_page_token=d.get('next_page_token', None), tables=_repeated_dict(d, 'tables', TableSummary)) + + + + + @dataclass @@ -5605,32 +4568,33 @@ class ListTablesResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + tables: Optional[List[TableInfo]] = None """An array of table information objects.""" - + def as_dict(self) -> dict: """Serializes the ListTablesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = [v.as_dict() for v in self.tables] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.tables: body['tables'] = [v.as_dict() for v in self.tables] return body def as_shallow_dict(self) -> dict: """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = self.tables + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.tables: body['tables'] = self.tables return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListTablesResponse: """Deserializes the ListTablesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableInfo)) + return cls(next_page_token=d.get('next_page_token', None), tables=_repeated_dict(d, 'tables', TableInfo)) + + + + + @dataclass @@ -5639,450 +4603,313 @@ class ListVolumesResponseContent: """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request to retrieve the next page of results.""" - + volumes: Optional[List[VolumeInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListVolumesResponseContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.volumes: - body["volumes"] = [v.as_dict() for v in self.volumes] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes] return body def as_shallow_dict(self) -> dict: """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.volumes: - body["volumes"] = self.volumes + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.volumes: body['volumes'] = self.volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListVolumesResponseContent: """Deserializes the ListVolumesResponseContent from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), volumes=_repeated_dict(d, "volumes", VolumeInfo)) + return cls(next_page_token=d.get('next_page_token', None), volumes=_repeated_dict(d, 'volumes', VolumeInfo)) + + class MatchType(Enum): """The artifact pattern matching type""" - - PREFIX_MATCH = "PREFIX_MATCH" - + + PREFIX_MATCH = 'PREFIX_MATCH' @dataclass class MetastoreAssignment: - metastore_id: str - """The unique ID of the metastore.""" - workspace_id: int """The unique ID of the Databricks workspace.""" - + + metastore_id: str + """The unique ID of the metastore.""" + default_catalog_name: Optional[str] = None """The name of the default catalog in the metastore.""" - + def as_dict(self) -> dict: """Serializes the MetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MetastoreAssignment: """Deserializes the MetastoreAssignment from a dictionary.""" - return cls( - default_catalog_name=d.get("default_catalog_name", None), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) + return cls(default_catalog_name=d.get('default_catalog_name', None), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) + + @dataclass class MetastoreInfo: cloud: Optional[str] = None """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" - + created_at: Optional[int] = None """Time at which this metastore was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of metastore creator.""" - + default_data_access_config_id: Optional[str] = None """Unique identifier of the metastore's (Default) Data Access Configuration.""" - + delta_sharing_organization_name: Optional[str] = None """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.""" - + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None """The lifetime of delta sharing recipient token in seconds.""" - - delta_sharing_scope: Optional[MetastoreInfoDeltaSharingScope] = None + + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None """The scope of Delta Sharing enabled for the metastore.""" - + external_access_enabled: Optional[bool] = None """Whether to allow non-DBR clients to directly access entities under the metastore.""" - + global_metastore_id: Optional[str] = None """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`.""" - + metastore_id: Optional[str] = None """Unique identifier of metastore.""" - + name: Optional[str] = None """The user-specified name of the metastore.""" - + owner: Optional[str] = None """The owner of the metastore.""" - + privilege_model_version: Optional[str] = None """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" - + region: Optional[str] = None """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - + storage_root: Optional[str] = None """The storage root URL for metastore""" - + storage_root_credential_id: Optional[str] = None """UUID of storage credential to access the metastore storage_root.""" - + storage_root_credential_name: Optional[str] = None """Name of the storage credential to access the metastore storage_root.""" - + updated_at: Optional[int] = None """Time at which the metastore was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the metastore.""" - + def as_dict(self) -> dict: """Serializes the MetastoreInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.default_data_access_config_id is not None: - body["default_data_access_config_id"] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope.value - if self.external_access_enabled is not None: - body["external_access_enabled"] = self.external_access_enabled - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: - body["storage_root_credential_name"] = self.storage_root_credential_name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.cloud is not None: body['cloud'] = self.cloud + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.default_data_access_config_id is not None: body['default_data_access_config_id'] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds + if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value + if self.external_access_enabled is not None: body['external_access_enabled'] = self.external_access_enabled + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version + if self.region is not None: body['region'] = self.region + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: body['storage_root_credential_name'] = self.storage_root_credential_name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.default_data_access_config_id is not None: - body["default_data_access_config_id"] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope - if self.external_access_enabled is not None: - body["external_access_enabled"] = self.external_access_enabled - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: - body["storage_root_credential_name"] = self.storage_root_credential_name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.cloud is not None: body['cloud'] = self.cloud + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.default_data_access_config_id is not None: body['default_data_access_config_id'] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds + if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope + if self.external_access_enabled is not None: body['external_access_enabled'] = self.external_access_enabled + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version + if self.region is not None: body['region'] = self.region + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: body['storage_root_credential_name'] = self.storage_root_credential_name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MetastoreInfo: """Deserializes the MetastoreInfo from a dictionary.""" - return cls( - cloud=d.get("cloud", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - default_data_access_config_id=d.get("default_data_access_config_id", None), - delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), - delta_sharing_recipient_token_lifetime_in_seconds=d.get( - "delta_sharing_recipient_token_lifetime_in_seconds", None - ), - delta_sharing_scope=_enum(d, "delta_sharing_scope", MetastoreInfoDeltaSharingScope), - external_access_enabled=d.get("external_access_enabled", None), - global_metastore_id=d.get("global_metastore_id", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - privilege_model_version=d.get("privilege_model_version", None), - region=d.get("region", None), - storage_root=d.get("storage_root", None), - storage_root_credential_id=d.get("storage_root_credential_id", None), - storage_root_credential_name=d.get("storage_root_credential_name", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) - - -class MetastoreInfoDeltaSharingScope(Enum): - """The scope of Delta Sharing enabled for the metastore.""" + return cls(cloud=d.get('cloud', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), default_data_access_config_id=d.get('default_data_access_config_id', None), delta_sharing_organization_name=d.get('delta_sharing_organization_name', None), delta_sharing_recipient_token_lifetime_in_seconds=d.get('delta_sharing_recipient_token_lifetime_in_seconds', None), delta_sharing_scope=_enum(d, 'delta_sharing_scope', DeltaSharingScopeEnum), external_access_enabled=d.get('external_access_enabled', None), global_metastore_id=d.get('global_metastore_id', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), privilege_model_version=d.get('privilege_model_version', None), region=d.get('region', None), storage_root=d.get('storage_root', None), storage_root_credential_id=d.get('storage_root_credential_id', None), storage_root_credential_name=d.get('storage_root_credential_name', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + - INTERNAL = "INTERNAL" - INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL" @dataclass class ModelVersionInfo: aliases: Optional[List[RegisteredModelAlias]] = None """List of aliases associated with the model version""" - + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """The name of the catalog containing the model version""" - + comment: Optional[str] = None """The comment attached to the model version""" - + created_at: Optional[int] = None - + created_by: Optional[str] = None """The identifier of the user who created the model version""" - + id: Optional[str] = None """The unique identifier of the model version""" - + metastore_id: Optional[str] = None """The unique identifier of the metastore containing the model version""" - + model_name: Optional[str] = None """The name of the parent registered model of the model version, relative to parent schema""" - + model_version_dependencies: Optional[DependencyList] = None """Model version dependencies, for feature-store packaged models""" - + run_id: Optional[str] = None """MLflow run ID used when creating the model version, if ``source`` was generated by an experiment run stored in an MLflow tracking server""" - + run_workspace_id: Optional[int] = None """ID of the Databricks workspace containing the MLflow run that generated this model version, if applicable""" - + schema_name: Optional[str] = None """The name of the schema containing the model version, relative to parent catalog""" - + source: Optional[str] = None """URI indicating the location of the source artifacts (files) for the model version""" - + status: Optional[ModelVersionInfoStatus] = None """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION status, then move to READY status once the model version files are uploaded and the model version is finalized. Only model versions in READY status can be loaded for inference or served.""" - + storage_location: Optional[str] = None """The storage location on the cloud under which model version data files are stored""" - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None """The identifier of the user who updated the model version last time""" - + version: Optional[int] = None """Integer model version number, used to reference the model version in API requests.""" - + def as_dict(self) -> dict: """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aliases: - body["aliases"] = [v.as_dict() for v in self.aliases] - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.id is not None: - body["id"] = self.id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version_dependencies: - body["model_version_dependencies"] = self.model_version_dependencies.as_dict() - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_workspace_id is not None: - body["run_workspace_id"] = self.run_workspace_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status.value - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.version is not None: - body["version"] = self.version + if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases] + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.id is not None: body['id'] = self.id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version_dependencies: body['model_version_dependencies'] = self.model_version_dependencies.as_dict() + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.source is not None: body['source'] = self.source + if self.status is not None: body['status'] = self.status.value + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aliases: - body["aliases"] = self.aliases - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.id is not None: - body["id"] = self.id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version_dependencies: - body["model_version_dependencies"] = self.model_version_dependencies - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_workspace_id is not None: - body["run_workspace_id"] = self.run_workspace_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.version is not None: - body["version"] = self.version + if self.aliases: body['aliases'] = self.aliases + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.id is not None: body['id'] = self.id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version_dependencies: body['model_version_dependencies'] = self.model_version_dependencies + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.source is not None: body['source'] = self.source + if self.status is not None: body['status'] = self.status + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: """Deserializes the ModelVersionInfo from a dictionary.""" - return cls( - aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - id=d.get("id", None), - metastore_id=d.get("metastore_id", None), - model_name=d.get("model_name", None), - model_version_dependencies=_from_dict(d, "model_version_dependencies", DependencyList), - run_id=d.get("run_id", None), - run_workspace_id=d.get("run_workspace_id", None), - schema_name=d.get("schema_name", None), - source=d.get("source", None), - status=_enum(d, "status", ModelVersionInfoStatus), - storage_location=d.get("storage_location", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - version=d.get("version", None), - ) + return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias), browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), id=d.get('id', None), metastore_id=d.get('metastore_id', None), model_name=d.get('model_name', None), model_version_dependencies=_from_dict(d, 'model_version_dependencies', DependencyList), run_id=d.get('run_id', None), run_workspace_id=d.get('run_workspace_id', None), schema_name=d.get('schema_name', None), source=d.get('source', None), status=_enum(d, 'status', ModelVersionInfoStatus), storage_location=d.get('storage_location', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), version=d.get('version', None)) + + class ModelVersionInfoStatus(Enum): """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION status, then move to READY status once the model version files are uploaded and the model version is finalized. Only model versions in READY status can be loaded for inference or served.""" - - FAILED_REGISTRATION = "FAILED_REGISTRATION" - PENDING_REGISTRATION = "PENDING_REGISTRATION" - READY = "READY" - + + FAILED_REGISTRATION = 'FAILED_REGISTRATION' + PENDING_REGISTRATION = 'PENDING_REGISTRATION' + READY = 'READY' @dataclass class MonitorCronSchedule: @@ -6090,75 +4917,66 @@ class MonitorCronSchedule: """The expression that determines when to run the monitor. See [examples]. [examples]: https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" - + timezone_id: str """The timezone id (e.g., ``"PST"``) in which to evaluate the quartz expression.""" - + pause_status: Optional[MonitorCronSchedulePauseStatus] = None """Read only field that indicates whether a schedule is paused or not.""" - + def as_dict(self) -> dict: """Serializes the MonitorCronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status.value - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.pause_status is not None: body['pause_status'] = self.pause_status.value + if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorCronSchedule: """Deserializes the MonitorCronSchedule from a dictionary.""" - return cls( - pause_status=_enum(d, "pause_status", MonitorCronSchedulePauseStatus), - quartz_cron_expression=d.get("quartz_cron_expression", None), - timezone_id=d.get("timezone_id", None), - ) + return cls(pause_status=_enum(d, 'pause_status', MonitorCronSchedulePauseStatus), quartz_cron_expression=d.get('quartz_cron_expression', None), timezone_id=d.get('timezone_id', None)) + + class MonitorCronSchedulePauseStatus(Enum): """Read only field that indicates whether a schedule is paused or not.""" - - PAUSED = "PAUSED" - UNPAUSED = "UNPAUSED" - + + PAUSED = 'PAUSED' + UNPAUSED = 'UNPAUSED' @dataclass class MonitorDataClassificationConfig: enabled: Optional[bool] = None """Whether data classification is enabled.""" - + def as_dict(self) -> dict: """Serializes the MonitorDataClassificationConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.enabled is not None: body['enabled'] = self.enabled return body def as_shallow_dict(self) -> dict: """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.enabled is not None: body['enabled'] = self.enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorDataClassificationConfig: """Deserializes the MonitorDataClassificationConfig from a dictionary.""" - return cls(enabled=d.get("enabled", None)) + return cls(enabled=d.get('enabled', None)) + + @dataclass @@ -6166,25 +4984,25 @@ class MonitorDestination: email_addresses: Optional[List[str]] = None """The list of email addresses to send the notification to. A maximum of 5 email addresses is supported.""" - + def as_dict(self) -> dict: """Serializes the MonitorDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.email_addresses: - body["email_addresses"] = [v for v in self.email_addresses] + if self.email_addresses: body['email_addresses'] = [v for v in self.email_addresses] return body def as_shallow_dict(self) -> dict: """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.email_addresses: - body["email_addresses"] = self.email_addresses + if self.email_addresses: body['email_addresses'] = self.email_addresses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorDestination: """Deserializes the MonitorDestination from a dictionary.""" - return cls(email_addresses=d.get("email_addresses", None)) + return cls(email_addresses=d.get('email_addresses', None)) + + @dataclass @@ -6195,290 +5013,215 @@ class MonitorInferenceLog: pyspark ``to_timestamp`` [function]. [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - + granularities: List[str] """Granularities for aggregating data into time windows based on their timestamp. Currently the following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" - + model_id_col: str """Column that contains the id of the model generating the predictions. Metrics will be computed per model id by default, and also across all model ids.""" - + problem_type: MonitorInferenceLogProblemType """Problem type the model aims to solve. Determines the type of model-quality metrics that will be computed.""" - + prediction_col: str """Column that contains the output/prediction from the model.""" - + label_col: Optional[str] = None """Optional column that contains the ground truth for the prediction.""" - + prediction_proba_col: Optional[str] = None """Optional column that contains the prediction probabilities for each class in a classification problem type. The values in this column should be a map, mapping each class label to the prediction probability for a given sample. The map should be of PySpark MapType().""" - + def as_dict(self) -> dict: """Serializes the MonitorInferenceLog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.granularities: - body["granularities"] = [v for v in self.granularities] - if self.label_col is not None: - body["label_col"] = self.label_col - if self.model_id_col is not None: - body["model_id_col"] = self.model_id_col - if self.prediction_col is not None: - body["prediction_col"] = self.prediction_col - if self.prediction_proba_col is not None: - body["prediction_proba_col"] = self.prediction_proba_col - if self.problem_type is not None: - body["problem_type"] = self.problem_type.value - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col + if self.granularities: body['granularities'] = [v for v in self.granularities] + if self.label_col is not None: body['label_col'] = self.label_col + if self.model_id_col is not None: body['model_id_col'] = self.model_id_col + if self.prediction_col is not None: body['prediction_col'] = self.prediction_col + if self.prediction_proba_col is not None: body['prediction_proba_col'] = self.prediction_proba_col + if self.problem_type is not None: body['problem_type'] = self.problem_type.value + if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col return body def as_shallow_dict(self) -> dict: """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes.""" body = {} - if self.granularities: - body["granularities"] = self.granularities - if self.label_col is not None: - body["label_col"] = self.label_col - if self.model_id_col is not None: - body["model_id_col"] = self.model_id_col - if self.prediction_col is not None: - body["prediction_col"] = self.prediction_col - if self.prediction_proba_col is not None: - body["prediction_proba_col"] = self.prediction_proba_col - if self.problem_type is not None: - body["problem_type"] = self.problem_type - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col + if self.granularities: body['granularities'] = self.granularities + if self.label_col is not None: body['label_col'] = self.label_col + if self.model_id_col is not None: body['model_id_col'] = self.model_id_col + if self.prediction_col is not None: body['prediction_col'] = self.prediction_col + if self.prediction_proba_col is not None: body['prediction_proba_col'] = self.prediction_proba_col + if self.problem_type is not None: body['problem_type'] = self.problem_type + if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorInferenceLog: """Deserializes the MonitorInferenceLog from a dictionary.""" - return cls( - granularities=d.get("granularities", None), - label_col=d.get("label_col", None), - model_id_col=d.get("model_id_col", None), - prediction_col=d.get("prediction_col", None), - prediction_proba_col=d.get("prediction_proba_col", None), - problem_type=_enum(d, "problem_type", MonitorInferenceLogProblemType), - timestamp_col=d.get("timestamp_col", None), - ) + return cls(granularities=d.get('granularities', None), label_col=d.get('label_col', None), model_id_col=d.get('model_id_col', None), prediction_col=d.get('prediction_col', None), prediction_proba_col=d.get('prediction_proba_col', None), problem_type=_enum(d, 'problem_type', MonitorInferenceLogProblemType), timestamp_col=d.get('timestamp_col', None)) + + class MonitorInferenceLogProblemType(Enum): """Problem type the model aims to solve. Determines the type of model-quality metrics that will be computed.""" - - PROBLEM_TYPE_CLASSIFICATION = "PROBLEM_TYPE_CLASSIFICATION" - PROBLEM_TYPE_REGRESSION = "PROBLEM_TYPE_REGRESSION" - + + PROBLEM_TYPE_CLASSIFICATION = 'PROBLEM_TYPE_CLASSIFICATION' + PROBLEM_TYPE_REGRESSION = 'PROBLEM_TYPE_REGRESSION' @dataclass class MonitorInfo: table_name: str """The full name of the table to monitor. Format: __catalog_name__.__schema_name__.__table_name__.""" - + status: MonitorInfoStatus """The status of the monitor.""" - + monitor_version: str """The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted.""" - + profile_metrics_table_name: str """The full name of the profile metrics table. Format: __catalog_name__.__schema_name__.__table_name__.""" - + drift_metrics_table_name: str """The full name of the drift metrics table. Format: __catalog_name__.__schema_name__.__table_name__.""" - + assets_dir: Optional[str] = None """The directory to store monitoring assets (e.g. dashboard, metric tables).""" - + baseline_table_name: Optional[str] = None """Name of the baseline table from which drift metrics are computed from. Columns in the monitored table should also be present in the baseline table.""" - + custom_metrics: Optional[List[MonitorMetric]] = None """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).""" - + dashboard_id: Optional[str] = None """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in PENDING state.""" - + data_classification_config: Optional[MonitorDataClassificationConfig] = None """The data classification config for the monitor.""" - + inference_log: Optional[MonitorInferenceLog] = None """Configuration for monitoring inference logs.""" - + latest_monitor_failure_msg: Optional[str] = None """The latest failure message of the monitor (if any).""" - + notifications: Optional[MonitorNotifications] = None """The notification settings for the monitor.""" - + output_schema_name: Optional[str] = None """Schema where output metric tables are created.""" - + schedule: Optional[MonitorCronSchedule] = None """The schedule for automatically updating and refreshing metric tables.""" - + slicing_exprs: Optional[List[str]] = None """List of column expressions to slice data with for targeted analysis. The data is grouped by each expression independently, resulting in a separate slice for each predicate and its complements. For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" - + snapshot: Optional[MonitorSnapshot] = None """Configuration for monitoring snapshot tables.""" - + time_series: Optional[MonitorTimeSeries] = None """Configuration for monitoring time series tables.""" - + def as_dict(self) -> dict: """Serializes the MonitorInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config.as_dict() - if self.drift_metrics_table_name is not None: - body["drift_metrics_table_name"] = self.drift_metrics_table_name - if self.inference_log: - body["inference_log"] = self.inference_log.as_dict() - if self.latest_monitor_failure_msg is not None: - body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg - if self.monitor_version is not None: - body["monitor_version"] = self.monitor_version - if self.notifications: - body["notifications"] = self.notifications.as_dict() - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.profile_metrics_table_name is not None: - body["profile_metrics_table_name"] = self.profile_metrics_table_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.slicing_exprs: - body["slicing_exprs"] = [v for v in self.slicing_exprs] - if self.snapshot: - body["snapshot"] = self.snapshot.as_dict() - if self.status is not None: - body["status"] = self.status.value - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series.as_dict() + if self.assets_dir is not None: body['assets_dir'] = self.assets_dir + if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name + if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics] + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.data_classification_config: body['data_classification_config'] = self.data_classification_config.as_dict() + if self.drift_metrics_table_name is not None: body['drift_metrics_table_name'] = self.drift_metrics_table_name + if self.inference_log: body['inference_log'] = self.inference_log.as_dict() + if self.latest_monitor_failure_msg is not None: body['latest_monitor_failure_msg'] = self.latest_monitor_failure_msg + if self.monitor_version is not None: body['monitor_version'] = self.monitor_version + if self.notifications: body['notifications'] = self.notifications.as_dict() + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.profile_metrics_table_name is not None: body['profile_metrics_table_name'] = self.profile_metrics_table_name + if self.schedule: body['schedule'] = self.schedule.as_dict() + if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs] + if self.snapshot: body['snapshot'] = self.snapshot.as_dict() + if self.status is not None: body['status'] = self.status.value + if self.table_name is not None: body['table_name'] = self.table_name + if self.time_series: body['time_series'] = self.time_series.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = self.custom_metrics - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config - if self.drift_metrics_table_name is not None: - body["drift_metrics_table_name"] = self.drift_metrics_table_name - if self.inference_log: - body["inference_log"] = self.inference_log - if self.latest_monitor_failure_msg is not None: - body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg - if self.monitor_version is not None: - body["monitor_version"] = self.monitor_version - if self.notifications: - body["notifications"] = self.notifications - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.profile_metrics_table_name is not None: - body["profile_metrics_table_name"] = self.profile_metrics_table_name - if self.schedule: - body["schedule"] = self.schedule - if self.slicing_exprs: - body["slicing_exprs"] = self.slicing_exprs - if self.snapshot: - body["snapshot"] = self.snapshot - if self.status is not None: - body["status"] = self.status - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series + if self.assets_dir is not None: body['assets_dir'] = self.assets_dir + if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name + if self.custom_metrics: body['custom_metrics'] = self.custom_metrics + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.data_classification_config: body['data_classification_config'] = self.data_classification_config + if self.drift_metrics_table_name is not None: body['drift_metrics_table_name'] = self.drift_metrics_table_name + if self.inference_log: body['inference_log'] = self.inference_log + if self.latest_monitor_failure_msg is not None: body['latest_monitor_failure_msg'] = self.latest_monitor_failure_msg + if self.monitor_version is not None: body['monitor_version'] = self.monitor_version + if self.notifications: body['notifications'] = self.notifications + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.profile_metrics_table_name is not None: body['profile_metrics_table_name'] = self.profile_metrics_table_name + if self.schedule: body['schedule'] = self.schedule + if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs + if self.snapshot: body['snapshot'] = self.snapshot + if self.status is not None: body['status'] = self.status + if self.table_name is not None: body['table_name'] = self.table_name + if self.time_series: body['time_series'] = self.time_series return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorInfo: """Deserializes the MonitorInfo from a dictionary.""" - return cls( - assets_dir=d.get("assets_dir", None), - baseline_table_name=d.get("baseline_table_name", None), - custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), - dashboard_id=d.get("dashboard_id", None), - data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), - drift_metrics_table_name=d.get("drift_metrics_table_name", None), - inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), - latest_monitor_failure_msg=d.get("latest_monitor_failure_msg", None), - monitor_version=d.get("monitor_version", None), - notifications=_from_dict(d, "notifications", MonitorNotifications), - output_schema_name=d.get("output_schema_name", None), - profile_metrics_table_name=d.get("profile_metrics_table_name", None), - schedule=_from_dict(d, "schedule", MonitorCronSchedule), - slicing_exprs=d.get("slicing_exprs", None), - snapshot=_from_dict(d, "snapshot", MonitorSnapshot), - status=_enum(d, "status", MonitorInfoStatus), - table_name=d.get("table_name", None), - time_series=_from_dict(d, "time_series", MonitorTimeSeries), - ) + return cls(assets_dir=d.get('assets_dir', None), baseline_table_name=d.get('baseline_table_name', None), custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric), dashboard_id=d.get('dashboard_id', None), data_classification_config=_from_dict(d, 'data_classification_config', MonitorDataClassificationConfig), drift_metrics_table_name=d.get('drift_metrics_table_name', None), inference_log=_from_dict(d, 'inference_log', MonitorInferenceLog), latest_monitor_failure_msg=d.get('latest_monitor_failure_msg', None), monitor_version=d.get('monitor_version', None), notifications=_from_dict(d, 'notifications', MonitorNotifications), output_schema_name=d.get('output_schema_name', None), profile_metrics_table_name=d.get('profile_metrics_table_name', None), schedule=_from_dict(d, 'schedule', MonitorCronSchedule), slicing_exprs=d.get('slicing_exprs', None), snapshot=_from_dict(d, 'snapshot', MonitorSnapshot), status=_enum(d, 'status', MonitorInfoStatus), table_name=d.get('table_name', None), time_series=_from_dict(d, 'time_series', MonitorTimeSeries)) + + class MonitorInfoStatus(Enum): """The status of the monitor.""" - - MONITOR_STATUS_ACTIVE = "MONITOR_STATUS_ACTIVE" - MONITOR_STATUS_DELETE_PENDING = "MONITOR_STATUS_DELETE_PENDING" - MONITOR_STATUS_ERROR = "MONITOR_STATUS_ERROR" - MONITOR_STATUS_FAILED = "MONITOR_STATUS_FAILED" - MONITOR_STATUS_PENDING = "MONITOR_STATUS_PENDING" - + + MONITOR_STATUS_ACTIVE = 'MONITOR_STATUS_ACTIVE' + MONITOR_STATUS_DELETE_PENDING = 'MONITOR_STATUS_DELETE_PENDING' + MONITOR_STATUS_ERROR = 'MONITOR_STATUS_ERROR' + MONITOR_STATUS_FAILED = 'MONITOR_STATUS_FAILED' + MONITOR_STATUS_PENDING = 'MONITOR_STATUS_PENDING' @dataclass class MonitorMetric: name: str """Name of the metric in the output tables.""" - + definition: str """Jinja template for a SQL expression that specifies how to compute the metric. See [create metric definition]. [create metric definition]: https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition""" - + input_columns: List[str] """A list of column names in the input table the metric should be computed for. Can use ``":table"`` to indicate that the metric needs information from multiple columns.""" - + output_data_type: str """The output type of the custom metric.""" - + type: MonitorMetricType """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and @@ -6487,47 +5230,33 @@ class MonitorMetric: two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" - + def as_dict(self) -> dict: """Serializes the MonitorMetric into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.input_columns: - body["input_columns"] = [v for v in self.input_columns] - if self.name is not None: - body["name"] = self.name - if self.output_data_type is not None: - body["output_data_type"] = self.output_data_type - if self.type is not None: - body["type"] = self.type.value + if self.definition is not None: body['definition'] = self.definition + if self.input_columns: body['input_columns'] = [v for v in self.input_columns] + if self.name is not None: body['name'] = self.name + if self.output_data_type is not None: body['output_data_type'] = self.output_data_type + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.input_columns: - body["input_columns"] = self.input_columns - if self.name is not None: - body["name"] = self.name - if self.output_data_type is not None: - body["output_data_type"] = self.output_data_type - if self.type is not None: - body["type"] = self.type + if self.definition is not None: body['definition'] = self.definition + if self.input_columns: body['input_columns'] = self.input_columns + if self.name is not None: body['name'] = self.name + if self.output_data_type is not None: body['output_data_type'] = self.output_data_type + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorMetric: """Deserializes the MonitorMetric from a dictionary.""" - return cls( - definition=d.get("definition", None), - input_columns=d.get("input_columns", None), - name=d.get("name", None), - output_data_type=d.get("output_data_type", None), - type=_enum(d, "type", MonitorMetricType), - ) + return cls(definition=d.get('definition', None), input_columns=d.get('input_columns', None), name=d.get('name', None), output_data_type=d.get('output_data_type', None), type=_enum(d, 'type', MonitorMetricType)) + + class MonitorMetricType(Enum): @@ -6538,154 +5267,129 @@ class MonitorMetricType(Enum): two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" - - CUSTOM_METRIC_TYPE_AGGREGATE = "CUSTOM_METRIC_TYPE_AGGREGATE" - CUSTOM_METRIC_TYPE_DERIVED = "CUSTOM_METRIC_TYPE_DERIVED" - CUSTOM_METRIC_TYPE_DRIFT = "CUSTOM_METRIC_TYPE_DRIFT" - + + CUSTOM_METRIC_TYPE_AGGREGATE = 'CUSTOM_METRIC_TYPE_AGGREGATE' + CUSTOM_METRIC_TYPE_DERIVED = 'CUSTOM_METRIC_TYPE_DERIVED' + CUSTOM_METRIC_TYPE_DRIFT = 'CUSTOM_METRIC_TYPE_DRIFT' @dataclass class MonitorNotifications: on_failure: Optional[MonitorDestination] = None """Who to send notifications to on monitor failure.""" - + on_new_classification_tag_detected: Optional[MonitorDestination] = None """Who to send notifications to when new data classification tags are detected.""" - + def as_dict(self) -> dict: """Serializes the MonitorNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.on_failure: - body["on_failure"] = self.on_failure.as_dict() - if self.on_new_classification_tag_detected: - body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected.as_dict() + if self.on_failure: body['on_failure'] = self.on_failure.as_dict() + if self.on_new_classification_tag_detected: body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.on_failure: - body["on_failure"] = self.on_failure - if self.on_new_classification_tag_detected: - body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected + if self.on_failure: body['on_failure'] = self.on_failure + if self.on_new_classification_tag_detected: body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorNotifications: """Deserializes the MonitorNotifications from a dictionary.""" - return cls( - on_failure=_from_dict(d, "on_failure", MonitorDestination), - on_new_classification_tag_detected=_from_dict(d, "on_new_classification_tag_detected", MonitorDestination), - ) + return cls(on_failure=_from_dict(d, 'on_failure', MonitorDestination), on_new_classification_tag_detected=_from_dict(d, 'on_new_classification_tag_detected', MonitorDestination)) + + @dataclass class MonitorRefreshInfo: refresh_id: int """Unique id of the refresh operation.""" - + state: MonitorRefreshInfoState """The current state of the refresh.""" - + start_time_ms: int """Time at which refresh operation was initiated (milliseconds since 1/1/1970 UTC).""" - + end_time_ms: Optional[int] = None """Time at which refresh operation completed (milliseconds since 1/1/1970 UTC).""" - + message: Optional[str] = None """An optional message to give insight into the current state of the job (e.g. FAILURE messages).""" - + trigger: Optional[MonitorRefreshInfoTrigger] = None """The method by which the refresh was triggered.""" - + def as_dict(self) -> dict: """Serializes the MonitorRefreshInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end_time_ms is not None: - body["end_time_ms"] = self.end_time_ms - if self.message is not None: - body["message"] = self.message - if self.refresh_id is not None: - body["refresh_id"] = self.refresh_id - if self.start_time_ms is not None: - body["start_time_ms"] = self.start_time_ms - if self.state is not None: - body["state"] = self.state.value - if self.trigger is not None: - body["trigger"] = self.trigger.value + if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms + if self.message is not None: body['message'] = self.message + if self.refresh_id is not None: body['refresh_id'] = self.refresh_id + if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms + if self.state is not None: body['state'] = self.state.value + if self.trigger is not None: body['trigger'] = self.trigger.value return body def as_shallow_dict(self) -> dict: """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.end_time_ms is not None: - body["end_time_ms"] = self.end_time_ms - if self.message is not None: - body["message"] = self.message - if self.refresh_id is not None: - body["refresh_id"] = self.refresh_id - if self.start_time_ms is not None: - body["start_time_ms"] = self.start_time_ms - if self.state is not None: - body["state"] = self.state - if self.trigger is not None: - body["trigger"] = self.trigger + if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms + if self.message is not None: body['message'] = self.message + if self.refresh_id is not None: body['refresh_id'] = self.refresh_id + if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms + if self.state is not None: body['state'] = self.state + if self.trigger is not None: body['trigger'] = self.trigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshInfo: """Deserializes the MonitorRefreshInfo from a dictionary.""" - return cls( - end_time_ms=d.get("end_time_ms", None), - message=d.get("message", None), - refresh_id=d.get("refresh_id", None), - start_time_ms=d.get("start_time_ms", None), - state=_enum(d, "state", MonitorRefreshInfoState), - trigger=_enum(d, "trigger", MonitorRefreshInfoTrigger), - ) + return cls(end_time_ms=d.get('end_time_ms', None), message=d.get('message', None), refresh_id=d.get('refresh_id', None), start_time_ms=d.get('start_time_ms', None), state=_enum(d, 'state', MonitorRefreshInfoState), trigger=_enum(d, 'trigger', MonitorRefreshInfoTrigger)) + + class MonitorRefreshInfoState(Enum): """The current state of the refresh.""" - - CANCELED = "CANCELED" - FAILED = "FAILED" - PENDING = "PENDING" - RUNNING = "RUNNING" - SUCCESS = "SUCCESS" - + + CANCELED = 'CANCELED' + FAILED = 'FAILED' + PENDING = 'PENDING' + RUNNING = 'RUNNING' + SUCCESS = 'SUCCESS' class MonitorRefreshInfoTrigger(Enum): """The method by which the refresh was triggered.""" - - MANUAL = "MANUAL" - SCHEDULE = "SCHEDULE" - + + MANUAL = 'MANUAL' + SCHEDULE = 'SCHEDULE' @dataclass class MonitorRefreshListResponse: refreshes: Optional[List[MonitorRefreshInfo]] = None """List of refreshes.""" - + def as_dict(self) -> dict: """Serializes the MonitorRefreshListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.refreshes: - body["refreshes"] = [v.as_dict() for v in self.refreshes] + if self.refreshes: body['refreshes'] = [v.as_dict() for v in self.refreshes] return body def as_shallow_dict(self) -> dict: """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.refreshes: - body["refreshes"] = self.refreshes + if self.refreshes: body['refreshes'] = self.refreshes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshListResponse: """Deserializes the MonitorRefreshListResponse from a dictionary.""" - return cls(refreshes=_repeated_dict(d, "refreshes", MonitorRefreshInfo)) + return cls(refreshes=_repeated_dict(d, 'refreshes', MonitorRefreshInfo)) + + @dataclass @@ -6704,6 +5408,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> MonitorSnapshot: """Deserializes the MonitorSnapshot from a dictionary.""" return cls() + + @dataclass @@ -6714,165 +5420,112 @@ class MonitorTimeSeries: pyspark ``to_timestamp`` [function]. [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - + granularities: List[str] """Granularities for aggregating data into time windows based on their timestamp. Currently the following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" - + def as_dict(self) -> dict: """Serializes the MonitorTimeSeries into a dictionary suitable for use as a JSON request body.""" body = {} - if self.granularities: - body["granularities"] = [v for v in self.granularities] - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col + if self.granularities: body['granularities'] = [v for v in self.granularities] + if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col return body def as_shallow_dict(self) -> dict: """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes.""" body = {} - if self.granularities: - body["granularities"] = self.granularities - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col + if self.granularities: body['granularities'] = self.granularities + if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorTimeSeries: """Deserializes the MonitorTimeSeries from a dictionary.""" - return cls(granularities=d.get("granularities", None), timestamp_col=d.get("timestamp_col", None)) + return cls(granularities=d.get('granularities', None), timestamp_col=d.get('timestamp_col', None)) + + @dataclass class NamedTableConstraint: name: str """The name of the constraint.""" - + def as_dict(self) -> dict: """Serializes the NamedTableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NamedTableConstraint: """Deserializes the NamedTableConstraint from a dictionary.""" - return cls(name=d.get("name", None)) - - -@dataclass -class NewPipelineSpec: - """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other - fields of pipeline are still inferred by table def internally""" - - storage_catalog: Optional[str] = None - """UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This - needs to be a standard catalog where the user has permissions to create Delta tables.""" - - storage_schema: Optional[str] = None - """UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This needs - to be in the standard catalog where the user has permissions to create Delta tables.""" - - def as_dict(self) -> dict: - """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.storage_catalog is not None: - body["storage_catalog"] = self.storage_catalog - if self.storage_schema is not None: - body["storage_schema"] = self.storage_schema - return body - - def as_shallow_dict(self) -> dict: - """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.storage_catalog is not None: - body["storage_catalog"] = self.storage_catalog - if self.storage_schema is not None: - body["storage_schema"] = self.storage_schema - return body + return cls(name=d.get('name', None)) + - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec: - """Deserializes the NewPipelineSpec from a dictionary.""" - return cls(storage_catalog=d.get("storage_catalog", None), storage_schema=d.get("storage_schema", None)) @dataclass class OnlineTable: """Online Table information.""" - + name: Optional[str] = None """Full three-part (catalog, schema, table) name of the table.""" - + spec: Optional[OnlineTableSpec] = None """Specification of the online table.""" - + status: Optional[OnlineTableStatus] = None """Online Table data synchronization status""" - + table_serving_url: Optional[str] = None """Data serving REST API URL for this table""" - + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None """The provisioning state of the online table entity in Unity Catalog. This is distinct from the state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline may be in "PROVISIONING" as it runs asynchronously).""" - + def as_dict(self) -> dict: """Serializes the OnlineTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.spec: - body["spec"] = self.spec.as_dict() - if self.status: - body["status"] = self.status.as_dict() - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: - body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value + if self.name is not None: body['name'] = self.name + if self.spec: body['spec'] = self.spec.as_dict() + if self.status: body['status'] = self.status.as_dict() + if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value return body def as_shallow_dict(self) -> dict: """Serializes the OnlineTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.spec: - body["spec"] = self.spec - if self.status: - body["status"] = self.status - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: - body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state + if self.name is not None: body['name'] = self.name + if self.spec: body['spec'] = self.spec + if self.status: body['status'] = self.status + if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OnlineTable: """Deserializes the OnlineTable from a dictionary.""" - return cls( - name=d.get("name", None), - spec=_from_dict(d, "spec", OnlineTableSpec), - status=_from_dict(d, "status", OnlineTableStatus), - table_serving_url=d.get("table_serving_url", None), - unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), - ) + return cls(name=d.get('name', None), spec=_from_dict(d, 'spec', OnlineTableSpec), status=_from_dict(d, 'status', OnlineTableStatus), table_serving_url=d.get('table_serving_url', None), unity_catalog_provisioning_state=_enum(d, 'unity_catalog_provisioning_state', ProvisioningInfoState)) + + @dataclass class OnlineTableSpec: """Specification of an online table.""" - + perform_full_copy: Optional[bool] = None """Whether to create a full-copy pipeline -- a pipeline that stops after creates a full copy of the source table upon initialization and does not process any change data feeds (CDFs) afterwards. @@ -6880,76 +5533,56 @@ class OnlineTableSpec: the source table and there are no incremental updates. This mode is useful for syncing views or tables without CDFs to online tables. Note that the full-copy pipeline only supports "triggered" scheduling policy.""" - + pipeline_id: Optional[str] = None """ID of the associated pipeline. Generated by the server - cannot be set by the caller.""" - + primary_key_columns: Optional[List[str]] = None """Primary Key columns to be used for data insert/update in the destination.""" - + run_continuously: Optional[OnlineTableSpecContinuousSchedulingPolicy] = None """Pipeline runs continuously after generating the initial data.""" - + run_triggered: Optional[OnlineTableSpecTriggeredSchedulingPolicy] = None """Pipeline stops after generating the initial data and can be triggered later (manually, through a cron job or through data triggers)""" - + source_table_full_name: Optional[str] = None """Three-part (catalog, schema, table) name of the source Delta table.""" - + timeseries_key: Optional[str] = None """Time series key to deduplicate (tie-break) rows with the same primary key.""" - + def as_dict(self) -> dict: """Serializes the OnlineTableSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.perform_full_copy is not None: - body["perform_full_copy"] = self.perform_full_copy - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.primary_key_columns: - body["primary_key_columns"] = [v for v in self.primary_key_columns] - if self.run_continuously: - body["run_continuously"] = self.run_continuously.as_dict() - if self.run_triggered: - body["run_triggered"] = self.run_triggered.as_dict() - if self.source_table_full_name is not None: - body["source_table_full_name"] = self.source_table_full_name - if self.timeseries_key is not None: - body["timeseries_key"] = self.timeseries_key + if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.primary_key_columns: body['primary_key_columns'] = [v for v in self.primary_key_columns] + if self.run_continuously: body['run_continuously'] = self.run_continuously.as_dict() + if self.run_triggered: body['run_triggered'] = self.run_triggered.as_dict() + if self.source_table_full_name is not None: body['source_table_full_name'] = self.source_table_full_name + if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key return body def as_shallow_dict(self) -> dict: """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.perform_full_copy is not None: - body["perform_full_copy"] = self.perform_full_copy - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.primary_key_columns: - body["primary_key_columns"] = self.primary_key_columns - if self.run_continuously: - body["run_continuously"] = self.run_continuously - if self.run_triggered: - body["run_triggered"] = self.run_triggered - if self.source_table_full_name is not None: - body["source_table_full_name"] = self.source_table_full_name - if self.timeseries_key is not None: - body["timeseries_key"] = self.timeseries_key + if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.primary_key_columns: body['primary_key_columns'] = self.primary_key_columns + if self.run_continuously: body['run_continuously'] = self.run_continuously + if self.run_triggered: body['run_triggered'] = self.run_triggered + if self.source_table_full_name is not None: body['source_table_full_name'] = self.source_table_full_name + if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpec: """Deserializes the OnlineTableSpec from a dictionary.""" - return cls( - perform_full_copy=d.get("perform_full_copy", None), - pipeline_id=d.get("pipeline_id", None), - primary_key_columns=d.get("primary_key_columns", None), - run_continuously=_from_dict(d, "run_continuously", OnlineTableSpecContinuousSchedulingPolicy), - run_triggered=_from_dict(d, "run_triggered", OnlineTableSpecTriggeredSchedulingPolicy), - source_table_full_name=d.get("source_table_full_name", None), - timeseries_key=d.get("timeseries_key", None), - ) + return cls(perform_full_copy=d.get('perform_full_copy', None), pipeline_id=d.get('pipeline_id', None), primary_key_columns=d.get('primary_key_columns', None), run_continuously=_from_dict(d, 'run_continuously', OnlineTableSpecContinuousSchedulingPolicy), run_triggered=_from_dict(d, 'run_triggered', OnlineTableSpecTriggeredSchedulingPolicy), source_table_full_name=d.get('source_table_full_name', None), timeseries_key=d.get('timeseries_key', None)) + + @dataclass @@ -6968,6 +5601,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecContinuousSchedulingPolicy: """Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary.""" return cls() + + @dataclass @@ -6986,1971 +5621,1397 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecTriggeredSchedulingPolicy: """Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary.""" return cls() + + class OnlineTableState(Enum): """The state of an online table.""" - - OFFLINE = "OFFLINE" - OFFLINE_FAILED = "OFFLINE_FAILED" - ONLINE = "ONLINE" - ONLINE_CONTINUOUS_UPDATE = "ONLINE_CONTINUOUS_UPDATE" - ONLINE_NO_PENDING_UPDATE = "ONLINE_NO_PENDING_UPDATE" - ONLINE_PIPELINE_FAILED = "ONLINE_PIPELINE_FAILED" - ONLINE_TRIGGERED_UPDATE = "ONLINE_TRIGGERED_UPDATE" - ONLINE_UPDATING_PIPELINE_RESOURCES = "ONLINE_UPDATING_PIPELINE_RESOURCES" - PROVISIONING = "PROVISIONING" - PROVISIONING_INITIAL_SNAPSHOT = "PROVISIONING_INITIAL_SNAPSHOT" - PROVISIONING_PIPELINE_RESOURCES = "PROVISIONING_PIPELINE_RESOURCES" - + + OFFLINE = 'OFFLINE' + OFFLINE_FAILED = 'OFFLINE_FAILED' + ONLINE = 'ONLINE' + ONLINE_CONTINUOUS_UPDATE = 'ONLINE_CONTINUOUS_UPDATE' + ONLINE_NO_PENDING_UPDATE = 'ONLINE_NO_PENDING_UPDATE' + ONLINE_PIPELINE_FAILED = 'ONLINE_PIPELINE_FAILED' + ONLINE_TRIGGERED_UPDATE = 'ONLINE_TRIGGERED_UPDATE' + ONLINE_UPDATING_PIPELINE_RESOURCES = 'ONLINE_UPDATING_PIPELINE_RESOURCES' + PROVISIONING = 'PROVISIONING' + PROVISIONING_INITIAL_SNAPSHOT = 'PROVISIONING_INITIAL_SNAPSHOT' + PROVISIONING_PIPELINE_RESOURCES = 'PROVISIONING_PIPELINE_RESOURCES' @dataclass class OnlineTableStatus: """Status of an online table.""" - + continuous_update_status: Optional[ContinuousUpdateStatus] = None """Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE or the ONLINE_UPDATING_PIPELINE_RESOURCES state.""" - + detailed_state: Optional[OnlineTableState] = None """The state of the online table.""" - + failed_status: Optional[FailedStatus] = None """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the ONLINE_PIPELINE_FAILED state.""" - + message: Optional[str] = None """A text description of the current state of the online table.""" - + provisioning_status: Optional[ProvisioningStatus] = None """Detailed status of an online table. Shown if the online table is in the PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - + triggered_update_status: Optional[TriggeredUpdateStatus] = None """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE or the ONLINE_NO_PENDING_UPDATE state.""" - + def as_dict(self) -> dict: """Serializes the OnlineTableStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.continuous_update_status: - body["continuous_update_status"] = self.continuous_update_status.as_dict() - if self.detailed_state is not None: - body["detailed_state"] = self.detailed_state.value - if self.failed_status: - body["failed_status"] = self.failed_status.as_dict() - if self.message is not None: - body["message"] = self.message - if self.provisioning_status: - body["provisioning_status"] = self.provisioning_status.as_dict() - if self.triggered_update_status: - body["triggered_update_status"] = self.triggered_update_status.as_dict() + if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status.as_dict() + if self.detailed_state is not None: body['detailed_state'] = self.detailed_state.value + if self.failed_status: body['failed_status'] = self.failed_status.as_dict() + if self.message is not None: body['message'] = self.message + if self.provisioning_status: body['provisioning_status'] = self.provisioning_status.as_dict() + if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.continuous_update_status: - body["continuous_update_status"] = self.continuous_update_status - if self.detailed_state is not None: - body["detailed_state"] = self.detailed_state - if self.failed_status: - body["failed_status"] = self.failed_status - if self.message is not None: - body["message"] = self.message - if self.provisioning_status: - body["provisioning_status"] = self.provisioning_status - if self.triggered_update_status: - body["triggered_update_status"] = self.triggered_update_status + if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status + if self.detailed_state is not None: body['detailed_state'] = self.detailed_state + if self.failed_status: body['failed_status'] = self.failed_status + if self.message is not None: body['message'] = self.message + if self.provisioning_status: body['provisioning_status'] = self.provisioning_status + if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OnlineTableStatus: """Deserializes the OnlineTableStatus from a dictionary.""" - return cls( - continuous_update_status=_from_dict(d, "continuous_update_status", ContinuousUpdateStatus), - detailed_state=_enum(d, "detailed_state", OnlineTableState), - failed_status=_from_dict(d, "failed_status", FailedStatus), - message=d.get("message", None), - provisioning_status=_from_dict(d, "provisioning_status", ProvisioningStatus), - triggered_update_status=_from_dict(d, "triggered_update_status", TriggeredUpdateStatus), - ) + return cls(continuous_update_status=_from_dict(d, 'continuous_update_status', ContinuousUpdateStatus), detailed_state=_enum(d, 'detailed_state', OnlineTableState), failed_status=_from_dict(d, 'failed_status', FailedStatus), message=d.get('message', None), provisioning_status=_from_dict(d, 'provisioning_status', ProvisioningStatus), triggered_update_status=_from_dict(d, 'triggered_update_status', TriggeredUpdateStatus)) + + @dataclass class PermissionsChange: add: Optional[List[Privilege]] = None """The set of privileges to add.""" - + principal: Optional[str] = None """The principal whose privileges we are changing.""" - + remove: Optional[List[Privilege]] = None """The set of privileges to remove.""" - + def as_dict(self) -> dict: """Serializes the PermissionsChange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add: - body["add"] = [v.value for v in self.add] - if self.principal is not None: - body["principal"] = self.principal - if self.remove: - body["remove"] = [v.value for v in self.remove] + if self.add: body['add'] = [v.value for v in self.add] + if self.principal is not None: body['principal'] = self.principal + if self.remove: body['remove'] = [v.value for v in self.remove] return body def as_shallow_dict(self) -> dict: """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes.""" body = {} - if self.add: - body["add"] = self.add - if self.principal is not None: - body["principal"] = self.principal - if self.remove: - body["remove"] = self.remove + if self.add: body['add'] = self.add + if self.principal is not None: body['principal'] = self.principal + if self.remove: body['remove'] = self.remove return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: """Deserializes the PermissionsChange from a dictionary.""" - return cls( - add=_repeated_enum(d, "add", Privilege), - principal=d.get("principal", None), - remove=_repeated_enum(d, "remove", Privilege), - ) - - -@dataclass -class PermissionsList: - privilege_assignments: Optional[List[PrivilegeAssignment]] = None - """The privileges assigned to each principal""" - - def as_dict(self) -> dict: - """Serializes the PermissionsList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PermissionsList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments - return body + return cls(add=_repeated_enum(d, 'add', Privilege), principal=d.get('principal', None), remove=_repeated_enum(d, 'remove', Privilege)) + - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PermissionsList: - """Deserializes the PermissionsList from a dictionary.""" - return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment)) @dataclass class PipelineProgress: """Progress information of the Online Table data synchronization pipeline.""" - + estimated_completion_time_seconds: Optional[float] = None """The estimated time remaining to complete this update in seconds.""" - + latest_version_currently_processing: Optional[int] = None """The source table Delta version that was last processed by the pipeline. The pipeline may not have completely processed this version yet.""" - + sync_progress_completion: Optional[float] = None """The completion ratio of this update. This is a number between 0 and 1.""" - + synced_row_count: Optional[int] = None """The number of rows that have been synced in this update.""" - + total_row_count: Optional[int] = None """The total number of rows that need to be synced in this update. This number may be an estimate.""" - + def as_dict(self) -> dict: """Serializes the PipelineProgress into a dictionary suitable for use as a JSON request body.""" body = {} - if self.estimated_completion_time_seconds is not None: - body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: - body["latest_version_currently_processing"] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: - body["sync_progress_completion"] = self.sync_progress_completion - if self.synced_row_count is not None: - body["synced_row_count"] = self.synced_row_count - if self.total_row_count is not None: - body["total_row_count"] = self.total_row_count + if self.estimated_completion_time_seconds is not None: body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: body['latest_version_currently_processing'] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: body['sync_progress_completion'] = self.sync_progress_completion + if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count + if self.total_row_count is not None: body['total_row_count'] = self.total_row_count return body def as_shallow_dict(self) -> dict: """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes.""" body = {} - if self.estimated_completion_time_seconds is not None: - body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: - body["latest_version_currently_processing"] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: - body["sync_progress_completion"] = self.sync_progress_completion - if self.synced_row_count is not None: - body["synced_row_count"] = self.synced_row_count - if self.total_row_count is not None: - body["total_row_count"] = self.total_row_count + if self.estimated_completion_time_seconds is not None: body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: body['latest_version_currently_processing'] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: body['sync_progress_completion'] = self.sync_progress_completion + if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count + if self.total_row_count is not None: body['total_row_count'] = self.total_row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineProgress: """Deserializes the PipelineProgress from a dictionary.""" - return cls( - estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None), - latest_version_currently_processing=d.get("latest_version_currently_processing", None), - sync_progress_completion=d.get("sync_progress_completion", None), - synced_row_count=d.get("synced_row_count", None), - total_row_count=d.get("total_row_count", None), - ) + return cls(estimated_completion_time_seconds=d.get('estimated_completion_time_seconds', None), latest_version_currently_processing=d.get('latest_version_currently_processing', None), sync_progress_completion=d.get('sync_progress_completion', None), synced_row_count=d.get('synced_row_count', None), total_row_count=d.get('total_row_count', None)) + + @dataclass class PrimaryKeyConstraint: name: str """The name of the constraint.""" - + child_columns: List[str] """Column names for this constraint.""" - + timeseries_columns: Optional[List[str]] = None """Column names that represent a timeseries.""" - + def as_dict(self) -> dict: """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.child_columns: - body["child_columns"] = [v for v in self.child_columns] - if self.name is not None: - body["name"] = self.name - if self.timeseries_columns: - body["timeseries_columns"] = [v for v in self.timeseries_columns] + if self.child_columns: body['child_columns'] = [v for v in self.child_columns] + if self.name is not None: body['name'] = self.name + if self.timeseries_columns: body['timeseries_columns'] = [v for v in self.timeseries_columns] return body def as_shallow_dict(self) -> dict: """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.child_columns: - body["child_columns"] = self.child_columns - if self.name is not None: - body["name"] = self.name - if self.timeseries_columns: - body["timeseries_columns"] = self.timeseries_columns + if self.child_columns: body['child_columns'] = self.child_columns + if self.name is not None: body['name'] = self.name + if self.timeseries_columns: body['timeseries_columns'] = self.timeseries_columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: """Deserializes the PrimaryKeyConstraint from a dictionary.""" - return cls( - child_columns=d.get("child_columns", None), - name=d.get("name", None), - timeseries_columns=d.get("timeseries_columns", None), - ) - + return cls(child_columns=d.get('child_columns', None), name=d.get('name', None), timeseries_columns=d.get('timeseries_columns', None)) + -class Privilege(Enum): - ACCESS = "ACCESS" - ALL_PRIVILEGES = "ALL_PRIVILEGES" - APPLY_TAG = "APPLY_TAG" - BROWSE = "BROWSE" - CREATE = "CREATE" - CREATE_CATALOG = "CREATE_CATALOG" - CREATE_CLEAN_ROOM = "CREATE_CLEAN_ROOM" - CREATE_CONNECTION = "CREATE_CONNECTION" - CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION" - CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE" - CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME" - CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG" - CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE" - CREATE_FUNCTION = "CREATE_FUNCTION" - CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE" - CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW" - CREATE_MODEL = "CREATE_MODEL" - CREATE_PROVIDER = "CREATE_PROVIDER" - CREATE_RECIPIENT = "CREATE_RECIPIENT" - CREATE_SCHEMA = "CREATE_SCHEMA" - CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL" - CREATE_SHARE = "CREATE_SHARE" - CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL" - CREATE_TABLE = "CREATE_TABLE" - CREATE_VIEW = "CREATE_VIEW" - CREATE_VOLUME = "CREATE_VOLUME" - EXECUTE = "EXECUTE" - EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" - MANAGE = "MANAGE" - MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" - MODIFY = "MODIFY" - MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM" - READ_FILES = "READ_FILES" - READ_PRIVATE_FILES = "READ_PRIVATE_FILES" - READ_VOLUME = "READ_VOLUME" - REFRESH = "REFRESH" - SELECT = "SELECT" - SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION" - USAGE = "USAGE" - USE_CATALOG = "USE_CATALOG" - USE_CONNECTION = "USE_CONNECTION" - USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS" - USE_PROVIDER = "USE_PROVIDER" - USE_RECIPIENT = "USE_RECIPIENT" - USE_SCHEMA = "USE_SCHEMA" - USE_SHARE = "USE_SHARE" - WRITE_FILES = "WRITE_FILES" - WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES" - WRITE_VOLUME = "WRITE_VOLUME" +class Privilege(Enum): + + + ACCESS = 'ACCESS' + ALL_PRIVILEGES = 'ALL_PRIVILEGES' + APPLY_TAG = 'APPLY_TAG' + BROWSE = 'BROWSE' + CREATE = 'CREATE' + CREATE_CATALOG = 'CREATE_CATALOG' + CREATE_CLEAN_ROOM = 'CREATE_CLEAN_ROOM' + CREATE_CONNECTION = 'CREATE_CONNECTION' + CREATE_EXTERNAL_LOCATION = 'CREATE_EXTERNAL_LOCATION' + CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE' + CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME' + CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG' + CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE' + CREATE_FUNCTION = 'CREATE_FUNCTION' + CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE' + CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW' + CREATE_MODEL = 'CREATE_MODEL' + CREATE_PROVIDER = 'CREATE_PROVIDER' + CREATE_RECIPIENT = 'CREATE_RECIPIENT' + CREATE_SCHEMA = 'CREATE_SCHEMA' + CREATE_SERVICE_CREDENTIAL = 'CREATE_SERVICE_CREDENTIAL' + CREATE_SHARE = 'CREATE_SHARE' + CREATE_STORAGE_CREDENTIAL = 'CREATE_STORAGE_CREDENTIAL' + CREATE_TABLE = 'CREATE_TABLE' + CREATE_VIEW = 'CREATE_VIEW' + CREATE_VOLUME = 'CREATE_VOLUME' + EXECUTE = 'EXECUTE' + EXECUTE_CLEAN_ROOM_TASK = 'EXECUTE_CLEAN_ROOM_TASK' + MANAGE = 'MANAGE' + MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST' + MODIFY = 'MODIFY' + MODIFY_CLEAN_ROOM = 'MODIFY_CLEAN_ROOM' + READ_FILES = 'READ_FILES' + READ_PRIVATE_FILES = 'READ_PRIVATE_FILES' + READ_VOLUME = 'READ_VOLUME' + REFRESH = 'REFRESH' + SELECT = 'SELECT' + SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION' + USAGE = 'USAGE' + USE_CATALOG = 'USE_CATALOG' + USE_CONNECTION = 'USE_CONNECTION' + USE_MARKETPLACE_ASSETS = 'USE_MARKETPLACE_ASSETS' + USE_PROVIDER = 'USE_PROVIDER' + USE_RECIPIENT = 'USE_RECIPIENT' + USE_SCHEMA = 'USE_SCHEMA' + USE_SHARE = 'USE_SHARE' + WRITE_FILES = 'WRITE_FILES' + WRITE_PRIVATE_FILES = 'WRITE_PRIVATE_FILES' + WRITE_VOLUME = 'WRITE_VOLUME' @dataclass class PrivilegeAssignment: principal: Optional[str] = None """The principal (user email address or group name).""" - + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" - + def as_dict(self) -> dict: """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = [v.value for v in self.privileges] + if self.principal is not None: body['principal'] = self.principal + if self.privileges: body['privileges'] = [v.value for v in self.privileges] return body def as_shallow_dict(self) -> dict: """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = self.privileges + if self.principal is not None: body['principal'] = self.principal + if self.privileges: body['privileges'] = self.privileges return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) - + return cls(principal=d.get('principal', None), privileges=_repeated_enum(d, 'privileges', Privilege)) + -PropertiesKvPairs = Dict[str, str] @dataclass class ProvisioningInfo: """Status of an asynchronously provisioned resource.""" - + state: Optional[ProvisioningInfoState] = None """The provisioning state of the resource.""" - + def as_dict(self) -> dict: """Serializes the ProvisioningInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.state is not None: - body["state"] = self.state.value + if self.state is not None: body['state'] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.state is not None: - body["state"] = self.state + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProvisioningInfo: """Deserializes the ProvisioningInfo from a dictionary.""" - return cls(state=_enum(d, "state", ProvisioningInfoState)) - + return cls(state=_enum(d, 'state', ProvisioningInfoState)) + -class ProvisioningInfoState(Enum): - ACTIVE = "ACTIVE" - DEGRADED = "DEGRADED" - DELETING = "DELETING" - FAILED = "FAILED" - PROVISIONING = "PROVISIONING" - UPDATING = "UPDATING" +class ProvisioningInfoState(Enum): + + + ACTIVE = 'ACTIVE' + DEGRADED = 'DEGRADED' + DELETING = 'DELETING' + FAILED = 'FAILED' + PROVISIONING = 'PROVISIONING' + UPDATING = 'UPDATING' @dataclass class ProvisioningStatus: """Detailed status of an online table. Shown if the online table is in the PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - + initial_pipeline_sync_progress: Optional[PipelineProgress] = None """Details about initial data synchronization. Only populated when in the PROVISIONING_INITIAL_SNAPSHOT state.""" - + def as_dict(self) -> dict: """Serializes the ProvisioningStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.initial_pipeline_sync_progress: - body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() + if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.initial_pipeline_sync_progress: - body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress + if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProvisioningStatus: """Deserializes the ProvisioningStatus from a dictionary.""" - return cls(initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress)) + return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', PipelineProgress)) + + @dataclass class QuotaInfo: last_refreshed_at: Optional[int] = None """The timestamp that indicates when the quota count was last updated.""" - + parent_full_name: Optional[str] = None """Name of the parent resource. Returns metastore ID if the parent is a metastore.""" - + parent_securable_type: Optional[SecurableType] = None """The quota parent securable type.""" - + quota_count: Optional[int] = None """The current usage of the resource quota.""" - + quota_limit: Optional[int] = None """The current limit of the resource quota.""" - + quota_name: Optional[str] = None """The name of the quota.""" - + def as_dict(self) -> dict: """Serializes the QuotaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_refreshed_at is not None: - body["last_refreshed_at"] = self.last_refreshed_at - if self.parent_full_name is not None: - body["parent_full_name"] = self.parent_full_name - if self.parent_securable_type is not None: - body["parent_securable_type"] = self.parent_securable_type.value - if self.quota_count is not None: - body["quota_count"] = self.quota_count - if self.quota_limit is not None: - body["quota_limit"] = self.quota_limit - if self.quota_name is not None: - body["quota_name"] = self.quota_name + if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at + if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name + if self.parent_securable_type is not None: body['parent_securable_type'] = self.parent_securable_type.value + if self.quota_count is not None: body['quota_count'] = self.quota_count + if self.quota_limit is not None: body['quota_limit'] = self.quota_limit + if self.quota_name is not None: body['quota_name'] = self.quota_name return body def as_shallow_dict(self) -> dict: """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_refreshed_at is not None: - body["last_refreshed_at"] = self.last_refreshed_at - if self.parent_full_name is not None: - body["parent_full_name"] = self.parent_full_name - if self.parent_securable_type is not None: - body["parent_securable_type"] = self.parent_securable_type - if self.quota_count is not None: - body["quota_count"] = self.quota_count - if self.quota_limit is not None: - body["quota_limit"] = self.quota_limit - if self.quota_name is not None: - body["quota_name"] = self.quota_name + if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at + if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name + if self.parent_securable_type is not None: body['parent_securable_type'] = self.parent_securable_type + if self.quota_count is not None: body['quota_count'] = self.quota_count + if self.quota_limit is not None: body['quota_limit'] = self.quota_limit + if self.quota_name is not None: body['quota_name'] = self.quota_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QuotaInfo: """Deserializes the QuotaInfo from a dictionary.""" - return cls( - last_refreshed_at=d.get("last_refreshed_at", None), - parent_full_name=d.get("parent_full_name", None), - parent_securable_type=_enum(d, "parent_securable_type", SecurableType), - quota_count=d.get("quota_count", None), - quota_limit=d.get("quota_limit", None), - quota_name=d.get("quota_name", None), - ) + return cls(last_refreshed_at=d.get('last_refreshed_at', None), parent_full_name=d.get('parent_full_name', None), parent_securable_type=_enum(d, 'parent_securable_type', SecurableType), quota_count=d.get('quota_count', None), quota_limit=d.get('quota_limit', None), quota_name=d.get('quota_name', None)) + + @dataclass class R2Credentials: """R2 temporary credentials for API authentication. Read more at https://developers.cloudflare.com/r2/api/s3/tokens/.""" - + access_key_id: Optional[str] = None """The access key ID that identifies the temporary credentials.""" - + secret_access_key: Optional[str] = None """The secret access key associated with the access key.""" - + session_token: Optional[str] = None """The generated JWT that users must pass to use the temporary credentials.""" - + def as_dict(self) -> dict: """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key - if self.session_token is not None: - body["session_token"] = self.session_token + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + if self.session_token is not None: body['session_token'] = self.session_token return body def as_shallow_dict(self) -> dict: """Serializes the R2Credentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key - if self.session_token is not None: - body["session_token"] = self.session_token + if self.access_key_id is not None: body['access_key_id'] = self.access_key_id + if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + if self.session_token is not None: body['session_token'] = self.session_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> R2Credentials: """Deserializes the R2Credentials from a dictionary.""" - return cls( - access_key_id=d.get("access_key_id", None), - secret_access_key=d.get("secret_access_key", None), - session_token=d.get("session_token", None), - ) + return cls(access_key_id=d.get('access_key_id', None), secret_access_key=d.get('secret_access_key', None), session_token=d.get('session_token', None)) + + + + + @dataclass class RegenerateDashboardRequest: table_name: Optional[str] = None """Full name of the table.""" - + warehouse_id: Optional[str] = None """Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first running warehouse will be used.""" - + def as_dict(self) -> dict: """Serializes the RegenerateDashboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.table_name is not None: - body["table_name"] = self.table_name - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.table_name is not None: body['table_name'] = self.table_name + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the RegenerateDashboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.table_name is not None: - body["table_name"] = self.table_name - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.table_name is not None: body['table_name'] = self.table_name + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardRequest: """Deserializes the RegenerateDashboardRequest from a dictionary.""" - return cls(table_name=d.get("table_name", None), warehouse_id=d.get("warehouse_id", None)) + return cls(table_name=d.get('table_name', None), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class RegenerateDashboardResponse: dashboard_id: Optional[str] = None """Id of the regenerated monitoring dashboard.""" - + parent_folder: Optional[str] = None """The directory where the regenerated dashboard is stored.""" - + def as_dict(self) -> dict: """Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.parent_folder is not None: - body["parent_folder"] = self.parent_folder + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.parent_folder is not None: body['parent_folder'] = self.parent_folder return body def as_shallow_dict(self) -> dict: """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.parent_folder is not None: - body["parent_folder"] = self.parent_folder + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.parent_folder is not None: body['parent_folder'] = self.parent_folder return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardResponse: """Deserializes the RegenerateDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get("dashboard_id", None), parent_folder=d.get("parent_folder", None)) + return cls(dashboard_id=d.get('dashboard_id', None), parent_folder=d.get('parent_folder', None)) + + @dataclass class RegisteredModelAlias: """Registered model alias.""" - + alias_name: Optional[str] = None """Name of the alias, e.g. 'champion' or 'latest_stable'""" - + version_num: Optional[int] = None """Integer version number of the model version to which this alias points.""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelAlias into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alias_name is not None: - body["alias_name"] = self.alias_name - if self.version_num is not None: - body["version_num"] = self.version_num + if self.alias_name is not None: body['alias_name'] = self.alias_name + if self.version_num is not None: body['version_num'] = self.version_num return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes.""" body = {} - if self.alias_name is not None: - body["alias_name"] = self.alias_name - if self.version_num is not None: - body["version_num"] = self.version_num + if self.alias_name is not None: body['alias_name'] = self.alias_name + if self.version_num is not None: body['version_num'] = self.version_num return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: """Deserializes the RegisteredModelAlias from a dictionary.""" - return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) + return cls(alias_name=d.get('alias_name', None), version_num=d.get('version_num', None)) + + @dataclass class RegisteredModelInfo: aliases: Optional[List[RegisteredModelAlias]] = None """List of aliases associated with the registered model""" - + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """The name of the catalog where the schema and the registered model reside""" - + comment: Optional[str] = None """The comment attached to the registered model""" - + created_at: Optional[int] = None """Creation timestamp of the registered model in milliseconds since the Unix epoch""" - + created_by: Optional[str] = None """The identifier of the user who created the registered model""" - + full_name: Optional[str] = None """The three-level (fully qualified) name of the registered model""" - + metastore_id: Optional[str] = None """The unique identifier of the metastore""" - + name: Optional[str] = None """The name of the registered model""" - + owner: Optional[str] = None """The identifier of the user who owns the registered model""" - + schema_name: Optional[str] = None """The name of the schema where the registered model resides""" - + storage_location: Optional[str] = None """The storage location on the cloud under which model version data files are stored""" - + updated_at: Optional[int] = None """Last-update timestamp of the registered model in milliseconds since the Unix epoch""" - + updated_by: Optional[str] = None """The identifier of the user who updated the registered model last time""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aliases: - body["aliases"] = [v.as_dict() for v in self.aliases] - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases] + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aliases: - body["aliases"] = self.aliases - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.aliases: body['aliases'] = self.aliases + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelInfo: """Deserializes the RegisteredModelInfo from a dictionary.""" - return cls( - aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias), browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), schema_name=d.get('schema_name', None), storage_location=d.get('storage_location', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + + + + @dataclass class SchemaInfo: + """Next ID: 40""" + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """Name of parent catalog.""" - - catalog_type: Optional[str] = None + + catalog_type: Optional[CatalogType] = None """The type of the parent catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this schema was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of schema creator.""" - + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - + """Whether predictive optimization should be enabled for this object and objects under it.""" + full_name: Optional[str] = None """Full name of schema, in form of __catalog_name__.__schema_name__.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of schema, relative to parent catalog.""" - + owner: Optional[str] = None """Username of current owner of schema.""" - - properties: Optional[Dict[str, str]] = None + + properties: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + schema_id: Optional[str] = None """The unique identifier of the schema.""" - + storage_location: Optional[str] = None """Storage location for managed tables within schema.""" - + storage_root: Optional[str] = None """Storage root URL for managed tables within schema.""" - + updated_at: Optional[int] = None """Time at which this schema was created, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified schema.""" - + def as_dict(self) -> dict: """Serializes the SchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.catalog_type is not None: - body["catalog_type"] = self.catalog_type - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.schema_id is not None: - body["schema_id"] = self.schema_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.catalog_type is not None: body['catalog_type'] = self.catalog_type.value + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag.as_dict() + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + if self.schema_id is not None: body['schema_id'] = self.schema_id + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.catalog_type is not None: - body["catalog_type"] = self.catalog_type - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.schema_id is not None: - body["schema_id"] = self.schema_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.catalog_type is not None: body['catalog_type'] = self.catalog_type + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties + if self.schema_id is not None: body['schema_id'] = self.schema_id + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SchemaInfo: """Deserializes the SchemaInfo from a dictionary.""" - return cls( - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - catalog_type=d.get("catalog_type", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - effective_predictive_optimization_flag=_from_dict( - d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag - ), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - properties=d.get("properties", None), - schema_id=d.get("schema_id", None), - storage_location=d.get("storage_location", None), - storage_root=d.get("storage_root", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) - - -SecurableOptionsMap = Dict[str, str] - - -SecurablePropertiesMap = Dict[str, str] + return cls(browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), catalog_type=_enum(d, 'catalog_type', CatalogType), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), effective_predictive_optimization_flag=_from_dict(d, 'effective_predictive_optimization_flag', EffectivePredictiveOptimizationFlag), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), properties=d.get('properties', None), schema_id=d.get('schema_id', None), storage_location=d.get('storage_location', None), storage_root=d.get('storage_root', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + class SecurableType(Enum): """The type of Unity Catalog securable.""" - - CATALOG = "CATALOG" - CLEAN_ROOM = "CLEAN_ROOM" - CONNECTION = "CONNECTION" - CREDENTIAL = "CREDENTIAL" - EXTERNAL_LOCATION = "EXTERNAL_LOCATION" - EXTERNAL_METADATA = "EXTERNAL_METADATA" - FUNCTION = "FUNCTION" - METASTORE = "METASTORE" - PIPELINE = "PIPELINE" - PROVIDER = "PROVIDER" - RECIPIENT = "RECIPIENT" - SCHEMA = "SCHEMA" - SHARE = "SHARE" - STAGING_TABLE = "STAGING_TABLE" - STORAGE_CREDENTIAL = "STORAGE_CREDENTIAL" - TABLE = "TABLE" - UNKNOWN_SECURABLE_TYPE = "UNKNOWN_SECURABLE_TYPE" - VOLUME = "VOLUME" - + + CATALOG = 'CATALOG' + CLEAN_ROOM = 'CLEAN_ROOM' + CONNECTION = 'CONNECTION' + CREDENTIAL = 'CREDENTIAL' + EXTERNAL_LOCATION = 'EXTERNAL_LOCATION' + EXTERNAL_METADATA = 'EXTERNAL_METADATA' + FUNCTION = 'FUNCTION' + METASTORE = 'METASTORE' + PIPELINE = 'PIPELINE' + PROVIDER = 'PROVIDER' + RECIPIENT = 'RECIPIENT' + SCHEMA = 'SCHEMA' + SHARE = 'SHARE' + STAGING_TABLE = 'STAGING_TABLE' + STORAGE_CREDENTIAL = 'STORAGE_CREDENTIAL' + TABLE = 'TABLE' + UNKNOWN_SECURABLE_TYPE = 'UNKNOWN_SECURABLE_TYPE' + VOLUME = 'VOLUME' @dataclass class SetArtifactAllowlist: artifact_matchers: List[ArtifactMatcher] """A list of allowed artifact match patterns.""" - + artifact_type: Optional[ArtifactType] = None """The artifact type of the allowlist.""" - + created_at: Optional[int] = None """Time at which this artifact allowlist was set, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of the user who set the artifact allowlist.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + def as_dict(self) -> dict: """Serializes the SetArtifactAllowlist into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_matchers: - body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers] - if self.artifact_type is not None: - body["artifact_type"] = self.artifact_type.value - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id + if self.artifact_matchers: body['artifact_matchers'] = [v.as_dict() for v in self.artifact_matchers] + if self.artifact_type is not None: body['artifact_type'] = self.artifact_type.value + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id return body def as_shallow_dict(self) -> dict: """Serializes the SetArtifactAllowlist into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_matchers: - body["artifact_matchers"] = self.artifact_matchers - if self.artifact_type is not None: - body["artifact_type"] = self.artifact_type - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id + if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers + if self.artifact_type is not None: body['artifact_type'] = self.artifact_type + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetArtifactAllowlist: """Deserializes the SetArtifactAllowlist from a dictionary.""" - return cls( - artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher), - artifact_type=_enum(d, "artifact_type", ArtifactType), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - metastore_id=d.get("metastore_id", None), - ) + return cls(artifact_matchers=_repeated_dict(d, 'artifact_matchers', ArtifactMatcher), artifact_type=_enum(d, 'artifact_type', ArtifactType), created_at=d.get('created_at', None), created_by=d.get('created_by', None), metastore_id=d.get('metastore_id', None)) + + @dataclass class SetRegisteredModelAliasRequest: full_name: str """Full name of the registered model""" - + alias: str """The name of the alias""" - + version_num: int """The version number of the model version to which the alias points""" - + def as_dict(self) -> dict: """Serializes the SetRegisteredModelAliasRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version_num is not None: - body["version_num"] = self.version_num + if self.alias is not None: body['alias'] = self.alias + if self.full_name is not None: body['full_name'] = self.full_name + if self.version_num is not None: body['version_num'] = self.version_num return body def as_shallow_dict(self) -> dict: """Serializes the SetRegisteredModelAliasRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version_num is not None: - body["version_num"] = self.version_num + if self.alias is not None: body['alias'] = self.alias + if self.full_name is not None: body['full_name'] = self.full_name + if self.version_num is not None: body['version_num'] = self.version_num return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetRegisteredModelAliasRequest: """Deserializes the SetRegisteredModelAliasRequest from a dictionary.""" - return cls( - alias=d.get("alias", None), full_name=d.get("full_name", None), version_num=d.get("version_num", None) - ) + return cls(alias=d.get('alias', None), full_name=d.get('full_name', None), version_num=d.get('version_num', None)) + + @dataclass class SseEncryptionDetails: """Server-Side Encryption properties for clients communicating with AWS s3.""" - + algorithm: Optional[SseEncryptionDetailsAlgorithm] = None """Sets the value of the 'x-amz-server-side-encryption' header in S3 request.""" - + aws_kms_key_arn: Optional[str] = None """Optional. The ARN of the SSE-KMS key used with the S3 location, when algorithm = "SSE-KMS". Sets the value of the 'x-amz-server-side-encryption-aws-kms-key-id' header.""" - + def as_dict(self) -> dict: """Serializes the SseEncryptionDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.algorithm is not None: - body["algorithm"] = self.algorithm.value - if self.aws_kms_key_arn is not None: - body["aws_kms_key_arn"] = self.aws_kms_key_arn + if self.algorithm is not None: body['algorithm'] = self.algorithm.value + if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn return body def as_shallow_dict(self) -> dict: """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.algorithm is not None: - body["algorithm"] = self.algorithm - if self.aws_kms_key_arn is not None: - body["aws_kms_key_arn"] = self.aws_kms_key_arn + if self.algorithm is not None: body['algorithm'] = self.algorithm + if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SseEncryptionDetails: """Deserializes the SseEncryptionDetails from a dictionary.""" - return cls( - algorithm=_enum(d, "algorithm", SseEncryptionDetailsAlgorithm), - aws_kms_key_arn=d.get("aws_kms_key_arn", None), - ) - + return cls(algorithm=_enum(d, 'algorithm', SseEncryptionDetailsAlgorithm), aws_kms_key_arn=d.get('aws_kms_key_arn', None)) + -class SseEncryptionDetailsAlgorithm(Enum): - AWS_SSE_KMS = "AWS_SSE_KMS" - AWS_SSE_S3 = "AWS_SSE_S3" +class SseEncryptionDetailsAlgorithm(Enum): + + + AWS_SSE_KMS = 'AWS_SSE_KMS' + AWS_SSE_S3 = 'AWS_SSE_S3' @dataclass class StorageCredentialInfo: aws_iam_role: Optional[AwsIamRoleResponse] = None """The AWS IAM role configuration.""" - + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" - + cloudflare_api_token: Optional[CloudflareApiToken] = None """The Cloudflare API token configuration.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + created_at: Optional[int] = None """Time at which this Credential was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of credential creator.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None """The Databricks managed GCP service account configuration.""" - + full_name: Optional[str] = None """The full name of the credential.""" - + id: Optional[str] = None """The unique identifier of the credential.""" - + isolation_mode: Optional[IsolationMode] = None - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """The credential name. The name must be unique within the metastore.""" - + owner: Optional[str] = None """Username of current owner of credential.""" - + read_only: Optional[bool] = None """Whether the storage credential is only usable for read operations.""" - + updated_at: Optional[int] = None """Time at which this credential was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the credential.""" - + used_for_managed_storage: Optional[bool] = None """Whether this credential is the current metastore's root storage credential.""" - + def as_dict(self) -> dict: """Serializes the StorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: body['full_name'] = self.full_name + if self.id is not None: body['id'] = self.id + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.used_for_managed_storage is not None: body['used_for_managed_storage'] = self.used_for_managed_storage return body def as_shallow_dict(self) -> dict: """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.full_name is not None: body['full_name'] = self.full_name + if self.id is not None: body['id'] = self.id + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.used_for_managed_storage is not None: body['used_for_managed_storage'] = self.used_for_managed_storage return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StorageCredentialInfo: """Deserializes the StorageCredentialInfo from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleResponse), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountResponse - ), - full_name=d.get("full_name", None), - id=d.get("id", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - used_for_managed_storage=d.get("used_for_managed_storage", None), - ) - - -@dataclass -class SyncedDatabaseTable: - """Next field marker: 10""" - - name: str - """Full three-part (catalog, schema, table) name of the table.""" - - data_synchronization_status: Optional[OnlineTableStatus] = None - """Synced Table data synchronization status""" - - database_instance_name: Optional[str] = None - """Name of the target database instance. This is required when creating synced database tables in - standard catalogs. This is optional when creating synced database tables in registered catalogs. - If this field is specified when creating synced database tables in registered catalogs, the - database instance name MUST match that of the registered catalog (or the request will be - rejected).""" - - logical_database_name: Optional[str] = None - """Target Postgres database object (logical database) name for this table. This field is optional - in all scenarios. - - When creating a synced table in a registered Postgres catalog, the target Postgres database name - is inferred to be that of the registered catalog. If this field is specified in this scenario, - the Postgres database name MUST match that of the registered catalog (or the request will be - rejected). + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleResponse), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentityResponse), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccountResponse), full_name=d.get('full_name', None), id=d.get('id', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), used_for_managed_storage=d.get('used_for_managed_storage', None)) - When creating a synced table in a standard catalog, the target database name is inferred to be - that of the standard catalog. In this scenario, specifying this field will allow targeting an - arbitrary postgres database.""" - - spec: Optional[SyncedTableSpec] = None - """Specification of a synced database table.""" - - table_serving_url: Optional[str] = None - """Data serving REST API URL for this table""" - - unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None - """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the - state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline - may be in "PROVISIONING" as it runs asynchronously).""" - - def as_dict(self) -> dict: - """Serializes the SyncedDatabaseTable into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data_synchronization_status: - body["data_synchronization_status"] = self.data_synchronization_status.as_dict() - if self.database_instance_name is not None: - body["database_instance_name"] = self.database_instance_name - if self.logical_database_name is not None: - body["logical_database_name"] = self.logical_database_name - if self.name is not None: - body["name"] = self.name - if self.spec: - body["spec"] = self.spec.as_dict() - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: - body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SyncedDatabaseTable into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data_synchronization_status: - body["data_synchronization_status"] = self.data_synchronization_status - if self.database_instance_name is not None: - body["database_instance_name"] = self.database_instance_name - if self.logical_database_name is not None: - body["logical_database_name"] = self.logical_database_name - if self.name is not None: - body["name"] = self.name - if self.spec: - body["spec"] = self.spec - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: - body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable: - """Deserializes the SyncedDatabaseTable from a dictionary.""" - return cls( - data_synchronization_status=_from_dict(d, "data_synchronization_status", OnlineTableStatus), - database_instance_name=d.get("database_instance_name", None), - logical_database_name=d.get("logical_database_name", None), - name=d.get("name", None), - spec=_from_dict(d, "spec", SyncedTableSpec), - table_serving_url=d.get("table_serving_url", None), - unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), - ) - - -class SyncedTableSchedulingPolicy(Enum): - - CONTINUOUS = "CONTINUOUS" - SNAPSHOT = "SNAPSHOT" - TRIGGERED = "TRIGGERED" - - -@dataclass -class SyncedTableSpec: - """Specification of a synced database table.""" - - create_database_objects_if_missing: Optional[bool] = None - """If true, the synced table's logical database and schema resources in PG will be created if they - do not already exist.""" - - new_pipeline_spec: Optional[NewPipelineSpec] = None - """Spec of new pipeline. Should be empty if pipeline_id is set""" - - pipeline_id: Optional[str] = None - """ID of the associated pipeline. Should be empty if new_pipeline_spec is set""" - - primary_key_columns: Optional[List[str]] = None - """Primary Key columns to be used for data insert/update in the destination.""" - - scheduling_policy: Optional[SyncedTableSchedulingPolicy] = None - """Scheduling policy of the underlying pipeline.""" - - source_table_full_name: Optional[str] = None - """Three-part (catalog, schema, table) name of the source Delta table.""" - - timeseries_key: Optional[str] = None - """Time series key to deduplicate (tie-break) rows with the same primary key.""" - - def as_dict(self) -> dict: - """Serializes the SyncedTableSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.create_database_objects_if_missing is not None: - body["create_database_objects_if_missing"] = self.create_database_objects_if_missing - if self.new_pipeline_spec: - body["new_pipeline_spec"] = self.new_pipeline_spec.as_dict() - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.primary_key_columns: - body["primary_key_columns"] = [v for v in self.primary_key_columns] - if self.scheduling_policy is not None: - body["scheduling_policy"] = self.scheduling_policy.value - if self.source_table_full_name is not None: - body["source_table_full_name"] = self.source_table_full_name - if self.timeseries_key is not None: - body["timeseries_key"] = self.timeseries_key - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SyncedTableSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.create_database_objects_if_missing is not None: - body["create_database_objects_if_missing"] = self.create_database_objects_if_missing - if self.new_pipeline_spec: - body["new_pipeline_spec"] = self.new_pipeline_spec - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.primary_key_columns: - body["primary_key_columns"] = self.primary_key_columns - if self.scheduling_policy is not None: - body["scheduling_policy"] = self.scheduling_policy - if self.source_table_full_name is not None: - body["source_table_full_name"] = self.source_table_full_name - if self.timeseries_key is not None: - body["timeseries_key"] = self.timeseries_key - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SyncedTableSpec: - """Deserializes the SyncedTableSpec from a dictionary.""" - return cls( - create_database_objects_if_missing=d.get("create_database_objects_if_missing", None), - new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec), - pipeline_id=d.get("pipeline_id", None), - primary_key_columns=d.get("primary_key_columns", None), - scheduling_policy=_enum(d, "scheduling_policy", SyncedTableSchedulingPolicy), - source_table_full_name=d.get("source_table_full_name", None), - timeseries_key=d.get("timeseries_key", None), - ) @dataclass class SystemSchemaInfo: schema: str """Name of the system schema.""" - + state: str """The current state of enablement for the system schema. An empty string means the system schema is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE""" - + def as_dict(self) -> dict: """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.schema is not None: - body["schema"] = self.schema - if self.state is not None: - body["state"] = self.state + if self.schema is not None: body['schema'] = self.schema + if self.state is not None: body['state'] = self.state return body def as_shallow_dict(self) -> dict: """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.schema is not None: - body["schema"] = self.schema - if self.state is not None: - body["state"] = self.state + if self.schema is not None: body['schema'] = self.schema + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SystemSchemaInfo: """Deserializes the SystemSchemaInfo from a dictionary.""" - return cls(schema=d.get("schema", None), state=d.get("state", None)) + return cls(schema=d.get('schema', None), state=d.get('state', None)) + + @dataclass class TableConstraint: """A table constraint, as defined by *one* of the following fields being set: __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" - + foreign_key_constraint: Optional[ForeignKeyConstraint] = None - + named_table_constraint: Optional[NamedTableConstraint] = None - + primary_key_constraint: Optional[PrimaryKeyConstraint] = None - + def as_dict(self) -> dict: """Serializes the TableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.foreign_key_constraint: - body["foreign_key_constraint"] = self.foreign_key_constraint.as_dict() - if self.named_table_constraint: - body["named_table_constraint"] = self.named_table_constraint.as_dict() - if self.primary_key_constraint: - body["primary_key_constraint"] = self.primary_key_constraint.as_dict() + if self.foreign_key_constraint: body['foreign_key_constraint'] = self.foreign_key_constraint.as_dict() + if self.named_table_constraint: body['named_table_constraint'] = self.named_table_constraint.as_dict() + if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TableConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.foreign_key_constraint: - body["foreign_key_constraint"] = self.foreign_key_constraint - if self.named_table_constraint: - body["named_table_constraint"] = self.named_table_constraint - if self.primary_key_constraint: - body["primary_key_constraint"] = self.primary_key_constraint + if self.foreign_key_constraint: body['foreign_key_constraint'] = self.foreign_key_constraint + if self.named_table_constraint: body['named_table_constraint'] = self.named_table_constraint + if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableConstraint: """Deserializes the TableConstraint from a dictionary.""" - return cls( - foreign_key_constraint=_from_dict(d, "foreign_key_constraint", ForeignKeyConstraint), - named_table_constraint=_from_dict(d, "named_table_constraint", NamedTableConstraint), - primary_key_constraint=_from_dict(d, "primary_key_constraint", PrimaryKeyConstraint), - ) + return cls(foreign_key_constraint=_from_dict(d, 'foreign_key_constraint', ForeignKeyConstraint), named_table_constraint=_from_dict(d, 'named_table_constraint', NamedTableConstraint), primary_key_constraint=_from_dict(d, 'primary_key_constraint', PrimaryKeyConstraint)) + + @dataclass class TableDependency: """A table that is dependent on a SQL object.""" - + table_full_name: str """Full name of the dependent table, in the form of __catalog_name__.__schema_name__.__table_name__.""" - + def as_dict(self) -> dict: """Serializes the TableDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.table_full_name is not None: - body["table_full_name"] = self.table_full_name + if self.table_full_name is not None: body['table_full_name'] = self.table_full_name return body def as_shallow_dict(self) -> dict: """Serializes the TableDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.table_full_name is not None: - body["table_full_name"] = self.table_full_name + if self.table_full_name is not None: body['table_full_name'] = self.table_full_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableDependency: """Deserializes the TableDependency from a dictionary.""" - return cls(table_full_name=d.get("table_full_name", None)) + return cls(table_full_name=d.get('table_full_name', None)) + + @dataclass class TableExistsResponse: table_exists: Optional[bool] = None """Whether the table exists or not.""" - + def as_dict(self) -> dict: """Serializes the TableExistsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.table_exists is not None: - body["table_exists"] = self.table_exists + if self.table_exists is not None: body['table_exists'] = self.table_exists return body def as_shallow_dict(self) -> dict: """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.table_exists is not None: - body["table_exists"] = self.table_exists + if self.table_exists is not None: body['table_exists'] = self.table_exists return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableExistsResponse: """Deserializes the TableExistsResponse from a dictionary.""" - return cls(table_exists=d.get("table_exists", None)) + return cls(table_exists=d.get('table_exists', None)) + + @dataclass class TableInfo: access_point: Optional[str] = None """The AWS access point to use when accesing s3 for this external location.""" - + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """Name of parent catalog.""" - + columns: Optional[List[ColumnInfo]] = None """The array of __ColumnInfo__ definitions of the table's columns.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this table was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of table creator.""" - + data_access_configuration_id: Optional[str] = None """Unique ID of the Data Access Configuration to use with the table data.""" - + data_source_format: Optional[DataSourceFormat] = None """Data source format""" - + deleted_at: Optional[int] = None """Time at which this table was deleted, in epoch milliseconds. Field is omitted if table is not deleted.""" - + delta_runtime_properties_kvpairs: Optional[DeltaRuntimePropertiesKvPairs] = None """Information pertaining to current state of the delta table.""" - + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + full_name: Optional[str] = None """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of table, relative to parent schema.""" - + owner: Optional[str] = None """Username of current owner of table.""" - + pipeline_id: Optional[str] = None """The pipeline ID of the table. Applicable for tables created by pipelines (Materialized View, Streaming Table, etc.).""" - - properties: Optional[Dict[str, str]] = None + + properties: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + row_filter: Optional[TableRowFilter] = None - + schema_name: Optional[str] = None """Name of parent schema relative to its parent catalog.""" - + sql_path: Optional[str] = None """List of schemes whose objects can be referenced without qualification.""" - + storage_credential_name: Optional[str] = None """Name of the storage credential, when a storage credential is configured for use with this table.""" - + storage_location: Optional[str] = None """Storage root URL for table (for **MANAGED**, **EXTERNAL** tables)""" - + table_constraints: Optional[List[TableConstraint]] = None """List of table constraints. Note: this field is not set in the output of the __listTables__ API.""" - + table_id: Optional[str] = None """The unique identifier of the table.""" - + table_type: Optional[TableType] = None - + updated_at: Optional[int] = None """Time at which this table was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the table.""" - + view_definition: Optional[str] = None """View definition SQL (when __table_type__ is **VIEW**, **MATERIALIZED_VIEW**, or **STREAMING_TABLE**)""" - + view_dependencies: Optional[DependencyList] = None """View dependencies (when table_type == **VIEW** or **MATERIALIZED_VIEW**, **STREAMING_TABLE**) - when DependencyList is None, the dependency is not provided; - when DependencyList is an empty list, the dependency is provided but is empty; - when DependencyList is not an empty list, dependencies are provided and recorded.""" - + def as_dict(self) -> dict: """Serializes the TableInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_point is not None: - body["access_point"] = self.access_point - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_access_configuration_id is not None: - body["data_access_configuration_id"] = self.data_access_configuration_id - if self.data_source_format is not None: - body["data_source_format"] = self.data_source_format.value - if self.deleted_at is not None: - body["deleted_at"] = self.deleted_at - if self.delta_runtime_properties_kvpairs: - body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs.as_dict() - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.properties: - body["properties"] = self.properties - if self.row_filter: - body["row_filter"] = self.row_filter.as_dict() - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.table_constraints: - body["table_constraints"] = [v.as_dict() for v in self.table_constraints] - if self.table_id is not None: - body["table_id"] = self.table_id - if self.table_type is not None: - body["table_type"] = self.table_type.value - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.view_definition is not None: - body["view_definition"] = self.view_definition - if self.view_dependencies: - body["view_dependencies"] = self.view_dependencies.as_dict() + if self.access_point is not None: body['access_point'] = self.access_point + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_access_configuration_id is not None: body['data_access_configuration_id'] = self.data_access_configuration_id + if self.data_source_format is not None: body['data_source_format'] = self.data_source_format.value + if self.deleted_at is not None: body['deleted_at'] = self.deleted_at + if self.delta_runtime_properties_kvpairs: body['delta_runtime_properties_kvpairs'] = self.delta_runtime_properties_kvpairs.as_dict() + if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag.as_dict() + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.properties: body['properties'] = self.properties + if self.row_filter: body['row_filter'] = self.row_filter.as_dict() + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.sql_path is not None: body['sql_path'] = self.sql_path + if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.table_constraints: body['table_constraints'] = [v.as_dict() for v in self.table_constraints] + if self.table_id is not None: body['table_id'] = self.table_id + if self.table_type is not None: body['table_type'] = self.table_type.value + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.view_definition is not None: body['view_definition'] = self.view_definition + if self.view_dependencies: body['view_dependencies'] = self.view_dependencies.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TableInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_point is not None: - body["access_point"] = self.access_point - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.columns: - body["columns"] = self.columns - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_access_configuration_id is not None: - body["data_access_configuration_id"] = self.data_access_configuration_id - if self.data_source_format is not None: - body["data_source_format"] = self.data_source_format - if self.deleted_at is not None: - body["deleted_at"] = self.deleted_at - if self.delta_runtime_properties_kvpairs: - body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.properties: - body["properties"] = self.properties - if self.row_filter: - body["row_filter"] = self.row_filter - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.table_constraints: - body["table_constraints"] = self.table_constraints - if self.table_id is not None: - body["table_id"] = self.table_id - if self.table_type is not None: - body["table_type"] = self.table_type - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.view_definition is not None: - body["view_definition"] = self.view_definition - if self.view_dependencies: - body["view_dependencies"] = self.view_dependencies + if self.access_point is not None: body['access_point'] = self.access_point + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.columns: body['columns'] = self.columns + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_access_configuration_id is not None: body['data_access_configuration_id'] = self.data_access_configuration_id + if self.data_source_format is not None: body['data_source_format'] = self.data_source_format + if self.deleted_at is not None: body['deleted_at'] = self.deleted_at + if self.delta_runtime_properties_kvpairs: body['delta_runtime_properties_kvpairs'] = self.delta_runtime_properties_kvpairs + if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.properties: body['properties'] = self.properties + if self.row_filter: body['row_filter'] = self.row_filter + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.sql_path is not None: body['sql_path'] = self.sql_path + if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.table_constraints: body['table_constraints'] = self.table_constraints + if self.table_id is not None: body['table_id'] = self.table_id + if self.table_type is not None: body['table_type'] = self.table_type + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.view_definition is not None: body['view_definition'] = self.view_definition + if self.view_dependencies: body['view_dependencies'] = self.view_dependencies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableInfo: """Deserializes the TableInfo from a dictionary.""" - return cls( - access_point=d.get("access_point", None), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - columns=_repeated_dict(d, "columns", ColumnInfo), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - data_access_configuration_id=d.get("data_access_configuration_id", None), - data_source_format=_enum(d, "data_source_format", DataSourceFormat), - deleted_at=d.get("deleted_at", None), - delta_runtime_properties_kvpairs=_from_dict( - d, "delta_runtime_properties_kvpairs", DeltaRuntimePropertiesKvPairs - ), - effective_predictive_optimization_flag=_from_dict( - d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag - ), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - pipeline_id=d.get("pipeline_id", None), - properties=d.get("properties", None), - row_filter=_from_dict(d, "row_filter", TableRowFilter), - schema_name=d.get("schema_name", None), - sql_path=d.get("sql_path", None), - storage_credential_name=d.get("storage_credential_name", None), - storage_location=d.get("storage_location", None), - table_constraints=_repeated_dict(d, "table_constraints", TableConstraint), - table_id=d.get("table_id", None), - table_type=_enum(d, "table_type", TableType), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - view_definition=d.get("view_definition", None), - view_dependencies=_from_dict(d, "view_dependencies", DependencyList), - ) - + return cls(access_point=d.get('access_point', None), browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), columns=_repeated_dict(d, 'columns', ColumnInfo), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), data_access_configuration_id=d.get('data_access_configuration_id', None), data_source_format=_enum(d, 'data_source_format', DataSourceFormat), deleted_at=d.get('deleted_at', None), delta_runtime_properties_kvpairs=_from_dict(d, 'delta_runtime_properties_kvpairs', DeltaRuntimePropertiesKvPairs), effective_predictive_optimization_flag=_from_dict(d, 'effective_predictive_optimization_flag', EffectivePredictiveOptimizationFlag), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), pipeline_id=d.get('pipeline_id', None), properties=d.get('properties', None), row_filter=_from_dict(d, 'row_filter', TableRowFilter), schema_name=d.get('schema_name', None), sql_path=d.get('sql_path', None), storage_credential_name=d.get('storage_credential_name', None), storage_location=d.get('storage_location', None), table_constraints=_repeated_dict(d, 'table_constraints', TableConstraint), table_id=d.get('table_id', None), table_type=_enum(d, 'table_type', TableType), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), view_definition=d.get('view_definition', None), view_dependencies=_from_dict(d, 'view_dependencies', DependencyList)) + -class TableOperation(Enum): - READ = "READ" - READ_WRITE = "READ_WRITE" +class TableOperation(Enum): + + + READ = 'READ' + READ_WRITE = 'READ_WRITE' @dataclass class TableRowFilter: function_name: str """The full name of the row filter SQL UDF.""" - + input_column_names: List[str] """The list of table columns to be passed as input to the row filter function. The column types should match the types of the filter function arguments.""" - + def as_dict(self) -> dict: """Serializes the TableRowFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.input_column_names: - body["input_column_names"] = [v for v in self.input_column_names] + if self.function_name is not None: body['function_name'] = self.function_name + if self.input_column_names: body['input_column_names'] = [v for v in self.input_column_names] return body def as_shallow_dict(self) -> dict: """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.input_column_names: - body["input_column_names"] = self.input_column_names + if self.function_name is not None: body['function_name'] = self.function_name + if self.input_column_names: body['input_column_names'] = self.input_column_names return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableRowFilter: """Deserializes the TableRowFilter from a dictionary.""" - return cls(function_name=d.get("function_name", None), input_column_names=d.get("input_column_names", None)) + return cls(function_name=d.get('function_name', None), input_column_names=d.get('input_column_names', None)) + + @dataclass class TableSummary: full_name: Optional[str] = None """The full name of the table.""" - + table_type: Optional[TableType] = None - + def as_dict(self) -> dict: """Serializes the TableSummary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.table_type is not None: - body["table_type"] = self.table_type.value + if self.full_name is not None: body['full_name'] = self.full_name + if self.table_type is not None: body['table_type'] = self.table_type.value return body def as_shallow_dict(self) -> dict: """Serializes the TableSummary into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.table_type is not None: - body["table_type"] = self.table_type + if self.full_name is not None: body['full_name'] = self.full_name + if self.table_type is not None: body['table_type'] = self.table_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableSummary: """Deserializes the TableSummary from a dictionary.""" - return cls(full_name=d.get("full_name", None), table_type=_enum(d, "table_type", TableType)) - + return cls(full_name=d.get('full_name', None), table_type=_enum(d, 'table_type', TableType)) + -class TableType(Enum): - EXTERNAL = "EXTERNAL" - EXTERNAL_SHALLOW_CLONE = "EXTERNAL_SHALLOW_CLONE" - FOREIGN = "FOREIGN" - MANAGED = "MANAGED" - MANAGED_SHALLOW_CLONE = "MANAGED_SHALLOW_CLONE" - MATERIALIZED_VIEW = "MATERIALIZED_VIEW" - STREAMING_TABLE = "STREAMING_TABLE" - VIEW = "VIEW" +class TableType(Enum): + + + EXTERNAL = 'EXTERNAL' + EXTERNAL_SHALLOW_CLONE = 'EXTERNAL_SHALLOW_CLONE' + FOREIGN = 'FOREIGN' + MANAGED = 'MANAGED' + MANAGED_SHALLOW_CLONE = 'MANAGED_SHALLOW_CLONE' + MATERIALIZED_VIEW = 'MATERIALIZED_VIEW' + STREAMING_TABLE = 'STREAMING_TABLE' + VIEW = 'VIEW' @dataclass class TagKeyValue: key: Optional[str] = None """name of the tag""" - + value: Optional[str] = None """value of the tag associated with the key, could be optional""" - + def as_dict(self) -> dict: """Serializes the TagKeyValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the TagKeyValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TagKeyValue: """Deserializes the TagKeyValue from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass @@ -8958,103 +7019,87 @@ class TemporaryCredentials: aws_temp_credentials: Optional[AwsCredentials] = None """AWS temporary credentials for API authentication. Read more at https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" - + azure_aad: Optional[AzureActiveDirectoryToken] = None """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed Identity. Read more at https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" - + expiration_time: Optional[int] = None """Server time when the credential will expire, in epoch milliseconds. The API client is advised to cache the credential given this expiration time.""" - + gcp_oauth_token: Optional[GcpOauthToken] = None """GCP temporary credentials for API authentication. Read more at https://developers.google.com/identity/protocols/oauth2/service-account""" - + def as_dict(self) -> dict: """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() - if self.azure_aad: - body["azure_aad"] = self.azure_aad.as_dict() - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() + if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict() + if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict() + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials - if self.azure_aad: - body["azure_aad"] = self.azure_aad - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token + if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials + if self.azure_aad: body['azure_aad'] = self.azure_aad + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TemporaryCredentials: """Deserializes the TemporaryCredentials from a dictionary.""" - return cls( - aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), - azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), - expiration_time=d.get("expiration_time", None), - gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), - ) + return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials), azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken), expiration_time=d.get('expiration_time', None), gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken)) + + @dataclass class TriggeredUpdateStatus: """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE or the ONLINE_NO_PENDING_UPDATE state.""" - + last_processed_commit_version: Optional[int] = None """The last source table Delta version that was synced to the online table. Note that this Delta version may not be completely synced to the online table yet.""" - + timestamp: Optional[str] = None """The timestamp of the last time any data was synchronized from the source table to the online table.""" - + triggered_update_progress: Optional[PipelineProgress] = None """Progress of the active data synchronization pipeline.""" - + def as_dict(self) -> dict: """Serializes the TriggeredUpdateStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.triggered_update_progress: - body["triggered_update_progress"] = self.triggered_update_progress.as_dict() + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.triggered_update_progress: - body["triggered_update_progress"] = self.triggered_update_progress + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TriggeredUpdateStatus: """Deserializes the TriggeredUpdateStatus from a dictionary.""" - return cls( - last_processed_commit_version=d.get("last_processed_commit_version", None), - timestamp=d.get("timestamp", None), - triggered_update_progress=_from_dict(d, "triggered_update_progress", PipelineProgress), - ) + return cls(last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None), triggered_update_progress=_from_dict(d, 'triggered_update_progress', PipelineProgress)) + + + + + @dataclass @@ -9073,6 +7118,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UnassignResponse: """Deserializes the UnassignResponse from a dictionary.""" return cls() + + @dataclass @@ -9091,414 +7138,306 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: """Deserializes the UpdateAssignmentResponse from a dictionary.""" return cls() + + @dataclass class UpdateCatalog: comment: Optional[str] = None """User-provided free-form text description.""" - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None """Whether predictive optimization should be enabled for this object and objects under it.""" - + isolation_mode: Optional[CatalogIsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - + name: Optional[str] = None """The name of the catalog.""" - + new_name: Optional[str] = None """New name for the catalog.""" - - options: Optional[Dict[str, str]] = None + + options: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + owner: Optional[str] = None """Username of current owner of catalog.""" - - properties: Optional[Dict[str, str]] = None + + properties: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + def as_dict(self) -> dict: """Serializes the UpdateCatalog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties + if self.comment is not None: body['comment'] = self.comment + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCatalog into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties + if self.comment is not None: body['comment'] = self.comment + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalog: """Deserializes the UpdateCatalog from a dictionary.""" - return cls( - comment=d.get("comment", None), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - isolation_mode=_enum(d, "isolation_mode", CatalogIsolationMode), - name=d.get("name", None), - new_name=d.get("new_name", None), - options=d.get("options", None), - owner=d.get("owner", None), - properties=d.get("properties", None), - ) + return cls(comment=d.get('comment', None), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode), name=d.get('name', None), new_name=d.get('new_name', None), options=d.get('options', None), owner=d.get('owner', None), properties=d.get('properties', None)) + + @dataclass class UpdateCatalogWorkspaceBindingsResponse: workspaces: Optional[List[int]] = None """A list of workspace IDs""" - + def as_dict(self) -> dict: """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspaces: - body["workspaces"] = [v for v in self.workspaces] + if self.workspaces: body['workspaces'] = [v for v in self.workspaces] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspaces: - body["workspaces"] = self.workspaces + if self.workspaces: body['workspaces'] = self.workspaces return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get("workspaces", None)) + return cls(workspaces=d.get('workspaces', None)) + + @dataclass class UpdateConnection: - options: Dict[str, str] + options: Dict[str,str] """A map of key-value properties attached to the securable.""" - + name: Optional[str] = None """Name of the connection.""" - + new_name: Optional[str] = None """New name for the connection.""" - + owner: Optional[str] = None """Username of current owner of the connection.""" - + def as_dict(self) -> dict: """Serializes the UpdateConnection into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner return body def as_shallow_dict(self) -> dict: """Serializes the UpdateConnection into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.options: body['options'] = self.options + if self.owner is not None: body['owner'] = self.owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateConnection: """Deserializes the UpdateConnection from a dictionary.""" - return cls( - name=d.get("name", None), - new_name=d.get("new_name", None), - options=d.get("options", None), - owner=d.get("owner", None), - ) + return cls(name=d.get('name', None), new_name=d.get('new_name', None), options=d.get('options', None), owner=d.get('owner', None)) + + @dataclass class UpdateCredentialRequest: aws_iam_role: Optional[AwsIamRole] = None """The AWS IAM role configuration""" - + azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + force: Optional[bool] = None """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent external locations and external tables (when purpose is **STORAGE**).""" - + isolation_mode: Optional[IsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - + name_arg: Optional[str] = None """Name of the credential.""" - + new_name: Optional[str] = None """New name of credential.""" - + owner: Optional[str] = None """Username of current owner of credential.""" - + read_only: Optional[bool] = None """Whether the credential is usable only for read operations. Only applicable when purpose is **STORAGE**.""" - + skip_validation: Optional[bool] = None """Supply true to this argument to skip validation of the updated credential.""" - + def as_dict(self) -> dict: """Serializes the UpdateCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name_arg is not None: - body["name_arg"] = self.name_arg - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.name_arg is not None: body['name_arg'] = self.name_arg + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name_arg is not None: - body["name_arg"] = self.name_arg - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.name_arg is not None: body['name_arg'] = self.name_arg + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialRequest: """Deserializes the UpdateCredentialRequest from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name_arg=d.get("name_arg", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), comment=d.get('comment', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccount), force=d.get('force', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), name_arg=d.get('name_arg', None), new_name=d.get('new_name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) + + @dataclass class UpdateExternalLocation: comment: Optional[str] = None """User-provided free-form text description.""" - + credential_name: Optional[str] = None """Name of the storage credential used with this location.""" - + enable_file_events: Optional[bool] = None """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + fallback: Optional[bool] = None """Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient.""" - + file_event_queue: Optional[FileEventQueue] = None """[Create:OPT Update:OPT] File event queue settings.""" - + force: Optional[bool] = None """Force update even if changing url invalidates dependent external tables or mounts.""" - + isolation_mode: Optional[IsolationMode] = None - + name: Optional[str] = None """Name of the external location.""" - + new_name: Optional[str] = None """New name for the external location.""" - + owner: Optional[str] = None """The owner of the external location.""" - + read_only: Optional[bool] = None """Indicates whether the external location is read-only.""" - + skip_validation: Optional[bool] = None """Skips validation of the storage credential associated with the external location.""" - + url: Optional[str] = None """Path URL of the external location.""" - + def as_dict(self) -> dict: """Serializes the UpdateExternalLocation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url + if self.comment is not None: body['comment'] = self.comment + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() + if self.fallback is not None: body['fallback'] = self.fallback + if self.file_event_queue: body['file_event_queue'] = self.file_event_queue.as_dict() + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExternalLocation into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url + if self.comment is not None: body['comment'] = self.comment + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.fallback is not None: body['fallback'] = self.fallback + if self.file_event_queue: body['file_event_queue'] = self.file_event_queue + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExternalLocation: """Deserializes the UpdateExternalLocation from a dictionary.""" - return cls( - comment=d.get("comment", None), - credential_name=d.get("credential_name", None), - enable_file_events=d.get("enable_file_events", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - fallback=d.get("fallback", None), - file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - url=d.get("url", None), - ) + return cls(comment=d.get('comment', None), credential_name=d.get('credential_name', None), enable_file_events=d.get('enable_file_events', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), fallback=d.get('fallback', None), file_event_queue=_from_dict(d, 'file_event_queue', FileEventQueue), force=d.get('force', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None), url=d.get('url', None)) + + @dataclass @@ -9506,32 +7445,30 @@ class UpdateFunction: name: Optional[str] = None """The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__).""" - + owner: Optional[str] = None """Username of current owner of function.""" - + def as_dict(self) -> dict: """Serializes the UpdateFunction into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner return body def as_shallow_dict(self) -> dict: """Serializes the UpdateFunction into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateFunction: """Deserializes the UpdateFunction from a dictionary.""" - return cls(name=d.get("name", None), owner=d.get("owner", None)) + return cls(name=d.get('name', None), owner=d.get('owner', None)) + + @dataclass @@ -9539,394 +7476,317 @@ class UpdateMetastore: delta_sharing_organization_name: Optional[str] = None """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.""" - + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None """The lifetime of delta sharing recipient token in seconds.""" - - delta_sharing_scope: Optional[UpdateMetastoreDeltaSharingScope] = None + + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None """The scope of Delta Sharing enabled for the metastore.""" - + id: Optional[str] = None """Unique ID of the metastore.""" - + new_name: Optional[str] = None """New name for the metastore.""" - + owner: Optional[str] = None """The owner of the metastore.""" - + privilege_model_version: Optional[str] = None """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" - + storage_root_credential_id: Optional[str] = None """UUID of storage credential to access the metastore storage_root.""" - + def as_dict(self) -> dict: """Serializes the UpdateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope.value - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id + if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds + if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value + if self.id is not None: body['id'] = self.id + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version + if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id + if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds + if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope + if self.id is not None: body['id'] = self.id + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version + if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastore: """Deserializes the UpdateMetastore from a dictionary.""" - return cls( - delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), - delta_sharing_recipient_token_lifetime_in_seconds=d.get( - "delta_sharing_recipient_token_lifetime_in_seconds", None - ), - delta_sharing_scope=_enum(d, "delta_sharing_scope", UpdateMetastoreDeltaSharingScope), - id=d.get("id", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - privilege_model_version=d.get("privilege_model_version", None), - storage_root_credential_id=d.get("storage_root_credential_id", None), - ) + return cls(delta_sharing_organization_name=d.get('delta_sharing_organization_name', None), delta_sharing_recipient_token_lifetime_in_seconds=d.get('delta_sharing_recipient_token_lifetime_in_seconds', None), delta_sharing_scope=_enum(d, 'delta_sharing_scope', DeltaSharingScopeEnum), id=d.get('id', None), new_name=d.get('new_name', None), owner=d.get('owner', None), privilege_model_version=d.get('privilege_model_version', None), storage_root_credential_id=d.get('storage_root_credential_id', None)) + + @dataclass class UpdateMetastoreAssignment: default_catalog_name: Optional[str] = None - """The name of the default catalog in the metastore. This field is depracted. Please use "Default + """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" - + metastore_id: Optional[str] = None """The unique ID of the metastore.""" - + workspace_id: Optional[int] = None """A workspace ID.""" - + def as_dict(self) -> dict: """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastoreAssignment: """Deserializes the UpdateMetastoreAssignment from a dictionary.""" - return cls( - default_catalog_name=d.get("default_catalog_name", None), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) - - -class UpdateMetastoreDeltaSharingScope(Enum): - """The scope of Delta Sharing enabled for the metastore.""" + return cls(default_catalog_name=d.get('default_catalog_name', None), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) + - INTERNAL = "INTERNAL" - INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL" @dataclass class UpdateModelVersionRequest: comment: Optional[str] = None """The comment attached to the model version""" - + full_name: Optional[str] = None """The three-level (fully qualified) name of the model version""" - + version: Optional[int] = None """The integer version number of the model version""" - + def as_dict(self) -> dict: """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version is not None: - body["version"] = self.version + if self.comment is not None: body['comment'] = self.comment + if self.full_name is not None: body['full_name'] = self.full_name + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version is not None: - body["version"] = self.version + if self.comment is not None: body['comment'] = self.comment + if self.full_name is not None: body['full_name'] = self.full_name + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionRequest: """Deserializes the UpdateModelVersionRequest from a dictionary.""" - return cls(comment=d.get("comment", None), full_name=d.get("full_name", None), version=d.get("version", None)) + return cls(comment=d.get('comment', None), full_name=d.get('full_name', None), version=d.get('version', None)) + + @dataclass class UpdateMonitor: output_schema_name: str """Schema where output metric tables are created.""" - + baseline_table_name: Optional[str] = None """Name of the baseline table from which drift metrics are computed from. Columns in the monitored table should also be present in the baseline table.""" - + custom_metrics: Optional[List[MonitorMetric]] = None """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).""" - + dashboard_id: Optional[str] = None """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in PENDING state.""" - + data_classification_config: Optional[MonitorDataClassificationConfig] = None """The data classification config for the monitor.""" - + inference_log: Optional[MonitorInferenceLog] = None """Configuration for monitoring inference logs.""" - + notifications: Optional[MonitorNotifications] = None """The notification settings for the monitor.""" - + schedule: Optional[MonitorCronSchedule] = None """The schedule for automatically updating and refreshing metric tables.""" - + slicing_exprs: Optional[List[str]] = None """List of column expressions to slice data with for targeted analysis. The data is grouped by each expression independently, resulting in a separate slice for each predicate and its complements. For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" - + snapshot: Optional[MonitorSnapshot] = None """Configuration for monitoring snapshot tables.""" - + table_name: Optional[str] = None """Full name of the table.""" - + time_series: Optional[MonitorTimeSeries] = None """Configuration for monitoring time series tables.""" - + def as_dict(self) -> dict: """Serializes the UpdateMonitor into a dictionary suitable for use as a JSON request body.""" body = {} - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config.as_dict() - if self.inference_log: - body["inference_log"] = self.inference_log.as_dict() - if self.notifications: - body["notifications"] = self.notifications.as_dict() - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.slicing_exprs: - body["slicing_exprs"] = [v for v in self.slicing_exprs] - if self.snapshot: - body["snapshot"] = self.snapshot.as_dict() - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series.as_dict() + if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name + if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics] + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.data_classification_config: body['data_classification_config'] = self.data_classification_config.as_dict() + if self.inference_log: body['inference_log'] = self.inference_log.as_dict() + if self.notifications: body['notifications'] = self.notifications.as_dict() + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.schedule: body['schedule'] = self.schedule.as_dict() + if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs] + if self.snapshot: body['snapshot'] = self.snapshot.as_dict() + if self.table_name is not None: body['table_name'] = self.table_name + if self.time_series: body['time_series'] = self.time_series.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateMonitor into a shallow dictionary of its immediate attributes.""" body = {} - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = self.custom_metrics - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config - if self.inference_log: - body["inference_log"] = self.inference_log - if self.notifications: - body["notifications"] = self.notifications - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule - if self.slicing_exprs: - body["slicing_exprs"] = self.slicing_exprs - if self.snapshot: - body["snapshot"] = self.snapshot - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series + if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name + if self.custom_metrics: body['custom_metrics'] = self.custom_metrics + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.data_classification_config: body['data_classification_config'] = self.data_classification_config + if self.inference_log: body['inference_log'] = self.inference_log + if self.notifications: body['notifications'] = self.notifications + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.schedule: body['schedule'] = self.schedule + if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs + if self.snapshot: body['snapshot'] = self.snapshot + if self.table_name is not None: body['table_name'] = self.table_name + if self.time_series: body['time_series'] = self.time_series return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateMonitor: """Deserializes the UpdateMonitor from a dictionary.""" - return cls( - baseline_table_name=d.get("baseline_table_name", None), - custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), - dashboard_id=d.get("dashboard_id", None), - data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), - inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), - notifications=_from_dict(d, "notifications", MonitorNotifications), - output_schema_name=d.get("output_schema_name", None), - schedule=_from_dict(d, "schedule", MonitorCronSchedule), - slicing_exprs=d.get("slicing_exprs", None), - snapshot=_from_dict(d, "snapshot", MonitorSnapshot), - table_name=d.get("table_name", None), - time_series=_from_dict(d, "time_series", MonitorTimeSeries), - ) + return cls(baseline_table_name=d.get('baseline_table_name', None), custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric), dashboard_id=d.get('dashboard_id', None), data_classification_config=_from_dict(d, 'data_classification_config', MonitorDataClassificationConfig), inference_log=_from_dict(d, 'inference_log', MonitorInferenceLog), notifications=_from_dict(d, 'notifications', MonitorNotifications), output_schema_name=d.get('output_schema_name', None), schedule=_from_dict(d, 'schedule', MonitorCronSchedule), slicing_exprs=d.get('slicing_exprs', None), snapshot=_from_dict(d, 'snapshot', MonitorSnapshot), table_name=d.get('table_name', None), time_series=_from_dict(d, 'time_series', MonitorTimeSeries)) + + @dataclass class UpdatePermissions: changes: Optional[List[PermissionsChange]] = None """Array of permissions change objects.""" - + full_name: Optional[str] = None """Full name of securable.""" - - securable_type: Optional[SecurableType] = None + + securable_type: Optional[str] = None """Type of securable.""" - + def as_dict(self) -> dict: """Serializes the UpdatePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.changes: - body["changes"] = [v.as_dict() for v in self.changes] - if self.full_name is not None: - body["full_name"] = self.full_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type.value + if self.changes: body['changes'] = [v.as_dict() for v in self.changes] + if self.full_name is not None: body['full_name'] = self.full_name + if self.securable_type is not None: body['securable_type'] = self.securable_type return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.changes: - body["changes"] = self.changes - if self.full_name is not None: - body["full_name"] = self.full_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type + if self.changes: body['changes'] = self.changes + if self.full_name is not None: body['full_name'] = self.full_name + if self.securable_type is not None: body['securable_type'] = self.securable_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissions: """Deserializes the UpdatePermissions from a dictionary.""" - return cls( - changes=_repeated_dict(d, "changes", PermissionsChange), - full_name=d.get("full_name", None), - securable_type=_enum(d, "securable_type", SecurableType), - ) + return cls(changes=_repeated_dict(d, 'changes', PermissionsChange), full_name=d.get('full_name', None), securable_type=d.get('securable_type', None)) + + + + +@dataclass +class UpdatePermissionsResponse: + privilege_assignments: Optional[List[PrivilegeAssignment]] = None + """The privileges assigned to each principal""" + + def as_dict(self) -> dict: + """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdatePermissionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissionsResponse: + """Deserializes the UpdatePermissionsResponse from a dictionary.""" + return cls(privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment)) + + @dataclass class UpdateRegisteredModelRequest: comment: Optional[str] = None """The comment attached to the registered model""" - + full_name: Optional[str] = None """The three-level (fully qualified) name of the registered model""" - + new_name: Optional[str] = None """New name for the registered model.""" - + owner: Optional[str] = None """The identifier of the user who owns the registered model""" - + def as_dict(self) -> dict: """Serializes the UpdateRegisteredModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.full_name is not None: - body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner + if self.comment is not None: body['comment'] = self.comment + if self.full_name is not None: body['full_name'] = self.full_name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.full_name is not None: - body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner + if self.comment is not None: body['comment'] = self.comment + if self.full_name is not None: body['full_name'] = self.full_name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRegisteredModelRequest: """Deserializes the UpdateRegisteredModelRequest from a dictionary.""" - return cls( - comment=d.get("comment", None), - full_name=d.get("full_name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - ) + return cls(comment=d.get('comment', None), full_name=d.get('full_name', None), new_name=d.get('new_name', None), owner=d.get('owner', None)) + + @dataclass @@ -9945,456 +7805,380 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() + + @dataclass class UpdateSchema: comment: Optional[str] = None """User-provided free-form text description.""" - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - + """Whether predictive optimization should be enabled for this object and objects under it.""" + full_name: Optional[str] = None """Full name of the schema.""" - + new_name: Optional[str] = None """New name for the schema.""" - + owner: Optional[str] = None """Username of current owner of schema.""" - - properties: Optional[Dict[str, str]] = None + + properties: Optional[Dict[str,str]] = None """A map of key-value properties attached to the securable.""" - + def as_dict(self) -> dict: """Serializes the UpdateSchema into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.full_name is not None: - body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties + if self.comment is not None: body['comment'] = self.comment + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value + if self.full_name is not None: body['full_name'] = self.full_name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties return body def as_shallow_dict(self) -> dict: """Serializes the UpdateSchema into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.full_name is not None: - body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties + if self.comment is not None: body['comment'] = self.comment + if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization + if self.full_name is not None: body['full_name'] = self.full_name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.properties: body['properties'] = self.properties return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateSchema: """Deserializes the UpdateSchema from a dictionary.""" - return cls( - comment=d.get("comment", None), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - full_name=d.get("full_name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - properties=d.get("properties", None), - ) + return cls(comment=d.get('comment', None), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), full_name=d.get('full_name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), properties=d.get('properties', None)) + + @dataclass class UpdateStorageCredential: aws_iam_role: Optional[AwsIamRoleRequest] = None """The AWS IAM role configuration.""" - + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" - + cloudflare_api_token: Optional[CloudflareApiToken] = None """The Cloudflare API token configuration.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None """The Databricks managed GCP service account configuration.""" - + force: Optional[bool] = None """Force update even if there are dependent external locations or external tables.""" - + isolation_mode: Optional[IsolationMode] = None - + name: Optional[str] = None """Name of the storage credential.""" - + new_name: Optional[str] = None """New name for the storage credential.""" - + owner: Optional[str] = None """Username of current owner of credential.""" - + read_only: Optional[bool] = None """Whether the storage credential is only usable for read operations.""" - + skip_validation: Optional[bool] = None """Supplying true to this argument skips validation of the updated credential.""" - + def as_dict(self) -> dict: """Serializes the UpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token + if self.comment is not None: body['comment'] = self.comment + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.force is not None: body['force'] = self.force + if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.read_only is not None: body['read_only'] = self.read_only + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateStorageCredential: """Deserializes the UpdateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleRequest), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentityResponse), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), comment=d.get('comment', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccountRequest), force=d.get('force', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) + + + + +@dataclass +class UpdateTableRequest: + """Update a table owner.""" + + full_name: Optional[str] = None + """Full name of the table.""" + + owner: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the UpdateTableRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.full_name is not None: body['full_name'] = self.full_name + if self.owner is not None: body['owner'] = self.owner + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateTableRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.full_name is not None: body['full_name'] = self.full_name + if self.owner is not None: body['owner'] = self.owner + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateTableRequest: + """Deserializes the UpdateTableRequest from a dictionary.""" + return cls(full_name=d.get('full_name', None), owner=d.get('owner', None)) + + @dataclass class UpdateVolumeRequestContent: comment: Optional[str] = None """The comment attached to the volume""" - + name: Optional[str] = None """The three-level (fully qualified) name of the volume""" - + new_name: Optional[str] = None """New name for the volume.""" - + owner: Optional[str] = None """The identifier of the user who owns the volume""" - + def as_dict(self) -> dict: """Serializes the UpdateVolumeRequestContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner return body def as_shallow_dict(self) -> dict: """Serializes the UpdateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateVolumeRequestContent: """Deserializes the UpdateVolumeRequestContent from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - ) + return cls(comment=d.get('comment', None), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None)) + + @dataclass class UpdateWorkspaceBindings: assign_workspaces: Optional[List[int]] = None """A list of workspace IDs.""" - + name: Optional[str] = None """The name of the catalog.""" - + unassign_workspaces: Optional[List[int]] = None """A list of workspace IDs.""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceBindings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assign_workspaces: - body["assign_workspaces"] = [v for v in self.assign_workspaces] - if self.name is not None: - body["name"] = self.name - if self.unassign_workspaces: - body["unassign_workspaces"] = [v for v in self.unassign_workspaces] + if self.assign_workspaces: body['assign_workspaces'] = [v for v in self.assign_workspaces] + if self.name is not None: body['name'] = self.name + if self.unassign_workspaces: body['unassign_workspaces'] = [v for v in self.unassign_workspaces] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceBindings into a shallow dictionary of its immediate attributes.""" body = {} - if self.assign_workspaces: - body["assign_workspaces"] = self.assign_workspaces - if self.name is not None: - body["name"] = self.name - if self.unassign_workspaces: - body["unassign_workspaces"] = self.unassign_workspaces + if self.assign_workspaces: body['assign_workspaces'] = self.assign_workspaces + if self.name is not None: body['name'] = self.name + if self.unassign_workspaces: body['unassign_workspaces'] = self.unassign_workspaces return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindings: """Deserializes the UpdateWorkspaceBindings from a dictionary.""" - return cls( - assign_workspaces=d.get("assign_workspaces", None), - name=d.get("name", None), - unassign_workspaces=d.get("unassign_workspaces", None), - ) + return cls(assign_workspaces=d.get('assign_workspaces', None), name=d.get('name', None), unassign_workspaces=d.get('unassign_workspaces', None)) + + @dataclass class UpdateWorkspaceBindingsParameters: add: Optional[List[WorkspaceBinding]] = None """List of workspace bindings.""" - + remove: Optional[List[WorkspaceBinding]] = None """List of workspace bindings.""" - + securable_name: Optional[str] = None """The name of the securable.""" - + securable_type: Optional[str] = None """The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location).""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add: - body["add"] = [v.as_dict() for v in self.add] - if self.remove: - body["remove"] = [v.as_dict() for v in self.remove] - if self.securable_name is not None: - body["securable_name"] = self.securable_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type + if self.add: body['add'] = [v.as_dict() for v in self.add] + if self.remove: body['remove'] = [v.as_dict() for v in self.remove] + if self.securable_name is not None: body['securable_name'] = self.securable_name + if self.securable_type is not None: body['securable_type'] = self.securable_type return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsParameters into a shallow dictionary of its immediate attributes.""" body = {} - if self.add: - body["add"] = self.add - if self.remove: - body["remove"] = self.remove - if self.securable_name is not None: - body["securable_name"] = self.securable_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type + if self.add: body['add'] = self.add + if self.remove: body['remove'] = self.remove + if self.securable_name is not None: body['securable_name'] = self.securable_name + if self.securable_type is not None: body['securable_type'] = self.securable_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsParameters: """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary.""" - return cls( - add=_repeated_dict(d, "add", WorkspaceBinding), - remove=_repeated_dict(d, "remove", WorkspaceBinding), - securable_name=d.get("securable_name", None), - securable_type=d.get("securable_type", None), - ) + return cls(add=_repeated_dict(d, 'add', WorkspaceBinding), remove=_repeated_dict(d, 'remove', WorkspaceBinding), securable_name=d.get('securable_name', None), securable_type=d.get('securable_type', None)) + + @dataclass class UpdateWorkspaceBindingsResponse: """A list of workspace IDs that are bound to the securable""" - + bindings: Optional[List[WorkspaceBinding]] = None """List of workspace bindings.""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bindings: - body["bindings"] = [v.as_dict() for v in self.bindings] + if self.bindings: body['bindings'] = [v.as_dict() for v in self.bindings] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bindings: - body["bindings"] = self.bindings + if self.bindings: body['bindings'] = self.bindings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsResponse: """Deserializes the UpdateWorkspaceBindingsResponse from a dictionary.""" - return cls(bindings=_repeated_dict(d, "bindings", WorkspaceBinding)) + return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding)) + + @dataclass class ValidateCredentialRequest: """Next ID: 17""" - + aws_iam_role: Optional[AwsIamRole] = None """The AWS IAM role configuration""" - + azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" - + credential_name: Optional[str] = None """Required. The name of an existing credential or long-lived cloud credential to validate.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + external_location_name: Optional[str] = None """The name of an existing external location to validate. Only applicable for storage credentials (purpose is **STORAGE**.)""" - + purpose: Optional[CredentialPurpose] = None """The purpose of the credential. This should only be used when the credential is specified.""" - + read_only: Optional[bool] = None """Whether the credential is only usable for read operations. Only applicable for storage credentials (purpose is **STORAGE**.)""" - + url: Optional[str] = None """The external location url to validate. Only applicable when purpose is **STORAGE**.""" - + def as_dict(self) -> dict: """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.purpose is not None: - body["purpose"] = self.purpose.value - if self.read_only is not None: - body["read_only"] = self.read_only - if self.url is not None: - body["url"] = self.url + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.external_location_name is not None: body['external_location_name'] = self.external_location_name + if self.purpose is not None: body['purpose'] = self.purpose.value + if self.read_only is not None: body['read_only'] = self.read_only + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the ValidateCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.purpose is not None: - body["purpose"] = self.purpose - if self.read_only is not None: - body["read_only"] = self.read_only - if self.url is not None: - body["url"] = self.url + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.credential_name is not None: body['credential_name'] = self.credential_name + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.external_location_name is not None: body['external_location_name'] = self.external_location_name + if self.purpose is not None: body['purpose'] = self.purpose + if self.read_only is not None: body['read_only'] = self.read_only + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidateCredentialRequest: """Deserializes the ValidateCredentialRequest from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - credential_name=d.get("credential_name", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - external_location_name=d.get("external_location_name", None), - purpose=_enum(d, "purpose", CredentialPurpose), - read_only=d.get("read_only", None), - url=d.get("url", None), - ) + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), credential_name=d.get('credential_name', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccount), external_location_name=d.get('external_location_name', None), purpose=_enum(d, 'purpose', CredentialPurpose), read_only=d.get('read_only', None), url=d.get('url', None)) + + @dataclass @@ -10402,885 +8186,855 @@ class ValidateCredentialResponse: is_dir: Optional[bool] = None """Whether the tested location is a directory in cloud storage. Only applicable for when purpose is **STORAGE**.""" - + results: Optional[List[CredentialValidationResult]] = None """The results of the validation check.""" - + def as_dict(self) -> dict: """Serializes the ValidateCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_dir is not None: - body["isDir"] = self.is_dir - if self.results: - body["results"] = [v.as_dict() for v in self.results] + if self.is_dir is not None: body['isDir'] = self.is_dir + if self.results: body['results'] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ValidateCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_dir is not None: - body["isDir"] = self.is_dir - if self.results: - body["results"] = self.results + if self.is_dir is not None: body['isDir'] = self.is_dir + if self.results: body['results'] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidateCredentialResponse: """Deserializes the ValidateCredentialResponse from a dictionary.""" - return cls(is_dir=d.get("isDir", None), results=_repeated_dict(d, "results", CredentialValidationResult)) + return cls(is_dir=d.get('isDir', None), results=_repeated_dict(d, 'results', CredentialValidationResult)) + + class ValidateCredentialResult(Enum): """A enum represents the result of the file operation""" - - FAIL = "FAIL" - PASS = "PASS" - SKIP = "SKIP" - + + FAIL = 'FAIL' + PASS = 'PASS' + SKIP = 'SKIP' @dataclass class ValidateStorageCredential: aws_iam_role: Optional[AwsIamRoleRequest] = None """The AWS IAM role configuration.""" - + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" - + cloudflare_api_token: Optional[CloudflareApiToken] = None """The Cloudflare API token configuration.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None """The Databricks created GCP service account configuration.""" - + external_location_name: Optional[str] = None """The name of an existing external location to validate.""" - + read_only: Optional[bool] = None """Whether the storage credential is only usable for read operations.""" - + storage_credential_name: Optional[str] = None """The name of the storage credential to validate.""" - + url: Optional[str] = None """The external location url to validate.""" - + def as_dict(self) -> dict: """Serializes the ValidateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.url is not None: - body["url"] = self.url + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() + if self.external_location_name is not None: body['external_location_name'] = self.external_location_name + if self.read_only is not None: body['read_only'] = self.read_only + if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the ValidateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.url is not None: - body["url"] = self.url + if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role + if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity + if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal + if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token + if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account + if self.external_location_name is not None: body['external_location_name'] = self.external_location_name + if self.read_only is not None: body['read_only'] = self.read_only + if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidateStorageCredential: """Deserializes the ValidateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - external_location_name=d.get("external_location_name", None), - read_only=d.get("read_only", None), - storage_credential_name=d.get("storage_credential_name", None), - url=d.get("url", None), - ) + return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleRequest), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentityRequest), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccountRequest), external_location_name=d.get('external_location_name', None), read_only=d.get('read_only', None), storage_credential_name=d.get('storage_credential_name', None), url=d.get('url', None)) + + @dataclass class ValidateStorageCredentialResponse: is_dir: Optional[bool] = None """Whether the tested location is a directory in cloud storage.""" - + results: Optional[List[ValidationResult]] = None """The results of the validation check.""" - + def as_dict(self) -> dict: """Serializes the ValidateStorageCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_dir is not None: - body["isDir"] = self.is_dir - if self.results: - body["results"] = [v.as_dict() for v in self.results] + if self.is_dir is not None: body['isDir'] = self.is_dir + if self.results: body['results'] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ValidateStorageCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_dir is not None: - body["isDir"] = self.is_dir - if self.results: - body["results"] = self.results + if self.is_dir is not None: body['isDir'] = self.is_dir + if self.results: body['results'] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidateStorageCredentialResponse: """Deserializes the ValidateStorageCredentialResponse from a dictionary.""" - return cls(is_dir=d.get("isDir", None), results=_repeated_dict(d, "results", ValidationResult)) + return cls(is_dir=d.get('isDir', None), results=_repeated_dict(d, 'results', ValidationResult)) + + @dataclass class ValidationResult: message: Optional[str] = None """Error message would exist when the result does not equal to **PASS**.""" - + operation: Optional[ValidationResultOperation] = None """The operation tested.""" - + result: Optional[ValidationResultResult] = None """The results of the tested operation.""" - + def as_dict(self) -> dict: """Serializes the ValidationResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.operation is not None: - body["operation"] = self.operation.value - if self.result is not None: - body["result"] = self.result.value + if self.message is not None: body['message'] = self.message + if self.operation is not None: body['operation'] = self.operation.value + if self.result is not None: body['result'] = self.result.value return body def as_shallow_dict(self) -> dict: """Serializes the ValidationResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.operation is not None: - body["operation"] = self.operation - if self.result is not None: - body["result"] = self.result + if self.message is not None: body['message'] = self.message + if self.operation is not None: body['operation'] = self.operation + if self.result is not None: body['result'] = self.result return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidationResult: """Deserializes the ValidationResult from a dictionary.""" - return cls( - message=d.get("message", None), - operation=_enum(d, "operation", ValidationResultOperation), - result=_enum(d, "result", ValidationResultResult), - ) + return cls(message=d.get('message', None), operation=_enum(d, 'operation', ValidationResultOperation), result=_enum(d, 'result', ValidationResultResult)) + + class ValidationResultOperation(Enum): """The operation tested.""" - - DELETE = "DELETE" - LIST = "LIST" - PATH_EXISTS = "PATH_EXISTS" - READ = "READ" - WRITE = "WRITE" - + + DELETE = 'DELETE' + LIST = 'LIST' + PATH_EXISTS = 'PATH_EXISTS' + READ = 'READ' + WRITE = 'WRITE' class ValidationResultResult(Enum): """The results of the tested operation.""" - - FAIL = "FAIL" - PASS = "PASS" - SKIP = "SKIP" - + + FAIL = 'FAIL' + PASS = 'PASS' + SKIP = 'SKIP' @dataclass class VolumeInfo: access_point: Optional[str] = None """The AWS access point to use when accesing s3 for this external location.""" - + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """The name of the catalog where the schema and the volume are""" - + comment: Optional[str] = None """The comment attached to the volume""" - + created_at: Optional[int] = None - + created_by: Optional[str] = None """The identifier of the user who created the volume""" - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + full_name: Optional[str] = None """The three-level (fully qualified) name of the volume""" - + metastore_id: Optional[str] = None """The unique identifier of the metastore""" - + name: Optional[str] = None """The name of the volume""" - + owner: Optional[str] = None """The identifier of the user who owns the volume""" - + schema_name: Optional[str] = None """The name of the schema where the volume is""" - + storage_location: Optional[str] = None """The storage location on the cloud""" - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None """The identifier of the user who updated the volume last time""" - + volume_id: Optional[str] = None """The unique identifier of the volume""" - + volume_type: Optional[VolumeType] = None """The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" - + def as_dict(self) -> dict: """Serializes the VolumeInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_point is not None: - body["access_point"] = self.access_point - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.volume_id is not None: - body["volume_id"] = self.volume_id - if self.volume_type is not None: - body["volume_type"] = self.volume_type.value + if self.access_point is not None: body['access_point'] = self.access_point + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.volume_id is not None: body['volume_id'] = self.volume_id + if self.volume_type is not None: body['volume_type'] = self.volume_type.value return body def as_shallow_dict(self) -> dict: """Serializes the VolumeInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_point is not None: - body["access_point"] = self.access_point - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.volume_id is not None: - body["volume_id"] = self.volume_id - if self.volume_type is not None: - body["volume_type"] = self.volume_type + if self.access_point is not None: body['access_point'] = self.access_point + if self.browse_only is not None: body['browse_only'] = self.browse_only + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.encryption_details: body['encryption_details'] = self.encryption_details + if self.full_name is not None: body['full_name'] = self.full_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.volume_id is not None: body['volume_id'] = self.volume_id + if self.volume_type is not None: body['volume_type'] = self.volume_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VolumeInfo: """Deserializes the VolumeInfo from a dictionary.""" - return cls( - access_point=d.get("access_point", None), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - volume_id=d.get("volume_id", None), - volume_type=_enum(d, "volume_type", VolumeType), - ) + return cls(access_point=d.get('access_point', None), browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), schema_name=d.get('schema_name', None), storage_location=d.get('storage_location', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), volume_id=d.get('volume_id', None), volume_type=_enum(d, 'volume_type', VolumeType)) + + class VolumeType(Enum): """The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] - + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" - - EXTERNAL = "EXTERNAL" - MANAGED = "MANAGED" - + + EXTERNAL = 'EXTERNAL' + MANAGED = 'MANAGED' @dataclass class WorkspaceBinding: workspace_id: int """Required""" - + binding_type: Optional[WorkspaceBindingBindingType] = None """One of READ_WRITE/READ_ONLY. Default is READ_WRITE.""" - + def as_dict(self) -> dict: """Serializes the WorkspaceBinding into a dictionary suitable for use as a JSON request body.""" body = {} - if self.binding_type is not None: - body["binding_type"] = self.binding_type.value - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.binding_type is not None: body['binding_type'] = self.binding_type.value + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceBinding into a shallow dictionary of its immediate attributes.""" body = {} - if self.binding_type is not None: - body["binding_type"] = self.binding_type - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.binding_type is not None: body['binding_type'] = self.binding_type + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceBinding: """Deserializes the WorkspaceBinding from a dictionary.""" - return cls( - binding_type=_enum(d, "binding_type", WorkspaceBindingBindingType), workspace_id=d.get("workspace_id", None) - ) + return cls(binding_type=_enum(d, 'binding_type', WorkspaceBindingBindingType), workspace_id=d.get('workspace_id', None)) + + class WorkspaceBindingBindingType(Enum): """Using `BINDING_TYPE_` prefix here to avoid conflict with `TableOperation` enum in `credentials_common.proto`.""" + + BINDING_TYPE_READ_ONLY = 'BINDING_TYPE_READ_ONLY' + BINDING_TYPE_READ_WRITE = 'BINDING_TYPE_READ_WRITE' - BINDING_TYPE_READ_ONLY = "BINDING_TYPE_READ_ONLY" - BINDING_TYPE_READ_WRITE = "BINDING_TYPE_READ_WRITE" class AccountMetastoreAssignmentsAPI: """These APIs manage metastore assignments to a workspace.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[CreateMetastoreAssignment] = None - ): - """Assigns a workspace to a metastore. + - Creates an assignment to a metastore for a workspace + + + + + def create(self + , workspace_id: int, metastore_id: str + , * + , metastore_assignment: Optional[CreateMetastoreAssignment] = None): + """Assigns a workspace to a metastore. + + Creates an assignment to a metastore for a workspace + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional) - - + + """ body = {} - if metastore_assignment is not None: - body["metastore_assignment"] = metastore_assignment.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "POST", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", - body=body, - headers=headers, - ) - - def delete(self, workspace_id: int, metastore_id: str): - """Delete a metastore assignment. + if metastore_assignment is not None: body['metastore_assignment'] = metastore_assignment.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}', body=body + + , headers=headers + ) + - Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. + + + + def delete(self + , workspace_id: int, metastore_id: str + ): + """Delete a metastore assignment. + + Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", - headers=headers, - ) + + + - def get(self, workspace_id: int) -> AccountsMetastoreAssignment: + def get(self + , workspace_id: int + ) -> AccountsMetastoreAssignment: """Gets the metastore assignment for a workspace. - + Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment will not be found and a 404 returned. - + :param workspace_id: int Workspace ID. - + :returns: :class:`AccountsMetastoreAssignment` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastore", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastore' + + , headers=headers + ) return AccountsMetastoreAssignment.from_dict(res) - def list(self, metastore_id: str) -> Iterator[int]: - """Get all workspaces assigned to a metastore. + + + + def list(self + , metastore_id: str + ) -> Iterator[int]: + """Get all workspaces assigned to a metastore. + Gets a list of all Databricks workspace IDs that have been assigned to given metastore. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: Iterator over int """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/workspaces", headers=headers - ) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/workspaces' + + , headers=headers + ) parsed = ListAccountMetastoreAssignmentsResponse.from_dict(json).workspace_ids return parsed if parsed is not None else [] + - def update( - self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[UpdateMetastoreAssignment] = None - ): - """Updates a metastore assignment to a workspaces. + + + + def update(self + , workspace_id: int, metastore_id: str + , * + , metastore_assignment: Optional[UpdateMetastoreAssignment] = None): + """Updates a metastore assignment to a workspaces. + Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be updated. - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID :param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional) - - + + """ body = {} - if metastore_assignment is not None: - body["metastore_assignment"] = metastore_assignment.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", - body=body, - headers=headers, - ) - + if metastore_assignment is not None: body['metastore_assignment'] = metastore_assignment.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}', body=body + + , headers=headers + ) + + + class AccountMetastoresAPI: """These APIs manage Unity Catalog metastores for an account. A metastore contains catalogs that can be associated with workspaces""" - + def __init__(self, api_client): self._api = api_client + - def create(self, *, metastore_info: Optional[CreateMetastore] = None) -> AccountsMetastoreInfo: - """Create metastore. + - Creates a Unity Catalog metastore. + - :param metastore_info: :class:`CreateMetastore` (optional) + + + def create(self + + , * + , metastore_info: Optional[CreateMetastore] = None) -> AccountsMetastoreInfo: + """Create metastore. + + Creates a Unity Catalog metastore. + + :param metastore_info: :class:`CreateMetastore` (optional) + :returns: :class:`AccountsMetastoreInfo` """ body = {} - if metastore_info is not None: - body["metastore_info"] = metastore_info.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/metastores", body=body, headers=headers) + if metastore_info is not None: body['metastore_info'] = metastore_info.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/metastores', body=body + + , headers=headers + ) return AccountsMetastoreInfo.from_dict(res) - def delete(self, metastore_id: str, *, force: Optional[bool] = None): - """Delete a metastore. + + + + def delete(self + , metastore_id: str + , * + , force: Optional[bool] = None): + """Delete a metastore. + Deletes a Unity Catalog metastore for an account, both specified by ID. - + :param metastore_id: str Unity Catalog metastore ID :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - - + + """ - + query = {} - if force is not None: - query["force"] = force - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", - query=query, - headers=headers, - ) - - def get(self, metastore_id: str) -> AccountsMetastoreInfo: - """Get a metastore. + if force is not None: query['force'] = force + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}', query=query + + , headers=headers + ) + - Gets a Unity Catalog metastore from an account, both specified by ID. + + + + def get(self + , metastore_id: str + ) -> AccountsMetastoreInfo: + """Get a metastore. + + Gets a Unity Catalog metastore from an account, both specified by ID. + :param metastore_id: str Unity Catalog metastore ID - + :returns: :class:`AccountsMetastoreInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}' + + , headers=headers + ) return AccountsMetastoreInfo.from_dict(res) + + + + def list(self) -> Iterator[MetastoreInfo]: """Get all metastores associated with an account. - + Gets all Unity Catalog metastores associated with an account specified by ID. - + :returns: Iterator over :class:`MetastoreInfo` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/metastores", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores' + , headers=headers + ) parsed = ListMetastoresResponse.from_dict(json).metastores return parsed if parsed is not None else [] + - def update(self, metastore_id: str, *, metastore_info: Optional[UpdateMetastore] = None) -> AccountsMetastoreInfo: - """Update a metastore. + + + + def update(self + , metastore_id: str + , * + , metastore_info: Optional[UpdateMetastore] = None) -> AccountsMetastoreInfo: + """Update a metastore. + Updates an existing Unity Catalog metastore. - + :param metastore_id: str Unity Catalog metastore ID :param metastore_info: :class:`UpdateMetastore` (optional) - + :returns: :class:`AccountsMetastoreInfo` """ body = {} - if metastore_info is not None: - body["metastore_info"] = metastore_info.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", body=body, headers=headers - ) + if metastore_info is not None: body['metastore_info'] = metastore_info.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}', body=body + + , headers=headers + ) return AccountsMetastoreInfo.from_dict(res) - + + class AccountStorageCredentialsAPI: """These APIs manage storage credentials for a particular metastore.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, metastore_id: str, *, credential_info: Optional[CreateStorageCredential] = None - ) -> AccountsStorageCredentialInfo: - """Create a storage credential. + - Creates a new storage credential. The request object is specific to the cloud: + + + + + def create(self + , metastore_id: str + , * + , credential_info: Optional[CreateStorageCredential] = None) -> AccountsStorageCredentialInfo: + """Create a storage credential. + + Creates a new storage credential. The request object is specific to the cloud: + * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * **GcpServiceAcountKey** for GCP credentials. - + The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the metastore. - + :param metastore_id: str Unity Catalog metastore ID :param credential_info: :class:`CreateStorageCredential` (optional) - + :returns: :class:`AccountsStorageCredentialInfo` """ body = {} - if credential_info is not None: - body["credential_info"] = credential_info.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials", - body=body, - headers=headers, - ) + if credential_info is not None: body['credential_info'] = credential_info.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials', body=body + + , headers=headers + ) return AccountsStorageCredentialInfo.from_dict(res) - def delete(self, metastore_id: str, storage_credential_name: str, *, force: Optional[bool] = None): - """Delete a storage credential. + + + + def delete(self + , metastore_id: str, storage_credential_name: str + , * + , force: Optional[bool] = None): + """Delete a storage credential. + Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. :param force: bool (optional) Force deletion even if the Storage Credential is not empty. Default is false. - - + + """ - + query = {} - if force is not None: - query["force"] = force - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}", - query=query, - headers=headers, - ) - - def get(self, metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo: - """Gets the named storage credential. + if force is not None: query['force'] = force + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}', query=query + + , headers=headers + ) + + + + + + def get(self + , metastore_id: str, storage_credential_name: str + ) -> AccountsStorageCredentialInfo: + """Gets the named storage credential. + Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have a level of privilege on the storage credential. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. - + :returns: :class:`AccountsStorageCredentialInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}' + + , headers=headers + ) return AccountsStorageCredentialInfo.from_dict(res) - def list(self, metastore_id: str) -> Iterator[StorageCredentialInfo]: - """Get all storage credentials assigned to a metastore. + + + + def list(self + , metastore_id: str + ) -> Iterator[StorageCredentialInfo]: + """Get all storage credentials assigned to a metastore. + Gets a list of all storage credentials that have been assigned to given metastore. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: Iterator over :class:`StorageCredentialInfo` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials' + + , headers=headers + ) parsed = ListAccountStorageCredentialsResponse.from_dict(json).storage_credentials return parsed if parsed is not None else [] + - def update( - self, - metastore_id: str, - storage_credential_name: str, - *, - credential_info: Optional[UpdateStorageCredential] = None, - ) -> AccountsStorageCredentialInfo: - """Updates a storage credential. + + + + def update(self + , metastore_id: str, storage_credential_name: str + , * + , credential_info: Optional[UpdateStorageCredential] = None) -> AccountsStorageCredentialInfo: + """Updates a storage credential. + Updates a storage credential on the metastore. The caller must be the owner of the storage credential. If the caller is a metastore admin, only the __owner__ credential can be changed. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. :param credential_info: :class:`UpdateStorageCredential` (optional) - + :returns: :class:`AccountsStorageCredentialInfo` """ body = {} - if credential_info is not None: - body["credential_info"] = credential_info.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}", - body=body, - headers=headers, - ) + if credential_info is not None: body['credential_info'] = credential_info.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}', body=body + + , headers=headers + ) return AccountsStorageCredentialInfo.from_dict(res) - + + class ArtifactAllowlistsAPI: """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode.""" - + def __init__(self, api_client): self._api = api_client + - def get(self, artifact_type: ArtifactType) -> ArtifactAllowlistInfo: - """Get an artifact allowlist. + + + + + + + def get(self + , artifact_type: ArtifactType + ) -> ArtifactAllowlistInfo: + """Get an artifact allowlist. + Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. - + :param artifact_type: :class:`ArtifactType` The artifact type of the allowlist. - + :returns: :class:`ArtifactAllowlistInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}' + + , headers=headers + ) return ArtifactAllowlistInfo.from_dict(res) - def update( - self, - artifact_type: ArtifactType, - artifact_matchers: List[ArtifactMatcher], - *, - created_at: Optional[int] = None, - created_by: Optional[str] = None, - metastore_id: Optional[str] = None, - ) -> ArtifactAllowlistInfo: - """Set an artifact allowlist. + + + + def update(self + , artifact_type: ArtifactType, artifact_matchers: List[ArtifactMatcher] + , * + , created_at: Optional[int] = None, created_by: Optional[str] = None, metastore_id: Optional[str] = None) -> ArtifactAllowlistInfo: + """Set an artifact allowlist. + Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. - + :param artifact_type: :class:`ArtifactType` The artifact type of the allowlist. :param artifact_matchers: List[:class:`ArtifactMatcher`] @@ -11291,57 +9045,52 @@ def update( Username of the user who set the artifact allowlist. :param metastore_id: str (optional) Unique identifier of parent metastore. - + :returns: :class:`ArtifactAllowlistInfo` """ body = {} - if artifact_matchers is not None: - body["artifact_matchers"] = [v.as_dict() for v in artifact_matchers] - if created_at is not None: - body["created_at"] = created_at - if created_by is not None: - body["created_by"] = created_by - if metastore_id is not None: - body["metastore_id"] = metastore_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}", body=body, headers=headers - ) + if artifact_matchers is not None: body['artifact_matchers'] = [v.as_dict() for v in artifact_matchers] + if created_at is not None: body['created_at'] = created_at + if created_by is not None: body['created_by'] = created_by + if metastore_id is not None: body['metastore_id'] = metastore_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}', body=body + + , headers=headers + ) return ArtifactAllowlistInfo.from_dict(res) - + + class CatalogsAPI: """A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission. - + In Unity Catalog, admins and data stewards manage users and their access to data centrally across all of the workspaces in a Databricks account. Users in different workspaces can share access to the same data, depending on privileges granted centrally in Unity Catalog.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - name: str, - *, - comment: Optional[str] = None, - connection_name: Optional[str] = None, - options: Optional[Dict[str, str]] = None, - properties: Optional[Dict[str, str]] = None, - provider_name: Optional[str] = None, - share_name: Optional[str] = None, - storage_root: Optional[str] = None, - ) -> CatalogInfo: - """Create a catalog. + + + + + + + def create(self + , name: str + , * + , comment: Optional[str] = None, connection_name: Optional[str] = None, options: Optional[Dict[str,str]] = None, properties: Optional[Dict[str,str]] = None, provider_name: Optional[str] = None, share_name: Optional[str] = None, storage_root: Optional[str] = None) -> CatalogInfo: + """Create a catalog. + Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. - + :param name: str Name of catalog. :param comment: str (optional) @@ -11354,102 +9103,110 @@ def create( A map of key-value properties attached to the securable. :param provider_name: str (optional) The name of delta sharing provider. - + A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server. :param share_name: str (optional) The name of the share under the share provider. :param storage_root: str (optional) Storage root URL for managed tables within catalog. - + :returns: :class:`CatalogInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if connection_name is not None: - body["connection_name"] = connection_name - if name is not None: - body["name"] = name - if options is not None: - body["options"] = options - if properties is not None: - body["properties"] = properties - if provider_name is not None: - body["provider_name"] = provider_name - if share_name is not None: - body["share_name"] = share_name - if storage_root is not None: - body["storage_root"] = storage_root - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/catalogs", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if connection_name is not None: body['connection_name'] = connection_name + if name is not None: body['name'] = name + if options is not None: body['options'] = options + if properties is not None: body['properties'] = properties + if provider_name is not None: body['provider_name'] = provider_name + if share_name is not None: body['share_name'] = share_name + if storage_root is not None: body['storage_root'] = storage_root + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/catalogs', body=body + + , headers=headers + ) return CatalogInfo.from_dict(res) - def delete(self, name: str, *, force: Optional[bool] = None): - """Delete a catalog. + + + + def delete(self + , name: str + , * + , force: Optional[bool] = None): + """Delete a catalog. + Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner of the catalog. - + :param name: str The name of the catalog. :param force: bool (optional) Force deletion even if the catalog is not empty. - - + + """ - + query = {} - if force is not None: - query["force"] = force - headers = { - "Accept": "application/json", - } + if force is not None: query['force'] = force + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/catalogs/{name}', query=query + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/catalogs/{name}", query=query, headers=headers) + + + - def get(self, name: str, *, include_browse: Optional[bool] = None) -> CatalogInfo: + def get(self + , name: str + , * + , include_browse: Optional[bool] = None) -> CatalogInfo: """Get a catalog. - + Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the catalog, or a user that has the **USE_CATALOG** privilege set for their account. - + :param name: str The name of the catalog. :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective metadata for - + :returns: :class:`CatalogInfo` """ - + query = {} - if include_browse is not None: - query["include_browse"] = include_browse - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/catalogs/{name}", query=query, headers=headers) + if include_browse is not None: query['include_browse'] = include_browse + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/catalogs/{name}', query=query + + , headers=headers + ) return CatalogInfo.from_dict(res) - def list( - self, - *, - include_browse: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[CatalogInfo]: - """List catalogs. + + + + def list(self + + , * + , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[CatalogInfo]: + """List catalogs. + Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. - + :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective metadata for @@ -11463,47 +9220,44 @@ def list( response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CatalogInfo` """ - + query = {} - if include_browse is not None: - query["include_browse"] = include_browse - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if include_browse is not None: query['include_browse'] = include_browse + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/catalogs", query=query, headers=headers) - if "catalogs" in json: - for v in json["catalogs"]: - yield CatalogInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - name: str, - *, - comment: Optional[str] = None, - enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, - isolation_mode: Optional[CatalogIsolationMode] = None, - new_name: Optional[str] = None, - options: Optional[Dict[str, str]] = None, - owner: Optional[str] = None, - properties: Optional[Dict[str, str]] = None, - ) -> CatalogInfo: - """Update a catalog. + json = self._api.do('GET','/api/2.1/unity-catalog/catalogs', query=query + + , headers=headers + ) + if 'catalogs' in json: + for v in json['catalogs']: + yield CatalogInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , name: str + , * + , comment: Optional[str] = None, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, isolation_mode: Optional[CatalogIsolationMode] = None, new_name: Optional[str] = None, options: Optional[Dict[str,str]] = None, owner: Optional[str] = None, properties: Optional[Dict[str,str]] = None) -> CatalogInfo: + """Update a catalog. + Updates the catalog that matches the supplied name. The caller must be either the owner of the catalog, or a metastore admin (when changing the owner field of the catalog). - + :param name: str The name of the catalog. :param comment: str (optional) @@ -11520,63 +9274,59 @@ def update( Username of current owner of catalog. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. - + :returns: :class:`CatalogInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = enable_predictive_optimization.value - if isolation_mode is not None: - body["isolation_mode"] = isolation_mode.value - if new_name is not None: - body["new_name"] = new_name - if options is not None: - body["options"] = options - if owner is not None: - body["owner"] = owner - if properties is not None: - body["properties"] = properties - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/catalogs/{name}", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if enable_predictive_optimization is not None: body['enable_predictive_optimization'] = enable_predictive_optimization.value + if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value + if new_name is not None: body['new_name'] = new_name + if options is not None: body['options'] = options + if owner is not None: body['owner'] = owner + if properties is not None: body['properties'] = properties + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/catalogs/{name}', body=body + + , headers=headers + ) return CatalogInfo.from_dict(res) - + + class ConnectionsAPI: """Connections allow for creating a connection to an external data source. - + A connection is an abstraction of an external data source that can be connected from Databricks Compute. Creating a connection object is the first step to managing external data sources within Unity Catalog, with the second step being creating a data object (catalog, schema, or table) using the connection. Data objects derived from a connection can be written to or read from similar to other Unity Catalog data objects based on cloud storage. Users may create different types of connections with each connection having a unique set of configuration options to support credential management and other settings.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - name: str, - connection_type: ConnectionType, - options: Dict[str, str], - *, - comment: Optional[str] = None, - properties: Optional[Dict[str, str]] = None, - read_only: Optional[bool] = None, - ) -> ConnectionInfo: - """Create a connection. + - Creates a new connection + + + + + def create(self + , name: str, connection_type: ConnectionType, options: Dict[str,str] + , * + , comment: Optional[str] = None, properties: Optional[Dict[str,str]] = None, read_only: Optional[bool] = None) -> ConnectionInfo: + """Create a connection. + + Creates a new connection + Creates a new connection to an external data source. It allows users to specify connection details and configurations for interaction with the external server. - + :param name: str Name of the connection. :param connection_type: :class:`ConnectionType` @@ -11586,73 +9336,89 @@ def create( :param comment: str (optional) User-provided free-form text description. :param properties: Dict[str,str] (optional) - An object containing map of key-value properties attached to the connection. + A map of key-value properties attached to the securable. :param read_only: bool (optional) If the connection is read only. - + :returns: :class:`ConnectionInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if connection_type is not None: - body["connection_type"] = connection_type.value - if name is not None: - body["name"] = name - if options is not None: - body["options"] = options - if properties is not None: - body["properties"] = properties - if read_only is not None: - body["read_only"] = read_only - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/connections", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if connection_type is not None: body['connection_type'] = connection_type.value + if name is not None: body['name'] = name + if options is not None: body['options'] = options + if properties is not None: body['properties'] = properties + if read_only is not None: body['read_only'] = read_only + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/connections', body=body + + , headers=headers + ) return ConnectionInfo.from_dict(res) - def delete(self, name: str): - """Delete a connection. + + + + def delete(self + , name: str + ): + """Delete a connection. + Deletes the connection that matches the supplied name. - + :param name: str The name of the connection to be deleted. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/connections/{name}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.1/unity-catalog/connections/{name}", headers=headers) + + + - def get(self, name: str) -> ConnectionInfo: + def get(self + , name: str + ) -> ConnectionInfo: """Get a connection. - + Gets a connection from it's name. - + :param name: str Name of the connection. - + :returns: :class:`ConnectionInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/connections/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/connections/{name}' + + , headers=headers + ) return ConnectionInfo.from_dict(res) - def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ConnectionInfo]: - """List connections. + + + + def list(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ConnectionInfo]: + """List connections. + List all connections. - + :param max_results: int (optional) Maximum number of connections to return. - If not set, all connections are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of this value and @@ -11660,37 +9426,42 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ConnectionInfo` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - if "max_results" not in query: - query["max_results"] = 0 + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/connections", query=query, headers=headers) - if "connections" in json: - for v in json["connections"]: - yield ConnectionInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, name: str, options: Dict[str, str], *, new_name: Optional[str] = None, owner: Optional[str] = None - ) -> ConnectionInfo: - """Update a connection. + json = self._api.do('GET','/api/2.1/unity-catalog/connections', query=query + + , headers=headers + ) + if 'connections' in json: + for v in json['connections']: + yield ConnectionInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Updates the connection that matches the supplied name. + + + + def update(self + , name: str, options: Dict[str,str] + , * + , new_name: Optional[str] = None, owner: Optional[str] = None) -> ConnectionInfo: + """Update a connection. + + Updates the connection that matches the supplied name. + :param name: str Name of the connection. :param options: Dict[str,str] @@ -11699,58 +9470,55 @@ def update( New name for the connection. :param owner: str (optional) Username of current owner of the connection. - + :returns: :class:`ConnectionInfo` """ body = {} - if new_name is not None: - body["new_name"] = new_name - if options is not None: - body["options"] = options - if owner is not None: - body["owner"] = owner - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/connections/{name}", body=body, headers=headers) + if new_name is not None: body['new_name'] = new_name + if options is not None: body['options'] = options + if owner is not None: body['owner'] = owner + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/connections/{name}', body=body + + , headers=headers + ) return ConnectionInfo.from_dict(res) - + + class CredentialsAPI: """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant. Each credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential. - + To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL` privilege. The user who creates the credential can delegate ownership to another user or group to manage permissions on it.""" - + def __init__(self, api_client): self._api = api_client + - def create_credential( - self, - name: str, - *, - aws_iam_role: Optional[AwsIamRole] = None, - azure_managed_identity: Optional[AzureManagedIdentity] = None, - azure_service_principal: Optional[AzureServicePrincipal] = None, - comment: Optional[str] = None, - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, - purpose: Optional[CredentialPurpose] = None, - read_only: Optional[bool] = None, - skip_validation: Optional[bool] = None, - ) -> CredentialInfo: - """Create a credential. + + + + + + + def create_credential(self + , name: str + , * + , aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, purpose: Optional[CredentialPurpose] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> CredentialInfo: + """Create a credential. + Creates a new credential. The type of credential to be created is determined by the **purpose** field, which should be either **SERVICE** or **STORAGE**. - + The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials. - + :param name: str The credential name. The name must be unique among storage and service credentials within the metastore. @@ -11771,130 +9539,135 @@ def create_credential( **STORAGE**. :param skip_validation: bool (optional) Optional. Supplying true to this argument skips validation of the created set of credentials. - + :returns: :class:`CredentialInfo` """ body = {} - if aws_iam_role is not None: - body["aws_iam_role"] = aws_iam_role.as_dict() - if azure_managed_identity is not None: - body["azure_managed_identity"] = azure_managed_identity.as_dict() - if azure_service_principal is not None: - body["azure_service_principal"] = azure_service_principal.as_dict() - if comment is not None: - body["comment"] = comment - if databricks_gcp_service_account is not None: - body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() - if name is not None: - body["name"] = name - if purpose is not None: - body["purpose"] = purpose.value - if read_only is not None: - body["read_only"] = read_only - if skip_validation is not None: - body["skip_validation"] = skip_validation - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/credentials", body=body, headers=headers) + if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() + if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() + if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() + if comment is not None: body['comment'] = comment + if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() + if name is not None: body['name'] = name + if purpose is not None: body['purpose'] = purpose.value + if read_only is not None: body['read_only'] = read_only + if skip_validation is not None: body['skip_validation'] = skip_validation + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/credentials', body=body + + , headers=headers + ) return CredentialInfo.from_dict(res) - def delete_credential(self, name_arg: str, *, force: Optional[bool] = None): - """Delete a credential. + + + + def delete_credential(self + , name_arg: str + , * + , force: Optional[bool] = None): + """Delete a credential. + Deletes a service or storage credential from the metastore. The caller must be an owner of the credential. - + :param name_arg: str Name of the credential. :param force: bool (optional) Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent external locations and external tables (when purpose is **STORAGE**). - - + + """ - + query = {} - if force is not None: - query["force"] = force - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.1/unity-catalog/credentials/{name_arg}", query=query, headers=headers) - - def generate_temporary_service_credential( - self, - credential_name: str, - *, - azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None, - gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None, - ) -> TemporaryCredentials: - """Generate a temporary service credential. + if force is not None: query['force'] = force + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/credentials/{name_arg}', query=query + + , headers=headers + ) + + + + + + def generate_temporary_service_credential(self + , credential_name: str + , * + , azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None, gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None) -> TemporaryCredentials: + """Generate a temporary service credential. + Returns a set of temporary credentials generated using the specified service credential. The caller must be a metastore admin or have the metastore privilege **ACCESS** on the service credential. - + :param credential_name: str The name of the service credential used to generate a temporary credential :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional) The Azure cloud options to customize the requested temporary credential :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional) The GCP cloud options to customize the requested temporary credential - + :returns: :class:`TemporaryCredentials` """ body = {} - if azure_options is not None: - body["azure_options"] = azure_options.as_dict() - if credential_name is not None: - body["credential_name"] = credential_name - if gcp_options is not None: - body["gcp_options"] = gcp_options.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/temporary-service-credentials", body=body, headers=headers) + if azure_options is not None: body['azure_options'] = azure_options.as_dict() + if credential_name is not None: body['credential_name'] = credential_name + if gcp_options is not None: body['gcp_options'] = gcp_options.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/temporary-service-credentials', body=body + + , headers=headers + ) return TemporaryCredentials.from_dict(res) - def get_credential(self, name_arg: str) -> CredentialInfo: - """Get a credential. + + + + def get_credential(self + , name_arg: str + ) -> CredentialInfo: + """Get a credential. + Gets a service or storage credential from the metastore. The caller must be a metastore admin, the owner of the credential, or have any permission on the credential. - + :param name_arg: str Name of the credential. - + :returns: :class:`CredentialInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/credentials/{name_arg}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/credentials/{name_arg}' + + , headers=headers + ) return CredentialInfo.from_dict(res) - def list_credentials( - self, - *, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - purpose: Optional[CredentialPurpose] = None, - ) -> Iterator[CredentialInfo]: - """List credentials. + + + + def list_credentials(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None, purpose: Optional[CredentialPurpose] = None) -> Iterator[CredentialInfo]: + """List credentials. + Gets an array of credentials (as __CredentialInfo__ objects). - + The array is limited to only the credentials that the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of credentials to return. - If not set, the default max page size is used. - When set to a value greater than 0, the page length is the minimum of this value and a server-configured @@ -11904,53 +9677,46 @@ def list_credentials( Opaque token to retrieve the next page of results. :param purpose: :class:`CredentialPurpose` (optional) Return only credentials for the specified purpose. - + :returns: Iterator over :class:`CredentialInfo` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - if purpose is not None: - query["purpose"] = purpose.value - headers = { - "Accept": "application/json", - } - + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if purpose is not None: query['purpose'] = purpose.value + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/credentials", query=query, headers=headers) - if "credentials" in json: - for v in json["credentials"]: - yield CredentialInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update_credential( - self, - name_arg: str, - *, - aws_iam_role: Optional[AwsIamRole] = None, - azure_managed_identity: Optional[AzureManagedIdentity] = None, - azure_service_principal: Optional[AzureServicePrincipal] = None, - comment: Optional[str] = None, - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, - force: Optional[bool] = None, - isolation_mode: Optional[IsolationMode] = None, - new_name: Optional[str] = None, - owner: Optional[str] = None, - read_only: Optional[bool] = None, - skip_validation: Optional[bool] = None, - ) -> CredentialInfo: - """Update a credential. + json = self._api.do('GET','/api/2.1/unity-catalog/credentials', query=query + + , headers=headers + ) + if 'credentials' in json: + for v in json['credentials']: + yield CredentialInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Updates a service or storage credential on the metastore. + + + + def update_credential(self + , name_arg: str + , * + , aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, force: Optional[bool] = None, isolation_mode: Optional[IsolationMode] = None, new_name: Optional[str] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> CredentialInfo: + """Update a credential. + + Updates a service or storage credential on the metastore. + The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission. If the caller is a metastore admin, only the __owner__ field can be changed. - + :param name_arg: str Name of the credential. :param aws_iam_role: :class:`AwsIamRole` (optional) @@ -11977,68 +9743,53 @@ def update_credential( **STORAGE**. :param skip_validation: bool (optional) Supply true to this argument to skip validation of the updated credential. - + :returns: :class:`CredentialInfo` """ body = {} - if aws_iam_role is not None: - body["aws_iam_role"] = aws_iam_role.as_dict() - if azure_managed_identity is not None: - body["azure_managed_identity"] = azure_managed_identity.as_dict() - if azure_service_principal is not None: - body["azure_service_principal"] = azure_service_principal.as_dict() - if comment is not None: - body["comment"] = comment - if databricks_gcp_service_account is not None: - body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() - if force is not None: - body["force"] = force - if isolation_mode is not None: - body["isolation_mode"] = isolation_mode.value - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - if read_only is not None: - body["read_only"] = read_only - if skip_validation is not None: - body["skip_validation"] = skip_validation - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/credentials/{name_arg}", body=body, headers=headers) + if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() + if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() + if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() + if comment is not None: body['comment'] = comment + if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() + if force is not None: body['force'] = force + if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + if read_only is not None: body['read_only'] = read_only + if skip_validation is not None: body['skip_validation'] = skip_validation + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/credentials/{name_arg}', body=body + + , headers=headers + ) return CredentialInfo.from_dict(res) - def validate_credential( - self, - *, - aws_iam_role: Optional[AwsIamRole] = None, - azure_managed_identity: Optional[AzureManagedIdentity] = None, - credential_name: Optional[str] = None, - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, - external_location_name: Optional[str] = None, - purpose: Optional[CredentialPurpose] = None, - read_only: Optional[bool] = None, - url: Optional[str] = None, - ) -> ValidateCredentialResponse: - """Validate a credential. + + + + def validate_credential(self + + , * + , aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, credential_name: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, external_location_name: Optional[str] = None, purpose: Optional[CredentialPurpose] = None, read_only: Optional[bool] = None, url: Optional[str] = None) -> ValidateCredentialResponse: + """Validate a credential. + Validates a credential. - + For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific credential must be provided. - + For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific credential must be provided. - + The caller must be a metastore admin or the credential owner or have the required permission on the metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**). - + :param aws_iam_role: :class:`AwsIamRole` (optional) The AWS IAM role configuration :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) @@ -12046,316 +9797,72 @@ def validate_credential( :param credential_name: str (optional) Required. The name of an existing credential or long-lived cloud credential to validate. :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional) - GCP long-lived credential. Databricks-created Google Cloud Storage service account. - :param external_location_name: str (optional) - The name of an existing external location to validate. Only applicable for storage credentials - (purpose is **STORAGE**.) - :param purpose: :class:`CredentialPurpose` (optional) - The purpose of the credential. This should only be used when the credential is specified. - :param read_only: bool (optional) - Whether the credential is only usable for read operations. Only applicable for storage credentials - (purpose is **STORAGE**.) - :param url: str (optional) - The external location url to validate. Only applicable when purpose is **STORAGE**. - - :returns: :class:`ValidateCredentialResponse` - """ - body = {} - if aws_iam_role is not None: - body["aws_iam_role"] = aws_iam_role.as_dict() - if azure_managed_identity is not None: - body["azure_managed_identity"] = azure_managed_identity.as_dict() - if credential_name is not None: - body["credential_name"] = credential_name - if databricks_gcp_service_account is not None: - body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() - if external_location_name is not None: - body["external_location_name"] = external_location_name - if purpose is not None: - body["purpose"] = purpose.value - if read_only is not None: - body["read_only"] = read_only - if url is not None: - body["url"] = url - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/validate-credentials", body=body, headers=headers) - return ValidateCredentialResponse.from_dict(res) - - -class DatabaseInstancesAPI: - """Database Instances provide access to a database via REST API or direct SQL.""" - - def __init__(self, api_client): - self._api = api_client - - def create_database_catalog(self, catalog: DatabaseCatalog) -> DatabaseCatalog: - """Create a Database Catalog. - - :param catalog: :class:`DatabaseCatalog` - - :returns: :class:`DatabaseCatalog` - """ - body = catalog.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/database/catalogs", body=body, headers=headers) - return DatabaseCatalog.from_dict(res) - - def create_database_instance(self, database_instance: DatabaseInstance) -> DatabaseInstance: - """Create a Database Instance. - - :param database_instance: :class:`DatabaseInstance` - A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. - - :returns: :class:`DatabaseInstance` - """ - body = database_instance.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers) - return DatabaseInstance.from_dict(res) - - def create_synced_database_table(self, synced_table: SyncedDatabaseTable) -> SyncedDatabaseTable: - """Create a Synced Database Table. - - :param synced_table: :class:`SyncedDatabaseTable` - Next field marker: 10 - - :returns: :class:`SyncedDatabaseTable` - """ - body = synced_table.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/database/synced_tables", body=body, headers=headers) - return SyncedDatabaseTable.from_dict(res) - - def delete_database_catalog(self, name: str): - """Delete a Database Catalog. - - :param name: str - - - """ - - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/database/catalogs/{name}", headers=headers) - - def delete_database_instance(self, name: str, *, force: Optional[bool] = None, purge: Optional[bool] = None): - """Delete a Database Instance. - - :param name: str - Name of the instance to delete. - :param force: bool (optional) - By default, a instance cannot be deleted if it has descendant instances created via PITR. If this - flag is specified as true, all descendent instances will be deleted as well. - :param purge: bool (optional) - If false, the database instance is soft deleted. Soft deleted instances behave as if they are - deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by - calling the undelete API for a limited time. If true, the database instance is hard deleted and - cannot be undeleted. - - - """ - - query = {} - if force is not None: - query["force"] = force - if purge is not None: - query["purge"] = purge - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/database/instances/{name}", query=query, headers=headers) - - def delete_synced_database_table(self, name: str): - """Delete a Synced Database Table. - - :param name: str - - - """ - - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/database/synced_tables/{name}", headers=headers) - - def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> DatabaseInstance: - """Find a Database Instance by uid. - - :param uid: str (optional) - UID of the cluster to get. - - :returns: :class:`DatabaseInstance` - """ - - query = {} - if uid is not None: - query["uid"] = uid - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/database/instances:findByUid", query=query, headers=headers) - return DatabaseInstance.from_dict(res) - - def get_database_catalog(self, name: str) -> DatabaseCatalog: - """Get a Database Catalog. - - :param name: str - - :returns: :class:`DatabaseCatalog` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/database/catalogs/{name}", headers=headers) - return DatabaseCatalog.from_dict(res) - - def get_database_instance(self, name: str) -> DatabaseInstance: - """Get a Database Instance. - - :param name: str - Name of the cluster to get. - - :returns: :class:`DatabaseInstance` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/database/instances/{name}", headers=headers) - return DatabaseInstance.from_dict(res) - - def get_synced_database_table(self, name: str) -> SyncedDatabaseTable: - """Get a Synced Database Table. - - :param name: str - - :returns: :class:`SyncedDatabaseTable` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers) - return SyncedDatabaseTable.from_dict(res) - - def list_database_instances( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[DatabaseInstance]: - """List Database Instances. - - :param page_size: int (optional) - Upper bound for items returned. - :param page_token: str (optional) - Pagination token to go to the next page of Database Instances. Requests first page if absent. - - :returns: Iterator over :class:`DatabaseInstance` - """ - - query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - while True: - json = self._api.do("GET", "/api/2.0/database/instances", query=query, headers=headers) - if "database_instances" in json: - for v in json["database_instances"]: - yield DatabaseInstance.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update_database_instance( - self, name: str, database_instance: DatabaseInstance, update_mask: str - ) -> DatabaseInstance: - """Update a Database Instance. - - :param name: str - The name of the instance. This is the unique identifier for the instance. - :param database_instance: :class:`DatabaseInstance` - A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. - :param update_mask: str - The list of fields to update. - - :returns: :class:`DatabaseInstance` + GCP long-lived credential. Databricks-created Google Cloud Storage service account. + :param external_location_name: str (optional) + The name of an existing external location to validate. Only applicable for storage credentials + (purpose is **STORAGE**.) + :param purpose: :class:`CredentialPurpose` (optional) + The purpose of the credential. This should only be used when the credential is specified. + :param read_only: bool (optional) + Whether the credential is only usable for read operations. Only applicable for storage credentials + (purpose is **STORAGE**.) + :param url: str (optional) + The external location url to validate. Only applicable when purpose is **STORAGE**. + + :returns: :class:`ValidateCredentialResponse` """ - body = database_instance.as_dict() - query = {} - if update_mask is not None: - query["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers) - return DatabaseInstance.from_dict(res) - + body = {} + if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() + if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() + if credential_name is not None: body['credential_name'] = credential_name + if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() + if external_location_name is not None: body['external_location_name'] = external_location_name + if purpose is not None: body['purpose'] = purpose.value + if read_only is not None: body['read_only'] = read_only + if url is not None: body['url'] = url + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/validate-credentials', body=body + + , headers=headers + ) + return ValidateCredentialResponse.from_dict(res) + + class ExternalLocationsAPI: """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path. Each external location is subject to Unity Catalog access-control policies that control which users and groups can access the credential. If a user does not have access to an external location in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. - + Databricks recommends using external locations rather than using storage credentials directly. - + To create external locations, you must be a metastore admin or a user with the **CREATE_EXTERNAL_LOCATION** privilege.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - name: str, - url: str, - credential_name: str, - *, - comment: Optional[str] = None, - enable_file_events: Optional[bool] = None, - encryption_details: Optional[EncryptionDetails] = None, - fallback: Optional[bool] = None, - file_event_queue: Optional[FileEventQueue] = None, - read_only: Optional[bool] = None, - skip_validation: Optional[bool] = None, - ) -> ExternalLocationInfo: - """Create an external location. + + + + + + + def create(self + , name: str, url: str, credential_name: str + , * + , comment: Optional[str] = None, enable_file_events: Optional[bool] = None, encryption_details: Optional[EncryptionDetails] = None, fallback: Optional[bool] = None, file_event_queue: Optional[FileEventQueue] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> ExternalLocationInfo: + """Create an external location. + Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage credential. - + :param name: str Name of the external location. :param url: str @@ -12378,99 +9885,105 @@ def create( Indicates whether the external location is read-only. :param skip_validation: bool (optional) Skips validation of the storage credential associated with the external location. - + :returns: :class:`ExternalLocationInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if credential_name is not None: - body["credential_name"] = credential_name - if enable_file_events is not None: - body["enable_file_events"] = enable_file_events - if encryption_details is not None: - body["encryption_details"] = encryption_details.as_dict() - if fallback is not None: - body["fallback"] = fallback - if file_event_queue is not None: - body["file_event_queue"] = file_event_queue.as_dict() - if name is not None: - body["name"] = name - if read_only is not None: - body["read_only"] = read_only - if skip_validation is not None: - body["skip_validation"] = skip_validation - if url is not None: - body["url"] = url - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/external-locations", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if credential_name is not None: body['credential_name'] = credential_name + if enable_file_events is not None: body['enable_file_events'] = enable_file_events + if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict() + if fallback is not None: body['fallback'] = fallback + if file_event_queue is not None: body['file_event_queue'] = file_event_queue.as_dict() + if name is not None: body['name'] = name + if read_only is not None: body['read_only'] = read_only + if skip_validation is not None: body['skip_validation'] = skip_validation + if url is not None: body['url'] = url + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/external-locations', body=body + + , headers=headers + ) return ExternalLocationInfo.from_dict(res) - def delete(self, name: str, *, force: Optional[bool] = None): - """Delete an external location. + + + + def delete(self + , name: str + , * + , force: Optional[bool] = None): + """Delete an external location. + Deletes the specified external location from the metastore. The caller must be the owner of the external location. - + :param name: str Name of the external location. :param force: bool (optional) Force deletion even if there are dependent external tables or mounts. - - + + """ - + query = {} - if force is not None: - query["force"] = force - headers = { - "Accept": "application/json", - } + if force is not None: query['force'] = force + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/external-locations/{name}', query=query + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/external-locations/{name}", query=query, headers=headers) + + + - def get(self, name: str, *, include_browse: Optional[bool] = None) -> ExternalLocationInfo: + def get(self + , name: str + , * + , include_browse: Optional[bool] = None) -> ExternalLocationInfo: """Get an external location. - + Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. - + :param name: str Name of the external location. :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access selective metadata for - + :returns: :class:`ExternalLocationInfo` """ - + query = {} - if include_browse is not None: - query["include_browse"] = include_browse - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/external-locations/{name}", query=query, headers=headers) + if include_browse is not None: query['include_browse'] = include_browse + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/external-locations/{name}', query=query + + , headers=headers + ) return ExternalLocationInfo.from_dict(res) - def list( - self, - *, - include_browse: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[ExternalLocationInfo]: - """List external locations. + + + + def list(self + + , * + , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ExternalLocationInfo]: + """List external locations. + Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on the external location. There is no guarantee of a specific ordering of the elements in the array. - + :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access selective metadata for @@ -12481,54 +9994,45 @@ def list( value (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ExternalLocationInfo` """ - + query = {} - if include_browse is not None: - query["include_browse"] = include_browse - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if include_browse is not None: query['include_browse'] = include_browse + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/external-locations", query=query, headers=headers) - if "external_locations" in json: - for v in json["external_locations"]: - yield ExternalLocationInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - name: str, - *, - comment: Optional[str] = None, - credential_name: Optional[str] = None, - enable_file_events: Optional[bool] = None, - encryption_details: Optional[EncryptionDetails] = None, - fallback: Optional[bool] = None, - file_event_queue: Optional[FileEventQueue] = None, - force: Optional[bool] = None, - isolation_mode: Optional[IsolationMode] = None, - new_name: Optional[str] = None, - owner: Optional[str] = None, - read_only: Optional[bool] = None, - skip_validation: Optional[bool] = None, - url: Optional[str] = None, - ) -> ExternalLocationInfo: - """Update an external location. + json = self._api.do('GET','/api/2.1/unity-catalog/external-locations', query=query + + , headers=headers + ) + if 'external_locations' in json: + for v in json['external_locations']: + yield ExternalLocationInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , name: str + , * + , comment: Optional[str] = None, credential_name: Optional[str] = None, enable_file_events: Optional[bool] = None, encryption_details: Optional[EncryptionDetails] = None, fallback: Optional[bool] = None, file_event_queue: Optional[FileEventQueue] = None, force: Optional[bool] = None, isolation_mode: Optional[IsolationMode] = None, new_name: Optional[str] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None, url: Optional[str] = None) -> ExternalLocationInfo: + """Update an external location. + Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin can only update the name of the external location. - + :param name: str Name of the external location. :param comment: str (optional) @@ -12558,156 +10062,167 @@ def update( Skips validation of the storage credential associated with the external location. :param url: str (optional) Path URL of the external location. - + :returns: :class:`ExternalLocationInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if credential_name is not None: - body["credential_name"] = credential_name - if enable_file_events is not None: - body["enable_file_events"] = enable_file_events - if encryption_details is not None: - body["encryption_details"] = encryption_details.as_dict() - if fallback is not None: - body["fallback"] = fallback - if file_event_queue is not None: - body["file_event_queue"] = file_event_queue.as_dict() - if force is not None: - body["force"] = force - if isolation_mode is not None: - body["isolation_mode"] = isolation_mode.value - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - if read_only is not None: - body["read_only"] = read_only - if skip_validation is not None: - body["skip_validation"] = skip_validation - if url is not None: - body["url"] = url - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/external-locations/{name}", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if credential_name is not None: body['credential_name'] = credential_name + if enable_file_events is not None: body['enable_file_events'] = enable_file_events + if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict() + if fallback is not None: body['fallback'] = fallback + if file_event_queue is not None: body['file_event_queue'] = file_event_queue.as_dict() + if force is not None: body['force'] = force + if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + if read_only is not None: body['read_only'] = read_only + if skip_validation is not None: body['skip_validation'] = skip_validation + if url is not None: body['url'] = url + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/external-locations/{name}', body=body + + , headers=headers + ) return ExternalLocationInfo.from_dict(res) - + + class FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog. - + The function implementation can be any SQL expression or Query, and it can be invoked wherever a table reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it can be referenced with the form __catalog_name__.__schema_name__.__function_name__.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, function_info: CreateFunction) -> FunctionInfo: - """Create a function. + - **WARNING: This API is experimental and will change in future versions** + - Creates a new function + + + def create(self + , function_info: CreateFunction + ) -> FunctionInfo: + """Create a function. + + **WARNING: This API is experimental and will change in future versions** + + Creates a new function + The user must have the following permissions in order for the function to be created: - **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the function's parent schema - + :param function_info: :class:`CreateFunction` Partial __FunctionInfo__ specifying the function to be created. - + :returns: :class:`FunctionInfo` """ body = {} - if function_info is not None: - body["function_info"] = function_info.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/functions", body=body, headers=headers) + if function_info is not None: body['function_info'] = function_info.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/functions', body=body + + , headers=headers + ) return FunctionInfo.from_dict(res) - def delete(self, name: str, *, force: Optional[bool] = None): - """Delete a function. + + + + def delete(self + , name: str + , * + , force: Optional[bool] = None): + """Delete a function. + Deletes the function that matches the supplied name. For the deletion to succeed, the user must satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog and the **USE_SCHEMA** privilege on its parent schema - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param force: bool (optional) Force deletion even if the function is notempty. - - + + """ - + query = {} - if force is not None: - query["force"] = force - headers = { - "Accept": "application/json", - } + if force is not None: query['force'] = force + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/functions/{name}', query=query + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/functions/{name}", query=query, headers=headers) + + + - def get(self, name: str, *, include_browse: Optional[bool] = None) -> FunctionInfo: + def get(self + , name: str + , * + , include_browse: Optional[bool] = None) -> FunctionInfo: """Get a function. - + Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the **USE_SCHEMA** privilege on the function's parent schema, and the **EXECUTE** privilege on the function itself - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param include_browse: bool (optional) Whether to include functions in the response for which the principal can only access selective metadata for - + :returns: :class:`FunctionInfo` """ - + query = {} - if include_browse is not None: - query["include_browse"] = include_browse - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/functions/{name}", query=query, headers=headers) + if include_browse is not None: query['include_browse'] = include_browse + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/functions/{name}', query=query + + , headers=headers + ) return FunctionInfo.from_dict(res) - def list( - self, - catalog_name: str, - schema_name: str, - *, - include_browse: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[FunctionInfo]: - """List functions. + + + + def list(self + , catalog_name: str, schema_name: str + , * + , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FunctionInfo]: + """List functions. + List functions within the specified parent catalog and schema. If the user is a metastore admin, all functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for functions of interest. :param schema_name: str @@ -12722,383 +10237,490 @@ def list( (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`FunctionInfo` """ - + query = {} - if catalog_name is not None: - query["catalog_name"] = catalog_name - if include_browse is not None: - query["include_browse"] = include_browse - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - if schema_name is not None: - query["schema_name"] = schema_name - headers = { - "Accept": "application/json", - } - + if catalog_name is not None: query['catalog_name'] = catalog_name + if include_browse is not None: query['include_browse'] = include_browse + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if schema_name is not None: query['schema_name'] = schema_name + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/functions", query=query, headers=headers) - if "functions" in json: - for v in json["functions"]: - yield FunctionInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, name: str, *, owner: Optional[str] = None) -> FunctionInfo: - """Update a function. + json = self._api.do('GET','/api/2.1/unity-catalog/functions', query=query + + , headers=headers + ) + if 'functions' in json: + for v in json['functions']: + yield FunctionInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , name: str + , * + , owner: Optional[str] = None) -> FunctionInfo: + """Update a function. + Updates the function that matches the supplied name. Only the owner of the function can be updated. If the user is not a metastore admin, the user must be a member of the group that is the new function owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the **USE_SCHEMA** privilege on the function's parent schema. - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param owner: str (optional) Username of current owner of function. - + :returns: :class:`FunctionInfo` """ body = {} - if owner is not None: - body["owner"] = owner - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/functions/{name}", body=body, headers=headers) + if owner is not None: body['owner'] = owner + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/functions/{name}', body=body + + , headers=headers + ) return FunctionInfo.from_dict(res) - + + class GrantsAPI: """In Unity Catalog, data is secure by default. Initially, users have no access to data in a metastore. Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. - + Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. This means that granting a privilege on the catalog automatically grants the privilege to all current and future objects within the catalog. Similarly, privileges granted on a schema are inherited by all current and future objects within that schema.""" - + def __init__(self, api_client): self._api = api_client + - def get(self, securable_type: SecurableType, full_name: str, *, principal: Optional[str] = None) -> PermissionsList: - """Get permissions. + - Gets the permissions for a securable. + + + + - :param securable_type: :class:`SecurableType` + def get(self + , securable_type: str, full_name: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None, principal: Optional[str] = None) -> GetPermissionsResponse: + """Get permissions. + + Gets the permissions for a securable. Does not include inherited permissions. + + :param securable_type: str Type of securable. :param full_name: str Full name of securable. + :param max_results: int (optional) + Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment + present in a single page response is guaranteed to contain all the privileges granted on the + requested Securable for the respective principal. + + If not set, all the permissions are returned. If set to - lesser than 0: invalid parameter error - + 0: page length is set to a server configured value - lesser than 150 but greater than 0: invalid + parameter error (this is to ensure that server is able to return at least one complete + PrivilegeAssignment in a single page response) - greater than (or equal to) 150: page length is the + minimum of this value and a server configured value + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :param principal: str (optional) If provided, only the permissions for the specified principal (user or group) are returned. - - :returns: :class:`PermissionsList` + + :returns: :class:`GetPermissionsResponse` """ - + query = {} - if principal is not None: - query["principal"] = principal - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.1/unity-catalog/permissions/{securable_type.value}/{full_name}", - query=query, - headers=headers, - ) - return PermissionsList.from_dict(res) - - def get_effective( - self, securable_type: SecurableType, full_name: str, *, principal: Optional[str] = None - ) -> EffectivePermissionsList: - """Get effective permissions. + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if principal is not None: query['principal'] = principal + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/permissions/{securable_type}/{full_name}', query=query + + , headers=headers + ) + return GetPermissionsResponse.from_dict(res) - Gets the effective permissions for a securable. + + + - :param securable_type: :class:`SecurableType` + def get_effective(self + , securable_type: str, full_name: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None, principal: Optional[str] = None) -> EffectivePermissionsList: + """Get effective permissions. + + Gets the effective permissions for a securable. Includes inherited permissions from any parent + securables. + + :param securable_type: str Type of securable. :param full_name: str Full name of securable. + :param max_results: int (optional) + Specifies the maximum number of privileges to return (page length). Every + EffectivePrivilegeAssignment present in a single page response is guaranteed to contain all the + effective privileges granted on (or inherited by) the requested Securable for the respective + principal. + + If not set, all the effective permissions are returned. If set to - lesser than 0: invalid parameter + error - 0: page length is set to a server configured value - lesser than 150 but greater than 0: + invalid parameter error (this is to ensure that server is able to return at least one complete + EffectivePrivilegeAssignment in a single page response) - greater than (or equal to) 150: page + length is the minimum of this value and a server configured value + :param page_token: str (optional) + Opaque token for the next page of results (pagination). :param principal: str (optional) If provided, only the effective permissions for the specified principal (user or group) are returned. - + :returns: :class:`EffectivePermissionsList` """ - + query = {} - if principal is not None: - query["principal"] = principal - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.1/unity-catalog/effective-permissions/{securable_type.value}/{full_name}", - query=query, - headers=headers, - ) + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if principal is not None: query['principal'] = principal + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/effective-permissions/{securable_type}/{full_name}', query=query + + , headers=headers + ) return EffectivePermissionsList.from_dict(res) - def update( - self, securable_type: SecurableType, full_name: str, *, changes: Optional[List[PermissionsChange]] = None - ) -> PermissionsList: - """Update permissions. + + + + def update(self + , securable_type: str, full_name: str + , * + , changes: Optional[List[PermissionsChange]] = None) -> UpdatePermissionsResponse: + """Update permissions. + Updates the permissions for a securable. - - :param securable_type: :class:`SecurableType` + + :param securable_type: str Type of securable. :param full_name: str Full name of securable. :param changes: List[:class:`PermissionsChange`] (optional) Array of permissions change objects. - - :returns: :class:`PermissionsList` + + :returns: :class:`UpdatePermissionsResponse` """ body = {} - if changes is not None: - body["changes"] = [v.as_dict() for v in changes] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.1/unity-catalog/permissions/{securable_type.value}/{full_name}", - body=body, - headers=headers, - ) - return PermissionsList.from_dict(res) - + if changes is not None: body['changes'] = [v.as_dict() for v in changes] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/permissions/{securable_type}/{full_name}', body=body + + , headers=headers + ) + return UpdatePermissionsResponse.from_dict(res) + + class MetastoresAPI: """A metastore is the top-level container of objects in Unity Catalog. It stores data assets (tables and views) and the permissions that govern access to them. Databricks account admins can create metastores and assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use Unity Catalog, it must have a Unity Catalog metastore attached. - + Each metastore is configured with a root storage location in a cloud storage account. This storage location is used for metadata and managed tables data. - + NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is available in a catalog named hive_metastore.""" - + def __init__(self, api_client): self._api = api_client + - def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str): - """Create an assignment. + + + + + + + def assign(self + , workspace_id: int, metastore_id: str, default_catalog_name: str + ): + """Create an assignment. + Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account admin. - + :param workspace_id: int A workspace ID. :param metastore_id: str The unique ID of the metastore. :param default_catalog_name: str - The name of the default catalog in the metastore. This field is depracted. Please use "Default + The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace. - - + + """ body = {} - if default_catalog_name is not None: - body["default_catalog_name"] = default_catalog_name - if metastore_id is not None: - body["metastore_id"] = metastore_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if default_catalog_name is not None: body['default_catalog_name'] = default_catalog_name + if metastore_id is not None: body['metastore_id'] = metastore_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore', body=body + + , headers=headers + ) + - self._api.do("PUT", f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore", body=body, headers=headers) + + + - def create(self, name: str, *, region: Optional[str] = None, storage_root: Optional[str] = None) -> MetastoreInfo: + def create(self + , name: str + , * + , region: Optional[str] = None, storage_root: Optional[str] = None) -> MetastoreInfo: """Create a metastore. - + Creates a new metastore based on a provided name and optional storage root path. By default (if the __owner__ field is not set), the owner of the new metastore is the user calling the __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is assigned to the System User instead. - + :param name: str The user-specified name of the metastore. :param region: str (optional) - Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted in - the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is omitted, - the region of the workspace receiving the request will be used. + Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). :param storage_root: str (optional) The storage root URL for metastore - + :returns: :class:`MetastoreInfo` """ body = {} - if name is not None: - body["name"] = name - if region is not None: - body["region"] = region - if storage_root is not None: - body["storage_root"] = storage_root - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/metastores", body=body, headers=headers) + if name is not None: body['name'] = name + if region is not None: body['region'] = region + if storage_root is not None: body['storage_root'] = storage_root + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/metastores', body=body + + , headers=headers + ) return MetastoreInfo.from_dict(res) + + + + def current(self) -> MetastoreAssignment: """Get metastore assignment for workspace. - + Gets the metastore assignment for the workspace being accessed. - + :returns: :class:`MetastoreAssignment` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.1/unity-catalog/current-metastore-assignment", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.1/unity-catalog/current-metastore-assignment' + , headers=headers + ) return MetastoreAssignment.from_dict(res) - def delete(self, id: str, *, force: Optional[bool] = None): - """Delete a metastore. + + + + def delete(self + , id: str + , * + , force: Optional[bool] = None): + """Delete a metastore. + Deletes a metastore. The caller must be a metastore admin. - + :param id: str Unique ID of the metastore. :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - - + + """ - + query = {} - if force is not None: - query["force"] = force - headers = { - "Accept": "application/json", - } + if force is not None: query['force'] = force + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/metastores/{id}', query=query + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/metastores/{id}", query=query, headers=headers) + + + - def get(self, id: str) -> MetastoreInfo: + def get(self + , id: str + ) -> MetastoreInfo: """Get a metastore. - + Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this info. - + :param id: str Unique ID of the metastore. - + :returns: :class:`MetastoreInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/metastores/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/metastores/{id}' + + , headers=headers + ) return MetastoreInfo.from_dict(res) - def list(self) -> Iterator[MetastoreInfo]: - """List metastores. + + + + def list(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[MetastoreInfo]: + """List metastores. + Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in the array. - + + :param max_results: int (optional) + Maximum number of metastores to return. - when set to a value greater than 0, the page length is the + minimum of this value and a server configured value; - when set to 0, the page length is set to a + server configured value (recommended); - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all the metastores are returned (not recommended). - Note: The number of + returned metastores might be less than the specified max_results size, even zero. The only + definitive indication that no further metastores can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + :returns: Iterator over :class:`MetastoreInfo` """ + + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + + while True: + json = self._api.do('GET','/api/2.1/unity-catalog/metastores', query=query + + , headers=headers + ) + if 'metastores' in json: + for v in json['metastores']: + yield MetastoreInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.1/unity-catalog/metastores", headers=headers) - parsed = ListMetastoresResponse.from_dict(json).metastores - return parsed if parsed is not None else [] + + + def summary(self) -> GetMetastoreSummaryResponse: """Get a metastore summary. - + Gets information about a metastore. This summary includes the storage credential, the cloud vendor, the cloud region, and the global metastore ID. - + :returns: :class:`GetMetastoreSummaryResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.1/unity-catalog/metastore_summary", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.1/unity-catalog/metastore_summary' + , headers=headers + ) return GetMetastoreSummaryResponse.from_dict(res) - def unassign(self, workspace_id: int, metastore_id: str): + + + + + def unassign(self + , workspace_id: int, metastore_id: str + ): """Delete an assignment. - + Deletes a metastore assignment. The caller must be an account administrator. - + :param workspace_id: int A workspace ID. :param metastore_id: str Query for the ID of the metastore to delete. - - + + """ - + query = {} - if metastore_id is not None: - query["metastore_id"] = metastore_id - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore", query=query, headers=headers - ) - - def update( - self, - id: str, - *, - delta_sharing_organization_name: Optional[str] = None, - delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None, - delta_sharing_scope: Optional[UpdateMetastoreDeltaSharingScope] = None, - new_name: Optional[str] = None, - owner: Optional[str] = None, - privilege_model_version: Optional[str] = None, - storage_root_credential_id: Optional[str] = None, - ) -> MetastoreInfo: - """Update a metastore. + if metastore_id is not None: query['metastore_id'] = metastore_id + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore', query=query + + , headers=headers + ) + + + + + + def update(self + , id: str + , * + , delta_sharing_organization_name: Optional[str] = None, delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None, delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None, new_name: Optional[str] = None, owner: Optional[str] = None, privilege_model_version: Optional[str] = None, storage_root_credential_id: Optional[str] = None) -> MetastoreInfo: + """Update a metastore. + Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__ field is set to the empty string (**""**), the ownership is updated to the System User. - + :param id: str Unique ID of the metastore. :param delta_sharing_organization_name: str (optional) @@ -13106,7 +10728,7 @@ def update( Sharing as the official name. :param delta_sharing_recipient_token_lifetime_in_seconds: int (optional) The lifetime of delta sharing recipient token in seconds. - :param delta_sharing_scope: :class:`UpdateMetastoreDeltaSharingScope` (optional) + :param delta_sharing_scope: :class:`DeltaSharingScopeEnum` (optional) The scope of Delta Sharing enabled for the metastore. :param new_name: str (optional) New name for the metastore. @@ -13116,116 +10738,126 @@ def update( Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`). :param storage_root_credential_id: str (optional) UUID of storage credential to access the metastore storage_root. - + :returns: :class:`MetastoreInfo` """ body = {} - if delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = delta_sharing_organization_name - if delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - delta_sharing_recipient_token_lifetime_in_seconds - ) - if delta_sharing_scope is not None: - body["delta_sharing_scope"] = delta_sharing_scope.value - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - if privilege_model_version is not None: - body["privilege_model_version"] = privilege_model_version - if storage_root_credential_id is not None: - body["storage_root_credential_id"] = storage_root_credential_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/metastores/{id}", body=body, headers=headers) + if delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = delta_sharing_organization_name + if delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = delta_sharing_recipient_token_lifetime_in_seconds + if delta_sharing_scope is not None: body['delta_sharing_scope'] = delta_sharing_scope.value + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + if privilege_model_version is not None: body['privilege_model_version'] = privilege_model_version + if storage_root_credential_id is not None: body['storage_root_credential_id'] = storage_root_credential_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/metastores/{id}', body=body + + , headers=headers + ) return MetastoreInfo.from_dict(res) - def update_assignment( - self, workspace_id: int, *, default_catalog_name: Optional[str] = None, metastore_id: Optional[str] = None - ): - """Update an assignment. + + + + def update_assignment(self + , workspace_id: int + , * + , default_catalog_name: Optional[str] = None, metastore_id: Optional[str] = None): + """Update an assignment. + Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a Workspace admin. - + :param workspace_id: int A workspace ID. :param default_catalog_name: str (optional) - The name of the default catalog in the metastore. This field is depracted. Please use "Default + The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace. :param metastore_id: str (optional) The unique ID of the metastore. - - + + """ body = {} - if default_catalog_name is not None: - body["default_catalog_name"] = default_catalog_name - if metastore_id is not None: - body["metastore_id"] = metastore_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore", body=body, headers=headers) - + if default_catalog_name is not None: body['default_catalog_name'] = default_catalog_name + if metastore_id is not None: body['metastore_id'] = metastore_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore', body=body + + , headers=headers + ) + + + class ModelVersionsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog provide centralized access control, auditing, lineage, and discovery of ML models across Databricks workspaces. - + This API reference documents the REST endpoints for managing model versions in Unity Catalog. For more details, see the [registered models API docs](/api/workspace/registeredmodels).""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, full_name: str, version: int): - """Delete a Model Version. + + + + + + + def delete(self + , full_name: str, version: int + ): + """Delete a Model Version. + Deletes a model version from the specified registered model. Any aliases assigned to the model version will also be deleted. - + The caller must be a metastore admin or an owner of the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/models/{full_name}/versions/{version}", headers=headers) + + + - def get( - self, - full_name: str, - version: int, - *, - include_aliases: Optional[bool] = None, - include_browse: Optional[bool] = None, - ) -> ModelVersionInfo: + def get(self + , full_name: str, version: int + , * + , include_aliases: Optional[bool] = None, include_browse: Optional[bool] = None) -> ModelVersionInfo: """Get a Model Version. - + Get a model version. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int @@ -13235,77 +10867,79 @@ def get( :param include_browse: bool (optional) Whether to include model versions in the response for which the principal can only access selective metadata for - + :returns: :class:`ModelVersionInfo` """ - + query = {} - if include_aliases is not None: - query["include_aliases"] = include_aliases - if include_browse is not None: - query["include_browse"] = include_browse - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.1/unity-catalog/models/{full_name}/versions/{version}", query=query, headers=headers - ) + if include_aliases is not None: query['include_aliases'] = include_aliases + if include_browse is not None: query['include_browse'] = include_browse + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}', query=query + + , headers=headers + ) return ModelVersionInfo.from_dict(res) - def get_by_alias(self, full_name: str, alias: str, *, include_aliases: Optional[bool] = None) -> ModelVersionInfo: - """Get Model Version By Alias. + + + + def get_by_alias(self + , full_name: str, alias: str + , * + , include_aliases: Optional[bool] = None) -> ModelVersionInfo: + """Get Model Version By Alias. + Get a model version by alias. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param alias: str The name of the alias :param include_aliases: bool (optional) Whether to include aliases associated with the model version in the response - + :returns: :class:`ModelVersionInfo` """ - + query = {} - if include_aliases is not None: - query["include_aliases"] = include_aliases - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}", query=query, headers=headers - ) + if include_aliases is not None: query['include_aliases'] = include_aliases + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', query=query + + , headers=headers + ) return ModelVersionInfo.from_dict(res) - def list( - self, - full_name: str, - *, - include_browse: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[ModelVersionInfo]: - """List Model Versions. + + + + def list(self + , full_name: str + , * + , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ModelVersionInfo]: + """List Model Versions. + List model versions. You can list model versions under a particular schema, or list all model versions in the current metastore. - + The returned models are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the model versions. A regular user needs to be the owner or have the **EXECUTE** privilege on the parent registered model to recieve the model versions in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the response. The elements in the response will not contain any aliases or tags. - + :param full_name: str The full three-level name of the registered model under which to list model versions :param include_browse: bool (optional) @@ -13319,232 +10953,267 @@ def list( value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ModelVersionInfo` """ - + query = {} - if include_browse is not None: - query["include_browse"] = include_browse - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if include_browse is not None: query['include_browse'] = include_browse + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.1/unity-catalog/models/{full_name}/versions", query=query, headers=headers - ) - if "model_versions" in json: - for v in json["model_versions"]: - yield ModelVersionInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, full_name: str, version: int, *, comment: Optional[str] = None) -> ModelVersionInfo: - """Update a Model Version. + json = self._api.do('GET',f'/api/2.1/unity-catalog/models/{full_name}/versions', query=query + + , headers=headers + ) + if 'model_versions' in json: + for v in json['model_versions']: + yield ModelVersionInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Updates the specified model version. + + + + def update(self + , full_name: str, version: int + , * + , comment: Optional[str] = None) -> ModelVersionInfo: + """Update a Model Version. + + Updates the specified model version. + The caller must be a metastore admin or an owner of the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the comment of the model version can be updated. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version :param comment: str (optional) The comment attached to the model version - + :returns: :class:`ModelVersionInfo` """ body = {} - if comment is not None: - body["comment"] = comment - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.1/unity-catalog/models/{full_name}/versions/{version}", body=body, headers=headers - ) + if comment is not None: body['comment'] = comment + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}', body=body + + , headers=headers + ) return ModelVersionInfo.from_dict(res) - + + class OnlineTablesAPI: """Online tables provide lower latency and higher QPS access to data from Delta tables.""" - + def __init__(self, api_client): self._api = api_client + - def wait_get_online_table_active( - self, name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[OnlineTable], None]] = None - ) -> OnlineTable: - deadline = time.time() + timeout.total_seconds() - target_states = (ProvisioningInfoState.ACTIVE,) - failure_states = (ProvisioningInfoState.FAILED,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.unity_catalog_provisioning_state - status_message = f"current status: {status}" - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach ACTIVE, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def create(self, table: OnlineTable) -> Wait[OnlineTable]: - """Create an Online Table. + - Create a new Online Table. + + def wait_get_online_table_active(self, name: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[OnlineTable], None]] = None) -> OnlineTable: + deadline = time.time() + timeout.total_seconds() + target_states = (ProvisioningInfoState.ACTIVE, ) + failure_states = (ProvisioningInfoState.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.unity_catalog_provisioning_state + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach ACTIVE, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + def create(self + , table: OnlineTable + ) -> Wait[OnlineTable]: + """Create an Online Table. + + Create a new Online Table. + :param table: :class:`OnlineTable` Online Table information. - + :returns: Long-running operation waiter for :class:`OnlineTable`. See :method:wait_get_online_table_active for more details. """ body = table.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.0/online-tables", body=body, headers=headers) - return Wait( - self.wait_get_online_table_active, response=OnlineTable.from_dict(op_response), name=op_response["name"] - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.0/online-tables', body=body + + , headers=headers + ) + return Wait(self.wait_get_online_table_active + , response = OnlineTable.from_dict(op_response) + , name=op_response['name']) - def create_and_wait(self, table: OnlineTable, timeout=timedelta(minutes=20)) -> OnlineTable: + + def create_and_wait(self + , table: OnlineTable + , + timeout=timedelta(minutes=20)) -> OnlineTable: return self.create(table=table).result(timeout=timeout) + + + - def delete(self, name: str): + def delete(self + , name: str + ): """Delete an Online Table. - + Delete an online table. Warning: This will delete all the data in the online table. If the source Delta table was deleted or modified since this Online Table was created, this will lose the data forever! - + :param name: str Full three-part (catalog, schema, table) name of the table. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/online-tables/{name}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/online-tables/{name}", headers=headers) + + + - def get(self, name: str) -> OnlineTable: + def get(self + , name: str + ) -> OnlineTable: """Get an Online Table. - + Get information about an existing online table and its status. - + :param name: str Full three-part (catalog, schema, table) name of the table. - + :returns: :class:`OnlineTable` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/online-tables/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/online-tables/{name}' + + , headers=headers + ) return OnlineTable.from_dict(res) - + + class QualityMonitorsAPI: """A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics tables and a dashboard that you can use to monitor table health and set alerts. - + Most write operations require the user to be the owner of the table (or its parent schema or parent catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires the user to have **SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**).""" - + def __init__(self, api_client): self._api = api_client + - def cancel_refresh(self, table_name: str, refresh_id: str): - """Cancel refresh. + - Cancel an active monitor refresh for the given refresh ID. + + + + + def cancel_refresh(self + , table_name: str, refresh_id: str + ): + """Cancel refresh. + + Cancel an active monitor refresh for the given refresh ID. + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. :param refresh_id: str ID of the refresh. - - + + """ - + headers = {} + + self._api.do('POST',f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}/cancel' + + , headers=headers + ) + - self._api.do( - "POST", f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}/cancel", headers=headers - ) - - def create( - self, - table_name: str, - assets_dir: str, - output_schema_name: str, - *, - baseline_table_name: Optional[str] = None, - custom_metrics: Optional[List[MonitorMetric]] = None, - data_classification_config: Optional[MonitorDataClassificationConfig] = None, - inference_log: Optional[MonitorInferenceLog] = None, - notifications: Optional[MonitorNotifications] = None, - schedule: Optional[MonitorCronSchedule] = None, - skip_builtin_dashboard: Optional[bool] = None, - slicing_exprs: Optional[List[str]] = None, - snapshot: Optional[MonitorSnapshot] = None, - time_series: Optional[MonitorTimeSeries] = None, - warehouse_id: Optional[str] = None, - ) -> MonitorInfo: - """Create a table monitor. + + + + def create(self + , table_name: str, assets_dir: str, output_schema_name: str + , * + , baseline_table_name: Optional[str] = None, custom_metrics: Optional[List[MonitorMetric]] = None, data_classification_config: Optional[MonitorDataClassificationConfig] = None, inference_log: Optional[MonitorInferenceLog] = None, notifications: Optional[MonitorNotifications] = None, schedule: Optional[MonitorCronSchedule] = None, skip_builtin_dashboard: Optional[bool] = None, slicing_exprs: Optional[List[str]] = None, snapshot: Optional[MonitorSnapshot] = None, time_series: Optional[MonitorTimeSeries] = None, warehouse_id: Optional[str] = None) -> MonitorInfo: + """Create a table monitor. + Creates a new monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent schema, and have **SELECT** access on the table. 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Workspace assets, such as the dashboard, will be created in the workspace where this call was made. - + :param table_name: str Full name of the table. :param assets_dir: str @@ -13578,242 +11247,259 @@ def create( :param warehouse_id: str (optional) Optional argument to specify the warehouse for dashboard creation. If not specified, the first running warehouse will be used. - + :returns: :class:`MonitorInfo` """ body = {} - if assets_dir is not None: - body["assets_dir"] = assets_dir - if baseline_table_name is not None: - body["baseline_table_name"] = baseline_table_name - if custom_metrics is not None: - body["custom_metrics"] = [v.as_dict() for v in custom_metrics] - if data_classification_config is not None: - body["data_classification_config"] = data_classification_config.as_dict() - if inference_log is not None: - body["inference_log"] = inference_log.as_dict() - if notifications is not None: - body["notifications"] = notifications.as_dict() - if output_schema_name is not None: - body["output_schema_name"] = output_schema_name - if schedule is not None: - body["schedule"] = schedule.as_dict() - if skip_builtin_dashboard is not None: - body["skip_builtin_dashboard"] = skip_builtin_dashboard - if slicing_exprs is not None: - body["slicing_exprs"] = [v for v in slicing_exprs] - if snapshot is not None: - body["snapshot"] = snapshot.as_dict() - if time_series is not None: - body["time_series"] = time_series.as_dict() - if warehouse_id is not None: - body["warehouse_id"] = warehouse_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.1/unity-catalog/tables/{table_name}/monitor", body=body, headers=headers) + if assets_dir is not None: body['assets_dir'] = assets_dir + if baseline_table_name is not None: body['baseline_table_name'] = baseline_table_name + if custom_metrics is not None: body['custom_metrics'] = [v.as_dict() for v in custom_metrics] + if data_classification_config is not None: body['data_classification_config'] = data_classification_config.as_dict() + if inference_log is not None: body['inference_log'] = inference_log.as_dict() + if notifications is not None: body['notifications'] = notifications.as_dict() + if output_schema_name is not None: body['output_schema_name'] = output_schema_name + if schedule is not None: body['schedule'] = schedule.as_dict() + if skip_builtin_dashboard is not None: body['skip_builtin_dashboard'] = skip_builtin_dashboard + if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs] + if snapshot is not None: body['snapshot'] = snapshot.as_dict() + if time_series is not None: body['time_series'] = time_series.as_dict() + if warehouse_id is not None: body['warehouse_id'] = warehouse_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.1/unity-catalog/tables/{table_name}/monitor', body=body + + , headers=headers + ) return MonitorInfo.from_dict(res) - def delete(self, table_name: str): - """Delete a table monitor. + + + + def delete(self + , table_name: str + ): + """Delete a table monitor. + Deletes a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + Note that the metric tables and dashboard will not be deleted as part of this call; those assets must be manually cleaned up (if desired). - + :param table_name: str Full name of the table. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/tables/{table_name}/monitor' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/tables/{table_name}/monitor", headers=headers) + + + - def get(self, table_name: str) -> MonitorInfo: + def get(self + , table_name: str + ) -> MonitorInfo: """Get a table monitor. - + Gets a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + The returned information includes configuration values, as well as information on assets created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different workspace than where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/tables/{table_name}/monitor", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{table_name}/monitor' + + , headers=headers + ) return MonitorInfo.from_dict(res) - def get_refresh(self, table_name: str, refresh_id: str) -> MonitorRefreshInfo: - """Get refresh. + + + + def get_refresh(self + , table_name: str, refresh_id: str + ) -> MonitorRefreshInfo: + """Get refresh. + Gets info about a specific monitor refresh using the given refresh ID. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. :param refresh_id: str ID of the refresh. - + :returns: :class:`MonitorRefreshInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}' + + , headers=headers + ) return MonitorRefreshInfo.from_dict(res) - def list_refreshes(self, table_name: str) -> MonitorRefreshListResponse: - """List refreshes. + + + + def list_refreshes(self + , table_name: str + ) -> MonitorRefreshListResponse: + """List refreshes. + Gets an array containing the history of the most recent refreshes (up to 25) for this table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorRefreshListResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes' + + , headers=headers + ) return MonitorRefreshListResponse.from_dict(res) - def regenerate_dashboard( - self, table_name: str, *, warehouse_id: Optional[str] = None - ) -> RegenerateDashboardResponse: - """Regenerate a monitoring dashboard. + + + + def regenerate_dashboard(self + , table_name: str + , * + , warehouse_id: Optional[str] = None) -> RegenerateDashboardResponse: + """Regenerate a monitoring dashboard. + Regenerates the monitoring dashboard for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + The call must be made from the workspace where the monitor was created. The dashboard will be regenerated in the assets directory that was specified when the monitor was created. - + :param table_name: str Full name of the table. :param warehouse_id: str (optional) Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first running warehouse will be used. - + :returns: :class:`RegenerateDashboardResponse` """ body = {} - if warehouse_id is not None: - body["warehouse_id"] = warehouse_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.1/quality-monitoring/tables/{table_name}/monitor/dashboard", body=body, headers=headers - ) + if warehouse_id is not None: body['warehouse_id'] = warehouse_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.1/quality-monitoring/tables/{table_name}/monitor/dashboard', body=body + + , headers=headers + ) return RegenerateDashboardResponse.from_dict(res) - def run_refresh(self, table_name: str) -> MonitorRefreshInfo: - """Queue a metric refresh for a monitor. + + + + def run_refresh(self + , table_name: str + ) -> MonitorRefreshInfo: + """Queue a metric refresh for a monitor. + Queues a metric refresh on the monitor for the specified table. The refresh will execute in the background. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorRefreshInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("POST", f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('POST',f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes' + + , headers=headers + ) return MonitorRefreshInfo.from_dict(res) - def update( - self, - table_name: str, - output_schema_name: str, - *, - baseline_table_name: Optional[str] = None, - custom_metrics: Optional[List[MonitorMetric]] = None, - dashboard_id: Optional[str] = None, - data_classification_config: Optional[MonitorDataClassificationConfig] = None, - inference_log: Optional[MonitorInferenceLog] = None, - notifications: Optional[MonitorNotifications] = None, - schedule: Optional[MonitorCronSchedule] = None, - slicing_exprs: Optional[List[str]] = None, - snapshot: Optional[MonitorSnapshot] = None, - time_series: Optional[MonitorTimeSeries] = None, - ) -> MonitorInfo: - """Update a table monitor. + + + + def update(self + , table_name: str, output_schema_name: str + , * + , baseline_table_name: Optional[str] = None, custom_metrics: Optional[List[MonitorMetric]] = None, dashboard_id: Optional[str] = None, data_classification_config: Optional[MonitorDataClassificationConfig] = None, inference_log: Optional[MonitorInferenceLog] = None, notifications: Optional[MonitorNotifications] = None, schedule: Optional[MonitorCronSchedule] = None, slicing_exprs: Optional[List[str]] = None, snapshot: Optional[MonitorSnapshot] = None, time_series: Optional[MonitorTimeSeries] = None) -> MonitorInfo: + """Update a table monitor. + Updates a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Additionally, the call must be made from the workspace where the monitor was created, and the caller must be the original creator of the monitor. - + Certain configuration fields, such as output asset identifiers, cannot be updated. - + :param table_name: str Full name of the table. :param output_schema_name: str @@ -13843,56 +11529,46 @@ def update( Configuration for monitoring snapshot tables. :param time_series: :class:`MonitorTimeSeries` (optional) Configuration for monitoring time series tables. - + :returns: :class:`MonitorInfo` """ body = {} - if baseline_table_name is not None: - body["baseline_table_name"] = baseline_table_name - if custom_metrics is not None: - body["custom_metrics"] = [v.as_dict() for v in custom_metrics] - if dashboard_id is not None: - body["dashboard_id"] = dashboard_id - if data_classification_config is not None: - body["data_classification_config"] = data_classification_config.as_dict() - if inference_log is not None: - body["inference_log"] = inference_log.as_dict() - if notifications is not None: - body["notifications"] = notifications.as_dict() - if output_schema_name is not None: - body["output_schema_name"] = output_schema_name - if schedule is not None: - body["schedule"] = schedule.as_dict() - if slicing_exprs is not None: - body["slicing_exprs"] = [v for v in slicing_exprs] - if snapshot is not None: - body["snapshot"] = snapshot.as_dict() - if time_series is not None: - body["time_series"] = time_series.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.1/unity-catalog/tables/{table_name}/monitor", body=body, headers=headers) + if baseline_table_name is not None: body['baseline_table_name'] = baseline_table_name + if custom_metrics is not None: body['custom_metrics'] = [v.as_dict() for v in custom_metrics] + if dashboard_id is not None: body['dashboard_id'] = dashboard_id + if data_classification_config is not None: body['data_classification_config'] = data_classification_config.as_dict() + if inference_log is not None: body['inference_log'] = inference_log.as_dict() + if notifications is not None: body['notifications'] = notifications.as_dict() + if output_schema_name is not None: body['output_schema_name'] = output_schema_name + if schedule is not None: body['schedule'] = schedule.as_dict() + if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs] + if snapshot is not None: body['snapshot'] = snapshot.as_dict() + if time_series is not None: body['time_series'] = time_series.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.1/unity-catalog/tables/{table_name}/monitor', body=body + + , headers=headers + ) return MonitorInfo.from_dict(res) - + + class RegisteredModelsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog provide centralized access control, auditing, lineage, and discovery of ML models across Databricks workspaces. - + An MLflow registered model resides in the third layer of Unity Catalog’s three-level namespace. Registered models contain model versions, which correspond to actual ML models (MLflow models). Creating new model versions currently requires use of the MLflow Python client. Once model versions are created, you can load them for batch inference using MLflow Python client APIs, or deploy them for real-time serving using Databricks Model Serving. - + All operations on registered models and model versions require USE_CATALOG permissions on the enclosing catalog and USE_SCHEMA permissions on the enclosing schema. In addition, the following additional privileges are required for various operations: - + * To create a registered model, users must additionally have the CREATE_MODEL permission on the target schema. * To view registered model or model version metadata, model version data files, or invoke a model version, users must additionally have the EXECUTE permission on the registered model * To update @@ -13900,34 +11576,37 @@ class RegisteredModelsAPI: registered model * To update other registered model or model version metadata (comments, aliases) create a new model version, or update permissions on the registered model, users must be owners of the registered model. - + Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that specify a securable type, use "FUNCTION" as the securable type.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - catalog_name: str, - schema_name: str, - name: str, - *, - comment: Optional[str] = None, - storage_location: Optional[str] = None, - ) -> RegisteredModelInfo: - """Create a Registered Model. + - Creates a new registered model in Unity Catalog. + + + + + def create(self + , catalog_name: str, schema_name: str, name: str + , * + , comment: Optional[str] = None, storage_location: Optional[str] = None) -> RegisteredModelInfo: + """Create a Registered Model. + + Creates a new registered model in Unity Catalog. + File storage for model versions in the registered model will be located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. - + For registered model creation to succeed, the user must satisfy the following conditions: - The caller must be a metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema. - + :param catalog_name: str The name of the catalog where the schema and the registered model reside :param schema_name: str @@ -13938,79 +11617,99 @@ def create( The comment attached to the registered model :param storage_location: str (optional) The storage location on the cloud under which model version data files are stored - + :returns: :class:`RegisteredModelInfo` """ body = {} - if catalog_name is not None: - body["catalog_name"] = catalog_name - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if schema_name is not None: - body["schema_name"] = schema_name - if storage_location is not None: - body["storage_location"] = storage_location - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/models", body=body, headers=headers) + if catalog_name is not None: body['catalog_name'] = catalog_name + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if schema_name is not None: body['schema_name'] = schema_name + if storage_location is not None: body['storage_location'] = storage_location + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/models', body=body + + , headers=headers + ) return RegisteredModelInfo.from_dict(res) - def delete(self, full_name: str): - """Delete a Registered Model. + + + + def delete(self + , full_name: str + ): + """Delete a Registered Model. + Deletes a registered model and all its model versions from the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/models/{full_name}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/models/{full_name}", headers=headers) + + + - def delete_alias(self, full_name: str, alias: str): + def delete_alias(self + , full_name: str, alias: str + ): """Delete a Registered Model Alias. - + Deletes a registered model alias. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param alias: str The name of the alias - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}", headers=headers) + + + - def get( - self, full_name: str, *, include_aliases: Optional[bool] = None, include_browse: Optional[bool] = None - ) -> RegisteredModelInfo: + def get(self + , full_name: str + , * + , include_aliases: Optional[bool] = None, include_browse: Optional[bool] = None) -> RegisteredModelInfo: """Get a Registered Model. - + Get a registered model. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param include_aliases: bool (optional) @@ -14018,44 +11717,42 @@ def get( :param include_browse: bool (optional) Whether to include registered models in the response for which the principal can only access selective metadata for - + :returns: :class:`RegisteredModelInfo` """ - + query = {} - if include_aliases is not None: - query["include_aliases"] = include_aliases - if include_browse is not None: - query["include_browse"] = include_browse - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/models/{full_name}", query=query, headers=headers) + if include_aliases is not None: query['include_aliases'] = include_aliases + if include_browse is not None: query['include_browse'] = include_browse + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/models/{full_name}', query=query + + , headers=headers + ) return RegisteredModelInfo.from_dict(res) - def list( - self, - *, - catalog_name: Optional[str] = None, - include_browse: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - schema_name: Optional[str] = None, - ) -> Iterator[RegisteredModelInfo]: - """List Registered Models. + + + + def list(self + + , * + , catalog_name: Optional[str] = None, include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None, schema_name: Optional[str] = None) -> Iterator[RegisteredModelInfo]: + """List Registered Models. + List registered models. You can list registered models under a particular schema, or list all registered models in the current metastore. - + The returned models are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the registered models. A regular user needs to be the owner or have the **EXECUTE** privilege on the registered model to recieve the registered models in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the response. - + :param catalog_name: str (optional) The identifier of the catalog under which to list registered models. If specified, schema_name must be specified. @@ -14064,13 +11761,13 @@ def list( selective metadata for :param max_results: int (optional) Max number of registered models to return. - + If both catalog and schema are specified: - when max_results is not specified, the page length is set to a server configured value (10000, as of 4/2/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (10000, as of 4/2/2024); - when set to 0, the page length is set to a server configured value (10000, as of 4/2/2024); - when set to a value less than 0, an invalid parameter error is returned; - + If neither schema nor catalog is specified: - when max_results is not specified, the page length is set to a server configured value (100, as of 4/2/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (1000, as of 4/2/2024); - @@ -14081,83 +11778,85 @@ def list( :param schema_name: str (optional) The identifier of the schema under which to list registered models. If specified, catalog_name must be specified. - + :returns: Iterator over :class:`RegisteredModelInfo` """ - + query = {} - if catalog_name is not None: - query["catalog_name"] = catalog_name - if include_browse is not None: - query["include_browse"] = include_browse - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - if schema_name is not None: - query["schema_name"] = schema_name - headers = { - "Accept": "application/json", - } - + if catalog_name is not None: query['catalog_name'] = catalog_name + if include_browse is not None: query['include_browse'] = include_browse + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if schema_name is not None: query['schema_name'] = schema_name + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/models", query=query, headers=headers) - if "registered_models" in json: - for v in json["registered_models"]: - yield RegisteredModelInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def set_alias(self, full_name: str, alias: str, version_num: int) -> RegisteredModelAlias: - """Set a Registered Model Alias. + json = self._api.do('GET','/api/2.1/unity-catalog/models', query=query + + , headers=headers + ) + if 'registered_models' in json: + for v in json['registered_models']: + yield RegisteredModelInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Set an alias on the specified registered model. + + + + def set_alias(self + , full_name: str, alias: str, version_num: int + ) -> RegisteredModelAlias: + """Set a Registered Model Alias. + + Set an alias on the specified registered model. + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the registered model :param alias: str The name of the alias :param version_num: int The version number of the model version to which the alias points - + :returns: :class:`RegisteredModelAlias` """ body = {} - if version_num is not None: - body["version_num"] = version_num - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}", body=body, headers=headers - ) + if version_num is not None: body['version_num'] = version_num + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', body=body + + , headers=headers + ) return RegisteredModelAlias.from_dict(res) - def update( - self, - full_name: str, - *, - comment: Optional[str] = None, - new_name: Optional[str] = None, - owner: Optional[str] = None, - ) -> RegisteredModelInfo: - """Update a Registered Model. + + + + def update(self + , full_name: str + , * + , comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None) -> RegisteredModelInfo: + """Update a Registered Model. + Updates the specified registered model. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the name, the owner or the comment of the registered model can be updated. - + :param full_name: str The three-level (fully qualified) name of the registered model :param comment: str (optional) @@ -14166,125 +11865,138 @@ def update( New name for the registered model. :param owner: str (optional) The identifier of the user who owns the registered model - + :returns: :class:`RegisteredModelInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/models/{full_name}", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/models/{full_name}', body=body + + , headers=headers + ) return RegisteredModelInfo.from_dict(res) - + + class ResourceQuotasAPI: """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and limits. For more information on resource quotas see the [Unity Catalog documentation]. - - [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas - """ - + + [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas""" + def __init__(self, api_client): self._api = api_client + - def get_quota(self, parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse: - """Get information for a single resource quota. + + + + + + + def get_quota(self + , parent_securable_type: str, parent_full_name: str, quota_name: str + ) -> GetQuotaResponse: + """Get information for a single resource quota. + The GetQuota API returns usage information for a single resource quota, defined as a child-parent pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered asynchronously. The updated count might not be returned in the first call. - + :param parent_securable_type: str Securable type of the quota parent. :param parent_full_name: str Full name of the parent resource. Provide the metastore ID if the parent is a metastore. :param quota_name: str Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. - + :returns: :class:`GetQuotaResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}' + + , headers=headers + ) return GetQuotaResponse.from_dict(res) - def list_quotas( - self, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[QuotaInfo]: - """List all resource quotas under a metastore. + + + + def list_quotas(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[QuotaInfo]: + """List all resource quotas under a metastore. + ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the counts returned. This API does not trigger a refresh of quota counts. - + :param max_results: int (optional) The number of quotas to return. :param page_token: str (optional) Opaque token for the next page of results. - + :returns: Iterator over :class:`QuotaInfo` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", "/api/2.1/unity-catalog/resource-quotas/all-resource-quotas", query=query, headers=headers - ) - if "quotas" in json: - for v in json["quotas"]: - yield QuotaInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - + json = self._api.do('GET','/api/2.1/unity-catalog/resource-quotas/all-resource-quotas', query=query + + , headers=headers + ) + if 'quotas' in json: + for v in json['quotas']: + yield QuotaInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + class SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema organizes tables, views and functions. To access (or list) a table or view in a schema, users must have the USE_SCHEMA data permission on the schema and its parent catalog, and they must have the SELECT permission on the table or view.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - name: str, - catalog_name: str, - *, - comment: Optional[str] = None, - properties: Optional[Dict[str, str]] = None, - storage_root: Optional[str] = None, - ) -> SchemaInfo: - """Create a schema. + + + + + + + def create(self + , name: str, catalog_name: str + , * + , comment: Optional[str] = None, properties: Optional[Dict[str,str]] = None, storage_root: Optional[str] = None) -> SchemaInfo: + """Create a schema. + Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. - + :param name: str Name of schema, relative to parent catalog. :param catalog_name: str @@ -14295,91 +12007,101 @@ def create( A map of key-value properties attached to the securable. :param storage_root: str (optional) Storage root URL for managed tables within schema. - + :returns: :class:`SchemaInfo` """ body = {} - if catalog_name is not None: - body["catalog_name"] = catalog_name - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if properties is not None: - body["properties"] = properties - if storage_root is not None: - body["storage_root"] = storage_root - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/schemas", body=body, headers=headers) + if catalog_name is not None: body['catalog_name'] = catalog_name + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if properties is not None: body['properties'] = properties + if storage_root is not None: body['storage_root'] = storage_root + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/schemas', body=body + + , headers=headers + ) return SchemaInfo.from_dict(res) - def delete(self, full_name: str, *, force: Optional[bool] = None): - """Delete a schema. + + + + def delete(self + , full_name: str + , * + , force: Optional[bool] = None): + """Delete a schema. + Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an owner of the parent catalog. - + :param full_name: str Full name of the schema. :param force: bool (optional) Force deletion even if the schema is not empty. - - + + """ - + query = {} - if force is not None: - query["force"] = force - headers = { - "Accept": "application/json", - } + if force is not None: query['force'] = force + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/schemas/{full_name}', query=query + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/schemas/{full_name}", query=query, headers=headers) + + + - def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> SchemaInfo: + def get(self + , full_name: str + , * + , include_browse: Optional[bool] = None) -> SchemaInfo: """Get a schema. - + Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the schema, or a user that has the **USE_SCHEMA** privilege on the schema. - + :param full_name: str Full name of the schema. :param include_browse: bool (optional) Whether to include schemas in the response for which the principal can only access selective metadata for - + :returns: :class:`SchemaInfo` """ - + query = {} - if include_browse is not None: - query["include_browse"] = include_browse - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/schemas/{full_name}", query=query, headers=headers) + if include_browse is not None: query['include_browse'] = include_browse + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/schemas/{full_name}', query=query + + , headers=headers + ) return SchemaInfo.from_dict(res) - def list( - self, - catalog_name: str, - *, - include_browse: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[SchemaInfo]: - """List schemas. + + + + def list(self + , catalog_name: str + , * + , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[SchemaInfo]: + """List schemas. + Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Parent catalog for schemas of interest. :param include_browse: bool (optional) @@ -14392,117 +12114,109 @@ def list( (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`SchemaInfo` """ - + query = {} - if catalog_name is not None: - query["catalog_name"] = catalog_name - if include_browse is not None: - query["include_browse"] = include_browse - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - if "max_results" not in query: - query["max_results"] = 0 + if catalog_name is not None: query['catalog_name'] = catalog_name + if include_browse is not None: query['include_browse'] = include_browse + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/schemas", query=query, headers=headers) - if "schemas" in json: - for v in json["schemas"]: - yield SchemaInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - full_name: str, - *, - comment: Optional[str] = None, - enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, - new_name: Optional[str] = None, - owner: Optional[str] = None, - properties: Optional[Dict[str, str]] = None, - ) -> SchemaInfo: - """Update a schema. + json = self._api.do('GET','/api/2.1/unity-catalog/schemas', query=query + + , headers=headers + ) + if 'schemas' in json: + for v in json['schemas']: + yield SchemaInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , full_name: str + , * + , comment: Optional[str] = None, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, new_name: Optional[str] = None, owner: Optional[str] = None, properties: Optional[Dict[str,str]] = None) -> SchemaInfo: + """Update a schema. + Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If the caller is a metastore admin, only the __owner__ field can be changed in the update. If the __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA** privilege on the parent catalog. - + :param full_name: str Full name of the schema. :param comment: str (optional) User-provided free-form text description. :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional) + Whether predictive optimization should be enabled for this object and objects under it. :param new_name: str (optional) New name for the schema. :param owner: str (optional) Username of current owner of schema. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. - + :returns: :class:`SchemaInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = enable_predictive_optimization.value - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - if properties is not None: - body["properties"] = properties - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/schemas/{full_name}", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if enable_predictive_optimization is not None: body['enable_predictive_optimization'] = enable_predictive_optimization.value + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + if properties is not None: body['properties'] = properties + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/schemas/{full_name}', body=body + + , headers=headers + ) return SchemaInfo.from_dict(res) - + + class StorageCredentialsAPI: """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant. Each storage credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential. If a user does not have access to a storage credential in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. - + Databricks recommends using external locations rather than using storage credentials directly. - + To create storage credentials, you must be a Databricks account admin. The account admin who creates the storage credential can delegate ownership to another user or group to manage permissions on it.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - name: str, - *, - aws_iam_role: Optional[AwsIamRoleRequest] = None, - azure_managed_identity: Optional[AzureManagedIdentityRequest] = None, - azure_service_principal: Optional[AzureServicePrincipal] = None, - cloudflare_api_token: Optional[CloudflareApiToken] = None, - comment: Optional[str] = None, - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, - read_only: Optional[bool] = None, - skip_validation: Optional[bool] = None, - ) -> StorageCredentialInfo: - """Create a storage credential. + - Creates a new storage credential. + + + + + def create(self + , name: str + , * + , aws_iam_role: Optional[AwsIamRoleRequest] = None, azure_managed_identity: Optional[AzureManagedIdentityRequest] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, cloudflare_api_token: Optional[CloudflareApiToken] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> StorageCredentialInfo: + """Create a storage credential. + + Creates a new storage credential. + :param name: str The credential name. The name must be unique within the metastore. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) @@ -14521,88 +12235,99 @@ def create( Whether the storage credential is only usable for read operations. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the created credential. - + :returns: :class:`StorageCredentialInfo` """ body = {} - if aws_iam_role is not None: - body["aws_iam_role"] = aws_iam_role.as_dict() - if azure_managed_identity is not None: - body["azure_managed_identity"] = azure_managed_identity.as_dict() - if azure_service_principal is not None: - body["azure_service_principal"] = azure_service_principal.as_dict() - if cloudflare_api_token is not None: - body["cloudflare_api_token"] = cloudflare_api_token.as_dict() - if comment is not None: - body["comment"] = comment - if databricks_gcp_service_account is not None: - body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() - if name is not None: - body["name"] = name - if read_only is not None: - body["read_only"] = read_only - if skip_validation is not None: - body["skip_validation"] = skip_validation - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/storage-credentials", body=body, headers=headers) + if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() + if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() + if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() + if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict() + if comment is not None: body['comment'] = comment + if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() + if name is not None: body['name'] = name + if read_only is not None: body['read_only'] = read_only + if skip_validation is not None: body['skip_validation'] = skip_validation + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/storage-credentials', body=body + + , headers=headers + ) return StorageCredentialInfo.from_dict(res) - def delete(self, name: str, *, force: Optional[bool] = None): - """Delete a credential. + + + + def delete(self + , name: str + , * + , force: Optional[bool] = None): + """Delete a credential. + Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. - + :param name: str Name of the storage credential. :param force: bool (optional) Force deletion even if there are dependent external locations or external tables. - - + + """ - + query = {} - if force is not None: - query["force"] = force - headers = { - "Accept": "application/json", - } + if force is not None: query['force'] = force + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/storage-credentials/{name}', query=query + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/storage-credentials/{name}", query=query, headers=headers) + + + - def get(self, name: str) -> StorageCredentialInfo: + def get(self + , name: str + ) -> StorageCredentialInfo: """Get a credential. - + Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. - + :param name: str Name of the storage credential. - + :returns: :class:`StorageCredentialInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/storage-credentials/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/storage-credentials/{name}' + + , headers=headers + ) return StorageCredentialInfo.from_dict(res) - def list( - self, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[StorageCredentialInfo]: - """List credentials. + + + + def list(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[StorageCredentialInfo]: + """List credentials. + Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of storage credentials to return. If not set, all the storage credentials are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of @@ -14611,51 +12336,42 @@ def list( returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`StorageCredentialInfo` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - if "max_results" not in query: - query["max_results"] = 0 + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + if "max_results" not in query: query['max_results'] = 0 while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/storage-credentials", query=query, headers=headers) - if "storage_credentials" in json: - for v in json["storage_credentials"]: - yield StorageCredentialInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - name: str, - *, - aws_iam_role: Optional[AwsIamRoleRequest] = None, - azure_managed_identity: Optional[AzureManagedIdentityResponse] = None, - azure_service_principal: Optional[AzureServicePrincipal] = None, - cloudflare_api_token: Optional[CloudflareApiToken] = None, - comment: Optional[str] = None, - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, - force: Optional[bool] = None, - isolation_mode: Optional[IsolationMode] = None, - new_name: Optional[str] = None, - owner: Optional[str] = None, - read_only: Optional[bool] = None, - skip_validation: Optional[bool] = None, - ) -> StorageCredentialInfo: - """Update a credential. + json = self._api.do('GET','/api/2.1/unity-catalog/storage-credentials', query=query + + , headers=headers + ) + if 'storage_credentials' in json: + for v in json['storage_credentials']: + yield StorageCredentialInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Updates a storage credential on the metastore. + + + + def update(self + , name: str + , * + , aws_iam_role: Optional[AwsIamRoleRequest] = None, azure_managed_identity: Optional[AzureManagedIdentityResponse] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, cloudflare_api_token: Optional[CloudflareApiToken] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, force: Optional[bool] = None, isolation_mode: Optional[IsolationMode] = None, new_name: Optional[str] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> StorageCredentialInfo: + """Update a credential. + + Updates a storage credential on the metastore. + :param name: str Name of the storage credential. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) @@ -14681,67 +12397,50 @@ def update( Whether the storage credential is only usable for read operations. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the updated credential. - + :returns: :class:`StorageCredentialInfo` """ body = {} - if aws_iam_role is not None: - body["aws_iam_role"] = aws_iam_role.as_dict() - if azure_managed_identity is not None: - body["azure_managed_identity"] = azure_managed_identity.as_dict() - if azure_service_principal is not None: - body["azure_service_principal"] = azure_service_principal.as_dict() - if cloudflare_api_token is not None: - body["cloudflare_api_token"] = cloudflare_api_token.as_dict() - if comment is not None: - body["comment"] = comment - if databricks_gcp_service_account is not None: - body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() - if force is not None: - body["force"] = force - if isolation_mode is not None: - body["isolation_mode"] = isolation_mode.value - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - if read_only is not None: - body["read_only"] = read_only - if skip_validation is not None: - body["skip_validation"] = skip_validation - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/storage-credentials/{name}", body=body, headers=headers) + if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() + if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() + if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() + if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict() + if comment is not None: body['comment'] = comment + if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() + if force is not None: body['force'] = force + if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + if read_only is not None: body['read_only'] = read_only + if skip_validation is not None: body['skip_validation'] = skip_validation + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/storage-credentials/{name}', body=body + + , headers=headers + ) return StorageCredentialInfo.from_dict(res) - def validate( - self, - *, - aws_iam_role: Optional[AwsIamRoleRequest] = None, - azure_managed_identity: Optional[AzureManagedIdentityRequest] = None, - azure_service_principal: Optional[AzureServicePrincipal] = None, - cloudflare_api_token: Optional[CloudflareApiToken] = None, - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, - external_location_name: Optional[str] = None, - read_only: Optional[bool] = None, - storage_credential_name: Optional[str] = None, - url: Optional[str] = None, - ) -> ValidateStorageCredentialResponse: - """Validate a storage credential. + + + + def validate(self + + , * + , aws_iam_role: Optional[AwsIamRoleRequest] = None, azure_managed_identity: Optional[AzureManagedIdentityRequest] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, cloudflare_api_token: Optional[CloudflareApiToken] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, external_location_name: Optional[str] = None, read_only: Optional[bool] = None, storage_credential_name: Optional[str] = None, url: Optional[str] = None) -> ValidateStorageCredentialResponse: + """Validate a storage credential. + Validates a storage credential. At least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking overlapping urls. - + Either the __storage_credential_name__ or the cloud-specific credential must be provided. - + The caller must be a metastore admin or the storage credential owner or have the **CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage credential. - + :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional) @@ -14760,104 +12459,113 @@ def validate( The name of the storage credential to validate. :param url: str (optional) The external location url to validate. - + :returns: :class:`ValidateStorageCredentialResponse` """ body = {} - if aws_iam_role is not None: - body["aws_iam_role"] = aws_iam_role.as_dict() - if azure_managed_identity is not None: - body["azure_managed_identity"] = azure_managed_identity.as_dict() - if azure_service_principal is not None: - body["azure_service_principal"] = azure_service_principal.as_dict() - if cloudflare_api_token is not None: - body["cloudflare_api_token"] = cloudflare_api_token.as_dict() - if databricks_gcp_service_account is not None: - body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() - if external_location_name is not None: - body["external_location_name"] = external_location_name - if read_only is not None: - body["read_only"] = read_only - if storage_credential_name is not None: - body["storage_credential_name"] = storage_credential_name - if url is not None: - body["url"] = url - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/validate-storage-credentials", body=body, headers=headers) + if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() + if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() + if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() + if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict() + if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() + if external_location_name is not None: body['external_location_name'] = external_location_name + if read_only is not None: body['read_only'] = read_only + if storage_credential_name is not None: body['storage_credential_name'] = storage_credential_name + if url is not None: body['url'] = url + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/validate-storage-credentials', body=body + + , headers=headers + ) return ValidateStorageCredentialResponse.from_dict(res) - + + class SystemSchemasAPI: """A system schema is a schema that lives within the system catalog. A system schema may contain information about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc.""" - + def __init__(self, api_client): self._api = api_client + - def disable(self, metastore_id: str, schema_name: str): - """Disable a system schema. + + + + + + + def disable(self + , metastore_id: str, schema_name: str + ): + """Disable a system schema. + Disables the system schema and removes it from the system catalog. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The metastore ID under which the system schema lives. :param schema_name: str Full name of the system schema. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}", headers=headers - ) + + + - def enable(self, metastore_id: str, schema_name: str, *, catalog_name: Optional[str] = None): + def enable(self + , metastore_id: str, schema_name: str + , * + , catalog_name: Optional[str] = None): """Enable a system schema. - + Enables the system schema and adds it to the system catalog. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The metastore ID under which the system schema lives. :param schema_name: str Full name of the system schema. :param catalog_name: str (optional) - the catalog for which the system schema is to enabled in - - + the catalog for which the system schema is to enabled in + + """ body = {} - if catalog_name is not None: - body["catalog_name"] = catalog_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "PUT", - f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}", - body=body, - headers=headers, - ) - - def list( - self, metastore_id: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[SystemSchemaInfo]: - """List system schemas. + if catalog_name is not None: body['catalog_name'] = catalog_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}', body=body + + , headers=headers + ) + + + + + + def list(self + , metastore_id: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[SystemSchemaInfo]: + """List system schemas. + Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The ID for the metastore in which the system schema resides. :param max_results: int (optional) @@ -14867,91 +12575,106 @@ def list( is returned; - If not set, all the schemas are returned (not recommended). :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`SystemSchemaInfo` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas", query=query, headers=headers - ) - if "schemas" in json: - for v in json["schemas"]: - yield SystemSchemaInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - + json = self._api.do('GET',f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas', query=query + + , headers=headers + ) + if 'schemas' in json: + for v in json['schemas']: + yield SystemSchemaInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + class TableConstraintsAPI: """Primary key and foreign key constraints encode relationships between fields in tables. - + Primary and foreign keys are informational only and are not enforced. Foreign keys must reference a primary key in another table. This primary key is the parent constraint of the foreign key and the table this primary key is on is the parent table of the foreign key. Similarly, the foreign key is the child constraint of its referenced primary key; the table of the foreign key is the child table of the primary key. - + You can declare primary keys and foreign keys as part of the table specification during table creation. You can also add or drop constraints on existing tables.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, full_name_arg: str, constraint: TableConstraint) -> TableConstraint: - """Create a table constraint. + - Creates a new table constraint. + + + + + def create(self + , full_name_arg: str, constraint: TableConstraint + ) -> TableConstraint: + """Create a table constraint. + + Creates a new table constraint. + For the table constraint creation to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** privilege on the table's parent schema, and be the owner of the table. - if the new constraint is a __ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the owner of the referenced parent table. - + :param full_name_arg: str The full name of the table referenced by the constraint. :param constraint: :class:`TableConstraint` A table constraint, as defined by *one* of the following fields being set: __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. - + :returns: :class:`TableConstraint` """ body = {} - if constraint is not None: - body["constraint"] = constraint.as_dict() - if full_name_arg is not None: - body["full_name_arg"] = full_name_arg - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/constraints", body=body, headers=headers) + if constraint is not None: body['constraint'] = constraint.as_dict() + if full_name_arg is not None: body['full_name_arg'] = full_name_arg + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/constraints', body=body + + , headers=headers + ) return TableConstraint.from_dict(res) - def delete(self, full_name: str, constraint_name: str, cascade: bool): - """Delete a table constraint. + + + + def delete(self + , full_name: str, constraint_name: str, cascade: bool + ): + """Delete a table constraint. + Deletes a table constraint. - + For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** privilege on the table's parent schema, and be the owner of the table. - if __cascade__ argument is **true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG** privilege on the table's catalog, the **USE_SCHEMA** privilege on the table's schema, and be the owner of the table. - + :param full_name: str Full name of the table referenced by the constraint. :param constraint_name: str @@ -14959,94 +12682,114 @@ def delete(self, full_name: str, constraint_name: str, cascade: bool): :param cascade: bool If true, try deleting all child constraints of the current constraint. If false, reject this operation if the current constraint has any child constraints. - - + + """ - + query = {} - if cascade is not None: - query["cascade"] = cascade - if constraint_name is not None: - query["constraint_name"] = constraint_name - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.1/unity-catalog/constraints/{full_name}", query=query, headers=headers) - + if cascade is not None: query['cascade'] = cascade + if constraint_name is not None: query['constraint_name'] = constraint_name + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/constraints/{full_name}', query=query + + , headers=headers + ) + + + class TablesAPI: """A table resides in the third layer of Unity Catalog’s three-level namespace. It contains rows of data. To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the schema, and they must have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT permission on the table, and they must have the USE_CATALOG permission on its parent catalog and the USE_SCHEMA permission on its parent schema. - + A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table (rather than a managed or external table).""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, full_name: str): - """Delete a table. + + + + + + + def delete(self + , full_name: str + ): + """Delete a table. + Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the table. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/tables/{full_name}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.1/unity-catalog/tables/{full_name}", headers=headers) + + + - def exists(self, full_name: str) -> TableExistsResponse: + def exists(self + , full_name: str + ) -> TableExistsResponse: """Get boolean reflecting if table exists. - + Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on the parent catalog * Have BROWSE privilege on the parent schema. - + :param full_name: str Full name of the table. - + :returns: :class:`TableExistsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/tables/{full_name}/exists", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{full_name}/exists' + + , headers=headers + ) return TableExistsResponse.from_dict(res) - def get( - self, - full_name: str, - *, - include_browse: Optional[bool] = None, - include_delta_metadata: Optional[bool] = None, - include_manifest_capabilities: Optional[bool] = None, - ) -> TableInfo: - """Get a table. + + + + def get(self + , full_name: str + , * + , include_browse: Optional[bool] = None, include_delta_metadata: Optional[bool] = None, include_manifest_capabilities: Optional[bool] = None) -> TableInfo: + """Get a table. + Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the SELECT privilege on the table. - + :param full_name: str Full name of the table. :param include_browse: bool (optional) @@ -15056,46 +12799,38 @@ def get( Whether delta metadata should be included in the response. :param include_manifest_capabilities: bool (optional) Whether to include a manifest containing capabilities the table has. - + :returns: :class:`TableInfo` """ - + query = {} - if include_browse is not None: - query["include_browse"] = include_browse - if include_delta_metadata is not None: - query["include_delta_metadata"] = include_delta_metadata - if include_manifest_capabilities is not None: - query["include_manifest_capabilities"] = include_manifest_capabilities - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/tables/{full_name}", query=query, headers=headers) + if include_browse is not None: query['include_browse'] = include_browse + if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata + if include_manifest_capabilities is not None: query['include_manifest_capabilities'] = include_manifest_capabilities + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{full_name}', query=query + + , headers=headers + ) return TableInfo.from_dict(res) - def list( - self, - catalog_name: str, - schema_name: str, - *, - include_browse: Optional[bool] = None, - include_delta_metadata: Optional[bool] = None, - include_manifest_capabilities: Optional[bool] = None, - max_results: Optional[int] = None, - omit_columns: Optional[bool] = None, - omit_properties: Optional[bool] = None, - omit_username: Optional[bool] = None, - page_token: Optional[str] = None, - ) -> Iterator[TableInfo]: - """List tables. + + + + def list(self + , catalog_name: str, schema_name: str + , * + , include_browse: Optional[bool] = None, include_delta_metadata: Optional[bool] = None, include_manifest_capabilities: Optional[bool] = None, max_results: Optional[int] = None, omit_columns: Optional[bool] = None, omit_properties: Optional[bool] = None, omit_username: Optional[bool] = None, page_token: Optional[str] = None) -> Iterator[TableInfo]: + """List tables. + Gets an array of all tables for the current metastore under the parent catalog and schema. The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for tables of interest. :param schema_name: str @@ -15121,69 +12856,59 @@ def list( not. :param page_token: str (optional) Opaque token to send for the next page of results (pagination). - + :returns: Iterator over :class:`TableInfo` """ - + query = {} - if catalog_name is not None: - query["catalog_name"] = catalog_name - if include_browse is not None: - query["include_browse"] = include_browse - if include_delta_metadata is not None: - query["include_delta_metadata"] = include_delta_metadata - if include_manifest_capabilities is not None: - query["include_manifest_capabilities"] = include_manifest_capabilities - if max_results is not None: - query["max_results"] = max_results - if omit_columns is not None: - query["omit_columns"] = omit_columns - if omit_properties is not None: - query["omit_properties"] = omit_properties - if omit_username is not None: - query["omit_username"] = omit_username - if page_token is not None: - query["page_token"] = page_token - if schema_name is not None: - query["schema_name"] = schema_name - headers = { - "Accept": "application/json", - } - - if "max_results" not in query: - query["max_results"] = 0 + if catalog_name is not None: query['catalog_name'] = catalog_name + if include_browse is not None: query['include_browse'] = include_browse + if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata + if include_manifest_capabilities is not None: query['include_manifest_capabilities'] = include_manifest_capabilities + if max_results is not None: query['max_results'] = max_results + if omit_columns is not None: query['omit_columns'] = omit_columns + if omit_properties is not None: query['omit_properties'] = omit_properties + if omit_username is not None: query['omit_username'] = omit_username + if page_token is not None: query['page_token'] = page_token + if schema_name is not None: query['schema_name'] = schema_name + headers = {'Accept': 'application/json',} + + + if "max_results" not in query: query['max_results'] = 0 while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/tables", query=query, headers=headers) - if "tables" in json: - for v in json["tables"]: - yield TableInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_summaries( - self, - catalog_name: str, - *, - include_manifest_capabilities: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - schema_name_pattern: Optional[str] = None, - table_name_pattern: Optional[str] = None, - ) -> Iterator[TableSummary]: - """List table summaries. + json = self._api.do('GET','/api/2.1/unity-catalog/tables', query=query + + , headers=headers + ) + if 'tables' in json: + for v in json['tables']: + yield TableInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def list_summaries(self + , catalog_name: str + , * + , include_manifest_capabilities: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None, schema_name_pattern: Optional[str] = None, table_name_pattern: Optional[str] = None) -> Iterator[TableSummary]: + """List table summaries. + Gets an array of summaries for tables for a schema and catalog within the metastore. The table summaries returned are either: - + * summaries for tables (within the current metastore and parent catalog and schema), when the user is a metastore admin, or: * summaries for tables and schemas (within the current metastore and parent catalog) for which the user has ownership or the **SELECT** privilege on the table and ownership or **USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the **USE_CATALOG** privilege on the parent catalog. - + There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for tables of interest. :param include_manifest_capabilities: bool (optional) @@ -15200,63 +12925,67 @@ def list_summaries( A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or empty. :param table_name_pattern: str (optional) A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty. - + :returns: Iterator over :class:`TableSummary` """ - + query = {} - if catalog_name is not None: - query["catalog_name"] = catalog_name - if include_manifest_capabilities is not None: - query["include_manifest_capabilities"] = include_manifest_capabilities - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - if schema_name_pattern is not None: - query["schema_name_pattern"] = schema_name_pattern - if table_name_pattern is not None: - query["table_name_pattern"] = table_name_pattern - headers = { - "Accept": "application/json", - } - - if "max_results" not in query: - query["max_results"] = 0 + if catalog_name is not None: query['catalog_name'] = catalog_name + if include_manifest_capabilities is not None: query['include_manifest_capabilities'] = include_manifest_capabilities + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if schema_name_pattern is not None: query['schema_name_pattern'] = schema_name_pattern + if table_name_pattern is not None: query['table_name_pattern'] = table_name_pattern + headers = {'Accept': 'application/json',} + + + if "max_results" not in query: query['max_results'] = 0 while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/table-summaries", query=query, headers=headers) - if "tables" in json: - for v in json["tables"]: - yield TableSummary.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, full_name: str, *, owner: Optional[str] = None): - """Update a table owner. + json = self._api.do('GET','/api/2.1/unity-catalog/table-summaries', query=query + + , headers=headers + ) + if 'tables' in json: + for v in json['tables']: + yield TableSummary.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , full_name: str + , * + , owner: Optional[str] = None): + """Update a table owner. + Change the owner of the table. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the table. :param owner: str (optional) - - + + """ body = {} - if owner is not None: - body["owner"] = owner - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.1/unity-catalog/tables/{full_name}", body=body, headers=headers) - + if owner is not None: body['owner'] = owner + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.1/unity-catalog/tables/{full_name}', body=body + + , headers=headers + ) + + + class TemporaryTableCredentialsAPI: """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks. These credentials are employed to provide secure and @@ -15270,42 +12999,50 @@ class TemporaryTableCredentialsAPI: by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for security reason.""" - + def __init__(self, api_client): self._api = api_client + - def generate_temporary_table_credentials( - self, *, operation: Optional[TableOperation] = None, table_id: Optional[str] = None - ) -> GenerateTemporaryTableCredentialResponse: - """Generate a temporary table credential. + + + + + + + def generate_temporary_table_credentials(self + + , * + , operation: Optional[TableOperation] = None, table_id: Optional[str] = None) -> GenerateTemporaryTableCredentialResponse: + """Generate a temporary table credential. + Get a short-lived credential for directly accessing the table data on cloud storage. The metastore must have external_access_enabled flag set to true (default false). The caller must have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog owners. - + :param operation: :class:`TableOperation` (optional) The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is specified, the credentials returned will have write permissions, otherwise, it will be read only. :param table_id: str (optional) UUID of the table to read or write. - + :returns: :class:`GenerateTemporaryTableCredentialResponse` """ body = {} - if operation is not None: - body["operation"] = operation.value - if table_id is not None: - body["table_id"] = table_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/unity-catalog/temporary-table-credentials", body=body, headers=headers) + if operation is not None: body['operation'] = operation.value + if table_id is not None: body['table_id'] = table_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/unity-catalog/temporary-table-credentials', body=body + + , headers=headers + ) return GenerateTemporaryTableCredentialResponse.from_dict(res) - + + class VolumesAPI: """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF @@ -15313,38 +13050,40 @@ class VolumesAPI: that require access to the local file system on cluster machines, storing library and config files of arbitrary formats such as .whl or .txt centrally and providing secure access across workspaces to it, or transforming and querying non-tabular data files in ETL.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - catalog_name: str, - schema_name: str, - name: str, - volume_type: VolumeType, - *, - comment: Optional[str] = None, - storage_location: Optional[str] = None, - ) -> VolumeInfo: - """Create a Volume. + - Creates a new volume. + + + + + def create(self + , catalog_name: str, schema_name: str, name: str, volume_type: VolumeType + , * + , comment: Optional[str] = None, storage_location: Optional[str] = None) -> VolumeInfo: + """Create a Volume. + + Creates a new volume. + The user could create either an external volume or a managed volume. An external volume will be created in the specified external location, while a managed volume will be located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. - + For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have **CREATE VOLUME** privilege on the parent schema. - + For an external volume, following conditions also need to satisfy - The caller must have **CREATE EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes existing in the specified storage location. - The specified storage location is not under the location of other tables, nor volumes, or catalogs or schemas. - + :param catalog_name: str The name of the catalog where the schema and the volume are :param schema_name: str @@ -15355,76 +13094,79 @@ def create( The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] - + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external :param comment: str (optional) The comment attached to the volume :param storage_location: str (optional) The storage location on the cloud - + :returns: :class:`VolumeInfo` """ body = {} - if catalog_name is not None: - body["catalog_name"] = catalog_name - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if schema_name is not None: - body["schema_name"] = schema_name - if storage_location is not None: - body["storage_location"] = storage_location - if volume_type is not None: - body["volume_type"] = volume_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/volumes", body=body, headers=headers) + if catalog_name is not None: body['catalog_name'] = catalog_name + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if schema_name is not None: body['schema_name'] = schema_name + if storage_location is not None: body['storage_location'] = storage_location + if volume_type is not None: body['volume_type'] = volume_type.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/volumes', body=body + + , headers=headers + ) return VolumeInfo.from_dict(res) - def delete(self, name: str): - """Delete a Volume. + + + + def delete(self + , name: str + ): + """Delete a Volume. + Deletes a volume from the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param name: str The three-level (fully qualified) name of the volume - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/volumes/{name}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/volumes/{name}", headers=headers) - - def list( - self, - catalog_name: str, - schema_name: str, - *, - include_browse: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[VolumeInfo]: - """List Volumes. + + + + def list(self + , catalog_name: str, schema_name: str + , * + , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[VolumeInfo]: + """List Volumes. + Gets an array of volumes for the current metastore under the parent catalog and schema. - + The returned volumes are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the volumes. A regular user needs to be the owner or have the **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str The identifier of the catalog :param schema_name: str @@ -15434,88 +13176,99 @@ def list( metadata for :param max_results: int (optional) Maximum number of volumes to return (page length). - + If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value (10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter error is returned; - + Note: this parameter controls only the maximum number of volumes to return. The actual number of volumes returned in a page may be smaller than this value, including 0, even if there are more pages. :param page_token: str (optional) Opaque token returned by a previous request. It must be included in the request to retrieve the next page of results (pagination). - + :returns: Iterator over :class:`VolumeInfo` """ - + query = {} - if catalog_name is not None: - query["catalog_name"] = catalog_name - if include_browse is not None: - query["include_browse"] = include_browse - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - if schema_name is not None: - query["schema_name"] = schema_name - headers = { - "Accept": "application/json", - } - + if catalog_name is not None: query['catalog_name'] = catalog_name + if include_browse is not None: query['include_browse'] = include_browse + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if schema_name is not None: query['schema_name'] = schema_name + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/volumes", query=query, headers=headers) - if "volumes" in json: - for v in json["volumes"]: - yield VolumeInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def read(self, name: str, *, include_browse: Optional[bool] = None) -> VolumeInfo: - """Get a Volume. + json = self._api.do('GET','/api/2.1/unity-catalog/volumes', query=query + + , headers=headers + ) + if 'volumes' in json: + for v in json['volumes']: + yield VolumeInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Gets a volume from the metastore for a specific catalog and schema. + + + + def read(self + , name: str + , * + , include_browse: Optional[bool] = None) -> VolumeInfo: + """Get a Volume. + + Gets a volume from the metastore for a specific catalog and schema. + The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param name: str The three-level (fully qualified) name of the volume :param include_browse: bool (optional) Whether to include volumes in the response for which the principal can only access selective metadata for - + :returns: :class:`VolumeInfo` """ - + query = {} - if include_browse is not None: - query["include_browse"] = include_browse - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/volumes/{name}", query=query, headers=headers) + if include_browse is not None: query['include_browse'] = include_browse + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/volumes/{name}', query=query + + , headers=headers + ) return VolumeInfo.from_dict(res) - def update( - self, name: str, *, comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None - ) -> VolumeInfo: - """Update a Volume. + + + + def update(self + , name: str + , * + , comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None) -> VolumeInfo: + """Update a Volume. + Updates the specified volume under the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the name, the owner or the comment of the volume could be updated. - + :param name: str The three-level (fully qualified) name of the volume :param comment: str (optional) @@ -15524,76 +13277,85 @@ def update( New name for the volume. :param owner: str (optional) The identifier of the user who owns the volume - + :returns: :class:`VolumeInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/volumes/{name}", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/volumes/{name}', body=body + + , headers=headers + ) return VolumeInfo.from_dict(res) - + + class WorkspaceBindingsAPI: """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable can be accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a configured list of workspaces. This API allows you to configure (bind) securables to workspaces. - + NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) and the workspace bindings are only consulted when the securable's __isolation_mode__ is set to __ISOLATED__. - + A securable's workspace bindings can be configured by a metastore admin or the owner of the securable. - + The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). - + Securable types that support binding: - catalog - storage_credential - credential - external_location""" - + def __init__(self, api_client): self._api = api_client + - def get(self, name: str) -> GetCatalogWorkspaceBindingsResponse: - """Get catalog workspace bindings. + + + + + + + def get(self + , name: str + ) -> GetCatalogWorkspaceBindingsResponse: + """Get catalog workspace bindings. + Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. - + :param name: str The name of the catalog. - + :returns: :class:`GetCatalogWorkspaceBindingsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}' + + , headers=headers + ) return GetCatalogWorkspaceBindingsResponse.from_dict(res) - def get_bindings( - self, - securable_type: str, - securable_name: str, - *, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[WorkspaceBinding]: - """Get securable workspace bindings. + + + + def get_bindings(self + , securable_type: str, securable_name: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[WorkspaceBinding]: + """Get securable workspace bindings. + Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). @@ -15606,82 +13368,76 @@ def get_bindings( error is returned; - If not set, all the workspace bindings are returned (not recommended). :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`WorkspaceBinding` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}", - query=query, - headers=headers, - ) - if "bindings" in json: - for v in json["bindings"]: - yield WorkspaceBinding.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - name: str, - *, - assign_workspaces: Optional[List[int]] = None, - unassign_workspaces: Optional[List[int]] = None, - ) -> UpdateCatalogWorkspaceBindingsResponse: - """Update catalog workspace bindings. + json = self._api.do('GET',f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}', query=query + + , headers=headers + ) + if 'bindings' in json: + for v in json['bindings']: + yield WorkspaceBinding.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , name: str + , * + , assign_workspaces: Optional[List[int]] = None, unassign_workspaces: Optional[List[int]] = None) -> UpdateCatalogWorkspaceBindingsResponse: + """Update catalog workspace bindings. + Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. - + :param name: str The name of the catalog. :param assign_workspaces: List[int] (optional) A list of workspace IDs. :param unassign_workspaces: List[int] (optional) A list of workspace IDs. - + :returns: :class:`UpdateCatalogWorkspaceBindingsResponse` """ body = {} - if assign_workspaces is not None: - body["assign_workspaces"] = [v for v in assign_workspaces] - if unassign_workspaces is not None: - body["unassign_workspaces"] = [v for v in unassign_workspaces] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}", body=body, headers=headers - ) + if assign_workspaces is not None: body['assign_workspaces'] = [v for v in assign_workspaces] + if unassign_workspaces is not None: body['unassign_workspaces'] = [v for v in unassign_workspaces] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}', body=body + + , headers=headers + ) return UpdateCatalogWorkspaceBindingsResponse.from_dict(res) - def update_bindings( - self, - securable_type: str, - securable_name: str, - *, - add: Optional[List[WorkspaceBinding]] = None, - remove: Optional[List[WorkspaceBinding]] = None, - ) -> UpdateWorkspaceBindingsResponse: - """Update securable workspace bindings. + + + + def update_bindings(self + , securable_type: str, securable_name: str + , * + , add: Optional[List[WorkspaceBinding]] = None, remove: Optional[List[WorkspaceBinding]] = None) -> UpdateWorkspaceBindingsResponse: + """Update securable workspace bindings. + Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). @@ -15691,20 +13447,19 @@ def update_bindings( List of workspace bindings. :param remove: List[:class:`WorkspaceBinding`] (optional) List of workspace bindings. - + :returns: :class:`UpdateWorkspaceBindingsResponse` """ body = {} - if add is not None: - body["add"] = [v.as_dict() for v in add] - if remove is not None: - body["remove"] = [v.as_dict() for v in remove] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}", body=body, headers=headers - ) + if add is not None: body['add'] = [v.as_dict() for v in add] + if remove is not None: body['remove'] = [v.as_dict() for v in remove] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}', body=body + + , headers=headers + ) return UpdateWorkspaceBindingsResponse.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index edf1cd253..7545e253b 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -1,151 +1,135 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Any, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ._internal import _enum, _from_dict, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -from databricks.sdk.service import catalog, jobs, settings, sharing +from databricks.sdk.service import catalog +from databricks.sdk.service import jobs +from databricks.sdk.service import settings +from databricks.sdk.service import settings +from databricks.sdk.service import sharing # all definitions in this file are in alphabetical order - @dataclass class CleanRoom: access_restricted: Optional[CleanRoomAccessRestricted] = None """Whether clean room access is restricted due to [CSP] [CSP]: https://docs.databricks.com/en/security/privacy/security-profile.html""" - + comment: Optional[str] = None - + created_at: Optional[int] = None """When the clean room was created, in epoch milliseconds.""" - + local_collaborator_alias: Optional[str] = None """The alias of the collaborator tied to the local clean room.""" - + name: Optional[str] = None """The name of the clean room. It should follow [UC securable naming requirements]. [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements""" - + output_catalog: Optional[CleanRoomOutputCatalog] = None """Output catalog of the clean room. It is an output only field. Output catalog is manipulated using the separate CreateCleanRoomOutputCatalog API.""" - + owner: Optional[str] = None """This is Databricks username of the owner of the local clean room securable for permission management.""" - + remote_detailed_info: Optional[CleanRoomRemoteDetail] = None """Central clean room details. During creation, users need to specify cloud_vendor, region, and collaborators.global_metastore_id. This field will not be filled in the ListCleanRooms call.""" - + status: Optional[CleanRoomStatusEnum] = None """Clean room status.""" - + updated_at: Optional[int] = None """When the clean room was last updated, in epoch milliseconds.""" - + def as_dict(self) -> dict: """Serializes the CleanRoom into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_restricted is not None: - body["access_restricted"] = self.access_restricted.value - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.local_collaborator_alias is not None: - body["local_collaborator_alias"] = self.local_collaborator_alias - if self.name is not None: - body["name"] = self.name - if self.output_catalog: - body["output_catalog"] = self.output_catalog.as_dict() - if self.owner is not None: - body["owner"] = self.owner - if self.remote_detailed_info: - body["remote_detailed_info"] = self.remote_detailed_info.as_dict() - if self.status is not None: - body["status"] = self.status.value - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.access_restricted is not None: body['access_restricted'] = self.access_restricted.value + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.local_collaborator_alias is not None: body['local_collaborator_alias'] = self.local_collaborator_alias + if self.name is not None: body['name'] = self.name + if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict() + if self.owner is not None: body['owner'] = self.owner + if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict() + if self.status is not None: body['status'] = self.status.value + if self.updated_at is not None: body['updated_at'] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoom into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_restricted is not None: - body["access_restricted"] = self.access_restricted - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.local_collaborator_alias is not None: - body["local_collaborator_alias"] = self.local_collaborator_alias - if self.name is not None: - body["name"] = self.name - if self.output_catalog: - body["output_catalog"] = self.output_catalog - if self.owner is not None: - body["owner"] = self.owner - if self.remote_detailed_info: - body["remote_detailed_info"] = self.remote_detailed_info - if self.status is not None: - body["status"] = self.status - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.access_restricted is not None: body['access_restricted'] = self.access_restricted + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.local_collaborator_alias is not None: body['local_collaborator_alias'] = self.local_collaborator_alias + if self.name is not None: body['name'] = self.name + if self.output_catalog: body['output_catalog'] = self.output_catalog + if self.owner is not None: body['owner'] = self.owner + if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info + if self.status is not None: body['status'] = self.status + if self.updated_at is not None: body['updated_at'] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoom: """Deserializes the CleanRoom from a dictionary.""" - return cls( - access_restricted=_enum(d, "access_restricted", CleanRoomAccessRestricted), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - local_collaborator_alias=d.get("local_collaborator_alias", None), - name=d.get("name", None), - output_catalog=_from_dict(d, "output_catalog", CleanRoomOutputCatalog), - owner=d.get("owner", None), - remote_detailed_info=_from_dict(d, "remote_detailed_info", CleanRoomRemoteDetail), - status=_enum(d, "status", CleanRoomStatusEnum), - updated_at=d.get("updated_at", None), - ) - + return cls(access_restricted=_enum(d, 'access_restricted', CleanRoomAccessRestricted), comment=d.get('comment', None), created_at=d.get('created_at', None), local_collaborator_alias=d.get('local_collaborator_alias', None), name=d.get('name', None), output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog), owner=d.get('owner', None), remote_detailed_info=_from_dict(d, 'remote_detailed_info', CleanRoomRemoteDetail), status=_enum(d, 'status', CleanRoomStatusEnum), updated_at=d.get('updated_at', None)) + -class CleanRoomAccessRestricted(Enum): - CSP_MISMATCH = "CSP_MISMATCH" - NO_RESTRICTION = "NO_RESTRICTION" +class CleanRoomAccessRestricted(Enum): + + + CSP_MISMATCH = 'CSP_MISMATCH' + NO_RESTRICTION = 'NO_RESTRICTION' @dataclass class CleanRoomAsset: """Metadata of the clean room asset""" - + added_at: Optional[int] = None """When the asset is added to the clean room, in epoch milliseconds.""" - + asset_type: Optional[CleanRoomAssetAssetType] = None """The type of the asset.""" - + + clean_room_name: Optional[str] = None + """The name of the clean room this asset belongs to. This is an output-only field to ensure proper + resource identification.""" + foreign_table: Optional[CleanRoomAssetForeignTable] = None """Foreign table details available to all collaborators of the clean room. Present if and only if **asset_type** is **FOREIGN_TABLE**""" - + foreign_table_local_details: Optional[CleanRoomAssetForeignTableLocalDetails] = None """Local details for a foreign that are only available to its owner. Present if and only if **asset_type** is **FOREIGN_TABLE**""" - + name: Optional[str] = None """A fully qualified name that uniquely identifies the asset within the clean room. This is also the name displayed in the clean room UI. @@ -154,153 +138,115 @@ class CleanRoomAsset: *shared_catalog*.*shared_schema*.*asset_name* For notebooks, the name is the notebook file name.""" - + notebook: Optional[CleanRoomAssetNotebook] = None """Notebook details available to all collaborators of the clean room. Present if and only if **asset_type** is **NOTEBOOK_FILE**""" - + owner_collaborator_alias: Optional[str] = None """The alias of the collaborator who owns this asset""" - + status: Optional[CleanRoomAssetStatusEnum] = None """Status of the asset""" - + table: Optional[CleanRoomAssetTable] = None """Table details available to all collaborators of the clean room. Present if and only if **asset_type** is **TABLE**""" - + table_local_details: Optional[CleanRoomAssetTableLocalDetails] = None """Local details for a table that are only available to its owner. Present if and only if **asset_type** is **TABLE**""" - + view: Optional[CleanRoomAssetView] = None """View details available to all collaborators of the clean room. Present if and only if **asset_type** is **VIEW**""" - + view_local_details: Optional[CleanRoomAssetViewLocalDetails] = None """Local details for a view that are only available to its owner. Present if and only if **asset_type** is **VIEW**""" - + volume_local_details: Optional[CleanRoomAssetVolumeLocalDetails] = None """Local details for a volume that are only available to its owner. Present if and only if **asset_type** is **VOLUME**""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAsset into a dictionary suitable for use as a JSON request body.""" body = {} - if self.added_at is not None: - body["added_at"] = self.added_at - if self.asset_type is not None: - body["asset_type"] = self.asset_type.value - if self.foreign_table: - body["foreign_table"] = self.foreign_table.as_dict() - if self.foreign_table_local_details: - body["foreign_table_local_details"] = self.foreign_table_local_details.as_dict() - if self.name is not None: - body["name"] = self.name - if self.notebook: - body["notebook"] = self.notebook.as_dict() - if self.owner_collaborator_alias is not None: - body["owner_collaborator_alias"] = self.owner_collaborator_alias - if self.status is not None: - body["status"] = self.status.value - if self.table: - body["table"] = self.table.as_dict() - if self.table_local_details: - body["table_local_details"] = self.table_local_details.as_dict() - if self.view: - body["view"] = self.view.as_dict() - if self.view_local_details: - body["view_local_details"] = self.view_local_details.as_dict() - if self.volume_local_details: - body["volume_local_details"] = self.volume_local_details.as_dict() + if self.added_at is not None: body['added_at'] = self.added_at + if self.asset_type is not None: body['asset_type'] = self.asset_type.value + if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name + if self.foreign_table: body['foreign_table'] = self.foreign_table.as_dict() + if self.foreign_table_local_details: body['foreign_table_local_details'] = self.foreign_table_local_details.as_dict() + if self.name is not None: body['name'] = self.name + if self.notebook: body['notebook'] = self.notebook.as_dict() + if self.owner_collaborator_alias is not None: body['owner_collaborator_alias'] = self.owner_collaborator_alias + if self.status is not None: body['status'] = self.status.value + if self.table: body['table'] = self.table.as_dict() + if self.table_local_details: body['table_local_details'] = self.table_local_details.as_dict() + if self.view: body['view'] = self.view.as_dict() + if self.view_local_details: body['view_local_details'] = self.view_local_details.as_dict() + if self.volume_local_details: body['volume_local_details'] = self.volume_local_details.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAsset into a shallow dictionary of its immediate attributes.""" body = {} - if self.added_at is not None: - body["added_at"] = self.added_at - if self.asset_type is not None: - body["asset_type"] = self.asset_type - if self.foreign_table: - body["foreign_table"] = self.foreign_table - if self.foreign_table_local_details: - body["foreign_table_local_details"] = self.foreign_table_local_details - if self.name is not None: - body["name"] = self.name - if self.notebook: - body["notebook"] = self.notebook - if self.owner_collaborator_alias is not None: - body["owner_collaborator_alias"] = self.owner_collaborator_alias - if self.status is not None: - body["status"] = self.status - if self.table: - body["table"] = self.table - if self.table_local_details: - body["table_local_details"] = self.table_local_details - if self.view: - body["view"] = self.view - if self.view_local_details: - body["view_local_details"] = self.view_local_details - if self.volume_local_details: - body["volume_local_details"] = self.volume_local_details + if self.added_at is not None: body['added_at'] = self.added_at + if self.asset_type is not None: body['asset_type'] = self.asset_type + if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name + if self.foreign_table: body['foreign_table'] = self.foreign_table + if self.foreign_table_local_details: body['foreign_table_local_details'] = self.foreign_table_local_details + if self.name is not None: body['name'] = self.name + if self.notebook: body['notebook'] = self.notebook + if self.owner_collaborator_alias is not None: body['owner_collaborator_alias'] = self.owner_collaborator_alias + if self.status is not None: body['status'] = self.status + if self.table: body['table'] = self.table + if self.table_local_details: body['table_local_details'] = self.table_local_details + if self.view: body['view'] = self.view + if self.view_local_details: body['view_local_details'] = self.view_local_details + if self.volume_local_details: body['volume_local_details'] = self.volume_local_details return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAsset: """Deserializes the CleanRoomAsset from a dictionary.""" - return cls( - added_at=d.get("added_at", None), - asset_type=_enum(d, "asset_type", CleanRoomAssetAssetType), - foreign_table=_from_dict(d, "foreign_table", CleanRoomAssetForeignTable), - foreign_table_local_details=_from_dict( - d, "foreign_table_local_details", CleanRoomAssetForeignTableLocalDetails - ), - name=d.get("name", None), - notebook=_from_dict(d, "notebook", CleanRoomAssetNotebook), - owner_collaborator_alias=d.get("owner_collaborator_alias", None), - status=_enum(d, "status", CleanRoomAssetStatusEnum), - table=_from_dict(d, "table", CleanRoomAssetTable), - table_local_details=_from_dict(d, "table_local_details", CleanRoomAssetTableLocalDetails), - view=_from_dict(d, "view", CleanRoomAssetView), - view_local_details=_from_dict(d, "view_local_details", CleanRoomAssetViewLocalDetails), - volume_local_details=_from_dict(d, "volume_local_details", CleanRoomAssetVolumeLocalDetails), - ) - + return cls(added_at=d.get('added_at', None), asset_type=_enum(d, 'asset_type', CleanRoomAssetAssetType), clean_room_name=d.get('clean_room_name', None), foreign_table=_from_dict(d, 'foreign_table', CleanRoomAssetForeignTable), foreign_table_local_details=_from_dict(d, 'foreign_table_local_details', CleanRoomAssetForeignTableLocalDetails), name=d.get('name', None), notebook=_from_dict(d, 'notebook', CleanRoomAssetNotebook), owner_collaborator_alias=d.get('owner_collaborator_alias', None), status=_enum(d, 'status', CleanRoomAssetStatusEnum), table=_from_dict(d, 'table', CleanRoomAssetTable), table_local_details=_from_dict(d, 'table_local_details', CleanRoomAssetTableLocalDetails), view=_from_dict(d, 'view', CleanRoomAssetView), view_local_details=_from_dict(d, 'view_local_details', CleanRoomAssetViewLocalDetails), volume_local_details=_from_dict(d, 'volume_local_details', CleanRoomAssetVolumeLocalDetails)) + -class CleanRoomAssetAssetType(Enum): - FOREIGN_TABLE = "FOREIGN_TABLE" - NOTEBOOK_FILE = "NOTEBOOK_FILE" - TABLE = "TABLE" - VIEW = "VIEW" - VOLUME = "VOLUME" +class CleanRoomAssetAssetType(Enum): + + + FOREIGN_TABLE = 'FOREIGN_TABLE' + NOTEBOOK_FILE = 'NOTEBOOK_FILE' + TABLE = 'TABLE' + VIEW = 'VIEW' + VOLUME = 'VOLUME' @dataclass class CleanRoomAssetForeignTable: columns: Optional[List[catalog.ColumnInfo]] = None """The metadata information of the columns in the foreign table""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetForeignTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetForeignTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: - body["columns"] = self.columns + if self.columns: body['columns'] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetForeignTable: """Deserializes the CleanRoomAssetForeignTable from a dictionary.""" - return cls(columns=_repeated_dict(d, "columns", catalog.ColumnInfo)) + return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo)) + + @dataclass @@ -308,117 +254,103 @@ class CleanRoomAssetForeignTableLocalDetails: local_name: Optional[str] = None """The fully qualified name of the foreign table in its owner's local metastore, in the format of *catalog*.*schema*.*foreign_table_name*""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetForeignTableLocalDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.local_name is not None: - body["local_name"] = self.local_name + if self.local_name is not None: body['local_name'] = self.local_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetForeignTableLocalDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.local_name is not None: - body["local_name"] = self.local_name + if self.local_name is not None: body['local_name'] = self.local_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetForeignTableLocalDetails: """Deserializes the CleanRoomAssetForeignTableLocalDetails from a dictionary.""" - return cls(local_name=d.get("local_name", None)) + return cls(local_name=d.get('local_name', None)) + + @dataclass class CleanRoomAssetNotebook: etag: Optional[str] = None """Server generated etag that represents the notebook version.""" - + notebook_content: Optional[str] = None """Base 64 representation of the notebook contents. This is the same format as returned by :method:workspace/export with the format of **HTML**.""" - + review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None """top-level status derived from all reviews""" - + reviews: Optional[List[CleanRoomNotebookReview]] = None """All existing approvals or rejections""" - + runner_collaborator_aliases: Optional[List[str]] = None """collaborators that can run the notebook""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.notebook_content is not None: - body["notebook_content"] = self.notebook_content - if self.review_state is not None: - body["review_state"] = self.review_state.value - if self.reviews: - body["reviews"] = [v.as_dict() for v in self.reviews] - if self.runner_collaborator_aliases: - body["runner_collaborator_aliases"] = [v for v in self.runner_collaborator_aliases] + if self.etag is not None: body['etag'] = self.etag + if self.notebook_content is not None: body['notebook_content'] = self.notebook_content + if self.review_state is not None: body['review_state'] = self.review_state.value + if self.reviews: body['reviews'] = [v.as_dict() for v in self.reviews] + if self.runner_collaborator_aliases: body['runner_collaborator_aliases'] = [v for v in self.runner_collaborator_aliases] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetNotebook into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.notebook_content is not None: - body["notebook_content"] = self.notebook_content - if self.review_state is not None: - body["review_state"] = self.review_state - if self.reviews: - body["reviews"] = self.reviews - if self.runner_collaborator_aliases: - body["runner_collaborator_aliases"] = self.runner_collaborator_aliases + if self.etag is not None: body['etag'] = self.etag + if self.notebook_content is not None: body['notebook_content'] = self.notebook_content + if self.review_state is not None: body['review_state'] = self.review_state + if self.reviews: body['reviews'] = self.reviews + if self.runner_collaborator_aliases: body['runner_collaborator_aliases'] = self.runner_collaborator_aliases return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetNotebook: """Deserializes the CleanRoomAssetNotebook from a dictionary.""" - return cls( - etag=d.get("etag", None), - notebook_content=d.get("notebook_content", None), - review_state=_enum(d, "review_state", CleanRoomNotebookReviewNotebookReviewState), - reviews=_repeated_dict(d, "reviews", CleanRoomNotebookReview), - runner_collaborator_aliases=d.get("runner_collaborator_aliases", None), - ) - + return cls(etag=d.get('etag', None), notebook_content=d.get('notebook_content', None), review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState), reviews=_repeated_dict(d, 'reviews', CleanRoomNotebookReview), runner_collaborator_aliases=d.get('runner_collaborator_aliases', None)) + -class CleanRoomAssetStatusEnum(Enum): - ACTIVE = "ACTIVE" - PENDING = "PENDING" - PERMISSION_DENIED = "PERMISSION_DENIED" +class CleanRoomAssetStatusEnum(Enum): + + + ACTIVE = 'ACTIVE' + PENDING = 'PENDING' + PERMISSION_DENIED = 'PERMISSION_DENIED' @dataclass class CleanRoomAssetTable: columns: Optional[List[catalog.ColumnInfo]] = None """The metadata information of the columns in the table""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: - body["columns"] = self.columns + if self.columns: body['columns'] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetTable: """Deserializes the CleanRoomAssetTable from a dictionary.""" - return cls(columns=_repeated_dict(d, "columns", catalog.ColumnInfo)) + return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo)) + + @dataclass @@ -426,57 +358,55 @@ class CleanRoomAssetTableLocalDetails: local_name: Optional[str] = None """The fully qualified name of the table in its owner's local metastore, in the format of *catalog*.*schema*.*table_name*""" - + partitions: Optional[List[sharing.Partition]] = None """Partition filtering specification for a shared table.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetTableLocalDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.local_name is not None: - body["local_name"] = self.local_name - if self.partitions: - body["partitions"] = [v.as_dict() for v in self.partitions] + if self.local_name is not None: body['local_name'] = self.local_name + if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetTableLocalDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.local_name is not None: - body["local_name"] = self.local_name - if self.partitions: - body["partitions"] = self.partitions + if self.local_name is not None: body['local_name'] = self.local_name + if self.partitions: body['partitions'] = self.partitions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetTableLocalDetails: """Deserializes the CleanRoomAssetTableLocalDetails from a dictionary.""" - return cls(local_name=d.get("local_name", None), partitions=_repeated_dict(d, "partitions", sharing.Partition)) + return cls(local_name=d.get('local_name', None), partitions=_repeated_dict(d, 'partitions', sharing.Partition)) + + @dataclass class CleanRoomAssetView: columns: Optional[List[catalog.ColumnInfo]] = None """The metadata information of the columns in the view""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetView into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetView into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: - body["columns"] = self.columns + if self.columns: body['columns'] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetView: """Deserializes the CleanRoomAssetView from a dictionary.""" - return cls(columns=_repeated_dict(d, "columns", catalog.ColumnInfo)) + return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo)) + + @dataclass @@ -484,25 +414,25 @@ class CleanRoomAssetViewLocalDetails: local_name: Optional[str] = None """The fully qualified name of the view in its owner's local metastore, in the format of *catalog*.*schema*.*view_name*""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetViewLocalDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.local_name is not None: - body["local_name"] = self.local_name + if self.local_name is not None: body['local_name'] = self.local_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetViewLocalDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.local_name is not None: - body["local_name"] = self.local_name + if self.local_name is not None: body['local_name'] = self.local_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetViewLocalDetails: """Deserializes the CleanRoomAssetViewLocalDetails from a dictionary.""" - return cls(local_name=d.get("local_name", None)) + return cls(local_name=d.get('local_name', None)) + + @dataclass @@ -510,273 +440,216 @@ class CleanRoomAssetVolumeLocalDetails: local_name: Optional[str] = None """The fully qualified name of the volume in its owner's local metastore, in the format of *catalog*.*schema*.*volume_name*""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetVolumeLocalDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.local_name is not None: - body["local_name"] = self.local_name + if self.local_name is not None: body['local_name'] = self.local_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetVolumeLocalDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.local_name is not None: - body["local_name"] = self.local_name + if self.local_name is not None: body['local_name'] = self.local_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetVolumeLocalDetails: """Deserializes the CleanRoomAssetVolumeLocalDetails from a dictionary.""" - return cls(local_name=d.get("local_name", None)) + return cls(local_name=d.get('local_name', None)) + + @dataclass class CleanRoomCollaborator: """Publicly visible clean room collaborator.""" - + collaborator_alias: str """Collaborator alias specified by the clean room creator. It is unique across all collaborators of this clean room, and used to derive multiple values internally such as catalog alias and clean room name for single metastore clean rooms. It should follow [UC securable naming requirements]. [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements""" - + display_name: Optional[str] = None """Generated display name for the collaborator. In the case of a single metastore clean room, it is the clean room name. For x-metastore clean rooms, it is the organization name of the metastore. It is not restricted to these values and could change in the future""" - + global_metastore_id: Optional[str] = None """The global Unity Catalog metastore id of the collaborator. The identifier is of format cloud:region:metastore-uuid.""" - + invite_recipient_email: Optional[str] = None """Email of the user who is receiving the clean room "invitation". It should be empty for the creator of the clean room, and non-empty for the invitees of the clean room. It is only returned in the output when clean room creator calls GET""" - + invite_recipient_workspace_id: Optional[int] = None """Workspace ID of the user who is receiving the clean room "invitation". Must be specified if invite_recipient_email is specified. It should be empty when the collaborator is the creator of the clean room.""" - + organization_name: Optional[str] = None """[Organization name](:method:metastores/list#metastores-delta_sharing_organization_name) configured in the metastore""" - + def as_dict(self) -> dict: """Serializes the CleanRoomCollaborator into a dictionary suitable for use as a JSON request body.""" body = {} - if self.collaborator_alias is not None: - body["collaborator_alias"] = self.collaborator_alias - if self.display_name is not None: - body["display_name"] = self.display_name - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.invite_recipient_email is not None: - body["invite_recipient_email"] = self.invite_recipient_email - if self.invite_recipient_workspace_id is not None: - body["invite_recipient_workspace_id"] = self.invite_recipient_workspace_id - if self.organization_name is not None: - body["organization_name"] = self.organization_name + if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias + if self.display_name is not None: body['display_name'] = self.display_name + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.invite_recipient_email is not None: body['invite_recipient_email'] = self.invite_recipient_email + if self.invite_recipient_workspace_id is not None: body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id + if self.organization_name is not None: body['organization_name'] = self.organization_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomCollaborator into a shallow dictionary of its immediate attributes.""" body = {} - if self.collaborator_alias is not None: - body["collaborator_alias"] = self.collaborator_alias - if self.display_name is not None: - body["display_name"] = self.display_name - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.invite_recipient_email is not None: - body["invite_recipient_email"] = self.invite_recipient_email - if self.invite_recipient_workspace_id is not None: - body["invite_recipient_workspace_id"] = self.invite_recipient_workspace_id - if self.organization_name is not None: - body["organization_name"] = self.organization_name + if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias + if self.display_name is not None: body['display_name'] = self.display_name + if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id + if self.invite_recipient_email is not None: body['invite_recipient_email'] = self.invite_recipient_email + if self.invite_recipient_workspace_id is not None: body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id + if self.organization_name is not None: body['organization_name'] = self.organization_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomCollaborator: """Deserializes the CleanRoomCollaborator from a dictionary.""" - return cls( - collaborator_alias=d.get("collaborator_alias", None), - display_name=d.get("display_name", None), - global_metastore_id=d.get("global_metastore_id", None), - invite_recipient_email=d.get("invite_recipient_email", None), - invite_recipient_workspace_id=d.get("invite_recipient_workspace_id", None), - organization_name=d.get("organization_name", None), - ) + return cls(collaborator_alias=d.get('collaborator_alias', None), display_name=d.get('display_name', None), global_metastore_id=d.get('global_metastore_id', None), invite_recipient_email=d.get('invite_recipient_email', None), invite_recipient_workspace_id=d.get('invite_recipient_workspace_id', None), organization_name=d.get('organization_name', None)) + + @dataclass class CleanRoomNotebookReview: comment: Optional[str] = None """review comment""" - + created_at_millis: Optional[int] = None """timestamp of when the review was submitted""" - + review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None """review outcome""" - + review_sub_reason: Optional[CleanRoomNotebookReviewNotebookReviewSubReason] = None """specified when the review was not explicitly made by a user""" - + reviewer_collaborator_alias: Optional[str] = None """collaborator alias of the reviewer""" - + def as_dict(self) -> dict: """Serializes the CleanRoomNotebookReview into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.created_at_millis is not None: - body["created_at_millis"] = self.created_at_millis - if self.review_state is not None: - body["review_state"] = self.review_state.value - if self.review_sub_reason is not None: - body["review_sub_reason"] = self.review_sub_reason.value - if self.reviewer_collaborator_alias is not None: - body["reviewer_collaborator_alias"] = self.reviewer_collaborator_alias + if self.comment is not None: body['comment'] = self.comment + if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis + if self.review_state is not None: body['review_state'] = self.review_state.value + if self.review_sub_reason is not None: body['review_sub_reason'] = self.review_sub_reason.value + if self.reviewer_collaborator_alias is not None: body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomNotebookReview into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.created_at_millis is not None: - body["created_at_millis"] = self.created_at_millis - if self.review_state is not None: - body["review_state"] = self.review_state - if self.review_sub_reason is not None: - body["review_sub_reason"] = self.review_sub_reason - if self.reviewer_collaborator_alias is not None: - body["reviewer_collaborator_alias"] = self.reviewer_collaborator_alias + if self.comment is not None: body['comment'] = self.comment + if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis + if self.review_state is not None: body['review_state'] = self.review_state + if self.review_sub_reason is not None: body['review_sub_reason'] = self.review_sub_reason + if self.reviewer_collaborator_alias is not None: body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomNotebookReview: """Deserializes the CleanRoomNotebookReview from a dictionary.""" - return cls( - comment=d.get("comment", None), - created_at_millis=d.get("created_at_millis", None), - review_state=_enum(d, "review_state", CleanRoomNotebookReviewNotebookReviewState), - review_sub_reason=_enum(d, "review_sub_reason", CleanRoomNotebookReviewNotebookReviewSubReason), - reviewer_collaborator_alias=d.get("reviewer_collaborator_alias", None), - ) - + return cls(comment=d.get('comment', None), created_at_millis=d.get('created_at_millis', None), review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState), review_sub_reason=_enum(d, 'review_sub_reason', CleanRoomNotebookReviewNotebookReviewSubReason), reviewer_collaborator_alias=d.get('reviewer_collaborator_alias', None)) + -class CleanRoomNotebookReviewNotebookReviewState(Enum): - APPROVED = "APPROVED" - PENDING = "PENDING" - REJECTED = "REJECTED" +class CleanRoomNotebookReviewNotebookReviewState(Enum): + + + APPROVED = 'APPROVED' + PENDING = 'PENDING' + REJECTED = 'REJECTED' class CleanRoomNotebookReviewNotebookReviewSubReason(Enum): - - AUTO_APPROVED = "AUTO_APPROVED" - BACKFILLED = "BACKFILLED" - + + + AUTO_APPROVED = 'AUTO_APPROVED' + BACKFILLED = 'BACKFILLED' @dataclass class CleanRoomNotebookTaskRun: """Stores information about a single task run.""" - + collaborator_job_run_info: Optional[CollaboratorJobRunInfo] = None """Job run info of the task in the runner's local workspace. This field is only included in the LIST API. if the task was run within the same workspace the API is being called. If the task run was in a different workspace under the same metastore, only the workspace_id is included.""" - + notebook_etag: Optional[str] = None """Etag of the notebook executed in this task run, used to identify the notebook version.""" - + notebook_job_run_state: Optional[jobs.CleanRoomTaskRunState] = None """State of the task run.""" - + notebook_name: Optional[str] = None """Asset name of the notebook executed in this task run.""" - + notebook_updated_at: Optional[int] = None """The timestamp of when the notebook was last updated.""" - + output_schema_expiration_time: Optional[int] = None """Expiration time of the output schema of the task run (if any), in epoch milliseconds.""" - + output_schema_name: Optional[str] = None """Name of the output schema associated with the clean rooms notebook task run.""" - + run_duration: Optional[int] = None """Duration of the task run, in milliseconds.""" - + start_time: Optional[int] = None """When the task run started, in epoch milliseconds.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomNotebookTaskRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.collaborator_job_run_info: - body["collaborator_job_run_info"] = self.collaborator_job_run_info.as_dict() - if self.notebook_etag is not None: - body["notebook_etag"] = self.notebook_etag - if self.notebook_job_run_state: - body["notebook_job_run_state"] = self.notebook_job_run_state.as_dict() - if self.notebook_name is not None: - body["notebook_name"] = self.notebook_name - if self.notebook_updated_at is not None: - body["notebook_updated_at"] = self.notebook_updated_at - if self.output_schema_expiration_time is not None: - body["output_schema_expiration_time"] = self.output_schema_expiration_time - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.run_duration is not None: - body["run_duration"] = self.run_duration - if self.start_time is not None: - body["start_time"] = self.start_time + if self.collaborator_job_run_info: body['collaborator_job_run_info'] = self.collaborator_job_run_info.as_dict() + if self.notebook_etag is not None: body['notebook_etag'] = self.notebook_etag + if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state.as_dict() + if self.notebook_name is not None: body['notebook_name'] = self.notebook_name + if self.notebook_updated_at is not None: body['notebook_updated_at'] = self.notebook_updated_at + if self.output_schema_expiration_time is not None: body['output_schema_expiration_time'] = self.output_schema_expiration_time + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.start_time is not None: body['start_time'] = self.start_time return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomNotebookTaskRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.collaborator_job_run_info: - body["collaborator_job_run_info"] = self.collaborator_job_run_info - if self.notebook_etag is not None: - body["notebook_etag"] = self.notebook_etag - if self.notebook_job_run_state: - body["notebook_job_run_state"] = self.notebook_job_run_state - if self.notebook_name is not None: - body["notebook_name"] = self.notebook_name - if self.notebook_updated_at is not None: - body["notebook_updated_at"] = self.notebook_updated_at - if self.output_schema_expiration_time is not None: - body["output_schema_expiration_time"] = self.output_schema_expiration_time - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.run_duration is not None: - body["run_duration"] = self.run_duration - if self.start_time is not None: - body["start_time"] = self.start_time + if self.collaborator_job_run_info: body['collaborator_job_run_info'] = self.collaborator_job_run_info + if self.notebook_etag is not None: body['notebook_etag'] = self.notebook_etag + if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state + if self.notebook_name is not None: body['notebook_name'] = self.notebook_name + if self.notebook_updated_at is not None: body['notebook_updated_at'] = self.notebook_updated_at + if self.output_schema_expiration_time is not None: body['output_schema_expiration_time'] = self.output_schema_expiration_time + if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.start_time is not None: body['start_time'] = self.start_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomNotebookTaskRun: """Deserializes the CleanRoomNotebookTaskRun from a dictionary.""" - return cls( - collaborator_job_run_info=_from_dict(d, "collaborator_job_run_info", CollaboratorJobRunInfo), - notebook_etag=d.get("notebook_etag", None), - notebook_job_run_state=_from_dict(d, "notebook_job_run_state", jobs.CleanRoomTaskRunState), - notebook_name=d.get("notebook_name", None), - notebook_updated_at=d.get("notebook_updated_at", None), - output_schema_expiration_time=d.get("output_schema_expiration_time", None), - output_schema_name=d.get("output_schema_name", None), - run_duration=d.get("run_duration", None), - start_time=d.get("start_time", None), - ) + return cls(collaborator_job_run_info=_from_dict(d, 'collaborator_job_run_info', CollaboratorJobRunInfo), notebook_etag=d.get('notebook_etag', None), notebook_job_run_state=_from_dict(d, 'notebook_job_run_state', jobs.CleanRoomTaskRunState), notebook_name=d.get('notebook_name', None), notebook_updated_at=d.get('notebook_updated_at', None), output_schema_expiration_time=d.get('output_schema_expiration_time', None), output_schema_name=d.get('output_schema_name', None), run_duration=d.get('run_duration', None), start_time=d.get('start_time', None)) + + @dataclass @@ -786,53 +659,48 @@ class CleanRoomOutputCatalog: field will always exist if status is CREATED. [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements""" - + status: Optional[CleanRoomOutputCatalogOutputCatalogStatus] = None - + def as_dict(self) -> dict: """Serializes the CleanRoomOutputCatalog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.status is not None: - body["status"] = self.status.value + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomOutputCatalog into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.status is not None: - body["status"] = self.status + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomOutputCatalog: """Deserializes the CleanRoomOutputCatalog from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - status=_enum(d, "status", CleanRoomOutputCatalogOutputCatalogStatus), - ) - + return cls(catalog_name=d.get('catalog_name', None), status=_enum(d, 'status', CleanRoomOutputCatalogOutputCatalogStatus)) + -class CleanRoomOutputCatalogOutputCatalogStatus(Enum): - CREATED = "CREATED" - NOT_CREATED = "NOT_CREATED" - NOT_ELIGIBLE = "NOT_ELIGIBLE" +class CleanRoomOutputCatalogOutputCatalogStatus(Enum): + + + CREATED = 'CREATED' + NOT_CREATED = 'NOT_CREATED' + NOT_ELIGIBLE = 'NOT_ELIGIBLE' @dataclass class CleanRoomRemoteDetail: """Publicly visible central clean room details.""" - + central_clean_room_id: Optional[str] = None """Central clean room ID.""" - + cloud_vendor: Optional[str] = None """Cloud vendor (aws,azure,gcp) of the central clean room.""" - + collaborators: Optional[List[CleanRoomCollaborator]] = None """Collaborators in the central clean room. There should one and only one collaborator in the list that satisfies the owner condition: @@ -840,204 +708,177 @@ class CleanRoomRemoteDetail: 1. It has the creator's global_metastore_id (determined by caller of CreateCleanRoom). 2. Its invite_recipient_email is empty.""" - + compliance_security_profile: Optional[ComplianceSecurityProfile] = None """The compliance security profile used to process regulated data following compliance standards.""" - + creator: Optional[CleanRoomCollaborator] = None """Collaborator who creates the clean room.""" - + egress_network_policy: Optional[settings.EgressNetworkPolicy] = None """Egress network policy to apply to the central clean room workspace.""" - + region: Optional[str] = None """Region of the central clean room.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomRemoteDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.central_clean_room_id is not None: - body["central_clean_room_id"] = self.central_clean_room_id - if self.cloud_vendor is not None: - body["cloud_vendor"] = self.cloud_vendor - if self.collaborators: - body["collaborators"] = [v.as_dict() for v in self.collaborators] - if self.compliance_security_profile: - body["compliance_security_profile"] = self.compliance_security_profile.as_dict() - if self.creator: - body["creator"] = self.creator.as_dict() - if self.egress_network_policy: - body["egress_network_policy"] = self.egress_network_policy.as_dict() - if self.region is not None: - body["region"] = self.region + if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id + if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor + if self.collaborators: body['collaborators'] = [v.as_dict() for v in self.collaborators] + if self.compliance_security_profile: body['compliance_security_profile'] = self.compliance_security_profile.as_dict() + if self.creator: body['creator'] = self.creator.as_dict() + if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy.as_dict() + if self.region is not None: body['region'] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomRemoteDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.central_clean_room_id is not None: - body["central_clean_room_id"] = self.central_clean_room_id - if self.cloud_vendor is not None: - body["cloud_vendor"] = self.cloud_vendor - if self.collaborators: - body["collaborators"] = self.collaborators - if self.compliance_security_profile: - body["compliance_security_profile"] = self.compliance_security_profile - if self.creator: - body["creator"] = self.creator - if self.egress_network_policy: - body["egress_network_policy"] = self.egress_network_policy - if self.region is not None: - body["region"] = self.region + if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id + if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor + if self.collaborators: body['collaborators'] = self.collaborators + if self.compliance_security_profile: body['compliance_security_profile'] = self.compliance_security_profile + if self.creator: body['creator'] = self.creator + if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy + if self.region is not None: body['region'] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomRemoteDetail: """Deserializes the CleanRoomRemoteDetail from a dictionary.""" - return cls( - central_clean_room_id=d.get("central_clean_room_id", None), - cloud_vendor=d.get("cloud_vendor", None), - collaborators=_repeated_dict(d, "collaborators", CleanRoomCollaborator), - compliance_security_profile=_from_dict(d, "compliance_security_profile", ComplianceSecurityProfile), - creator=_from_dict(d, "creator", CleanRoomCollaborator), - egress_network_policy=_from_dict(d, "egress_network_policy", settings.EgressNetworkPolicy), - region=d.get("region", None), - ) - + return cls(central_clean_room_id=d.get('central_clean_room_id', None), cloud_vendor=d.get('cloud_vendor', None), collaborators=_repeated_dict(d, 'collaborators', CleanRoomCollaborator), compliance_security_profile=_from_dict(d, 'compliance_security_profile', ComplianceSecurityProfile), creator=_from_dict(d, 'creator', CleanRoomCollaborator), egress_network_policy=_from_dict(d, 'egress_network_policy', settings.EgressNetworkPolicy), region=d.get('region', None)) + -class CleanRoomStatusEnum(Enum): - ACTIVE = "ACTIVE" - DELETED = "DELETED" - FAILED = "FAILED" - PROVISIONING = "PROVISIONING" +class CleanRoomStatusEnum(Enum): + + + ACTIVE = 'ACTIVE' + DELETED = 'DELETED' + FAILED = 'FAILED' + PROVISIONING = 'PROVISIONING' @dataclass class CollaboratorJobRunInfo: collaborator_alias: Optional[str] = None """Alias of the collaborator that triggered the task run.""" - + collaborator_job_id: Optional[int] = None """Job ID of the task run in the collaborator's workspace.""" - + collaborator_job_run_id: Optional[int] = None """Job run ID of the task run in the collaborator's workspace.""" - + collaborator_task_run_id: Optional[int] = None """Task run ID of the task run in the collaborator's workspace.""" - + collaborator_workspace_id: Optional[int] = None """ID of the collaborator's workspace that triggered the task run.""" - + def as_dict(self) -> dict: """Serializes the CollaboratorJobRunInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.collaborator_alias is not None: - body["collaborator_alias"] = self.collaborator_alias - if self.collaborator_job_id is not None: - body["collaborator_job_id"] = self.collaborator_job_id - if self.collaborator_job_run_id is not None: - body["collaborator_job_run_id"] = self.collaborator_job_run_id - if self.collaborator_task_run_id is not None: - body["collaborator_task_run_id"] = self.collaborator_task_run_id - if self.collaborator_workspace_id is not None: - body["collaborator_workspace_id"] = self.collaborator_workspace_id + if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias + if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id + if self.collaborator_job_run_id is not None: body['collaborator_job_run_id'] = self.collaborator_job_run_id + if self.collaborator_task_run_id is not None: body['collaborator_task_run_id'] = self.collaborator_task_run_id + if self.collaborator_workspace_id is not None: body['collaborator_workspace_id'] = self.collaborator_workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the CollaboratorJobRunInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.collaborator_alias is not None: - body["collaborator_alias"] = self.collaborator_alias - if self.collaborator_job_id is not None: - body["collaborator_job_id"] = self.collaborator_job_id - if self.collaborator_job_run_id is not None: - body["collaborator_job_run_id"] = self.collaborator_job_run_id - if self.collaborator_task_run_id is not None: - body["collaborator_task_run_id"] = self.collaborator_task_run_id - if self.collaborator_workspace_id is not None: - body["collaborator_workspace_id"] = self.collaborator_workspace_id + if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias + if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id + if self.collaborator_job_run_id is not None: body['collaborator_job_run_id'] = self.collaborator_job_run_id + if self.collaborator_task_run_id is not None: body['collaborator_task_run_id'] = self.collaborator_task_run_id + if self.collaborator_workspace_id is not None: body['collaborator_workspace_id'] = self.collaborator_workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CollaboratorJobRunInfo: """Deserializes the CollaboratorJobRunInfo from a dictionary.""" - return cls( - collaborator_alias=d.get("collaborator_alias", None), - collaborator_job_id=d.get("collaborator_job_id", None), - collaborator_job_run_id=d.get("collaborator_job_run_id", None), - collaborator_task_run_id=d.get("collaborator_task_run_id", None), - collaborator_workspace_id=d.get("collaborator_workspace_id", None), - ) + return cls(collaborator_alias=d.get('collaborator_alias', None), collaborator_job_id=d.get('collaborator_job_id', None), collaborator_job_run_id=d.get('collaborator_job_run_id', None), collaborator_task_run_id=d.get('collaborator_task_run_id', None), collaborator_workspace_id=d.get('collaborator_workspace_id', None)) + + @dataclass class ComplianceSecurityProfile: """The compliance security profile used to process regulated data following compliance standards.""" - + compliance_standards: Optional[List[settings.ComplianceStandard]] = None """The list of compliance standards that the compliance security profile is configured to enforce.""" - + is_enabled: Optional[bool] = None """Whether the compliance security profile is enabled.""" - + def as_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compliance_standards: - body["compliance_standards"] = [v.as_dict() for v in self.compliance_standards] - if self.is_enabled is not None: - body["is_enabled"] = self.is_enabled + if self.compliance_standards: body['compliance_standards'] = [v.as_dict() for v in self.compliance_standards] + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled return body def as_shallow_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.compliance_standards: - body["compliance_standards"] = self.compliance_standards - if self.is_enabled is not None: - body["is_enabled"] = self.is_enabled + if self.compliance_standards: body['compliance_standards'] = self.compliance_standards + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: """Deserializes the ComplianceSecurityProfile from a dictionary.""" - return cls( - compliance_standards=_repeated_dict(d, "compliance_standards", settings.ComplianceStandard), - is_enabled=d.get("is_enabled", None), - ) + return cls(compliance_standards=_repeated_dict(d, 'compliance_standards', settings.ComplianceStandard), is_enabled=d.get('is_enabled', None)) + + + + + + + + @dataclass class CreateCleanRoomOutputCatalogResponse: output_catalog: Optional[CleanRoomOutputCatalog] = None - + def as_dict(self) -> dict: """Serializes the CreateCleanRoomOutputCatalogResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.output_catalog: - body["output_catalog"] = self.output_catalog.as_dict() + if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateCleanRoomOutputCatalogResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.output_catalog: - body["output_catalog"] = self.output_catalog + if self.output_catalog: body['output_catalog'] = self.output_catalog return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCleanRoomOutputCatalogResponse: """Deserializes the CreateCleanRoomOutputCatalogResponse from a dictionary.""" - return cls(output_catalog=_from_dict(d, "output_catalog", CleanRoomOutputCatalog)) + return cls(output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog)) + + + + + + + + @dataclass class DeleteCleanRoomAssetResponse: """Response for delete clean room request. Using an empty message since the generic Empty proto does not externd UnshadedMessageMarker.""" - + def as_dict(self) -> dict: """Serializes the DeleteCleanRoomAssetResponse into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1052,6 +893,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCleanRoomAssetResponse: """Deserializes the DeleteCleanRoomAssetResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -1070,39 +916,51 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + + + + + + + + + + @dataclass class ListCleanRoomAssetsResponse: assets: Optional[List[CleanRoomAsset]] = None """Assets in the clean room.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListCleanRoomAssetsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets: - body["assets"] = [v.as_dict() for v in self.assets] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.assets: body['assets'] = [v.as_dict() for v in self.assets] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListCleanRoomAssetsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets: - body["assets"] = self.assets - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.assets: body['assets'] = self.assets + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomAssetsResponse: """Deserializes the ListCleanRoomAssetsResponse from a dictionary.""" - return cls(assets=_repeated_dict(d, "assets", CleanRoomAsset), next_page_token=d.get("next_page_token", None)) + return cls(assets=_repeated_dict(d, 'assets', CleanRoomAsset), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass @@ -1110,218 +968,248 @@ class ListCleanRoomNotebookTaskRunsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token should be set to this value for the next request (for the next page of results).""" - + runs: Optional[List[CleanRoomNotebookTaskRun]] = None """Name of the clean room.""" - + def as_dict(self) -> dict: """Serializes the ListCleanRoomNotebookTaskRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.runs: - body["runs"] = [v.as_dict() for v in self.runs] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.runs: body['runs'] = [v.as_dict() for v in self.runs] return body def as_shallow_dict(self) -> dict: """Serializes the ListCleanRoomNotebookTaskRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.runs: - body["runs"] = self.runs + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.runs: body['runs'] = self.runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomNotebookTaskRunsResponse: """Deserializes the ListCleanRoomNotebookTaskRunsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), runs=_repeated_dict(d, "runs", CleanRoomNotebookTaskRun) - ) + return cls(next_page_token=d.get('next_page_token', None), runs=_repeated_dict(d, 'runs', CleanRoomNotebookTaskRun)) + + + + + @dataclass class ListCleanRoomsResponse: clean_rooms: Optional[List[CleanRoom]] = None - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListCleanRoomsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_rooms: - body["clean_rooms"] = [v.as_dict() for v in self.clean_rooms] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.clean_rooms: body['clean_rooms'] = [v.as_dict() for v in self.clean_rooms] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListCleanRoomsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_rooms: - body["clean_rooms"] = self.clean_rooms - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.clean_rooms: body['clean_rooms'] = self.clean_rooms + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomsResponse: """Deserializes the ListCleanRoomsResponse from a dictionary.""" - return cls( - clean_rooms=_repeated_dict(d, "clean_rooms", CleanRoom), next_page_token=d.get("next_page_token", None) - ) + return cls(clean_rooms=_repeated_dict(d, 'clean_rooms', CleanRoom), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class UpdateCleanRoomRequest: clean_room: Optional[CleanRoom] = None - + name: Optional[str] = None """Name of the clean room.""" - + def as_dict(self) -> dict: """Serializes the UpdateCleanRoomRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_room: - body["clean_room"] = self.clean_room.as_dict() - if self.name is not None: - body["name"] = self.name + if self.clean_room: body['clean_room'] = self.clean_room.as_dict() + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCleanRoomRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_room: - body["clean_room"] = self.clean_room - if self.name is not None: - body["name"] = self.name + if self.clean_room: body['clean_room'] = self.clean_room + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCleanRoomRequest: """Deserializes the UpdateCleanRoomRequest from a dictionary.""" - return cls(clean_room=_from_dict(d, "clean_room", CleanRoom), name=d.get("name", None)) + return cls(clean_room=_from_dict(d, 'clean_room', CleanRoom), name=d.get('name', None)) + + + + class CleanRoomAssetsAPI: """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset: - """Create an asset. + + + + + + + def create(self + , clean_room_name: str, asset: CleanRoomAsset + ) -> CleanRoomAsset: + """Create an asset. + Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC asset that is added through this method, the clean room owner must also have enough privilege on the asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to access the asset. Typically, you should use a group as the clean room owner. - + :param clean_room_name: str Name of the clean room. :param asset: :class:`CleanRoomAsset` Metadata of the clean room asset - + :returns: :class:`CleanRoomAsset` """ body = asset.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/clean-rooms/{clean_room_name}/assets", body=body, headers=headers) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/clean-rooms/{clean_room_name}/assets', body=body + + , headers=headers + ) return CleanRoomAsset.from_dict(res) - def delete(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str): - """Delete an asset. + + + + def delete(self + , clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str + ): + """Delete an asset. + Delete a clean room asset - unshare/remove the asset from the clean room - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` The type of the asset. - :param asset_full_name: str + :param name: str The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}", - headers=headers, - ) + + + - def get(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str) -> CleanRoomAsset: + def get(self + , clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str + ) -> CleanRoomAsset: """Get an asset. - + Get the details of a clean room asset by its type and full name. - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` The type of the asset. - :param asset_full_name: str + :param name: str The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. - + :returns: :class:`CleanRoomAsset` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}' + + , headers=headers + ) return CleanRoomAsset.from_dict(res) - def list(self, clean_room_name: str, *, page_token: Optional[str] = None) -> Iterator[CleanRoomAsset]: - """List assets. + + + + def list(self + , clean_room_name: str + , * + , page_token: Optional[str] = None) -> Iterator[CleanRoomAsset]: + """List assets. + :param clean_room_name: str Name of the clean room. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoomAsset` """ - + query = {} - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", f"/api/2.0/clean-rooms/{clean_room_name}/assets", query=query, headers=headers) - if "assets" in json: - for v in json["assets"]: - yield CleanRoomAsset.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, asset: CleanRoomAsset - ) -> CleanRoomAsset: - """Update an asset. + json = self._api.do('GET',f'/api/2.0/clean-rooms/{clean_room_name}/assets', query=query + + , headers=headers + ) + if 'assets' in json: + for v in json['assets']: + yield CleanRoomAsset.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, asset: CleanRoomAsset + ) -> CleanRoomAsset: + """Update an asset. + Update a clean room asset. For example, updating the content of a notebook; changing the shared partitions of a table; etc. - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` @@ -1329,49 +1217,50 @@ def update( :param name: str A fully qualified name that uniquely identifies the asset within the clean room. This is also the name displayed in the clean room UI. - + For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - + For notebooks, the name is the notebook file name. :param asset: :class:`CleanRoomAsset` Metadata of the clean room asset - + :returns: :class:`CleanRoomAsset` """ body = asset.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}", - body=body, - headers=headers, - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}', body=body + + , headers=headers + ) return CleanRoomAsset.from_dict(res) - + + class CleanRoomTaskRunsAPI: """Clean room task runs are the executions of notebooks in a clean room.""" - + def __init__(self, api_client): self._api = api_client + - def list( - self, - clean_room_name: str, - *, - notebook_name: Optional[str] = None, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[CleanRoomNotebookTaskRun]: - """List notebook task runs. + - List all the historical notebook task runs in a clean room. + + + + + def list(self + , clean_room_name: str + , * + , notebook_name: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[CleanRoomNotebookTaskRun]: + """List notebook task runs. + + List all the historical notebook task runs in a clean room. + :param clean_room_name: str Name of the clean room. :param notebook_name: str (optional) @@ -1380,176 +1269,225 @@ def list( The maximum number of task runs to return. Currently ignored - all runs will be returned. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoomNotebookTaskRun` """ - + query = {} - if notebook_name is not None: - query["notebook_name"] = notebook_name - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if notebook_name is not None: query['notebook_name'] = notebook_name + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", f"/api/2.0/clean-rooms/{clean_room_name}/runs", query=query, headers=headers) - if "runs" in json: - for v in json["runs"]: - yield CleanRoomNotebookTaskRun.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - + json = self._api.do('GET',f'/api/2.0/clean-rooms/{clean_room_name}/runs', query=query + + , headers=headers + ) + if 'runs' in json: + for v in json['runs']: + yield CleanRoomNotebookTaskRun.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + class CleanRoomsAPI: """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, clean_room: CleanRoom) -> CleanRoom: - """Create a clean room. + + + + + + + def create(self + , clean_room: CleanRoom + ) -> CleanRoom: + """Create a clean room. + Create a new clean room with the specified collaborators. This method is asynchronous; the returned name field inside the clean_room field can be used to poll the clean room status, using the :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING state, with only name, owner, comment, created_at and status populated. The clean room will be usable once it enters an ACTIVE state. - + The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore. - + :param clean_room: :class:`CleanRoom` - + :returns: :class:`CleanRoom` """ body = clean_room.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/clean-rooms", body=body, headers=headers) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/clean-rooms', body=body + + , headers=headers + ) return CleanRoom.from_dict(res) - def create_output_catalog( - self, clean_room_name: str, output_catalog: CleanRoomOutputCatalog - ) -> CreateCleanRoomOutputCatalogResponse: - """Create an output catalog. + + + + def create_output_catalog(self + , clean_room_name: str, output_catalog: CleanRoomOutputCatalog + ) -> CreateCleanRoomOutputCatalogResponse: + """Create an output catalog. + Create the output catalog of the clean room. - + :param clean_room_name: str Name of the clean room. :param output_catalog: :class:`CleanRoomOutputCatalog` - + :returns: :class:`CreateCleanRoomOutputCatalogResponse` """ body = output_catalog.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/clean-rooms/{clean_room_name}/output-catalogs", body=body, headers=headers - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/clean-rooms/{clean_room_name}/output-catalogs', body=body + + , headers=headers + ) return CreateCleanRoomOutputCatalogResponse.from_dict(res) - def delete(self, name: str): - """Delete a clean room. + + + + def delete(self + , name: str + ): + """Delete a clean room. + Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other collaborators have not deleted the clean room, they will still have the clean room in their metastore, but it will be in a DELETED state and no operations other than deletion can be performed on it. - + :param name: str Name of the clean room. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/clean-rooms/{name}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/clean-rooms/{name}", headers=headers) + + + - def get(self, name: str) -> CleanRoom: + def get(self + , name: str + ) -> CleanRoom: """Get a clean room. - + Get the details of a clean room given its name. - + :param name: str - + :returns: :class:`CleanRoom` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/clean-rooms/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/clean-rooms/{name}' + + , headers=headers + ) return CleanRoom.from_dict(res) - def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[CleanRoom]: - """List clean rooms. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[CleanRoom]: + """List clean rooms. + Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are returned. - + :param page_size: int (optional) Maximum number of clean rooms to return (i.e., the page length). Defaults to 100. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoom` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/clean-rooms", query=query, headers=headers) - if "clean_rooms" in json: - for v in json["clean_rooms"]: - yield CleanRoom.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, name: str, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom: - """Update a clean room. + json = self._api.do('GET','/api/2.0/clean-rooms', query=query + + , headers=headers + ) + if 'clean_rooms' in json: + for v in json['clean_rooms']: + yield CleanRoom.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , name: str + , * + , clean_room: Optional[CleanRoom] = None) -> CleanRoom: + """Update a clean room. + Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM** privilege, or be metastore admin. - + When the caller is a metastore admin, only the __owner__ field can be updated. - + :param name: str Name of the clean room. :param clean_room: :class:`CleanRoom` (optional) - + :returns: :class:`CleanRoom` """ body = {} - if clean_room is not None: - body["clean_room"] = clean_room.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/clean-rooms/{name}", body=body, headers=headers) + if clean_room is not None: body['clean_room'] = clean_room.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/clean-rooms/{name}', body=body + + , headers=headers + ) return CleanRoom.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index aa35234aa..550174964 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -1,29 +1,31 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class AddInstanceProfile: instance_profile_arn: str """The AWS ARN of the instance profile to register with Databricks. This field is required.""" - + iam_role_arn: Optional[str] = None """The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile @@ -32,55 +34,44 @@ class AddInstanceProfile: Otherwise, this field is optional. [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html""" - + is_meta_instance_profile: Optional[bool] = None """Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios. If true, it means the instance profile contains an meta IAM role which could assume a wide range of roles. Therefore it should always be used with authorization. This field is optional, the default value is `false`.""" - + skip_validation: Optional[bool] = None """By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile. This validation uses AWS dry-run mode for the RunInstances API. If validation fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your requested instance type is not supported in your requested availability zone”), you can pass this flag to skip the validation and forcibly add the instance profile.""" - + def as_dict(self) -> dict: """Serializes the AddInstanceProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.iam_role_arn is not None: - body["iam_role_arn"] = self.iam_role_arn - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = self.is_meta_instance_profile - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the AddInstanceProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.iam_role_arn is not None: - body["iam_role_arn"] = self.iam_role_arn - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = self.is_meta_instance_profile - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile + if self.skip_validation is not None: body['skip_validation'] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AddInstanceProfile: """Deserializes the AddInstanceProfile from a dictionary.""" - return cls( - iam_role_arn=d.get("iam_role_arn", None), - instance_profile_arn=d.get("instance_profile_arn", None), - is_meta_instance_profile=d.get("is_meta_instance_profile", None), - skip_validation=d.get("skip_validation", None), - ) + return cls(iam_role_arn=d.get('iam_role_arn', None), instance_profile_arn=d.get('instance_profile_arn', None), is_meta_instance_profile=d.get('is_meta_instance_profile', None), skip_validation=d.get('skip_validation', None)) + + @dataclass @@ -99,34 +90,36 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AddResponse: """Deserializes the AddResponse from a dictionary.""" return cls() + + @dataclass class Adlsgen2Info: """A storage location in Adls Gen2""" - + destination: str """abfss destination, e.g. `abfss://@.dfs.core.windows.net/`.""" - + def as_dict(self) -> dict: """Serializes the Adlsgen2Info into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the Adlsgen2Info into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Adlsgen2Info: """Deserializes the Adlsgen2Info from a dictionary.""" - return cls(destination=d.get("destination", None)) + return cls(destination=d.get('destination', None)) + + @dataclass @@ -134,44 +127,42 @@ class AutoScale: max_workers: Optional[int] = None """The maximum number of workers to which the cluster can scale up when overloaded. Note that `max_workers` must be strictly greater than `min_workers`.""" - + min_workers: Optional[int] = None """The minimum number of workers to which the cluster can scale down when underutilized. It is also the initial number of workers the cluster will have after creation.""" - + def as_dict(self) -> dict: """Serializes the AutoScale into a dictionary suitable for use as a JSON request body.""" body = {} - if self.max_workers is not None: - body["max_workers"] = self.max_workers - if self.min_workers is not None: - body["min_workers"] = self.min_workers + if self.max_workers is not None: body['max_workers'] = self.max_workers + if self.min_workers is not None: body['min_workers'] = self.min_workers return body def as_shallow_dict(self) -> dict: """Serializes the AutoScale into a shallow dictionary of its immediate attributes.""" body = {} - if self.max_workers is not None: - body["max_workers"] = self.max_workers - if self.min_workers is not None: - body["min_workers"] = self.min_workers + if self.max_workers is not None: body['max_workers'] = self.max_workers + if self.min_workers is not None: body['min_workers'] = self.min_workers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutoScale: """Deserializes the AutoScale from a dictionary.""" - return cls(max_workers=d.get("max_workers", None), min_workers=d.get("min_workers", None)) + return cls(max_workers=d.get('max_workers', None), min_workers=d.get('min_workers', None)) + + @dataclass class AwsAttributes: """Attributes set during cluster creation which are related to Amazon Web Services.""" - + availability: Optional[AwsAvailability] = None """Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" - + ebs_volume_count: Optional[int] = None """The number of volumes launched for each instance. Users can choose up to 10 volumes. This feature is only enabled for supported node types. Legacy node types cannot specify custom EBS @@ -188,23 +179,23 @@ class AwsAttributes: Please note that if EBS volumes are specified, then the Spark configuration `spark.local.dir` will be overridden.""" - + ebs_volume_iops: Optional[int] = None """If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.""" - + ebs_volume_size: Optional[int] = None """The size of each EBS volume (in GiB) launched for each instance. For general purpose SSD, this value must be within the range 100 - 4096. For throughput optimized HDD, this value must be within the range 500 - 4096.""" - + ebs_volume_throughput: Optional[int] = None """If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.""" - + ebs_volume_type: Optional[EbsVolumeType] = None """The type of EBS volumes that will be launched with this cluster.""" - + first_on_demand: Optional[int] = None """The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. If this value is greater than 0, the cluster driver node in particular will be placed on an on-demand @@ -213,7 +204,7 @@ class AwsAttributes: `first_on_demand` nodes will be placed on on-demand instances and the remainder will be placed on `availability` instances. Note that this value does not affect cluster size and cannot currently be mutated over the lifetime of a cluster.""" - + instance_profile_arn: Optional[str] = None """Nodes for this cluster will only be placed on AWS instances with this instance profile. If ommitted, nodes will be placed on instances without an IAM instance profile. The instance @@ -221,7 +212,7 @@ class AwsAttributes: administrator. This feature may only be available to certain customer plans.""" - + spot_bid_price_percent: Optional[int] = None """The bid price for AWS spot instances, as a percentage of the corresponding instance type's on-demand price. For example, if this field is set to 50, and the cluster needs a new @@ -230,7 +221,7 @@ class AwsAttributes: `r3.xlarge` instances. If not specified, the default value is 100. When spot instances are requested for this cluster, only spot instances whose bid price percentage matches this field will be considered. Note that, for safety, we enforce this field to be no more than 10000.""" - + zone_id: Optional[str] = None """Identifier for the availability zone/datacenter in which the cluster resides. This string will be of a form like "us-west-2a". The provided availability zone must be in the same region as the @@ -242,92 +233,62 @@ class AwsAttributes: The list of available zones as well as the default value can be found by using the `List Zones` method.""" - + def as_dict(self) -> dict: """Serializes the AwsAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: - body["availability"] = self.availability.value - if self.ebs_volume_count is not None: - body["ebs_volume_count"] = self.ebs_volume_count - if self.ebs_volume_iops is not None: - body["ebs_volume_iops"] = self.ebs_volume_iops - if self.ebs_volume_size is not None: - body["ebs_volume_size"] = self.ebs_volume_size - if self.ebs_volume_throughput is not None: - body["ebs_volume_throughput"] = self.ebs_volume_throughput - if self.ebs_volume_type is not None: - body["ebs_volume_type"] = self.ebs_volume_type.value - if self.first_on_demand is not None: - body["first_on_demand"] = self.first_on_demand - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.spot_bid_price_percent is not None: - body["spot_bid_price_percent"] = self.spot_bid_price_percent - if self.zone_id is not None: - body["zone_id"] = self.zone_id + if self.availability is not None: body['availability'] = self.availability.value + if self.ebs_volume_count is not None: body['ebs_volume_count'] = self.ebs_volume_count + if self.ebs_volume_iops is not None: body['ebs_volume_iops'] = self.ebs_volume_iops + if self.ebs_volume_size is not None: body['ebs_volume_size'] = self.ebs_volume_size + if self.ebs_volume_throughput is not None: body['ebs_volume_throughput'] = self.ebs_volume_throughput + if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value + if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent + if self.zone_id is not None: body['zone_id'] = self.zone_id return body def as_shallow_dict(self) -> dict: """Serializes the AwsAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: - body["availability"] = self.availability - if self.ebs_volume_count is not None: - body["ebs_volume_count"] = self.ebs_volume_count - if self.ebs_volume_iops is not None: - body["ebs_volume_iops"] = self.ebs_volume_iops - if self.ebs_volume_size is not None: - body["ebs_volume_size"] = self.ebs_volume_size - if self.ebs_volume_throughput is not None: - body["ebs_volume_throughput"] = self.ebs_volume_throughput - if self.ebs_volume_type is not None: - body["ebs_volume_type"] = self.ebs_volume_type - if self.first_on_demand is not None: - body["first_on_demand"] = self.first_on_demand - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.spot_bid_price_percent is not None: - body["spot_bid_price_percent"] = self.spot_bid_price_percent - if self.zone_id is not None: - body["zone_id"] = self.zone_id + if self.availability is not None: body['availability'] = self.availability + if self.ebs_volume_count is not None: body['ebs_volume_count'] = self.ebs_volume_count + if self.ebs_volume_iops is not None: body['ebs_volume_iops'] = self.ebs_volume_iops + if self.ebs_volume_size is not None: body['ebs_volume_size'] = self.ebs_volume_size + if self.ebs_volume_throughput is not None: body['ebs_volume_throughput'] = self.ebs_volume_throughput + if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type + if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent + if self.zone_id is not None: body['zone_id'] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsAttributes: """Deserializes the AwsAttributes from a dictionary.""" - return cls( - availability=_enum(d, "availability", AwsAvailability), - ebs_volume_count=d.get("ebs_volume_count", None), - ebs_volume_iops=d.get("ebs_volume_iops", None), - ebs_volume_size=d.get("ebs_volume_size", None), - ebs_volume_throughput=d.get("ebs_volume_throughput", None), - ebs_volume_type=_enum(d, "ebs_volume_type", EbsVolumeType), - first_on_demand=d.get("first_on_demand", None), - instance_profile_arn=d.get("instance_profile_arn", None), - spot_bid_price_percent=d.get("spot_bid_price_percent", None), - zone_id=d.get("zone_id", None), - ) + return cls(availability=_enum(d, 'availability', AwsAvailability), ebs_volume_count=d.get('ebs_volume_count', None), ebs_volume_iops=d.get('ebs_volume_iops', None), ebs_volume_size=d.get('ebs_volume_size', None), ebs_volume_throughput=d.get('ebs_volume_throughput', None), ebs_volume_type=_enum(d, 'ebs_volume_type', EbsVolumeType), first_on_demand=d.get('first_on_demand', None), instance_profile_arn=d.get('instance_profile_arn', None), spot_bid_price_percent=d.get('spot_bid_price_percent', None), zone_id=d.get('zone_id', None)) + + class AwsAvailability(Enum): """Availability type used for all subsequent nodes past the `first_on_demand` ones. - + Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" - - ON_DEMAND = "ON_DEMAND" - SPOT = "SPOT" - SPOT_WITH_FALLBACK = "SPOT_WITH_FALLBACK" - + + ON_DEMAND = 'ON_DEMAND' + SPOT = 'SPOT' + SPOT_WITH_FALLBACK = 'SPOT_WITH_FALLBACK' @dataclass class AzureAttributes: """Attributes set during cluster creation which are related to Microsoft Azure.""" - + availability: Optional[AzureAvailability] = None """Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" - + first_on_demand: Optional[int] = None """The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. This value should be greater than 0, to make sure the cluster driver node is placed on an on-demand @@ -336,100 +297,80 @@ class AzureAttributes: `first_on_demand` nodes will be placed on on-demand instances and the remainder will be placed on `availability` instances. Note that this value does not affect cluster size and cannot currently be mutated over the lifetime of a cluster.""" - + log_analytics_info: Optional[LogAnalyticsInfo] = None """Defines values necessary to configure and run Azure Log Analytics agent""" - + spot_bid_max_price: Optional[float] = None """The max bid price to be used for Azure spot instances. The Max price for the bid cannot be higher than the on-demand price of the instance. If not specified, the default value is -1, which specifies that the instance cannot be evicted on the basis of price, and only on the basis of availability. Further, the value should > 0 or -1.""" - + def as_dict(self) -> dict: """Serializes the AzureAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: - body["availability"] = self.availability.value - if self.first_on_demand is not None: - body["first_on_demand"] = self.first_on_demand - if self.log_analytics_info: - body["log_analytics_info"] = self.log_analytics_info.as_dict() - if self.spot_bid_max_price is not None: - body["spot_bid_max_price"] = self.spot_bid_max_price + if self.availability is not None: body['availability'] = self.availability.value + if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand + if self.log_analytics_info: body['log_analytics_info'] = self.log_analytics_info.as_dict() + if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price return body def as_shallow_dict(self) -> dict: """Serializes the AzureAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: - body["availability"] = self.availability - if self.first_on_demand is not None: - body["first_on_demand"] = self.first_on_demand - if self.log_analytics_info: - body["log_analytics_info"] = self.log_analytics_info - if self.spot_bid_max_price is not None: - body["spot_bid_max_price"] = self.spot_bid_max_price + if self.availability is not None: body['availability'] = self.availability + if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand + if self.log_analytics_info: body['log_analytics_info'] = self.log_analytics_info + if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureAttributes: """Deserializes the AzureAttributes from a dictionary.""" - return cls( - availability=_enum(d, "availability", AzureAvailability), - first_on_demand=d.get("first_on_demand", None), - log_analytics_info=_from_dict(d, "log_analytics_info", LogAnalyticsInfo), - spot_bid_max_price=d.get("spot_bid_max_price", None), - ) + return cls(availability=_enum(d, 'availability', AzureAvailability), first_on_demand=d.get('first_on_demand', None), log_analytics_info=_from_dict(d, 'log_analytics_info', LogAnalyticsInfo), spot_bid_max_price=d.get('spot_bid_max_price', None)) + + class AzureAvailability(Enum): """Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" - - ON_DEMAND_AZURE = "ON_DEMAND_AZURE" - SPOT_AZURE = "SPOT_AZURE" - SPOT_WITH_FALLBACK_AZURE = "SPOT_WITH_FALLBACK_AZURE" - + + ON_DEMAND_AZURE = 'ON_DEMAND_AZURE' + SPOT_AZURE = 'SPOT_AZURE' + SPOT_WITH_FALLBACK_AZURE = 'SPOT_WITH_FALLBACK_AZURE' @dataclass class CancelCommand: cluster_id: Optional[str] = None - + command_id: Optional[str] = None - + context_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CancelCommand into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command_id is not None: - body["commandId"] = self.command_id - if self.context_id is not None: - body["contextId"] = self.context_id + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.command_id is not None: body['commandId'] = self.command_id + if self.context_id is not None: body['contextId'] = self.context_id return body def as_shallow_dict(self) -> dict: """Serializes the CancelCommand into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command_id is not None: - body["commandId"] = self.command_id - if self.context_id is not None: - body["contextId"] = self.context_id + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.command_id is not None: body['commandId'] = self.command_id + if self.context_id is not None: body['contextId'] = self.context_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CancelCommand: """Deserializes the CancelCommand from a dictionary.""" - return cls( - cluster_id=d.get("clusterId", None), - command_id=d.get("commandId", None), - context_id=d.get("contextId", None), - ) + return cls(cluster_id=d.get('clusterId', None), command_id=d.get('commandId', None), context_id=d.get('contextId', None)) + + @dataclass @@ -448,37 +389,37 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelResponse: """Deserializes the CancelResponse from a dictionary.""" return cls() + + @dataclass class ChangeClusterOwner: cluster_id: str - + owner_username: str """New owner of the cluster_id after this RPC.""" - + def as_dict(self) -> dict: """Serializes the ChangeClusterOwner into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.owner_username is not None: - body["owner_username"] = self.owner_username + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.owner_username is not None: body['owner_username'] = self.owner_username return body def as_shallow_dict(self) -> dict: """Serializes the ChangeClusterOwner into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.owner_username is not None: - body["owner_username"] = self.owner_username + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.owner_username is not None: body['owner_username'] = self.owner_username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ChangeClusterOwner: """Deserializes the ChangeClusterOwner from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), owner_username=d.get("owner_username", None)) + return cls(cluster_id=d.get('cluster_id', None), owner_username=d.get('owner_username', None)) + + @dataclass @@ -497,242 +438,217 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ChangeClusterOwnerResponse: """Deserializes the ChangeClusterOwnerResponse from a dictionary.""" return cls() + + @dataclass class ClientsTypes: jobs: Optional[bool] = None """With jobs set, the cluster can be used for jobs""" - + notebooks: Optional[bool] = None """With notebooks set, this cluster can be used for notebooks""" - + def as_dict(self) -> dict: """Serializes the ClientsTypes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.jobs is not None: - body["jobs"] = self.jobs - if self.notebooks is not None: - body["notebooks"] = self.notebooks + if self.jobs is not None: body['jobs'] = self.jobs + if self.notebooks is not None: body['notebooks'] = self.notebooks return body def as_shallow_dict(self) -> dict: """Serializes the ClientsTypes into a shallow dictionary of its immediate attributes.""" body = {} - if self.jobs is not None: - body["jobs"] = self.jobs - if self.notebooks is not None: - body["notebooks"] = self.notebooks + if self.jobs is not None: body['jobs'] = self.jobs + if self.notebooks is not None: body['notebooks'] = self.notebooks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClientsTypes: """Deserializes the ClientsTypes from a dictionary.""" - return cls(jobs=d.get("jobs", None), notebooks=d.get("notebooks", None)) + return cls(jobs=d.get('jobs', None), notebooks=d.get('notebooks', None)) + + @dataclass class CloneCluster: source_cluster_id: str """The cluster that is being cloned.""" - + def as_dict(self) -> dict: """Serializes the CloneCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.source_cluster_id is not None: - body["source_cluster_id"] = self.source_cluster_id + if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the CloneCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.source_cluster_id is not None: - body["source_cluster_id"] = self.source_cluster_id + if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CloneCluster: """Deserializes the CloneCluster from a dictionary.""" - return cls(source_cluster_id=d.get("source_cluster_id", None)) + return cls(source_cluster_id=d.get('source_cluster_id', None)) + + @dataclass class CloudProviderNodeInfo: status: Optional[List[CloudProviderNodeStatus]] = None """Status as reported by the cloud provider""" - + def as_dict(self) -> dict: """Serializes the CloudProviderNodeInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.status: - body["status"] = [v.value for v in self.status] + if self.status: body['status'] = [v.value for v in self.status] return body def as_shallow_dict(self) -> dict: """Serializes the CloudProviderNodeInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.status: - body["status"] = self.status + if self.status: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CloudProviderNodeInfo: """Deserializes the CloudProviderNodeInfo from a dictionary.""" - return cls(status=_repeated_enum(d, "status", CloudProviderNodeStatus)) - + return cls(status=_repeated_enum(d, 'status', CloudProviderNodeStatus)) + -class CloudProviderNodeStatus(Enum): - NOT_AVAILABLE_IN_REGION = "NotAvailableInRegion" - NOT_ENABLED_ON_SUBSCRIPTION = "NotEnabledOnSubscription" +class CloudProviderNodeStatus(Enum): + + + NOT_AVAILABLE_IN_REGION = 'NotAvailableInRegion' + NOT_ENABLED_ON_SUBSCRIPTION = 'NotEnabledOnSubscription' @dataclass class ClusterAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[ClusterPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ClusterAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAccessControlRequest: """Deserializes the ClusterAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", ClusterPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ClusterPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class ClusterAccessControlResponse: all_permissions: Optional[List[ClusterPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ClusterAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAccessControlResponse: """Deserializes the ClusterAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", ClusterPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', ClusterPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class ClusterAttributes: """Common set of attributes set during cluster creation. These attributes cannot be changed over the lifetime of a cluster.""" - + spark_version: str """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -740,7 +656,7 @@ class ClusterAttributes: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -766,14 +682,14 @@ class ClusterAttributes: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -781,33 +697,33 @@ class ClusterAttributes: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -822,16 +738,16 @@ class ClusterAttributes: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -840,16 +756,16 @@ class ClusterAttributes: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str, str]] = None + + spark_conf: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str, str]] = None + + spark_env_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -861,276 +777,188 @@ class ClusterAttributes: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the ClusterAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind.value - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value + if self.docker_image: body['docker_image'] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAttributes: """Deserializes the ClusterAttributes from a dictionary.""" - return cls( - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_name=d.get("cluster_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - kind=_enum(d, "kind", Kind), - node_type_id=d.get("node_type_id", None), - policy_id=d.get("policy_id", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - ssh_public_keys=d.get("ssh_public_keys", None), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), - ) + return cls(autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + @dataclass class ClusterCompliance: cluster_id: str """Canonical unique identifier for a cluster.""" - + is_compliant: Optional[bool] = None """Whether this cluster is in compliance with the latest version of its policy.""" - - violations: Optional[Dict[str, str]] = None + + violations: Optional[Dict[str,str]] = None """An object containing key-value mappings representing the first 200 policy validation errors. The keys indicate the path where the policy validation error is occurring. The values indicate an error message describing the policy validation error.""" - + def as_dict(self) -> dict: """Serializes the ClusterCompliance into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.is_compliant is not None: - body["is_compliant"] = self.is_compliant - if self.violations: - body["violations"] = self.violations + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations return body def as_shallow_dict(self) -> dict: """Serializes the ClusterCompliance into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.is_compliant is not None: - body["is_compliant"] = self.is_compliant - if self.violations: - body["violations"] = self.violations + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterCompliance: """Deserializes the ClusterCompliance from a dictionary.""" - return cls( - cluster_id=d.get("cluster_id", None), - is_compliant=d.get("is_compliant", None), - violations=d.get("violations", None), - ) + return cls(cluster_id=d.get('cluster_id', None), is_compliant=d.get('is_compliant', None), violations=d.get('violations', None)) + + @dataclass class ClusterDetails: """Describes all of the metadata about a single Spark cluster in Databricks.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_cores: Optional[float] = None """Number of CPU cores available for this cluster. Note that this can be fractional, e.g. 7.5 cores, since certain node types are configured to share cores between Spark nodes on the same instance.""" - + cluster_id: Optional[str] = None """Canonical identifier for the cluster. This id is retained during cluster restarts and resizes, while each new cluster has a globally unique id.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_log_status: Optional[LogSyncStatus] = None """Cluster log delivery status.""" - + cluster_memory_mb: Optional[int] = None """Total amount of cluster memory, in megabytes""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - + cluster_source: Optional[ClusterSource] = None """Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.""" - + creator_user_name: Optional[str] = None """Creator user name. The field won't be included in the response if the user has already been deleted.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -1138,7 +966,7 @@ class ClusterDetails: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -1164,8 +992,8 @@ class ClusterDetails: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - - default_tags: Optional[Dict[str, str]] = None + + default_tags: Optional[Dict[str,str]] = None """Tags that are added by Databricks regardless of any `custom_tags`, including: - Vendor: Databricks @@ -1177,18 +1005,18 @@ class ClusterDetails: - ClusterId: - Name: """ - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver: Optional[SparkNode] = None """Node on which the Spark driver resides. The driver node contains the Spark master and the Databricks application that manages the per-notebook Spark REPLs.""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -1196,40 +1024,40 @@ class ClusterDetails: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + executors: Optional[List[SparkNode]] = None """Nodes on which the Spark executors reside.""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + jdbc_port: Optional[int] = None """Port on which Spark JDBC server is listening, in the driver nod. No service will be listeningon on this port in executor nodes.""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -1244,19 +1072,19 @@ class ClusterDetails: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + last_restarted_time: Optional[int] = None """the timestamp that the cluster was started/restarted""" - + last_state_loss_time: Optional[int] = None """Time when the cluster driver last lost its state (due to a restart or driver failure).""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -1266,10 +1094,10 @@ class ClusterDetails: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -1278,21 +1106,21 @@ class ClusterDetails: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str, str]] = None + + spark_conf: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - + spark_context_id: Optional[int] = None """A canonical SparkContext identifier. This value *does* change when the Spark driver restarts. The pair `(cluster_id, spark_context_id)` is a globally unique identifier over all Spark contexts.""" - - spark_env_vars: Optional[Dict[str, str]] = None + + spark_env_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -1304,913 +1132,658 @@ class ClusterDetails: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + spark_version: Optional[str] = None """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + spec: Optional[ClusterSpec] = None """The spec contains a snapshot of the latest user specified settings that were used to create/edit the cluster. Note: not included in the response of the ListClusters API.""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + start_time: Optional[int] = None """Time (in epoch milliseconds) when the cluster creation request was received (when the cluster entered a `PENDING` state).""" - + state: Optional[State] = None """Current state of the cluster.""" - + state_message: Optional[str] = None """A message associated with the most recent state transition (e.g., the reason why the cluster entered a `TERMINATED` state).""" - + terminated_time: Optional[int] = None """Time (in epoch milliseconds) when the cluster was terminated, if applicable.""" - + termination_reason: Optional[TerminationReason] = None """Information about why the cluster was terminated. This field only appears when the cluster is in a `TERMINATING` or `TERMINATED` state.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the ClusterDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.cluster_cores is not None: - body["cluster_cores"] = self.cluster_cores - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_log_status: - body["cluster_log_status"] = self.cluster_log_status.as_dict() - if self.cluster_memory_mb is not None: - body["cluster_memory_mb"] = self.cluster_memory_mb - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.cluster_source is not None: - body["cluster_source"] = self.cluster_source.value - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.default_tags: - body["default_tags"] = self.default_tags - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver: - body["driver"] = self.driver.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.executors: - body["executors"] = [v.as_dict() for v in self.executors] - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.jdbc_port is not None: - body["jdbc_port"] = self.jdbc_port - if self.kind is not None: - body["kind"] = self.kind.value - if self.last_restarted_time is not None: - body["last_restarted_time"] = self.last_restarted_time - if self.last_state_loss_time is not None: - body["last_state_loss_time"] = self.last_state_loss_time - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_context_id is not None: - body["spark_context_id"] = self.spark_context_id - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.spec: - body["spec"] = self.spec.as_dict() - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state is not None: - body["state"] = self.state.value - if self.state_message is not None: - body["state_message"] = self.state_message - if self.terminated_time is not None: - body["terminated_time"] = self.terminated_time - if self.termination_reason: - body["termination_reason"] = self.termination_reason.as_dict() - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() + if self.cluster_log_status: body['cluster_log_status'] = self.cluster_log_status.as_dict() + if self.cluster_memory_mb is not None: body['cluster_memory_mb'] = self.cluster_memory_mb + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.cluster_source is not None: body['cluster_source'] = self.cluster_source.value + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value + if self.default_tags: body['default_tags'] = self.default_tags + if self.docker_image: body['docker_image'] = self.docker_image.as_dict() + if self.driver: body['driver'] = self.driver.as_dict() + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.executors: body['executors'] = [v.as_dict() for v in self.executors] + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port + if self.kind is not None: body['kind'] = self.kind.value + if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time + if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.spec: body['spec'] = self.spec.as_dict() + if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.start_time is not None: body['start_time'] = self.start_time + if self.state is not None: body['state'] = self.state.value + if self.state_message is not None: body['state_message'] = self.state_message + if self.terminated_time is not None: body['terminated_time'] = self.terminated_time + if self.termination_reason: body['termination_reason'] = self.termination_reason.as_dict() + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.autoscale: - body["autoscale"] = self.autoscale - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.cluster_cores is not None: - body["cluster_cores"] = self.cluster_cores - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_log_status: - body["cluster_log_status"] = self.cluster_log_status - if self.cluster_memory_mb is not None: - body["cluster_memory_mb"] = self.cluster_memory_mb - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.cluster_source is not None: - body["cluster_source"] = self.cluster_source - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.default_tags: - body["default_tags"] = self.default_tags - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver: - body["driver"] = self.driver - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.executors: - body["executors"] = self.executors - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.jdbc_port is not None: - body["jdbc_port"] = self.jdbc_port - if self.kind is not None: - body["kind"] = self.kind - if self.last_restarted_time is not None: - body["last_restarted_time"] = self.last_restarted_time - if self.last_state_loss_time is not None: - body["last_state_loss_time"] = self.last_state_loss_time - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_context_id is not None: - body["spark_context_id"] = self.spark_context_id - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.spec: - body["spec"] = self.spec - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state is not None: - body["state"] = self.state - if self.state_message is not None: - body["state_message"] = self.state_message - if self.terminated_time is not None: - body["terminated_time"] = self.terminated_time - if self.termination_reason: - body["termination_reason"] = self.termination_reason - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_log_status: body['cluster_log_status'] = self.cluster_log_status + if self.cluster_memory_mb is not None: body['cluster_memory_mb'] = self.cluster_memory_mb + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.cluster_source is not None: body['cluster_source'] = self.cluster_source + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.default_tags: body['default_tags'] = self.default_tags + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver: body['driver'] = self.driver + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.executors: body['executors'] = self.executors + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port + if self.kind is not None: body['kind'] = self.kind + if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time + if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.spec: body['spec'] = self.spec + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.start_time is not None: body['start_time'] = self.start_time + if self.state is not None: body['state'] = self.state + if self.state_message is not None: body['state_message'] = self.state_message + if self.terminated_time is not None: body['terminated_time'] = self.terminated_time + if self.termination_reason: body['termination_reason'] = self.termination_reason + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterDetails: """Deserializes the ClusterDetails from a dictionary.""" - return cls( - autoscale=_from_dict(d, "autoscale", AutoScale), - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - cluster_cores=d.get("cluster_cores", None), - cluster_id=d.get("cluster_id", None), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_log_status=_from_dict(d, "cluster_log_status", LogSyncStatus), - cluster_memory_mb=d.get("cluster_memory_mb", None), - cluster_name=d.get("cluster_name", None), - cluster_source=_enum(d, "cluster_source", ClusterSource), - creator_user_name=d.get("creator_user_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - default_tags=d.get("default_tags", None), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver=_from_dict(d, "driver", SparkNode), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - executors=_repeated_dict(d, "executors", SparkNode), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - jdbc_port=d.get("jdbc_port", None), - kind=_enum(d, "kind", Kind), - last_restarted_time=d.get("last_restarted_time", None), - last_state_loss_time=d.get("last_state_loss_time", None), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_context_id=d.get("spark_context_id", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - spec=_from_dict(d, "spec", ClusterSpec), - ssh_public_keys=d.get("ssh_public_keys", None), - start_time=d.get("start_time", None), - state=_enum(d, "state", State), - state_message=d.get("state_message", None), - terminated_time=d.get("terminated_time", None), - termination_reason=_from_dict(d, "termination_reason", TerminationReason), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), - ) + return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_cores=d.get('cluster_cores', None), cluster_id=d.get('cluster_id', None), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_log_status=_from_dict(d, 'cluster_log_status', LogSyncStatus), cluster_memory_mb=d.get('cluster_memory_mb', None), cluster_name=d.get('cluster_name', None), cluster_source=_enum(d, 'cluster_source', ClusterSource), creator_user_name=d.get('creator_user_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), default_tags=d.get('default_tags', None), docker_image=_from_dict(d, 'docker_image', DockerImage), driver=_from_dict(d, 'driver', SparkNode), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), executors=_repeated_dict(d, 'executors', SparkNode), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), jdbc_port=d.get('jdbc_port', None), kind=_enum(d, 'kind', Kind), last_restarted_time=d.get('last_restarted_time', None), last_state_loss_time=d.get('last_state_loss_time', None), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_context_id=d.get('spark_context_id', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), spec=_from_dict(d, 'spec', ClusterSpec), ssh_public_keys=d.get('ssh_public_keys', None), start_time=d.get('start_time', None), state=_enum(d, 'state', State), state_message=d.get('state_message', None), terminated_time=d.get('terminated_time', None), termination_reason=_from_dict(d, 'termination_reason', TerminationReason), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + @dataclass class ClusterEvent: cluster_id: str - + data_plane_event_details: Optional[DataPlaneEventDetails] = None - + details: Optional[EventDetails] = None - + timestamp: Optional[int] = None """The timestamp when the event occurred, stored as the number of milliseconds since the Unix epoch. If not provided, this will be assigned by the Timeline service.""" - + type: Optional[EventType] = None - + def as_dict(self) -> dict: """Serializes the ClusterEvent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.data_plane_event_details: - body["data_plane_event_details"] = self.data_plane_event_details.as_dict() - if self.details: - body["details"] = self.details.as_dict() - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.type is not None: - body["type"] = self.type.value + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.data_plane_event_details: body['data_plane_event_details'] = self.data_plane_event_details.as_dict() + if self.details: body['details'] = self.details.as_dict() + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterEvent into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.data_plane_event_details: - body["data_plane_event_details"] = self.data_plane_event_details - if self.details: - body["details"] = self.details - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.type is not None: - body["type"] = self.type + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.data_plane_event_details: body['data_plane_event_details'] = self.data_plane_event_details + if self.details: body['details'] = self.details + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterEvent: """Deserializes the ClusterEvent from a dictionary.""" - return cls( - cluster_id=d.get("cluster_id", None), - data_plane_event_details=_from_dict(d, "data_plane_event_details", DataPlaneEventDetails), - details=_from_dict(d, "details", EventDetails), - timestamp=d.get("timestamp", None), - type=_enum(d, "type", EventType), - ) + return cls(cluster_id=d.get('cluster_id', None), data_plane_event_details=_from_dict(d, 'data_plane_event_details', DataPlaneEventDetails), details=_from_dict(d, 'details', EventDetails), timestamp=d.get('timestamp', None), type=_enum(d, 'type', EventType)) + + @dataclass class ClusterLibraryStatuses: cluster_id: Optional[str] = None """Unique identifier for the cluster.""" - + library_statuses: Optional[List[LibraryFullStatus]] = None """Status of all libraries on the cluster.""" - + def as_dict(self) -> dict: """Serializes the ClusterLibraryStatuses into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.library_statuses: - body["library_statuses"] = [v.as_dict() for v in self.library_statuses] + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.library_statuses: body['library_statuses'] = [v.as_dict() for v in self.library_statuses] return body def as_shallow_dict(self) -> dict: """Serializes the ClusterLibraryStatuses into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.library_statuses: - body["library_statuses"] = self.library_statuses + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.library_statuses: body['library_statuses'] = self.library_statuses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterLibraryStatuses: """Deserializes the ClusterLibraryStatuses from a dictionary.""" - return cls( - cluster_id=d.get("cluster_id", None), - library_statuses=_repeated_dict(d, "library_statuses", LibraryFullStatus), - ) + return cls(cluster_id=d.get('cluster_id', None), library_statuses=_repeated_dict(d, 'library_statuses', LibraryFullStatus)) + + @dataclass class ClusterLogConf: """Cluster log delivery config""" - + dbfs: Optional[DbfsStorageInfo] = None """destination needs to be provided. e.g. `{ "dbfs" : { "destination" : "dbfs:/home/cluster_log" } }`""" - + s3: Optional[S3StorageInfo] = None """destination and either the region or endpoint need to be provided. e.g. `{ "s3": { "destination" : "s3://cluster_log_bucket/prefix", "region" : "us-west-2" } }` Cluster iam role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to write data to the s3 destination.""" - + volumes: Optional[VolumesStorageInfo] = None """destination needs to be provided, e.g. `{ "volumes": { "destination": "/Volumes/catalog/schema/volume/cluster_log" } }`""" - + def as_dict(self) -> dict: """Serializes the ClusterLogConf into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbfs: - body["dbfs"] = self.dbfs.as_dict() - if self.s3: - body["s3"] = self.s3.as_dict() - if self.volumes: - body["volumes"] = self.volumes.as_dict() + if self.dbfs: body['dbfs'] = self.dbfs.as_dict() + if self.s3: body['s3'] = self.s3.as_dict() + if self.volumes: body['volumes'] = self.volumes.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterLogConf into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbfs: - body["dbfs"] = self.dbfs - if self.s3: - body["s3"] = self.s3 - if self.volumes: - body["volumes"] = self.volumes + if self.dbfs: body['dbfs'] = self.dbfs + if self.s3: body['s3'] = self.s3 + if self.volumes: body['volumes'] = self.volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterLogConf: """Deserializes the ClusterLogConf from a dictionary.""" - return cls( - dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), - s3=_from_dict(d, "s3", S3StorageInfo), - volumes=_from_dict(d, "volumes", VolumesStorageInfo), - ) + return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo), volumes=_from_dict(d, 'volumes', VolumesStorageInfo)) + + @dataclass class ClusterPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[ClusterPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ClusterPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPermission: """Deserializes the ClusterPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", ClusterPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ClusterPermissionLevel)) + + class ClusterPermissionLevel(Enum): """Permission level""" - - CAN_ATTACH_TO = "CAN_ATTACH_TO" - CAN_MANAGE = "CAN_MANAGE" - CAN_RESTART = "CAN_RESTART" - + + CAN_ATTACH_TO = 'CAN_ATTACH_TO' + CAN_MANAGE = 'CAN_MANAGE' + CAN_RESTART = 'CAN_RESTART' @dataclass class ClusterPermissions: access_control_list: Optional[List[ClusterAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ClusterPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissions: """Deserializes the ClusterPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ClusterAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class ClusterPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[ClusterPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ClusterPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissionsDescription: """Deserializes the ClusterPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", ClusterPermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ClusterPermissionLevel)) + + @dataclass class ClusterPermissionsRequest: access_control_list: Optional[List[ClusterAccessControlRequest]] = None - + cluster_id: Optional[str] = None """The cluster for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the ClusterPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissionsRequest: """Deserializes the ClusterPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ClusterAccessControlRequest), - cluster_id=d.get("cluster_id", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterAccessControlRequest), cluster_id=d.get('cluster_id', None)) + + @dataclass class ClusterPolicyAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[ClusterPolicyPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyAccessControlRequest: """Deserializes the ClusterPolicyAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", ClusterPolicyPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class ClusterPolicyAccessControlResponse: all_permissions: Optional[List[ClusterPolicyPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyAccessControlResponse: """Deserializes the ClusterPolicyAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", ClusterPolicyPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', ClusterPolicyPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class ClusterPolicyPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[ClusterPolicyPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermission: """Deserializes the ClusterPolicyPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", ClusterPolicyPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel)) + + class ClusterPolicyPermissionLevel(Enum): """Permission level""" - - CAN_USE = "CAN_USE" - + + CAN_USE = 'CAN_USE' @dataclass class ClusterPolicyPermissions: access_control_list: Optional[List[ClusterPolicyAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ClusterPolicyPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissions: """Deserializes the ClusterPolicyPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ClusterPolicyAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterPolicyAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class ClusterPolicyPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[ClusterPolicyPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissionsDescription: """Deserializes the ClusterPolicyPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", ClusterPolicyPermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel)) + + @dataclass class ClusterPolicyPermissionsRequest: access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None - + cluster_policy_id: Optional[str] = None """The cluster policy for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.cluster_policy_id is not None: - body["cluster_policy_id"] = self.cluster_policy_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.cluster_policy_id is not None: - body["cluster_policy_id"] = self.cluster_policy_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissionsRequest: """Deserializes the ClusterPolicyPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ClusterPolicyAccessControlRequest), - cluster_policy_id=d.get("cluster_policy_id", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterPolicyAccessControlRequest), cluster_policy_id=d.get('cluster_policy_id', None)) + + @dataclass class ClusterSettingsChange: """Represents a change to the cluster settings required for the cluster to become compliant with its policy.""" - + field: Optional[str] = None """The field where this change would be made.""" - + new_value: Optional[str] = None """The new value of this field after enforcing policy compliance (either a number, a boolean, or a string) converted to a string. This is intended to be read by a human. The typed new value of this field can be retrieved by reading the settings field in the API response.""" - + previous_value: Optional[str] = None """The previous value of this field before enforcing policy compliance (either a number, a boolean, or a string) converted to a string. This is intended to be read by a human. The type of the field can be retrieved by reading the settings field in the API response.""" - + def as_dict(self) -> dict: """Serializes the ClusterSettingsChange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.field is not None: - body["field"] = self.field - if self.new_value is not None: - body["new_value"] = self.new_value - if self.previous_value is not None: - body["previous_value"] = self.previous_value + if self.field is not None: body['field'] = self.field + if self.new_value is not None: body['new_value'] = self.new_value + if self.previous_value is not None: body['previous_value'] = self.previous_value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterSettingsChange into a shallow dictionary of its immediate attributes.""" body = {} - if self.field is not None: - body["field"] = self.field - if self.new_value is not None: - body["new_value"] = self.new_value - if self.previous_value is not None: - body["previous_value"] = self.previous_value + if self.field is not None: body['field'] = self.field + if self.new_value is not None: body['new_value'] = self.new_value + if self.previous_value is not None: body['previous_value'] = self.previous_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterSettingsChange: """Deserializes the ClusterSettingsChange from a dictionary.""" - return cls( - field=d.get("field", None), new_value=d.get("new_value", None), previous_value=d.get("previous_value", None) - ) + return cls(field=d.get('field', None), new_value=d.get('new_value', None), previous_value=d.get('previous_value', None)) + + @dataclass @@ -2218,7 +1791,7 @@ class ClusterSize: autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -2228,84 +1801,81 @@ class ClusterSize: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + def as_dict(self) -> dict: """Serializes the ClusterSize into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.num_workers is not None: - body["num_workers"] = self.num_workers + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.num_workers is not None: body['num_workers'] = self.num_workers return body def as_shallow_dict(self) -> dict: """Serializes the ClusterSize into a shallow dictionary of its immediate attributes.""" body = {} - if self.autoscale: - body["autoscale"] = self.autoscale - if self.num_workers is not None: - body["num_workers"] = self.num_workers + if self.autoscale: body['autoscale'] = self.autoscale + if self.num_workers is not None: body['num_workers'] = self.num_workers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterSize: """Deserializes the ClusterSize from a dictionary.""" - return cls(autoscale=_from_dict(d, "autoscale", AutoScale), num_workers=d.get("num_workers", None)) + return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), num_workers=d.get('num_workers', None)) + + class ClusterSource(Enum): """Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request. This is the same as cluster_creator, but read only.""" - - API = "API" - JOB = "JOB" - MODELS = "MODELS" - PIPELINE = "PIPELINE" - PIPELINE_MAINTENANCE = "PIPELINE_MAINTENANCE" - SQL = "SQL" - UI = "UI" - + + API = 'API' + JOB = 'JOB' + MODELS = 'MODELS' + PIPELINE = 'PIPELINE' + PIPELINE_MAINTENANCE = 'PIPELINE_MAINTENANCE' + SQL = 'SQL' + UI = 'UI' @dataclass class ClusterSpec: """Contains a snapshot of the latest user specified settings that were used to create/edit the cluster.""" - + apply_policy_default_values: Optional[bool] = None """When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -2313,7 +1883,7 @@ class ClusterSpec: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -2339,14 +1909,14 @@ class ClusterSpec: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -2354,33 +1924,33 @@ class ClusterSpec: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -2395,13 +1965,13 @@ class ClusterSpec: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -2411,10 +1981,10 @@ class ClusterSpec: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -2423,16 +1993,16 @@ class ClusterSpec: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str, str]] = None + + spark_conf: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str, str]] = None + + spark_env_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -2444,325 +2014,226 @@ class ClusterSpec: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + spark_version: Optional[str] = None """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the ClusterSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind.value - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value + if self.docker_image: body['docker_image'] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterSpec: """Deserializes the ClusterSpec from a dictionary.""" - return cls( - apply_policy_default_values=d.get("apply_policy_default_values", None), - autoscale=_from_dict(d, "autoscale", AutoScale), - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_name=d.get("cluster_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - kind=_enum(d, "kind", Kind), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - ssh_public_keys=d.get("ssh_public_keys", None), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), - ) + return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + + + + @dataclass class Command: cluster_id: Optional[str] = None """Running cluster id""" - + command: Optional[str] = None """Executable code""" - + context_id: Optional[str] = None """Running context id""" - + language: Optional[Language] = None - + def as_dict(self) -> dict: """Serializes the Command into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command is not None: - body["command"] = self.command - if self.context_id is not None: - body["contextId"] = self.context_id - if self.language is not None: - body["language"] = self.language.value + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.command is not None: body['command'] = self.command + if self.context_id is not None: body['contextId'] = self.context_id + if self.language is not None: body['language'] = self.language.value return body def as_shallow_dict(self) -> dict: """Serializes the Command into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command is not None: - body["command"] = self.command - if self.context_id is not None: - body["contextId"] = self.context_id - if self.language is not None: - body["language"] = self.language + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.command is not None: body['command'] = self.command + if self.context_id is not None: body['contextId'] = self.context_id + if self.language is not None: body['language'] = self.language return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Command: """Deserializes the Command from a dictionary.""" - return cls( - cluster_id=d.get("clusterId", None), - command=d.get("command", None), - context_id=d.get("contextId", None), - language=_enum(d, "language", Language), - ) + return cls(cluster_id=d.get('clusterId', None), command=d.get('command', None), context_id=d.get('contextId', None), language=_enum(d, 'language', Language)) + + class CommandStatus(Enum): + + + CANCELLED = 'Cancelled' + CANCELLING = 'Cancelling' + ERROR = 'Error' + FINISHED = 'Finished' + QUEUED = 'Queued' + RUNNING = 'Running' + - CANCELLED = "Cancelled" - CANCELLING = "Cancelling" - ERROR = "Error" - FINISHED = "Finished" - QUEUED = "Queued" - RUNNING = "Running" @dataclass class CommandStatusResponse: id: Optional[str] = None - + results: Optional[Results] = None - + status: Optional[CommandStatus] = None - + def as_dict(self) -> dict: """Serializes the CommandStatusResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.results: - body["results"] = self.results.as_dict() - if self.status is not None: - body["status"] = self.status.value + if self.id is not None: body['id'] = self.id + if self.results: body['results'] = self.results.as_dict() + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the CommandStatusResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.results: - body["results"] = self.results - if self.status is not None: - body["status"] = self.status + if self.id is not None: body['id'] = self.id + if self.results: body['results'] = self.results + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CommandStatusResponse: """Deserializes the CommandStatusResponse from a dictionary.""" - return cls( - id=d.get("id", None), results=_from_dict(d, "results", Results), status=_enum(d, "status", CommandStatus) - ) + return cls(id=d.get('id', None), results=_from_dict(d, 'results', Results), status=_enum(d, 'status', CommandStatus)) + + class ContextStatus(Enum): + + + ERROR = 'Error' + PENDING = 'Pending' + RUNNING = 'Running' + - ERROR = "Error" - PENDING = "Pending" - RUNNING = "Running" @dataclass class ContextStatusResponse: id: Optional[str] = None - + status: Optional[ContextStatus] = None - + def as_dict(self) -> dict: """Serializes the ContextStatusResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.status is not None: - body["status"] = self.status.value + if self.id is not None: body['id'] = self.id + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the ContextStatusResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.status is not None: - body["status"] = self.status + if self.id is not None: body['id'] = self.id + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ContextStatusResponse: """Deserializes the ContextStatusResponse from a dictionary.""" - return cls(id=d.get("id", None), status=_enum(d, "status", ContextStatus)) + return cls(id=d.get('id', None), status=_enum(d, 'status', ContextStatus)) + + @dataclass @@ -2770,46 +2241,46 @@ class CreateCluster: spark_version: str """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + apply_policy_default_values: Optional[bool] = None """When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + clone_from: Optional[CloneCluster] = None """When specified, this clones libraries from a source cluster during the creation of a new cluster.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -2817,7 +2288,7 @@ class CreateCluster: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -2843,14 +2314,14 @@ class CreateCluster: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -2858,33 +2329,33 @@ class CreateCluster: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -2899,13 +2370,13 @@ class CreateCluster: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -2915,10 +2386,10 @@ class CreateCluster: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -2927,16 +2398,16 @@ class CreateCluster: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str, str]] = None + + spark_conf: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str, str]] = None + + spark_env_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -2948,246 +2419,152 @@ class CreateCluster: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the CreateCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.clone_from: - body["clone_from"] = self.clone_from.as_dict() - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind.value - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.clone_from: body['clone_from'] = self.clone_from.as_dict() + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value + if self.docker_image: body['docker_image'] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.clone_from: - body["clone_from"] = self.clone_from - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.clone_from: body['clone_from'] = self.clone_from + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCluster: """Deserializes the CreateCluster from a dictionary.""" - return cls( - apply_policy_default_values=d.get("apply_policy_default_values", None), - autoscale=_from_dict(d, "autoscale", AutoScale), - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - clone_from=_from_dict(d, "clone_from", CloneCluster), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_name=d.get("cluster_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - kind=_enum(d, "kind", Kind), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - ssh_public_keys=d.get("ssh_public_keys", None), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), - ) + return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), clone_from=_from_dict(d, 'clone_from', CloneCluster), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + @dataclass class CreateClusterResponse: cluster_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateClusterResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateClusterResponse: """Deserializes the CreateClusterResponse from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) + return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass class CreateContext: cluster_id: Optional[str] = None """Running cluster id""" - + language: Optional[Language] = None - + def as_dict(self) -> dict: """Serializes the CreateContext into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.language is not None: - body["language"] = self.language.value + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.language is not None: body['language'] = self.language.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateContext into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.language is not None: - body["language"] = self.language + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.language is not None: body['language'] = self.language return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateContext: """Deserializes the CreateContext from a dictionary.""" - return cls(cluster_id=d.get("clusterId", None), language=_enum(d, "language", Language)) + return cls(cluster_id=d.get('clusterId', None), language=_enum(d, 'language', Language)) + + @dataclass @@ -3195,168 +2572,130 @@ class CreateInstancePool: instance_pool_name: str """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters.""" - + node_type_id: str """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + aws_attributes: Optional[InstancePoolAwsAttributes] = None """Attributes related to instance pools running on Amazon Web Services. If not specified at pool creation, a set of default values will be used.""" - + azure_attributes: Optional[InstancePoolAzureAttributes] = None """Attributes related to instance pools running on Azure. If not specified at pool creation, a set of default values will be used.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags""" - + disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + gcp_attributes: Optional[InstancePoolGcpAttributes] = None """Attributes related to instance pools running on Google Cloud Platform. If not specified at pool creation, a set of default values will be used.""" - + idle_instance_autotermination_minutes: Optional[int] = None """Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances will be automatically terminated after a default timeout. If specified, the threshold must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances from the cache if min cache size could still hold.""" - + max_capacity: Optional[int] = None """Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances. Clusters that require further instance provisioning will fail during upsize requests.""" - + min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" - + preloaded_docker_images: Optional[List[DockerImage]] = None """Custom Docker Image BYOC""" - + preloaded_spark_versions: Optional[List[str]] = None """A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + def as_dict(self) -> dict: """Serializes the CreateInstancePool into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec.as_dict() - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions] + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] return body def as_shallow_dict(self) -> dict: """Serializes the CreateInstancePool into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = self.preloaded_docker_images - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = self.preloaded_spark_versions + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePool: """Deserializes the CreateInstancePool from a dictionary.""" - return cls( - aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes), - custom_tags=d.get("custom_tags", None), - disk_spec=_from_dict(d, "disk_spec", DiskSpec), - enable_elastic_disk=d.get("enable_elastic_disk", None), - gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), - idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), - instance_pool_name=d.get("instance_pool_name", None), - max_capacity=d.get("max_capacity", None), - min_idle_instances=d.get("min_idle_instances", None), - node_type_id=d.get("node_type_id", None), - preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), - preloaded_spark_versions=d.get("preloaded_spark_versions", None), - ) + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), custom_tags=d.get('custom_tags', None), disk_spec=_from_dict(d, 'disk_spec', DiskSpec), enable_elastic_disk=d.get('enable_elastic_disk', None), gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None), preloaded_docker_images=_repeated_dict(d, 'preloaded_docker_images', DockerImage), preloaded_spark_versions=d.get('preloaded_spark_versions', None)) + + @dataclass class CreateInstancePoolResponse: instance_pool_id: Optional[str] = None """The ID of the created instance pool.""" - + def as_dict(self) -> dict: """Serializes the CreateInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePoolResponse: """Deserializes the CreateInstancePoolResponse from a dictionary.""" - return cls(instance_pool_id=d.get("instance_pool_id", None)) + return cls(instance_pool_id=d.get('instance_pool_id', None)) + + @dataclass @@ -3365,22 +2704,22 @@ class CreatePolicy: """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + description: Optional[str] = None """Additional human-readable description of the cluster policy.""" - + libraries: Optional[List[Library]] = None """A list of libraries to be installed on the next cluster restart that uses this policy. The maximum number of libraries is 500.""" - + max_clusters_per_user: Optional[int] = None """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" - + name: Optional[str] = None """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 characters.""" - + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -3389,138 +2728,118 @@ class CreatePolicy: rules specified here are merged into the inherited policy definition. [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + policy_family_id: Optional[str] = None """ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition.""" - + def as_dict(self) -> dict: """Serializes the CreatePolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name is not None: body['name'] = self.name + if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = self.libraries - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.libraries: body['libraries'] = self.libraries + if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name is not None: body['name'] = self.name + if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePolicy: """Deserializes the CreatePolicy from a dictionary.""" - return cls( - definition=d.get("definition", None), - description=d.get("description", None), - libraries=_repeated_dict(d, "libraries", Library), - max_clusters_per_user=d.get("max_clusters_per_user", None), - name=d.get("name", None), - policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), - policy_family_id=d.get("policy_family_id", None), - ) + return cls(definition=d.get('definition', None), description=d.get('description', None), libraries=_repeated_dict(d, 'libraries', Library), max_clusters_per_user=d.get('max_clusters_per_user', None), name=d.get('name', None), policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), policy_family_id=d.get('policy_family_id', None)) + + @dataclass class CreatePolicyResponse: policy_id: Optional[str] = None """Canonical unique identifier for the cluster policy.""" - + def as_dict(self) -> dict: """Serializes the CreatePolicyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.policy_id is not None: body['policy_id'] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.policy_id is not None: body['policy_id'] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePolicyResponse: """Deserializes the CreatePolicyResponse from a dictionary.""" - return cls(policy_id=d.get("policy_id", None)) + return cls(policy_id=d.get('policy_id', None)) + + @dataclass class CreateResponse: script_id: Optional[str] = None """The global init script ID.""" - + def as_dict(self) -> dict: """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.script_id is not None: - body["script_id"] = self.script_id + if self.script_id is not None: body['script_id'] = self.script_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.script_id is not None: - body["script_id"] = self.script_id + if self.script_id is not None: body['script_id'] = self.script_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" - return cls(script_id=d.get("script_id", None)) + return cls(script_id=d.get('script_id', None)) + + @dataclass class Created: id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the Created into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the Created into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Created: """Deserializes the Created from a dictionary.""" - return cls(id=d.get("id", None)) + return cls(id=d.get('id', None)) + + @dataclass @@ -3528,102 +2847,83 @@ class CustomPolicyTag: key: str """The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these - tags are preserved. + tags are preserved.""" - - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala - (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L17)""" - value: Optional[str] = None - """The value of the tag. + """The value of the tag.""" - - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala - (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L24)""" - def as_dict(self) -> dict: """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomPolicyTag: """Deserializes the CustomPolicyTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class DataPlaneEventDetails: event_type: Optional[DataPlaneEventDetailsEventType] = None - + executor_failures: Optional[int] = None - + host_id: Optional[str] = None - + timestamp: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the DataPlaneEventDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.event_type is not None: - body["event_type"] = self.event_type.value - if self.executor_failures is not None: - body["executor_failures"] = self.executor_failures - if self.host_id is not None: - body["host_id"] = self.host_id - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.event_type is not None: body['event_type'] = self.event_type.value + if self.executor_failures is not None: body['executor_failures'] = self.executor_failures + if self.host_id is not None: body['host_id'] = self.host_id + if self.timestamp is not None: body['timestamp'] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.event_type is not None: - body["event_type"] = self.event_type - if self.executor_failures is not None: - body["executor_failures"] = self.executor_failures - if self.host_id is not None: - body["host_id"] = self.host_id - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.event_type is not None: body['event_type'] = self.event_type + if self.executor_failures is not None: body['executor_failures'] = self.executor_failures + if self.host_id is not None: body['host_id'] = self.host_id + if self.timestamp is not None: body['timestamp'] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataPlaneEventDetails: """Deserializes the DataPlaneEventDetails from a dictionary.""" - return cls( - event_type=_enum(d, "event_type", DataPlaneEventDetailsEventType), - executor_failures=d.get("executor_failures", None), - host_id=d.get("host_id", None), - timestamp=d.get("timestamp", None), - ) - + return cls(event_type=_enum(d, 'event_type', DataPlaneEventDetailsEventType), executor_failures=d.get('executor_failures', None), host_id=d.get('host_id', None), timestamp=d.get('timestamp', None)) + -class DataPlaneEventDetailsEventType(Enum): - NODE_BLACKLISTED = "NODE_BLACKLISTED" - NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED" +class DataPlaneEventDetailsEventType(Enum): + + + NODE_BLACKLISTED = 'NODE_BLACKLISTED' + NODE_EXCLUDED_DECOMMISSIONED = 'NODE_EXCLUDED_DECOMMISSIONED' class DataSecurityMode(Enum): """Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in @@ -3632,78 +2932,77 @@ class DataSecurityMode(Enum): users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - - DATA_SECURITY_MODE_AUTO = "DATA_SECURITY_MODE_AUTO" - DATA_SECURITY_MODE_DEDICATED = "DATA_SECURITY_MODE_DEDICATED" - DATA_SECURITY_MODE_STANDARD = "DATA_SECURITY_MODE_STANDARD" - LEGACY_PASSTHROUGH = "LEGACY_PASSTHROUGH" - LEGACY_SINGLE_USER = "LEGACY_SINGLE_USER" - LEGACY_SINGLE_USER_STANDARD = "LEGACY_SINGLE_USER_STANDARD" - LEGACY_TABLE_ACL = "LEGACY_TABLE_ACL" - NONE = "NONE" - SINGLE_USER = "SINGLE_USER" - USER_ISOLATION = "USER_ISOLATION" - + + DATA_SECURITY_MODE_AUTO = 'DATA_SECURITY_MODE_AUTO' + DATA_SECURITY_MODE_DEDICATED = 'DATA_SECURITY_MODE_DEDICATED' + DATA_SECURITY_MODE_STANDARD = 'DATA_SECURITY_MODE_STANDARD' + LEGACY_PASSTHROUGH = 'LEGACY_PASSTHROUGH' + LEGACY_SINGLE_USER = 'LEGACY_SINGLE_USER' + LEGACY_SINGLE_USER_STANDARD = 'LEGACY_SINGLE_USER_STANDARD' + LEGACY_TABLE_ACL = 'LEGACY_TABLE_ACL' + NONE = 'NONE' + SINGLE_USER = 'SINGLE_USER' + USER_ISOLATION = 'USER_ISOLATION' @dataclass class DbfsStorageInfo: """A storage location in DBFS""" - + destination: str """dbfs destination, e.g. `dbfs:/my/path`""" - + def as_dict(self) -> dict: """Serializes the DbfsStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: """Deserializes the DbfsStorageInfo from a dictionary.""" - return cls(destination=d.get("destination", None)) + return cls(destination=d.get('destination', None)) + + @dataclass class DeleteCluster: cluster_id: str """The cluster to be terminated.""" - + def as_dict(self) -> dict: """Serializes the DeleteCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteCluster: """Deserializes the DeleteCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) + return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass @@ -3722,31 +3021,36 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteClusterResponse: """Deserializes the DeleteClusterResponse from a dictionary.""" return cls() + + + + + @dataclass class DeleteInstancePool: instance_pool_id: str """The instance pool to be terminated.""" - + def as_dict(self) -> dict: """Serializes the DeleteInstancePool into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteInstancePool into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePool: """Deserializes the DeleteInstancePool from a dictionary.""" - return cls(instance_pool_id=d.get("instance_pool_id", None)) + return cls(instance_pool_id=d.get('instance_pool_id', None)) + + @dataclass @@ -3765,31 +3069,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePoolResponse: """Deserializes the DeleteInstancePoolResponse from a dictionary.""" return cls() + + @dataclass class DeletePolicy: policy_id: str """The ID of the policy to delete.""" - + def as_dict(self) -> dict: """Serializes the DeletePolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.policy_id is not None: body['policy_id'] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the DeletePolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.policy_id is not None: body['policy_id'] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeletePolicy: """Deserializes the DeletePolicy from a dictionary.""" - return cls(policy_id=d.get("policy_id", None)) + return cls(policy_id=d.get('policy_id', None)) + + @dataclass @@ -3808,6 +3114,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeletePolicyResponse: """Deserializes the DeletePolicyResponse from a dictionary.""" return cls() + + @dataclass @@ -3826,36 +3134,36 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + @dataclass class DestroyContext: cluster_id: str - + context_id: str - + def as_dict(self) -> dict: """Serializes the DestroyContext into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.context_id is not None: - body["contextId"] = self.context_id + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.context_id is not None: body['contextId'] = self.context_id return body def as_shallow_dict(self) -> dict: """Serializes the DestroyContext into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.context_id is not None: - body["contextId"] = self.context_id + if self.cluster_id is not None: body['clusterId'] = self.cluster_id + if self.context_id is not None: body['contextId'] = self.context_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DestroyContext: """Deserializes the DestroyContext from a dictionary.""" - return cls(cluster_id=d.get("clusterId", None), context_id=d.get("contextId", None)) + return cls(cluster_id=d.get('clusterId', None), context_id=d.get('contextId', None)) + + @dataclass @@ -3874,6 +3182,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DestroyResponse: """Deserializes the DestroyResponse from a dictionary.""" return cls() + + @dataclass @@ -3881,7 +3191,7 @@ class DiskSpec: """Describes the disks that are launched for each instance in the spark cluster. For example, if the cluster has 3 instances, each instance is configured to launch 2 disks, 100 GiB each, then Databricks will launch a total of 6 disks, 100 GiB each, for this cluster.""" - + disk_count: Optional[int] = None """The number of disks launched for each instance: - This feature is only enabled for supported node types. - Users can choose up to the limit of the disks supported by the node type. - For @@ -3896,9 +3206,9 @@ class DiskSpec: Disks will be mounted at: - For AWS: `/ebs0`, `/ebs1`, and etc. - For Azure: `/remote_volume0`, `/remote_volume1`, and etc.""" - + disk_iops: Optional[int] = None - + disk_size: Optional[int] = None """The size of each disk (in GiB) launched for each instance. Values must fall into the supported range for a particular instance type. @@ -3906,225 +3216,199 @@ class DiskSpec: For AWS: - General Purpose SSD: 100 - 4096 GiB - Throughput Optimized HDD: 500 - 4096 GiB For Azure: - Premium LRS (SSD): 1 - 1023 GiB - Standard LRS (HDD): 1- 1023 GiB""" - + disk_throughput: Optional[int] = None - + disk_type: Optional[DiskType] = None """The type of disks that will be launched with this cluster.""" - + def as_dict(self) -> dict: """Serializes the DiskSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.disk_count is not None: - body["disk_count"] = self.disk_count - if self.disk_iops is not None: - body["disk_iops"] = self.disk_iops - if self.disk_size is not None: - body["disk_size"] = self.disk_size - if self.disk_throughput is not None: - body["disk_throughput"] = self.disk_throughput - if self.disk_type: - body["disk_type"] = self.disk_type.as_dict() + if self.disk_count is not None: body['disk_count'] = self.disk_count + if self.disk_iops is not None: body['disk_iops'] = self.disk_iops + if self.disk_size is not None: body['disk_size'] = self.disk_size + if self.disk_throughput is not None: body['disk_throughput'] = self.disk_throughput + if self.disk_type: body['disk_type'] = self.disk_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the DiskSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.disk_count is not None: - body["disk_count"] = self.disk_count - if self.disk_iops is not None: - body["disk_iops"] = self.disk_iops - if self.disk_size is not None: - body["disk_size"] = self.disk_size - if self.disk_throughput is not None: - body["disk_throughput"] = self.disk_throughput - if self.disk_type: - body["disk_type"] = self.disk_type + if self.disk_count is not None: body['disk_count'] = self.disk_count + if self.disk_iops is not None: body['disk_iops'] = self.disk_iops + if self.disk_size is not None: body['disk_size'] = self.disk_size + if self.disk_throughput is not None: body['disk_throughput'] = self.disk_throughput + if self.disk_type: body['disk_type'] = self.disk_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DiskSpec: """Deserializes the DiskSpec from a dictionary.""" - return cls( - disk_count=d.get("disk_count", None), - disk_iops=d.get("disk_iops", None), - disk_size=d.get("disk_size", None), - disk_throughput=d.get("disk_throughput", None), - disk_type=_from_dict(d, "disk_type", DiskType), - ) + return cls(disk_count=d.get('disk_count', None), disk_iops=d.get('disk_iops', None), disk_size=d.get('disk_size', None), disk_throughput=d.get('disk_throughput', None), disk_type=_from_dict(d, 'disk_type', DiskType)) + + @dataclass class DiskType: """Describes the disk type.""" - + azure_disk_volume_type: Optional[DiskTypeAzureDiskVolumeType] = None """All Azure Disk types that Databricks supports. See https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" - + ebs_volume_type: Optional[DiskTypeEbsVolumeType] = None """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for details.""" - + def as_dict(self) -> dict: """Serializes the DiskType into a dictionary suitable for use as a JSON request body.""" body = {} - if self.azure_disk_volume_type is not None: - body["azure_disk_volume_type"] = self.azure_disk_volume_type.value - if self.ebs_volume_type is not None: - body["ebs_volume_type"] = self.ebs_volume_type.value + if self.azure_disk_volume_type is not None: body['azure_disk_volume_type'] = self.azure_disk_volume_type.value + if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value return body def as_shallow_dict(self) -> dict: """Serializes the DiskType into a shallow dictionary of its immediate attributes.""" body = {} - if self.azure_disk_volume_type is not None: - body["azure_disk_volume_type"] = self.azure_disk_volume_type - if self.ebs_volume_type is not None: - body["ebs_volume_type"] = self.ebs_volume_type + if self.azure_disk_volume_type is not None: body['azure_disk_volume_type'] = self.azure_disk_volume_type + if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DiskType: """Deserializes the DiskType from a dictionary.""" - return cls( - azure_disk_volume_type=_enum(d, "azure_disk_volume_type", DiskTypeAzureDiskVolumeType), - ebs_volume_type=_enum(d, "ebs_volume_type", DiskTypeEbsVolumeType), - ) + return cls(azure_disk_volume_type=_enum(d, 'azure_disk_volume_type', DiskTypeAzureDiskVolumeType), ebs_volume_type=_enum(d, 'ebs_volume_type', DiskTypeEbsVolumeType)) + + class DiskTypeAzureDiskVolumeType(Enum): """All Azure Disk types that Databricks supports. See https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" - - PREMIUM_LRS = "PREMIUM_LRS" - STANDARD_LRS = "STANDARD_LRS" - + + PREMIUM_LRS = 'PREMIUM_LRS' + STANDARD_LRS = 'STANDARD_LRS' class DiskTypeEbsVolumeType(Enum): """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for details.""" - - GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" - THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" - + + GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD' + THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD' @dataclass class DockerBasicAuth: password: Optional[str] = None """Password of the user""" - + username: Optional[str] = None """Name of the user""" - + def as_dict(self) -> dict: """Serializes the DockerBasicAuth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.password is not None: - body["password"] = self.password - if self.username is not None: - body["username"] = self.username + if self.password is not None: body['password'] = self.password + if self.username is not None: body['username'] = self.username return body def as_shallow_dict(self) -> dict: """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes.""" body = {} - if self.password is not None: - body["password"] = self.password - if self.username is not None: - body["username"] = self.username + if self.password is not None: body['password'] = self.password + if self.username is not None: body['username'] = self.username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DockerBasicAuth: """Deserializes the DockerBasicAuth from a dictionary.""" - return cls(password=d.get("password", None), username=d.get("username", None)) + return cls(password=d.get('password', None), username=d.get('username', None)) + + @dataclass class DockerImage: basic_auth: Optional[DockerBasicAuth] = None """Basic auth with username and password""" - + url: Optional[str] = None """URL of the docker image.""" - + def as_dict(self) -> dict: """Serializes the DockerImage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.basic_auth: - body["basic_auth"] = self.basic_auth.as_dict() - if self.url is not None: - body["url"] = self.url + if self.basic_auth: body['basic_auth'] = self.basic_auth.as_dict() + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the DockerImage into a shallow dictionary of its immediate attributes.""" body = {} - if self.basic_auth: - body["basic_auth"] = self.basic_auth - if self.url is not None: - body["url"] = self.url + if self.basic_auth: body['basic_auth'] = self.basic_auth + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DockerImage: """Deserializes the DockerImage from a dictionary.""" - return cls(basic_auth=_from_dict(d, "basic_auth", DockerBasicAuth), url=d.get("url", None)) + return cls(basic_auth=_from_dict(d, 'basic_auth', DockerBasicAuth), url=d.get('url', None)) + + class EbsVolumeType(Enum): """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for details.""" - - GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" - THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" - + + GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD' + THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD' @dataclass class EditCluster: cluster_id: str """ID of the cluster""" - + spark_version: str """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + apply_policy_default_values: Optional[bool] = None """When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -4132,7 +3416,7 @@ class EditCluster: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -4158,14 +3442,14 @@ class EditCluster: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -4173,33 +3457,33 @@ class EditCluster: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -4214,13 +3498,13 @@ class EditCluster: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -4230,10 +3514,10 @@ class EditCluster: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -4242,16 +3526,16 @@ class EditCluster: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str, str]] = None + + spark_conf: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str, str]] = None + + spark_env_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -4263,191 +3547,99 @@ class EditCluster: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the EditCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind.value - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value + if self.docker_image: body['docker_image'] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EditCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditCluster: """Deserializes the EditCluster from a dictionary.""" - return cls( - apply_policy_default_values=d.get("apply_policy_default_values", None), - autoscale=_from_dict(d, "autoscale", AutoScale), - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - cluster_id=d.get("cluster_id", None), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_name=d.get("cluster_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - kind=_enum(d, "kind", Kind), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - ssh_public_keys=d.get("ssh_public_keys", None), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), - ) + return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_id=d.get('cluster_id', None), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + @dataclass @@ -4466,94 +3658,76 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditClusterResponse: """Deserializes the EditClusterResponse from a dictionary.""" return cls() + + @dataclass class EditInstancePool: instance_pool_id: str """Instance pool ID""" - + instance_pool_name: str """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters.""" - + node_type_id: str """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags""" - + idle_instance_autotermination_minutes: Optional[int] = None """Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances will be automatically terminated after a default timeout. If specified, the threshold must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances from the cache if min cache size could still hold.""" - + max_capacity: Optional[int] = None """Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances. Clusters that require further instance provisioning will fail during upsize requests.""" - + min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" - + def as_dict(self) -> dict: """Serializes the EditInstancePool into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id return body def as_shallow_dict(self) -> dict: """Serializes the EditInstancePool into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditInstancePool: """Deserializes the EditInstancePool from a dictionary.""" - return cls( - custom_tags=d.get("custom_tags", None), - idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), - instance_pool_id=d.get("instance_pool_id", None), - instance_pool_name=d.get("instance_pool_name", None), - max_capacity=d.get("max_capacity", None), - min_idle_instances=d.get("min_idle_instances", None), - node_type_id=d.get("node_type_id", None), - ) + return cls(custom_tags=d.get('custom_tags', None), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_id=d.get('instance_pool_id', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None)) + + @dataclass @@ -4572,33 +3746,35 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditInstancePoolResponse: """Deserializes the EditInstancePoolResponse from a dictionary.""" return cls() + + @dataclass class EditPolicy: policy_id: str """The ID of the policy to update.""" - + definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + description: Optional[str] = None """Additional human-readable description of the cluster policy.""" - + libraries: Optional[List[Library]] = None """A list of libraries to be installed on the next cluster restart that uses this policy. The maximum number of libraries is 500.""" - + max_clusters_per_user: Optional[int] = None """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" - + name: Optional[str] = None """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 characters.""" - + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -4607,69 +3783,46 @@ class EditPolicy: rules specified here are merged into the inherited policy definition. [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + policy_family_id: Optional[str] = None """ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition.""" - + def as_dict(self) -> dict: """Serializes the EditPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name is not None: body['name'] = self.name + if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.policy_id is not None: body['policy_id'] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the EditPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = self.libraries - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.libraries: body['libraries'] = self.libraries + if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name is not None: body['name'] = self.name + if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.policy_id is not None: body['policy_id'] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditPolicy: """Deserializes the EditPolicy from a dictionary.""" - return cls( - definition=d.get("definition", None), - description=d.get("description", None), - libraries=_repeated_dict(d, "libraries", Library), - max_clusters_per_user=d.get("max_clusters_per_user", None), - name=d.get("name", None), - policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), - policy_family_id=d.get("policy_family_id", None), - policy_id=d.get("policy_id", None), - ) + return cls(definition=d.get('definition', None), description=d.get('description', None), libraries=_repeated_dict(d, 'libraries', Library), max_clusters_per_user=d.get('max_clusters_per_user', None), name=d.get('name', None), policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), policy_family_id=d.get('policy_family_id', None), policy_id=d.get('policy_id', None)) + + @dataclass @@ -4688,6 +3841,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditPolicyResponse: """Deserializes the EditPolicyResponse from a dictionary.""" return cls() + + @dataclass @@ -4706,39 +3861,39 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditResponse: """Deserializes the EditResponse from a dictionary.""" return cls() + + @dataclass class EnforceClusterComplianceRequest: cluster_id: str """The ID of the cluster you want to enforce policy compliance on.""" - + validate_only: Optional[bool] = None """If set, previews the changes that would be made to a cluster to enforce compliance but does not update the cluster.""" - + def as_dict(self) -> dict: """Serializes the EnforceClusterComplianceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.validate_only is not None: body['validate_only'] = self.validate_only return body def as_shallow_dict(self) -> dict: """Serializes the EnforceClusterComplianceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.validate_only is not None: body['validate_only'] = self.validate_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforceClusterComplianceRequest: """Deserializes the EnforceClusterComplianceRequest from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), validate_only=d.get("validate_only", None)) + return cls(cluster_id=d.get('cluster_id', None), validate_only=d.get('validate_only', None)) + + @dataclass @@ -4746,33 +3901,31 @@ class EnforceClusterComplianceResponse: changes: Optional[List[ClusterSettingsChange]] = None """A list of changes that have been made to the cluster settings for the cluster to become compliant with its policy.""" - + has_changes: Optional[bool] = None """Whether any changes have been made to the cluster settings for the cluster to become compliant with its policy.""" - + def as_dict(self) -> dict: """Serializes the EnforceClusterComplianceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.changes: - body["changes"] = [v.as_dict() for v in self.changes] - if self.has_changes is not None: - body["has_changes"] = self.has_changes + if self.changes: body['changes'] = [v.as_dict() for v in self.changes] + if self.has_changes is not None: body['has_changes'] = self.has_changes return body def as_shallow_dict(self) -> dict: """Serializes the EnforceClusterComplianceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.changes: - body["changes"] = self.changes - if self.has_changes is not None: - body["has_changes"] = self.has_changes + if self.changes: body['changes'] = self.changes + if self.has_changes is not None: body['has_changes'] = self.has_changes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforceClusterComplianceResponse: """Deserializes the EnforceClusterComplianceResponse from a dictionary.""" - return cls(changes=_repeated_dict(d, "changes", ClusterSettingsChange), has_changes=d.get("has_changes", None)) + return cls(changes=_repeated_dict(d, 'changes', ClusterSettingsChange), has_changes=d.get('has_changes', None)) + + @dataclass @@ -4780,66 +3933,49 @@ class Environment: """The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal environment spec, only pip dependencies are supported.""" - - client: str - """Client version used by the environment The client is the user-facing environment of the runtime. - Each client comes with a specific set of pre-installed libraries. The version is a string, - consisting of the major client version.""" - + + client: Optional[str] = None + """Use `environment_version` instead.""" + dependencies: Optional[List[str]] = None """List of pip dependencies, as supported by the version of pip in this environment. Each - dependency is a pip requirement file line - https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be - , , (WSFS or Volumes in - Databricks), E.g. dependencies: ["foo==0.0.1", "-r - /Workspace/test/requirements.txt"]""" - + dependency is a valid pip requirements file line per + https://pip.pypa.io/en/stable/reference/requirements-file-format/. Allowed dependencies include + a requirement specifier, an archive URL, a local project path (such as WSFS or UC Volumes in + Databricks), or a VCS project URL.""" + environment_version: Optional[str] = None - """We renamed `client` to `environment_version` in notebook exports. This field is meant solely so - that imported notebooks with `environment_version` can be deserialized correctly, in a - backwards-compatible way (i.e. if `client` is specified instead of `environment_version`, it - will be deserialized correctly). Do NOT use this field for any other purpose, e.g. notebook - storage. This field is not yet exposed to customers (e.g. in the jobs API).""" - + """Required. Environment version used by the environment. Each version comes with a specific Python + version and a set of Python packages. The version is a string, consisting of an integer.""" + jar_dependencies: Optional[List[str]] = None """List of jar dependencies, should be string representing volume paths. For example: `/Volumes/path/to/test.jar`.""" - + def as_dict(self) -> dict: """Serializes the Environment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.client is not None: - body["client"] = self.client - if self.dependencies: - body["dependencies"] = [v for v in self.dependencies] - if self.environment_version is not None: - body["environment_version"] = self.environment_version - if self.jar_dependencies: - body["jar_dependencies"] = [v for v in self.jar_dependencies] + if self.client is not None: body['client'] = self.client + if self.dependencies: body['dependencies'] = [v for v in self.dependencies] + if self.environment_version is not None: body['environment_version'] = self.environment_version + if self.jar_dependencies: body['jar_dependencies'] = [v for v in self.jar_dependencies] return body def as_shallow_dict(self) -> dict: """Serializes the Environment into a shallow dictionary of its immediate attributes.""" body = {} - if self.client is not None: - body["client"] = self.client - if self.dependencies: - body["dependencies"] = self.dependencies - if self.environment_version is not None: - body["environment_version"] = self.environment_version - if self.jar_dependencies: - body["jar_dependencies"] = self.jar_dependencies + if self.client is not None: body['client'] = self.client + if self.dependencies: body['dependencies'] = self.dependencies + if self.environment_version is not None: body['environment_version'] = self.environment_version + if self.jar_dependencies: body['jar_dependencies'] = self.jar_dependencies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Environment: """Deserializes the Environment from a dictionary.""" - return cls( - client=d.get("client", None), - dependencies=d.get("dependencies", None), - environment_version=d.get("environment_version", None), - jar_dependencies=d.get("jar_dependencies", None), - ) + return cls(client=d.get('client', None), dependencies=d.get('dependencies', None), environment_version=d.get('environment_version', None), jar_dependencies=d.get('jar_dependencies', None)) + + @dataclass @@ -4847,339 +3983,265 @@ class EventDetails: attributes: Optional[ClusterAttributes] = None """* For created clusters, the attributes of the cluster. * For edited clusters, the new attributes of the cluster.""" - + cause: Optional[EventDetailsCause] = None """The cause of a change in target size.""" - + cluster_size: Optional[ClusterSize] = None """The actual cluster size that was set in the cluster creation or edit.""" - + current_num_vcpus: Optional[int] = None """The current number of vCPUs in the cluster.""" - + current_num_workers: Optional[int] = None """The current number of nodes in the cluster.""" - + did_not_expand_reason: Optional[str] = None - + disk_size: Optional[int] = None """Current disk size in bytes""" - + driver_state_message: Optional[str] = None """More details about the change in driver's state""" - + enable_termination_for_node_blocklisted: Optional[bool] = None """Whether or not a blocklisted node should be terminated. For ClusterEventType NODE_BLACKLISTED.""" - + free_space: Optional[int] = None - + init_scripts: Optional[InitScriptEventDetails] = None """List of global and cluster init scripts associated with this cluster event.""" - + instance_id: Optional[str] = None """Instance Id where the event originated from""" - + job_run_name: Optional[str] = None """Unique identifier of the specific job run associated with this cluster event * For clusters created for jobs, this will be the same as the cluster name""" - + previous_attributes: Optional[ClusterAttributes] = None """The cluster attributes before a cluster was edited.""" - + previous_cluster_size: Optional[ClusterSize] = None """The size of the cluster before an edit or resize.""" - + previous_disk_size: Optional[int] = None """Previous disk size in bytes""" - + reason: Optional[TerminationReason] = None """A termination reason: * On a TERMINATED event, this is the reason of the termination. * On a RESIZE_COMPLETE event, this indicates the reason that we failed to acquire some nodes.""" - + target_num_vcpus: Optional[int] = None """The targeted number of vCPUs in the cluster.""" - + target_num_workers: Optional[int] = None """The targeted number of nodes in the cluster.""" - + user: Optional[str] = None """The user that caused the event to occur. (Empty if it was done by the control plane.)""" - + def as_dict(self) -> dict: """Serializes the EventDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attributes: - body["attributes"] = self.attributes.as_dict() - if self.cause is not None: - body["cause"] = self.cause.value - if self.cluster_size: - body["cluster_size"] = self.cluster_size.as_dict() - if self.current_num_vcpus is not None: - body["current_num_vcpus"] = self.current_num_vcpus - if self.current_num_workers is not None: - body["current_num_workers"] = self.current_num_workers - if self.did_not_expand_reason is not None: - body["did_not_expand_reason"] = self.did_not_expand_reason - if self.disk_size is not None: - body["disk_size"] = self.disk_size - if self.driver_state_message is not None: - body["driver_state_message"] = self.driver_state_message - if self.enable_termination_for_node_blocklisted is not None: - body["enable_termination_for_node_blocklisted"] = self.enable_termination_for_node_blocklisted - if self.free_space is not None: - body["free_space"] = self.free_space - if self.init_scripts: - body["init_scripts"] = self.init_scripts.as_dict() - if self.instance_id is not None: - body["instance_id"] = self.instance_id - if self.job_run_name is not None: - body["job_run_name"] = self.job_run_name - if self.previous_attributes: - body["previous_attributes"] = self.previous_attributes.as_dict() - if self.previous_cluster_size: - body["previous_cluster_size"] = self.previous_cluster_size.as_dict() - if self.previous_disk_size is not None: - body["previous_disk_size"] = self.previous_disk_size - if self.reason: - body["reason"] = self.reason.as_dict() - if self.target_num_vcpus is not None: - body["target_num_vcpus"] = self.target_num_vcpus - if self.target_num_workers is not None: - body["target_num_workers"] = self.target_num_workers - if self.user is not None: - body["user"] = self.user + if self.attributes: body['attributes'] = self.attributes.as_dict() + if self.cause is not None: body['cause'] = self.cause.value + if self.cluster_size: body['cluster_size'] = self.cluster_size.as_dict() + if self.current_num_vcpus is not None: body['current_num_vcpus'] = self.current_num_vcpus + if self.current_num_workers is not None: body['current_num_workers'] = self.current_num_workers + if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason + if self.disk_size is not None: body['disk_size'] = self.disk_size + if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message + if self.enable_termination_for_node_blocklisted is not None: body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted + if self.free_space is not None: body['free_space'] = self.free_space + if self.init_scripts: body['init_scripts'] = self.init_scripts.as_dict() + if self.instance_id is not None: body['instance_id'] = self.instance_id + if self.job_run_name is not None: body['job_run_name'] = self.job_run_name + if self.previous_attributes: body['previous_attributes'] = self.previous_attributes.as_dict() + if self.previous_cluster_size: body['previous_cluster_size'] = self.previous_cluster_size.as_dict() + if self.previous_disk_size is not None: body['previous_disk_size'] = self.previous_disk_size + if self.reason: body['reason'] = self.reason.as_dict() + if self.target_num_vcpus is not None: body['target_num_vcpus'] = self.target_num_vcpus + if self.target_num_workers is not None: body['target_num_workers'] = self.target_num_workers + if self.user is not None: body['user'] = self.user return body def as_shallow_dict(self) -> dict: """Serializes the EventDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.attributes: - body["attributes"] = self.attributes - if self.cause is not None: - body["cause"] = self.cause - if self.cluster_size: - body["cluster_size"] = self.cluster_size - if self.current_num_vcpus is not None: - body["current_num_vcpus"] = self.current_num_vcpus - if self.current_num_workers is not None: - body["current_num_workers"] = self.current_num_workers - if self.did_not_expand_reason is not None: - body["did_not_expand_reason"] = self.did_not_expand_reason - if self.disk_size is not None: - body["disk_size"] = self.disk_size - if self.driver_state_message is not None: - body["driver_state_message"] = self.driver_state_message - if self.enable_termination_for_node_blocklisted is not None: - body["enable_termination_for_node_blocklisted"] = self.enable_termination_for_node_blocklisted - if self.free_space is not None: - body["free_space"] = self.free_space - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_id is not None: - body["instance_id"] = self.instance_id - if self.job_run_name is not None: - body["job_run_name"] = self.job_run_name - if self.previous_attributes: - body["previous_attributes"] = self.previous_attributes - if self.previous_cluster_size: - body["previous_cluster_size"] = self.previous_cluster_size - if self.previous_disk_size is not None: - body["previous_disk_size"] = self.previous_disk_size - if self.reason: - body["reason"] = self.reason - if self.target_num_vcpus is not None: - body["target_num_vcpus"] = self.target_num_vcpus - if self.target_num_workers is not None: - body["target_num_workers"] = self.target_num_workers - if self.user is not None: - body["user"] = self.user + if self.attributes: body['attributes'] = self.attributes + if self.cause is not None: body['cause'] = self.cause + if self.cluster_size: body['cluster_size'] = self.cluster_size + if self.current_num_vcpus is not None: body['current_num_vcpus'] = self.current_num_vcpus + if self.current_num_workers is not None: body['current_num_workers'] = self.current_num_workers + if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason + if self.disk_size is not None: body['disk_size'] = self.disk_size + if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message + if self.enable_termination_for_node_blocklisted is not None: body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted + if self.free_space is not None: body['free_space'] = self.free_space + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_id is not None: body['instance_id'] = self.instance_id + if self.job_run_name is not None: body['job_run_name'] = self.job_run_name + if self.previous_attributes: body['previous_attributes'] = self.previous_attributes + if self.previous_cluster_size: body['previous_cluster_size'] = self.previous_cluster_size + if self.previous_disk_size is not None: body['previous_disk_size'] = self.previous_disk_size + if self.reason: body['reason'] = self.reason + if self.target_num_vcpus is not None: body['target_num_vcpus'] = self.target_num_vcpus + if self.target_num_workers is not None: body['target_num_workers'] = self.target_num_workers + if self.user is not None: body['user'] = self.user return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EventDetails: """Deserializes the EventDetails from a dictionary.""" - return cls( - attributes=_from_dict(d, "attributes", ClusterAttributes), - cause=_enum(d, "cause", EventDetailsCause), - cluster_size=_from_dict(d, "cluster_size", ClusterSize), - current_num_vcpus=d.get("current_num_vcpus", None), - current_num_workers=d.get("current_num_workers", None), - did_not_expand_reason=d.get("did_not_expand_reason", None), - disk_size=d.get("disk_size", None), - driver_state_message=d.get("driver_state_message", None), - enable_termination_for_node_blocklisted=d.get("enable_termination_for_node_blocklisted", None), - free_space=d.get("free_space", None), - init_scripts=_from_dict(d, "init_scripts", InitScriptEventDetails), - instance_id=d.get("instance_id", None), - job_run_name=d.get("job_run_name", None), - previous_attributes=_from_dict(d, "previous_attributes", ClusterAttributes), - previous_cluster_size=_from_dict(d, "previous_cluster_size", ClusterSize), - previous_disk_size=d.get("previous_disk_size", None), - reason=_from_dict(d, "reason", TerminationReason), - target_num_vcpus=d.get("target_num_vcpus", None), - target_num_workers=d.get("target_num_workers", None), - user=d.get("user", None), - ) + return cls(attributes=_from_dict(d, 'attributes', ClusterAttributes), cause=_enum(d, 'cause', EventDetailsCause), cluster_size=_from_dict(d, 'cluster_size', ClusterSize), current_num_vcpus=d.get('current_num_vcpus', None), current_num_workers=d.get('current_num_workers', None), did_not_expand_reason=d.get('did_not_expand_reason', None), disk_size=d.get('disk_size', None), driver_state_message=d.get('driver_state_message', None), enable_termination_for_node_blocklisted=d.get('enable_termination_for_node_blocklisted', None), free_space=d.get('free_space', None), init_scripts=_from_dict(d, 'init_scripts', InitScriptEventDetails), instance_id=d.get('instance_id', None), job_run_name=d.get('job_run_name', None), previous_attributes=_from_dict(d, 'previous_attributes', ClusterAttributes), previous_cluster_size=_from_dict(d, 'previous_cluster_size', ClusterSize), previous_disk_size=d.get('previous_disk_size', None), reason=_from_dict(d, 'reason', TerminationReason), target_num_vcpus=d.get('target_num_vcpus', None), target_num_workers=d.get('target_num_workers', None), user=d.get('user', None)) + + class EventDetailsCause(Enum): """The cause of a change in target size.""" - - AUTORECOVERY = "AUTORECOVERY" - AUTOSCALE = "AUTOSCALE" - REPLACE_BAD_NODES = "REPLACE_BAD_NODES" - USER_REQUEST = "USER_REQUEST" - + + AUTORECOVERY = 'AUTORECOVERY' + AUTOSCALE = 'AUTOSCALE' + REPLACE_BAD_NODES = 'REPLACE_BAD_NODES' + USER_REQUEST = 'USER_REQUEST' class EventType(Enum): - - ADD_NODES_FAILED = "ADD_NODES_FAILED" - AUTOMATIC_CLUSTER_UPDATE = "AUTOMATIC_CLUSTER_UPDATE" - AUTOSCALING_BACKOFF = "AUTOSCALING_BACKOFF" - AUTOSCALING_FAILED = "AUTOSCALING_FAILED" - AUTOSCALING_STATS_REPORT = "AUTOSCALING_STATS_REPORT" - CREATING = "CREATING" - DBFS_DOWN = "DBFS_DOWN" - DID_NOT_EXPAND_DISK = "DID_NOT_EXPAND_DISK" - DRIVER_HEALTHY = "DRIVER_HEALTHY" - DRIVER_NOT_RESPONDING = "DRIVER_NOT_RESPONDING" - DRIVER_UNAVAILABLE = "DRIVER_UNAVAILABLE" - EDITED = "EDITED" - EXPANDED_DISK = "EXPANDED_DISK" - FAILED_TO_EXPAND_DISK = "FAILED_TO_EXPAND_DISK" - INIT_SCRIPTS_FINISHED = "INIT_SCRIPTS_FINISHED" - INIT_SCRIPTS_STARTED = "INIT_SCRIPTS_STARTED" - METASTORE_DOWN = "METASTORE_DOWN" - NODES_LOST = "NODES_LOST" - NODE_BLACKLISTED = "NODE_BLACKLISTED" - NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED" - PINNED = "PINNED" - RESIZING = "RESIZING" - RESTARTING = "RESTARTING" - RUNNING = "RUNNING" - SPARK_EXCEPTION = "SPARK_EXCEPTION" - STARTING = "STARTING" - TERMINATING = "TERMINATING" - UNPINNED = "UNPINNED" - UPSIZE_COMPLETED = "UPSIZE_COMPLETED" - + + + ADD_NODES_FAILED = 'ADD_NODES_FAILED' + AUTOMATIC_CLUSTER_UPDATE = 'AUTOMATIC_CLUSTER_UPDATE' + AUTOSCALING_BACKOFF = 'AUTOSCALING_BACKOFF' + AUTOSCALING_FAILED = 'AUTOSCALING_FAILED' + AUTOSCALING_STATS_REPORT = 'AUTOSCALING_STATS_REPORT' + CLUSTER_MIGRATED = 'CLUSTER_MIGRATED' + CREATING = 'CREATING' + DBFS_DOWN = 'DBFS_DOWN' + DID_NOT_EXPAND_DISK = 'DID_NOT_EXPAND_DISK' + DRIVER_HEALTHY = 'DRIVER_HEALTHY' + DRIVER_NOT_RESPONDING = 'DRIVER_NOT_RESPONDING' + DRIVER_UNAVAILABLE = 'DRIVER_UNAVAILABLE' + EDITED = 'EDITED' + EXPANDED_DISK = 'EXPANDED_DISK' + FAILED_TO_EXPAND_DISK = 'FAILED_TO_EXPAND_DISK' + INIT_SCRIPTS_FINISHED = 'INIT_SCRIPTS_FINISHED' + INIT_SCRIPTS_STARTED = 'INIT_SCRIPTS_STARTED' + METASTORE_DOWN = 'METASTORE_DOWN' + NODES_LOST = 'NODES_LOST' + NODE_BLACKLISTED = 'NODE_BLACKLISTED' + NODE_EXCLUDED_DECOMMISSIONED = 'NODE_EXCLUDED_DECOMMISSIONED' + PINNED = 'PINNED' + RESIZING = 'RESIZING' + RESTARTING = 'RESTARTING' + RUNNING = 'RUNNING' + SPARK_EXCEPTION = 'SPARK_EXCEPTION' + STARTING = 'STARTING' + TERMINATING = 'TERMINATING' + UNPINNED = 'UNPINNED' + UPSIZE_COMPLETED = 'UPSIZE_COMPLETED' @dataclass class GcpAttributes: """Attributes set during cluster creation which are related to GCP.""" - + availability: Optional[GcpAvailability] = None """This field determines whether the spark executors will be scheduled to run on preemptible VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.""" - + boot_disk_size: Optional[int] = None """Boot disk size in GB""" - + google_service_account: Optional[str] = None """If provided, the cluster will impersonate the google service account when accessing gcloud services (like GCS). The google service account must have previously been added to the Databricks environment by an account administrator.""" - + local_ssd_count: Optional[int] = None """If provided, each node (workers and driver) in the cluster will have this number of local SSDs attached. Each local SSD is 375GB in size. Refer to [GCP documentation] for the supported number of local SSDs for each instance type. [GCP documentation]: https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds""" - + use_preemptible_executors: Optional[bool] = None """This field determines whether the spark executors will be scheduled to run on preemptible VMs (when set to true) versus standard compute engine VMs (when set to false; default). Note: Soon to be deprecated, use the 'availability' field instead.""" - + zone_id: Optional[str] = None """Identifier for the availability zone in which the cluster resides. This can be one of the following: - "HA" => High availability, spread nodes across availability zones for a Databricks deployment region [default]. - "AUTO" => Databricks picks an availability zone to schedule the cluster on. - A GCP availability zone => Pick One of the available zones for (machine type + region) from https://cloud.google.com/compute/docs/regions-zones.""" - + def as_dict(self) -> dict: """Serializes the GcpAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: - body["availability"] = self.availability.value - if self.boot_disk_size is not None: - body["boot_disk_size"] = self.boot_disk_size - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.local_ssd_count is not None: - body["local_ssd_count"] = self.local_ssd_count - if self.use_preemptible_executors is not None: - body["use_preemptible_executors"] = self.use_preemptible_executors - if self.zone_id is not None: - body["zone_id"] = self.zone_id + if self.availability is not None: body['availability'] = self.availability.value + if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size + if self.google_service_account is not None: body['google_service_account'] = self.google_service_account + if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count + if self.use_preemptible_executors is not None: body['use_preemptible_executors'] = self.use_preemptible_executors + if self.zone_id is not None: body['zone_id'] = self.zone_id return body def as_shallow_dict(self) -> dict: """Serializes the GcpAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: - body["availability"] = self.availability - if self.boot_disk_size is not None: - body["boot_disk_size"] = self.boot_disk_size - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.local_ssd_count is not None: - body["local_ssd_count"] = self.local_ssd_count - if self.use_preemptible_executors is not None: - body["use_preemptible_executors"] = self.use_preemptible_executors - if self.zone_id is not None: - body["zone_id"] = self.zone_id + if self.availability is not None: body['availability'] = self.availability + if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size + if self.google_service_account is not None: body['google_service_account'] = self.google_service_account + if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count + if self.use_preemptible_executors is not None: body['use_preemptible_executors'] = self.use_preemptible_executors + if self.zone_id is not None: body['zone_id'] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpAttributes: """Deserializes the GcpAttributes from a dictionary.""" - return cls( - availability=_enum(d, "availability", GcpAvailability), - boot_disk_size=d.get("boot_disk_size", None), - google_service_account=d.get("google_service_account", None), - local_ssd_count=d.get("local_ssd_count", None), - use_preemptible_executors=d.get("use_preemptible_executors", None), - zone_id=d.get("zone_id", None), - ) + return cls(availability=_enum(d, 'availability', GcpAvailability), boot_disk_size=d.get('boot_disk_size', None), google_service_account=d.get('google_service_account', None), local_ssd_count=d.get('local_ssd_count', None), use_preemptible_executors=d.get('use_preemptible_executors', None), zone_id=d.get('zone_id', None)) + + class GcpAvailability(Enum): """This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.""" - - ON_DEMAND_GCP = "ON_DEMAND_GCP" - PREEMPTIBLE_GCP = "PREEMPTIBLE_GCP" - PREEMPTIBLE_WITH_FALLBACK_GCP = "PREEMPTIBLE_WITH_FALLBACK_GCP" - + + ON_DEMAND_GCP = 'ON_DEMAND_GCP' + PREEMPTIBLE_GCP = 'PREEMPTIBLE_GCP' + PREEMPTIBLE_WITH_FALLBACK_GCP = 'PREEMPTIBLE_WITH_FALLBACK_GCP' @dataclass class GcsStorageInfo: """A storage location in Google Cloud Platform's GCS""" - + destination: str """GCS destination/URI, e.g. `gs://my-bucket/some-prefix`""" - + def as_dict(self) -> dict: """Serializes the GcsStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the GcsStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcsStorageInfo: """Deserializes the GcsStorageInfo from a dictionary.""" - return cls(destination=d.get("destination", None)) + return cls(destination=d.get('destination', None)) + + + + + @dataclass @@ -5187,280 +4249,259 @@ class GetClusterComplianceResponse: is_compliant: Optional[bool] = None """Whether the cluster is compliant with its policy or not. Clusters could be out of compliance if the policy was updated after the cluster was last edited.""" - - violations: Optional[Dict[str, str]] = None + + violations: Optional[Dict[str,str]] = None """An object containing key-value mappings representing the first 200 policy validation errors. The keys indicate the path where the policy validation error is occurring. The values indicate an error message describing the policy validation error.""" - + def as_dict(self) -> dict: """Serializes the GetClusterComplianceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_compliant is not None: - body["is_compliant"] = self.is_compliant - if self.violations: - body["violations"] = self.violations + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations return body def as_shallow_dict(self) -> dict: """Serializes the GetClusterComplianceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_compliant is not None: - body["is_compliant"] = self.is_compliant - if self.violations: - body["violations"] = self.violations + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetClusterComplianceResponse: """Deserializes the GetClusterComplianceResponse from a dictionary.""" - return cls(is_compliant=d.get("is_compliant", None), violations=d.get("violations", None)) + return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None)) + + + + + @dataclass class GetClusterPermissionLevelsResponse: permission_levels: Optional[List[ClusterPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetClusterPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetClusterPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetClusterPermissionLevelsResponse: """Deserializes the GetClusterPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", ClusterPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', ClusterPermissionsDescription)) + + + + + + + + @dataclass class GetClusterPolicyPermissionLevelsResponse: permission_levels: Optional[List[ClusterPolicyPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetClusterPolicyPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetClusterPolicyPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetClusterPolicyPermissionLevelsResponse: """Deserializes the GetClusterPolicyPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", ClusterPolicyPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', ClusterPolicyPermissionsDescription)) + + + + + + + + + + + @dataclass class GetEvents: cluster_id: str """The ID of the cluster to retrieve events about.""" - + end_time: Optional[int] = None """The end time in epoch milliseconds. If empty, returns events up to the current time.""" - + event_types: Optional[List[EventType]] = None """An optional set of event types to filter on. If empty, all event types are returned.""" - + limit: Optional[int] = None """Deprecated: use page_token in combination with page_size instead. The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed value is 500.""" - + offset: Optional[int] = None """Deprecated: use page_token in combination with page_size instead. The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results are requested in descending order, the end_time field is required.""" - + order: Optional[GetEventsOrder] = None """The order to list events in; either "ASC" or "DESC". Defaults to "DESC".""" - + page_size: Optional[int] = None """The maximum number of events to include in a page of events. The server may further constrain the maximum number of results returned in a single page. If the page_size is empty or 0, the server will decide the number of results to be returned. The field has to be in the range [0,500]. If the value is outside the range, the server enforces 0 or 500.""" - + page_token: Optional[str] = None """Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of events respectively. If page_token is empty, the first page is returned.""" - + start_time: Optional[int] = None """The start time in epoch milliseconds. If empty, returns events starting from the beginning of time.""" - + def as_dict(self) -> dict: """Serializes the GetEvents into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.end_time is not None: - body["end_time"] = self.end_time - if self.event_types: - body["event_types"] = [v.value for v in self.event_types] - if self.limit is not None: - body["limit"] = self.limit - if self.offset is not None: - body["offset"] = self.offset - if self.order is not None: - body["order"] = self.order.value - if self.page_size is not None: - body["page_size"] = self.page_size - if self.page_token is not None: - body["page_token"] = self.page_token - if self.start_time is not None: - body["start_time"] = self.start_time + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.end_time is not None: body['end_time'] = self.end_time + if self.event_types: body['event_types'] = [v.value for v in self.event_types] + if self.limit is not None: body['limit'] = self.limit + if self.offset is not None: body['offset'] = self.offset + if self.order is not None: body['order'] = self.order.value + if self.page_size is not None: body['page_size'] = self.page_size + if self.page_token is not None: body['page_token'] = self.page_token + if self.start_time is not None: body['start_time'] = self.start_time return body def as_shallow_dict(self) -> dict: """Serializes the GetEvents into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.end_time is not None: - body["end_time"] = self.end_time - if self.event_types: - body["event_types"] = self.event_types - if self.limit is not None: - body["limit"] = self.limit - if self.offset is not None: - body["offset"] = self.offset - if self.order is not None: - body["order"] = self.order - if self.page_size is not None: - body["page_size"] = self.page_size - if self.page_token is not None: - body["page_token"] = self.page_token - if self.start_time is not None: - body["start_time"] = self.start_time + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.end_time is not None: body['end_time'] = self.end_time + if self.event_types: body['event_types'] = self.event_types + if self.limit is not None: body['limit'] = self.limit + if self.offset is not None: body['offset'] = self.offset + if self.order is not None: body['order'] = self.order + if self.page_size is not None: body['page_size'] = self.page_size + if self.page_token is not None: body['page_token'] = self.page_token + if self.start_time is not None: body['start_time'] = self.start_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetEvents: """Deserializes the GetEvents from a dictionary.""" - return cls( - cluster_id=d.get("cluster_id", None), - end_time=d.get("end_time", None), - event_types=_repeated_enum(d, "event_types", EventType), - limit=d.get("limit", None), - offset=d.get("offset", None), - order=_enum(d, "order", GetEventsOrder), - page_size=d.get("page_size", None), - page_token=d.get("page_token", None), - start_time=d.get("start_time", None), - ) - + return cls(cluster_id=d.get('cluster_id', None), end_time=d.get('end_time', None), event_types=_repeated_enum(d, 'event_types', EventType), limit=d.get('limit', None), offset=d.get('offset', None), order=_enum(d, 'order', GetEventsOrder), page_size=d.get('page_size', None), page_token=d.get('page_token', None), start_time=d.get('start_time', None)) + -class GetEventsOrder(Enum): - ASC = "ASC" - DESC = "DESC" +class GetEventsOrder(Enum): + + + ASC = 'ASC' + DESC = 'DESC' @dataclass class GetEventsResponse: events: Optional[List[ClusterEvent]] = None - + next_page: Optional[GetEvents] = None """Deprecated: use next_page_token or prev_page_token instead. The parameters required to retrieve the next page of events. Omitted if there are no more events to read.""" - + next_page_token: Optional[str] = None """This field represents the pagination token to retrieve the next page of results. If the value is "", it means no further results for the request.""" - + prev_page_token: Optional[str] = None """This field represents the pagination token to retrieve the previous page of results. If the value is "", it means no further results for the request.""" - + total_count: Optional[int] = None """Deprecated: Returns 0 when request uses page_token. Will start returning zero when request uses offset/limit soon. The total number of events filtered by the start_time, end_time, and event_types.""" - + def as_dict(self) -> dict: """Serializes the GetEventsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.events: - body["events"] = [v.as_dict() for v in self.events] - if self.next_page: - body["next_page"] = self.next_page.as_dict() - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token - if self.total_count is not None: - body["total_count"] = self.total_count + if self.events: body['events'] = [v.as_dict() for v in self.events] + if self.next_page: body['next_page'] = self.next_page.as_dict() + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.total_count is not None: body['total_count'] = self.total_count return body def as_shallow_dict(self) -> dict: """Serializes the GetEventsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.events: - body["events"] = self.events - if self.next_page: - body["next_page"] = self.next_page - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token - if self.total_count is not None: - body["total_count"] = self.total_count + if self.events: body['events'] = self.events + if self.next_page: body['next_page'] = self.next_page + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.total_count is not None: body['total_count'] = self.total_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetEventsResponse: """Deserializes the GetEventsResponse from a dictionary.""" - return cls( - events=_repeated_dict(d, "events", ClusterEvent), - next_page=_from_dict(d, "next_page", GetEvents), - next_page_token=d.get("next_page_token", None), - prev_page_token=d.get("prev_page_token", None), - total_count=d.get("total_count", None), - ) + return cls(events=_repeated_dict(d, 'events', ClusterEvent), next_page=_from_dict(d, 'next_page', GetEvents), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None), total_count=d.get('total_count', None)) + + + + + @dataclass class GetInstancePool: instance_pool_id: str """Canonical unique identifier for the pool.""" - + aws_attributes: Optional[InstancePoolAwsAttributes] = None """Attributes related to instance pools running on Amazon Web Services. If not specified at pool creation, a set of default values will be used.""" - + azure_attributes: Optional[InstancePoolAzureAttributes] = None """Attributes related to instance pools running on Azure. If not specified at pool creation, a set of default values will be used.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags""" - - default_tags: Optional[Dict[str, str]] = None + + default_tags: Optional[Dict[str,str]] = None """Tags that are added by Databricks regardless of any ``custom_tags``, including: - Vendor: Databricks @@ -5470,230 +4511,189 @@ class GetInstancePool: - InstancePoolName: - InstancePoolId: """ - + disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + gcp_attributes: Optional[InstancePoolGcpAttributes] = None """Attributes related to instance pools running on Google Cloud Platform. If not specified at pool creation, a set of default values will be used.""" - + idle_instance_autotermination_minutes: Optional[int] = None """Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances will be automatically terminated after a default timeout. If specified, the threshold must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances from the cache if min cache size could still hold.""" - + instance_pool_name: Optional[str] = None """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters.""" - + max_capacity: Optional[int] = None """Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances. Clusters that require further instance provisioning will fail during upsize requests.""" - + min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + preloaded_docker_images: Optional[List[DockerImage]] = None """Custom Docker Image BYOC""" - + preloaded_spark_versions: Optional[List[str]] = None """A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + state: Optional[InstancePoolState] = None """Current state of the instance pool.""" - + stats: Optional[InstancePoolStats] = None """Usage statistics about the instance pool.""" - + status: Optional[InstancePoolStatus] = None """Status of failed pending instances in the pool.""" - + def as_dict(self) -> dict: """Serializes the GetInstancePool into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.default_tags: - body["default_tags"] = self.default_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec.as_dict() - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions] - if self.state is not None: - body["state"] = self.state.value - if self.stats: - body["stats"] = self.stats.as_dict() - if self.status: - body["status"] = self.status.as_dict() + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.default_tags: body['default_tags'] = self.default_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + if self.state is not None: body['state'] = self.state.value + if self.stats: body['stats'] = self.stats.as_dict() + if self.status: body['status'] = self.status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetInstancePool into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.default_tags: - body["default_tags"] = self.default_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = self.preloaded_docker_images - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = self.preloaded_spark_versions - if self.state is not None: - body["state"] = self.state - if self.stats: - body["stats"] = self.stats - if self.status: - body["status"] = self.status + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.default_tags: body['default_tags'] = self.default_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions + if self.state is not None: body['state'] = self.state + if self.stats: body['stats'] = self.stats + if self.status: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetInstancePool: """Deserializes the GetInstancePool from a dictionary.""" - return cls( - aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes), - custom_tags=d.get("custom_tags", None), - default_tags=d.get("default_tags", None), - disk_spec=_from_dict(d, "disk_spec", DiskSpec), - enable_elastic_disk=d.get("enable_elastic_disk", None), - gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), - idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), - instance_pool_id=d.get("instance_pool_id", None), - instance_pool_name=d.get("instance_pool_name", None), - max_capacity=d.get("max_capacity", None), - min_idle_instances=d.get("min_idle_instances", None), - node_type_id=d.get("node_type_id", None), - preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), - preloaded_spark_versions=d.get("preloaded_spark_versions", None), - state=_enum(d, "state", InstancePoolState), - stats=_from_dict(d, "stats", InstancePoolStats), - status=_from_dict(d, "status", InstancePoolStatus), - ) + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), custom_tags=d.get('custom_tags', None), default_tags=d.get('default_tags', None), disk_spec=_from_dict(d, 'disk_spec', DiskSpec), enable_elastic_disk=d.get('enable_elastic_disk', None), gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_id=d.get('instance_pool_id', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None), preloaded_docker_images=_repeated_dict(d, 'preloaded_docker_images', DockerImage), preloaded_spark_versions=d.get('preloaded_spark_versions', None), state=_enum(d, 'state', InstancePoolState), stats=_from_dict(d, 'stats', InstancePoolStats), status=_from_dict(d, 'status', InstancePoolStatus)) + + + + + @dataclass class GetInstancePoolPermissionLevelsResponse: permission_levels: Optional[List[InstancePoolPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetInstancePoolPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetInstancePoolPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetInstancePoolPermissionLevelsResponse: """Deserializes the GetInstancePoolPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", InstancePoolPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', InstancePoolPermissionsDescription)) + + + + + + + + + + + @dataclass class GetSparkVersionsResponse: versions: Optional[List[SparkVersion]] = None """All the available Spark versions.""" - + def as_dict(self) -> dict: """Serializes the GetSparkVersionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.versions: - body["versions"] = [v.as_dict() for v in self.versions] + if self.versions: body['versions'] = [v.as_dict() for v in self.versions] return body def as_shallow_dict(self) -> dict: """Serializes the GetSparkVersionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.versions: - body["versions"] = self.versions + if self.versions: body['versions'] = self.versions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetSparkVersionsResponse: """Deserializes the GetSparkVersionsResponse from a dictionary.""" - return cls(versions=_repeated_dict(d, "versions", SparkVersion)) + return cls(versions=_repeated_dict(d, 'versions', SparkVersion)) + + @dataclass class GlobalInitScriptCreateRequest: name: str """The name of the script""" - + script: str """The Base64-encoded content of the script.""" - + enabled: Optional[bool] = None """Specifies whether the script is enabled. The script runs only if enabled.""" - + position: Optional[int] = None """The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. @@ -5704,231 +4704,171 @@ class GlobalInitScriptCreateRequest: 0, 1, and 2. Any position of (3) or greater puts the script in the last position. If an explicit position value conflicts with an existing script value, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1.""" - + def as_dict(self) -> dict: """Serializes the GlobalInitScriptCreateRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script is not None: body['script'] = self.script return body def as_shallow_dict(self) -> dict: """Serializes the GlobalInitScriptCreateRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script is not None: body['script'] = self.script return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptCreateRequest: """Deserializes the GlobalInitScriptCreateRequest from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - name=d.get("name", None), - position=d.get("position", None), - script=d.get("script", None), - ) + return cls(enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script=d.get('script', None)) + + @dataclass class GlobalInitScriptDetails: created_at: Optional[int] = None """Time when the script was created, represented as a Unix timestamp in milliseconds.""" - + created_by: Optional[str] = None """The username of the user who created the script.""" - + enabled: Optional[bool] = None """Specifies whether the script is enabled. The script runs only if enabled.""" - + name: Optional[str] = None """The name of the script""" - + position: Optional[int] = None """The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.""" - + script_id: Optional[str] = None """The global init script ID.""" - + updated_at: Optional[int] = None """Time when the script was updated, represented as a Unix timestamp in milliseconds.""" - + updated_by: Optional[str] = None """The username of the user who last updated the script""" - + def as_dict(self) -> dict: """Serializes the GlobalInitScriptDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script_id is not None: - body["script_id"] = self.script_id - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script_id is not None: body['script_id'] = self.script_id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the GlobalInitScriptDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script_id is not None: - body["script_id"] = self.script_id - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script_id is not None: body['script_id'] = self.script_id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptDetails: """Deserializes the GlobalInitScriptDetails from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - enabled=d.get("enabled", None), - name=d.get("name", None), - position=d.get("position", None), - script_id=d.get("script_id", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script_id=d.get('script_id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass class GlobalInitScriptDetailsWithContent: created_at: Optional[int] = None """Time when the script was created, represented as a Unix timestamp in milliseconds.""" - + created_by: Optional[str] = None """The username of the user who created the script.""" - + enabled: Optional[bool] = None """Specifies whether the script is enabled. The script runs only if enabled.""" - + name: Optional[str] = None """The name of the script""" - + position: Optional[int] = None """The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.""" - + script: Optional[str] = None """The Base64-encoded content of the script.""" - + script_id: Optional[str] = None """The global init script ID.""" - + updated_at: Optional[int] = None """Time when the script was updated, represented as a Unix timestamp in milliseconds.""" - + updated_by: Optional[str] = None """The username of the user who last updated the script""" - + def as_dict(self) -> dict: """Serializes the GlobalInitScriptDetailsWithContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - if self.script_id is not None: - body["script_id"] = self.script_id - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script is not None: body['script'] = self.script + if self.script_id is not None: body['script_id'] = self.script_id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the GlobalInitScriptDetailsWithContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - if self.script_id is not None: - body["script_id"] = self.script_id - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script is not None: body['script'] = self.script + if self.script_id is not None: body['script_id'] = self.script_id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptDetailsWithContent: """Deserializes the GlobalInitScriptDetailsWithContent from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - enabled=d.get("enabled", None), - name=d.get("name", None), - position=d.get("position", None), - script=d.get("script", None), - script_id=d.get("script_id", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script=d.get('script', None), script_id=d.get('script_id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass class GlobalInitScriptUpdateRequest: name: str """The name of the script""" - + script: str """The Base64-encoded content of the script.""" - + enabled: Optional[bool] = None """Specifies whether the script is enabled. The script runs only if enabled.""" - + position: Optional[int] = None """The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. To move the script to run first, set its position to 0. @@ -5939,193 +4879,150 @@ class GlobalInitScriptUpdateRequest: If an explicit position value conflicts with an existing script, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1.""" - + script_id: Optional[str] = None """The ID of the global init script.""" - + def as_dict(self) -> dict: """Serializes the GlobalInitScriptUpdateRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - if self.script_id is not None: - body["script_id"] = self.script_id + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script is not None: body['script'] = self.script + if self.script_id is not None: body['script_id'] = self.script_id return body def as_shallow_dict(self) -> dict: """Serializes the GlobalInitScriptUpdateRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - if self.script_id is not None: - body["script_id"] = self.script_id + if self.enabled is not None: body['enabled'] = self.enabled + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.script is not None: body['script'] = self.script + if self.script_id is not None: body['script_id'] = self.script_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptUpdateRequest: """Deserializes the GlobalInitScriptUpdateRequest from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - name=d.get("name", None), - position=d.get("position", None), - script=d.get("script", None), - script_id=d.get("script_id", None), - ) + return cls(enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script=d.get('script', None), script_id=d.get('script_id', None)) + + @dataclass class InitScriptEventDetails: cluster: Optional[List[InitScriptInfoAndExecutionDetails]] = None """The cluster scoped init scripts associated with this cluster event.""" - + global_: Optional[List[InitScriptInfoAndExecutionDetails]] = None """The global init scripts associated with this cluster event.""" - + reported_for_node: Optional[str] = None """The private ip of the node we are reporting init script execution details for (we will select the execution details from only one node rather than reporting the execution details from every node to keep these event details small) This should only be defined for the INIT_SCRIPTS_FINISHED event""" - + def as_dict(self) -> dict: """Serializes the InitScriptEventDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster: - body["cluster"] = [v.as_dict() for v in self.cluster] - if self.global_: - body["global"] = [v.as_dict() for v in self.global_] - if self.reported_for_node is not None: - body["reported_for_node"] = self.reported_for_node + if self.cluster: body['cluster'] = [v.as_dict() for v in self.cluster] + if self.global_: body['global'] = [v.as_dict() for v in self.global_] + if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node return body def as_shallow_dict(self) -> dict: """Serializes the InitScriptEventDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster: - body["cluster"] = self.cluster - if self.global_: - body["global"] = self.global_ - if self.reported_for_node is not None: - body["reported_for_node"] = self.reported_for_node + if self.cluster: body['cluster'] = self.cluster + if self.global_: body['global'] = self.global_ + if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InitScriptEventDetails: """Deserializes the InitScriptEventDetails from a dictionary.""" - return cls( - cluster=_repeated_dict(d, "cluster", InitScriptInfoAndExecutionDetails), - global_=_repeated_dict(d, "global", InitScriptInfoAndExecutionDetails), - reported_for_node=d.get("reported_for_node", None), - ) + return cls(cluster=_repeated_dict(d, 'cluster', InitScriptInfoAndExecutionDetails), global_=_repeated_dict(d, 'global', InitScriptInfoAndExecutionDetails), reported_for_node=d.get('reported_for_node', None)) + + class InitScriptExecutionDetailsInitScriptExecutionStatus(Enum): """Result of attempted script execution""" - - FAILED_EXECUTION = "FAILED_EXECUTION" - FAILED_FETCH = "FAILED_FETCH" - FUSE_MOUNT_FAILED = "FUSE_MOUNT_FAILED" - NOT_EXECUTED = "NOT_EXECUTED" - SKIPPED = "SKIPPED" - SUCCEEDED = "SUCCEEDED" - UNKNOWN = "UNKNOWN" - + + FAILED_EXECUTION = 'FAILED_EXECUTION' + FAILED_FETCH = 'FAILED_FETCH' + FUSE_MOUNT_FAILED = 'FUSE_MOUNT_FAILED' + NOT_EXECUTED = 'NOT_EXECUTED' + SKIPPED = 'SKIPPED' + SUCCEEDED = 'SUCCEEDED' + UNKNOWN = 'UNKNOWN' @dataclass class InitScriptInfo: """Config for an individual init script Next ID: 11""" - + abfss: Optional[Adlsgen2Info] = None """destination needs to be provided, e.g. `abfss://@.dfs.core.windows.net/`""" - + dbfs: Optional[DbfsStorageInfo] = None """destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`""" - + file: Optional[LocalFileInfo] = None """destination needs to be provided, e.g. `{ "file": { "destination": "file:/my/local/file.sh" } }`""" - + gcs: Optional[GcsStorageInfo] = None """destination needs to be provided, e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }`""" - + s3: Optional[S3StorageInfo] = None """destination and either the region or endpoint need to be provided. e.g. `{ \"s3\": { \"destination\": \"s3://cluster_log_bucket/prefix\", \"region\": \"us-west-2\" } }` Cluster iam role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to write data to the s3 destination.""" - + volumes: Optional[VolumesStorageInfo] = None """destination needs to be provided. e.g. `{ \"volumes\" : { \"destination\" : \"/Volumes/my-init.sh\" } }`""" - + workspace: Optional[WorkspaceStorageInfo] = None """destination needs to be provided, e.g. `{ "workspace": { "destination": "/cluster-init-scripts/setup-datadog.sh" } }`""" - + def as_dict(self) -> dict: """Serializes the InitScriptInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.abfss: - body["abfss"] = self.abfss.as_dict() - if self.dbfs: - body["dbfs"] = self.dbfs.as_dict() - if self.file: - body["file"] = self.file.as_dict() - if self.gcs: - body["gcs"] = self.gcs.as_dict() - if self.s3: - body["s3"] = self.s3.as_dict() - if self.volumes: - body["volumes"] = self.volumes.as_dict() - if self.workspace: - body["workspace"] = self.workspace.as_dict() + if self.abfss: body['abfss'] = self.abfss.as_dict() + if self.dbfs: body['dbfs'] = self.dbfs.as_dict() + if self.file: body['file'] = self.file.as_dict() + if self.gcs: body['gcs'] = self.gcs.as_dict() + if self.s3: body['s3'] = self.s3.as_dict() + if self.volumes: body['volumes'] = self.volumes.as_dict() + if self.workspace: body['workspace'] = self.workspace.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the InitScriptInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.abfss: - body["abfss"] = self.abfss - if self.dbfs: - body["dbfs"] = self.dbfs - if self.file: - body["file"] = self.file - if self.gcs: - body["gcs"] = self.gcs - if self.s3: - body["s3"] = self.s3 - if self.volumes: - body["volumes"] = self.volumes - if self.workspace: - body["workspace"] = self.workspace + if self.abfss: body['abfss'] = self.abfss + if self.dbfs: body['dbfs'] = self.dbfs + if self.file: body['file'] = self.file + if self.gcs: body['gcs'] = self.gcs + if self.s3: body['s3'] = self.s3 + if self.volumes: body['volumes'] = self.volumes + if self.workspace: body['workspace'] = self.workspace return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InitScriptInfo: """Deserializes the InitScriptInfo from a dictionary.""" - return cls( - abfss=_from_dict(d, "abfss", Adlsgen2Info), - dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), - file=_from_dict(d, "file", LocalFileInfo), - gcs=_from_dict(d, "gcs", GcsStorageInfo), - s3=_from_dict(d, "s3", S3StorageInfo), - volumes=_from_dict(d, "volumes", VolumesStorageInfo), - workspace=_from_dict(d, "workspace", WorkspaceStorageInfo), - ) + return cls(abfss=_from_dict(d, 'abfss', Adlsgen2Info), dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), file=_from_dict(d, 'file', LocalFileInfo), gcs=_from_dict(d, 'gcs', GcsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo), volumes=_from_dict(d, 'volumes', VolumesStorageInfo), workspace=_from_dict(d, 'workspace', WorkspaceStorageInfo)) + + @dataclass @@ -6133,139 +5030,108 @@ class InitScriptInfoAndExecutionDetails: abfss: Optional[Adlsgen2Info] = None """destination needs to be provided, e.g. `abfss://@.dfs.core.windows.net/`""" - + dbfs: Optional[DbfsStorageInfo] = None """destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`""" - + error_message: Optional[str] = None """Additional details regarding errors (such as a file not found message if the status is FAILED_FETCH). This field should only be used to provide *additional* information to the status field, not duplicate it.""" - + execution_duration_seconds: Optional[int] = None """The number duration of the script execution in seconds""" - + file: Optional[LocalFileInfo] = None """destination needs to be provided, e.g. `{ "file": { "destination": "file:/my/local/file.sh" } }`""" - + gcs: Optional[GcsStorageInfo] = None """destination needs to be provided, e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }`""" - + s3: Optional[S3StorageInfo] = None """destination and either the region or endpoint need to be provided. e.g. `{ \"s3\": { \"destination\": \"s3://cluster_log_bucket/prefix\", \"region\": \"us-west-2\" } }` Cluster iam role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to write data to the s3 destination.""" - + status: Optional[InitScriptExecutionDetailsInitScriptExecutionStatus] = None """The current status of the script""" - + volumes: Optional[VolumesStorageInfo] = None """destination needs to be provided. e.g. `{ \"volumes\" : { \"destination\" : \"/Volumes/my-init.sh\" } }`""" - + workspace: Optional[WorkspaceStorageInfo] = None """destination needs to be provided, e.g. `{ "workspace": { "destination": "/cluster-init-scripts/setup-datadog.sh" } }`""" - + def as_dict(self) -> dict: """Serializes the InitScriptInfoAndExecutionDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.abfss: - body["abfss"] = self.abfss.as_dict() - if self.dbfs: - body["dbfs"] = self.dbfs.as_dict() - if self.error_message is not None: - body["error_message"] = self.error_message - if self.execution_duration_seconds is not None: - body["execution_duration_seconds"] = self.execution_duration_seconds - if self.file: - body["file"] = self.file.as_dict() - if self.gcs: - body["gcs"] = self.gcs.as_dict() - if self.s3: - body["s3"] = self.s3.as_dict() - if self.status is not None: - body["status"] = self.status.value - if self.volumes: - body["volumes"] = self.volumes.as_dict() - if self.workspace: - body["workspace"] = self.workspace.as_dict() + if self.abfss: body['abfss'] = self.abfss.as_dict() + if self.dbfs: body['dbfs'] = self.dbfs.as_dict() + if self.error_message is not None: body['error_message'] = self.error_message + if self.execution_duration_seconds is not None: body['execution_duration_seconds'] = self.execution_duration_seconds + if self.file: body['file'] = self.file.as_dict() + if self.gcs: body['gcs'] = self.gcs.as_dict() + if self.s3: body['s3'] = self.s3.as_dict() + if self.status is not None: body['status'] = self.status.value + if self.volumes: body['volumes'] = self.volumes.as_dict() + if self.workspace: body['workspace'] = self.workspace.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the InitScriptInfoAndExecutionDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.abfss: - body["abfss"] = self.abfss - if self.dbfs: - body["dbfs"] = self.dbfs - if self.error_message is not None: - body["error_message"] = self.error_message - if self.execution_duration_seconds is not None: - body["execution_duration_seconds"] = self.execution_duration_seconds - if self.file: - body["file"] = self.file - if self.gcs: - body["gcs"] = self.gcs - if self.s3: - body["s3"] = self.s3 - if self.status is not None: - body["status"] = self.status - if self.volumes: - body["volumes"] = self.volumes - if self.workspace: - body["workspace"] = self.workspace + if self.abfss: body['abfss'] = self.abfss + if self.dbfs: body['dbfs'] = self.dbfs + if self.error_message is not None: body['error_message'] = self.error_message + if self.execution_duration_seconds is not None: body['execution_duration_seconds'] = self.execution_duration_seconds + if self.file: body['file'] = self.file + if self.gcs: body['gcs'] = self.gcs + if self.s3: body['s3'] = self.s3 + if self.status is not None: body['status'] = self.status + if self.volumes: body['volumes'] = self.volumes + if self.workspace: body['workspace'] = self.workspace return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InitScriptInfoAndExecutionDetails: """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary.""" - return cls( - abfss=_from_dict(d, "abfss", Adlsgen2Info), - dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), - error_message=d.get("error_message", None), - execution_duration_seconds=d.get("execution_duration_seconds", None), - file=_from_dict(d, "file", LocalFileInfo), - gcs=_from_dict(d, "gcs", GcsStorageInfo), - s3=_from_dict(d, "s3", S3StorageInfo), - status=_enum(d, "status", InitScriptExecutionDetailsInitScriptExecutionStatus), - volumes=_from_dict(d, "volumes", VolumesStorageInfo), - workspace=_from_dict(d, "workspace", WorkspaceStorageInfo), - ) + return cls(abfss=_from_dict(d, 'abfss', Adlsgen2Info), dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), error_message=d.get('error_message', None), execution_duration_seconds=d.get('execution_duration_seconds', None), file=_from_dict(d, 'file', LocalFileInfo), gcs=_from_dict(d, 'gcs', GcsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo), status=_enum(d, 'status', InitScriptExecutionDetailsInitScriptExecutionStatus), volumes=_from_dict(d, 'volumes', VolumesStorageInfo), workspace=_from_dict(d, 'workspace', WorkspaceStorageInfo)) + + @dataclass class InstallLibraries: cluster_id: str """Unique identifier for the cluster on which to install these libraries.""" - + libraries: List[Library] """The libraries to install.""" - + def as_dict(self) -> dict: """Serializes the InstallLibraries into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] return body def as_shallow_dict(self) -> dict: """Serializes the InstallLibraries into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = self.libraries + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.libraries: body['libraries'] = self.libraries return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstallLibraries: """Deserializes the InstallLibraries from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), libraries=_repeated_dict(d, "libraries", Library)) + return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated_dict(d, 'libraries', Library)) + + @dataclass @@ -6284,116 +5150,93 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> InstallLibrariesResponse: """Deserializes the InstallLibrariesResponse from a dictionary.""" return cls() + + @dataclass class InstancePoolAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[InstancePoolPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAccessControlRequest: """Deserializes the InstancePoolAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", InstancePoolPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class InstancePoolAccessControlResponse: all_permissions: Optional[List[InstancePoolPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAccessControlResponse: """Deserializes the InstancePoolAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", InstancePoolPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', InstancePoolPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -6401,18 +5244,18 @@ class InstancePoolAndStats: aws_attributes: Optional[InstancePoolAwsAttributes] = None """Attributes related to instance pools running on Amazon Web Services. If not specified at pool creation, a set of default values will be used.""" - + azure_attributes: Optional[InstancePoolAzureAttributes] = None """Attributes related to instance pools running on Azure. If not specified at pool creation, a set of default values will be used.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags""" - - default_tags: Optional[Dict[str, str]] = None + + default_tags: Optional[Dict[str,str]] = None """Tags that are added by Databricks regardless of any ``custom_tags``, including: - Vendor: Databricks @@ -6422,179 +5265,126 @@ class InstancePoolAndStats: - InstancePoolName: - InstancePoolId: """ - + disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + gcp_attributes: Optional[InstancePoolGcpAttributes] = None """Attributes related to instance pools running on Google Cloud Platform. If not specified at pool creation, a set of default values will be used.""" - + idle_instance_autotermination_minutes: Optional[int] = None """Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances will be automatically terminated after a default timeout. If specified, the threshold must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances from the cache if min cache size could still hold.""" - + instance_pool_id: Optional[str] = None """Canonical unique identifier for the pool.""" - + instance_pool_name: Optional[str] = None """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters.""" - + max_capacity: Optional[int] = None """Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances. Clusters that require further instance provisioning will fail during upsize requests.""" - + min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + preloaded_docker_images: Optional[List[DockerImage]] = None """Custom Docker Image BYOC""" - + preloaded_spark_versions: Optional[List[str]] = None """A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + state: Optional[InstancePoolState] = None """Current state of the instance pool.""" - + stats: Optional[InstancePoolStats] = None """Usage statistics about the instance pool.""" - + status: Optional[InstancePoolStatus] = None """Status of failed pending instances in the pool.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAndStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.default_tags: - body["default_tags"] = self.default_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec.as_dict() - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions] - if self.state is not None: - body["state"] = self.state.value - if self.stats: - body["stats"] = self.stats.as_dict() - if self.status: - body["status"] = self.status.as_dict() + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.default_tags: body['default_tags'] = self.default_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + if self.state is not None: body['state'] = self.state.value + if self.stats: body['stats'] = self.stats.as_dict() + if self.status: body['status'] = self.status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAndStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.default_tags: - body["default_tags"] = self.default_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = self.preloaded_docker_images - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = self.preloaded_spark_versions - if self.state is not None: - body["state"] = self.state - if self.stats: - body["stats"] = self.stats - if self.status: - body["status"] = self.status + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.default_tags: body['default_tags'] = self.default_tags + if self.disk_spec: body['disk_spec'] = self.disk_spec + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name + if self.max_capacity is not None: body['max_capacity'] = self.max_capacity + if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images + if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions + if self.state is not None: body['state'] = self.state + if self.stats: body['stats'] = self.stats + if self.status: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAndStats: """Deserializes the InstancePoolAndStats from a dictionary.""" - return cls( - aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes), - custom_tags=d.get("custom_tags", None), - default_tags=d.get("default_tags", None), - disk_spec=_from_dict(d, "disk_spec", DiskSpec), - enable_elastic_disk=d.get("enable_elastic_disk", None), - gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), - idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), - instance_pool_id=d.get("instance_pool_id", None), - instance_pool_name=d.get("instance_pool_name", None), - max_capacity=d.get("max_capacity", None), - min_idle_instances=d.get("min_idle_instances", None), - node_type_id=d.get("node_type_id", None), - preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), - preloaded_spark_versions=d.get("preloaded_spark_versions", None), - state=_enum(d, "state", InstancePoolState), - stats=_from_dict(d, "stats", InstancePoolStats), - status=_from_dict(d, "status", InstancePoolStatus), - ) + return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), custom_tags=d.get('custom_tags', None), default_tags=d.get('default_tags', None), disk_spec=_from_dict(d, 'disk_spec', DiskSpec), enable_elastic_disk=d.get('enable_elastic_disk', None), gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_id=d.get('instance_pool_id', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None), preloaded_docker_images=_repeated_dict(d, 'preloaded_docker_images', DockerImage), preloaded_spark_versions=d.get('preloaded_spark_versions', None), state=_enum(d, 'state', InstancePoolState), stats=_from_dict(d, 'stats', InstancePoolStats), status=_from_dict(d, 'status', InstancePoolStatus)) + + @dataclass class InstancePoolAwsAttributes: """Attributes set during instance pool creation which are related to Amazon Web Services.""" - + availability: Optional[InstancePoolAwsAttributesAvailability] = None """Availability type used for the spot nodes.""" - + spot_bid_price_percent: Optional[int] = None """Calculates the bid price for AWS spot instances, as a percentage of the corresponding instance type's on-demand price. For example, if this field is set to 50, and the cluster needs a new @@ -6603,7 +5393,7 @@ class InstancePoolAwsAttributes: `r3.xlarge` instances. If not specified, the default value is 100. When spot instances are requested for this cluster, only spot instances whose bid price percentage matches this field will be considered. Note that, for safety, we enforce this field to be no more than 10000.""" - + zone_id: Optional[str] = None """Identifier for the availability zone/datacenter in which the cluster resides. This string will be of a form like "us-west-2a". The provided availability zone must be in the same region as the @@ -6611,108 +5401,93 @@ class InstancePoolAwsAttributes: deployment resides in the "us-east-1" region. This is an optional field at cluster creation, and if not specified, a default zone will be used. The list of available zones as well as the default value can be found by using the `List Zones` method.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAwsAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: - body["availability"] = self.availability.value - if self.spot_bid_price_percent is not None: - body["spot_bid_price_percent"] = self.spot_bid_price_percent - if self.zone_id is not None: - body["zone_id"] = self.zone_id + if self.availability is not None: body['availability'] = self.availability.value + if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent + if self.zone_id is not None: body['zone_id'] = self.zone_id return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAwsAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: - body["availability"] = self.availability - if self.spot_bid_price_percent is not None: - body["spot_bid_price_percent"] = self.spot_bid_price_percent - if self.zone_id is not None: - body["zone_id"] = self.zone_id + if self.availability is not None: body['availability'] = self.availability + if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent + if self.zone_id is not None: body['zone_id'] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAwsAttributes: """Deserializes the InstancePoolAwsAttributes from a dictionary.""" - return cls( - availability=_enum(d, "availability", InstancePoolAwsAttributesAvailability), - spot_bid_price_percent=d.get("spot_bid_price_percent", None), - zone_id=d.get("zone_id", None), - ) + return cls(availability=_enum(d, 'availability', InstancePoolAwsAttributesAvailability), spot_bid_price_percent=d.get('spot_bid_price_percent', None), zone_id=d.get('zone_id', None)) + + class InstancePoolAwsAttributesAvailability(Enum): """The set of AWS availability types supported when setting up nodes for a cluster.""" - - ON_DEMAND = "ON_DEMAND" - SPOT = "SPOT" - + + ON_DEMAND = 'ON_DEMAND' + SPOT = 'SPOT' @dataclass class InstancePoolAzureAttributes: """Attributes set during instance pool creation which are related to Azure.""" - + availability: Optional[InstancePoolAzureAttributesAvailability] = None """Availability type used for the spot nodes.""" - + spot_bid_max_price: Optional[float] = None """With variable pricing, you have option to set a max price, in US dollars (USD) For example, the value 2 would be a max price of $2.00 USD per hour. If you set the max price to be -1, the VM won't be evicted based on price. The price for the VM will be the current price for spot or the price for a standard VM, which ever is less, as long as there is capacity and quota available.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAzureAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: - body["availability"] = self.availability.value - if self.spot_bid_max_price is not None: - body["spot_bid_max_price"] = self.spot_bid_max_price + if self.availability is not None: body['availability'] = self.availability.value + if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAzureAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: - body["availability"] = self.availability - if self.spot_bid_max_price is not None: - body["spot_bid_max_price"] = self.spot_bid_max_price + if self.availability is not None: body['availability'] = self.availability + if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAzureAttributes: """Deserializes the InstancePoolAzureAttributes from a dictionary.""" - return cls( - availability=_enum(d, "availability", InstancePoolAzureAttributesAvailability), - spot_bid_max_price=d.get("spot_bid_max_price", None), - ) + return cls(availability=_enum(d, 'availability', InstancePoolAzureAttributesAvailability), spot_bid_max_price=d.get('spot_bid_max_price', None)) + + class InstancePoolAzureAttributesAvailability(Enum): """The set of Azure availability types supported when setting up nodes for a cluster.""" - - ON_DEMAND_AZURE = "ON_DEMAND_AZURE" - SPOT_AZURE = "SPOT_AZURE" - + + ON_DEMAND_AZURE = 'ON_DEMAND_AZURE' + SPOT_AZURE = 'SPOT_AZURE' @dataclass class InstancePoolGcpAttributes: """Attributes set during instance pool creation which are related to GCP.""" - + gcp_availability: Optional[GcpAvailability] = None """This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.""" - + local_ssd_count: Optional[int] = None """If provided, each node in the instance pool will have this number of local SSDs attached. Each local SSD is 375GB in size. Refer to [GCP documentation] for the supported number of local SSDs for each instance type. [GCP documentation]: https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds""" - + zone_id: Optional[str] = None """Identifier for the availability zone/datacenter in which the cluster resides. This string will be of a form like "us-west1-a". The provided availability zone must be in the same region as the @@ -6726,255 +5501,208 @@ class InstancePoolGcpAttributes: https://cloud.google.com/compute/docs/regions-zones (e.g. "us-west1-a"). If empty, Databricks picks an availability zone to schedule the cluster on.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolGcpAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gcp_availability is not None: - body["gcp_availability"] = self.gcp_availability.value - if self.local_ssd_count is not None: - body["local_ssd_count"] = self.local_ssd_count - if self.zone_id is not None: - body["zone_id"] = self.zone_id + if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability.value + if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count + if self.zone_id is not None: body['zone_id'] = self.zone_id return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolGcpAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.gcp_availability is not None: - body["gcp_availability"] = self.gcp_availability - if self.local_ssd_count is not None: - body["local_ssd_count"] = self.local_ssd_count - if self.zone_id is not None: - body["zone_id"] = self.zone_id + if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability + if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count + if self.zone_id is not None: body['zone_id'] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolGcpAttributes: """Deserializes the InstancePoolGcpAttributes from a dictionary.""" - return cls( - gcp_availability=_enum(d, "gcp_availability", GcpAvailability), - local_ssd_count=d.get("local_ssd_count", None), - zone_id=d.get("zone_id", None), - ) + return cls(gcp_availability=_enum(d, 'gcp_availability', GcpAvailability), local_ssd_count=d.get('local_ssd_count', None), zone_id=d.get('zone_id', None)) + + @dataclass class InstancePoolPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[InstancePoolPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the InstancePoolPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermission: """Deserializes the InstancePoolPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", InstancePoolPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel)) + + class InstancePoolPermissionLevel(Enum): """Permission level""" - - CAN_ATTACH_TO = "CAN_ATTACH_TO" - CAN_MANAGE = "CAN_MANAGE" - + + CAN_ATTACH_TO = 'CAN_ATTACH_TO' + CAN_MANAGE = 'CAN_MANAGE' @dataclass class InstancePoolPermissions: access_control_list: Optional[List[InstancePoolAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the InstancePoolPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissions: """Deserializes the InstancePoolPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", InstancePoolAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', InstancePoolAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class InstancePoolPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[InstancePoolPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the InstancePoolPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissionsDescription: """Deserializes the InstancePoolPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", InstancePoolPermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel)) + + @dataclass class InstancePoolPermissionsRequest: access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None - + instance_pool_id: Optional[str] = None """The instance pool for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissionsRequest: """Deserializes the InstancePoolPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", InstancePoolAccessControlRequest), - instance_pool_id=d.get("instance_pool_id", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', InstancePoolAccessControlRequest), instance_pool_id=d.get('instance_pool_id', None)) + + class InstancePoolState(Enum): """The state of a Cluster. The current allowable state transitions are as follows: - + - ``ACTIVE`` -> ``STOPPED`` - ``ACTIVE`` -> ``DELETED`` - ``STOPPED`` -> ``ACTIVE`` - ``STOPPED`` -> ``DELETED``""" - - ACTIVE = "ACTIVE" - DELETED = "DELETED" - STOPPED = "STOPPED" - + + ACTIVE = 'ACTIVE' + DELETED = 'DELETED' + STOPPED = 'STOPPED' @dataclass class InstancePoolStats: idle_count: Optional[int] = None """Number of active instances in the pool that are NOT part of a cluster.""" - + pending_idle_count: Optional[int] = None """Number of pending instances in the pool that are NOT part of a cluster.""" - + pending_used_count: Optional[int] = None """Number of pending instances in the pool that are part of a cluster.""" - + used_count: Optional[int] = None """Number of active instances in the pool that are part of a cluster.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.idle_count is not None: - body["idle_count"] = self.idle_count - if self.pending_idle_count is not None: - body["pending_idle_count"] = self.pending_idle_count - if self.pending_used_count is not None: - body["pending_used_count"] = self.pending_used_count - if self.used_count is not None: - body["used_count"] = self.used_count + if self.idle_count is not None: body['idle_count'] = self.idle_count + if self.pending_idle_count is not None: body['pending_idle_count'] = self.pending_idle_count + if self.pending_used_count is not None: body['pending_used_count'] = self.pending_used_count + if self.used_count is not None: body['used_count'] = self.used_count return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.idle_count is not None: - body["idle_count"] = self.idle_count - if self.pending_idle_count is not None: - body["pending_idle_count"] = self.pending_idle_count - if self.pending_used_count is not None: - body["pending_used_count"] = self.pending_used_count - if self.used_count is not None: - body["used_count"] = self.used_count + if self.idle_count is not None: body['idle_count'] = self.idle_count + if self.pending_idle_count is not None: body['pending_idle_count'] = self.pending_idle_count + if self.pending_used_count is not None: body['pending_used_count'] = self.pending_used_count + if self.used_count is not None: body['used_count'] = self.used_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolStats: """Deserializes the InstancePoolStats from a dictionary.""" - return cls( - idle_count=d.get("idle_count", None), - pending_idle_count=d.get("pending_idle_count", None), - pending_used_count=d.get("pending_used_count", None), - used_count=d.get("used_count", None), - ) + return cls(idle_count=d.get('idle_count', None), pending_idle_count=d.get('pending_idle_count', None), pending_used_count=d.get('pending_used_count', None), used_count=d.get('used_count', None)) + + @dataclass @@ -6983,32 +5711,32 @@ class InstancePoolStatus: """List of error messages for the failed pending instances. The pending_instance_errors follows FIFO with maximum length of the min_idle of the pool. The pending_instance_errors is emptied once the number of exiting available instances reaches the min_idle of the pool.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pending_instance_errors: - body["pending_instance_errors"] = [v.as_dict() for v in self.pending_instance_errors] + if self.pending_instance_errors: body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors] return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.pending_instance_errors: - body["pending_instance_errors"] = self.pending_instance_errors + if self.pending_instance_errors: body['pending_instance_errors'] = self.pending_instance_errors return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolStatus: """Deserializes the InstancePoolStatus from a dictionary.""" - return cls(pending_instance_errors=_repeated_dict(d, "pending_instance_errors", PendingInstanceError)) + return cls(pending_instance_errors=_repeated_dict(d, 'pending_instance_errors', PendingInstanceError)) + + @dataclass class InstanceProfile: instance_profile_arn: str """The AWS ARN of the instance profile to register with Databricks. This field is required.""" - + iam_role_arn: Optional[str] = None """The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile @@ -7017,589 +5745,526 @@ class InstanceProfile: Otherwise, this field is optional. [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html""" - + is_meta_instance_profile: Optional[bool] = None """Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios. If true, it means the instance profile contains an meta IAM role which could assume a wide range of roles. Therefore it should always be used with authorization. This field is optional, the default value is `false`.""" - + def as_dict(self) -> dict: """Serializes the InstanceProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.iam_role_arn is not None: - body["iam_role_arn"] = self.iam_role_arn - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = self.is_meta_instance_profile + if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile return body def as_shallow_dict(self) -> dict: """Serializes the InstanceProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.iam_role_arn is not None: - body["iam_role_arn"] = self.iam_role_arn - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = self.is_meta_instance_profile + if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstanceProfile: """Deserializes the InstanceProfile from a dictionary.""" - return cls( - iam_role_arn=d.get("iam_role_arn", None), - instance_profile_arn=d.get("instance_profile_arn", None), - is_meta_instance_profile=d.get("is_meta_instance_profile", None), - ) + return cls(iam_role_arn=d.get('iam_role_arn', None), instance_profile_arn=d.get('instance_profile_arn', None), is_meta_instance_profile=d.get('is_meta_instance_profile', None)) + + class Kind(Enum): """The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html""" - - CLASSIC_PREVIEW = "CLASSIC_PREVIEW" - + + CLASSIC_PREVIEW = 'CLASSIC_PREVIEW' class Language(Enum): - - PYTHON = "python" - SCALA = "scala" - SQL = "sql" - + + + PYTHON = 'python' + SCALA = 'scala' + SQL = 'sql' @dataclass class Library: cran: Optional[RCranLibrary] = None """Specification of a CRAN library to be installed as part of the library""" - + egg: Optional[str] = None """Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.""" - + jar: Optional[str] = None """URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example: `{ "jar": "/Workspace/path/to/library.jar" }`, `{ "jar" : "/Volumes/path/to/library.jar" }` or `{ "jar": "s3://my-bucket/library.jar" }`. If S3 is used, please make sure the cluster has read access on the library. You may need to launch the cluster with an IAM role to access the S3 URI.""" - + maven: Optional[MavenLibrary] = None """Specification of a maven library to be installed. For example: `{ "coordinates": "org.jsoup:jsoup:1.7.2" }`""" - + pypi: Optional[PythonPyPiLibrary] = None """Specification of a PyPi library to be installed. For example: `{ "package": "simplejson" }`""" - + requirements: Optional[str] = None """URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes paths are supported. For example: `{ "requirements": "/Workspace/path/to/requirements.txt" }` or `{ "requirements" : "/Volumes/path/to/requirements.txt" }`""" - + whl: Optional[str] = None """URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example: `{ "whl": "/Workspace/path/to/library.whl" }`, `{ "whl" : "/Volumes/path/to/library.whl" }` or `{ "whl": "s3://my-bucket/library.whl" }`. If S3 is used, please make sure the cluster has read access on the library. You may need to launch the cluster with an IAM role to access the S3 URI.""" - + def as_dict(self) -> dict: """Serializes the Library into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cran: - body["cran"] = self.cran.as_dict() - if self.egg is not None: - body["egg"] = self.egg - if self.jar is not None: - body["jar"] = self.jar - if self.maven: - body["maven"] = self.maven.as_dict() - if self.pypi: - body["pypi"] = self.pypi.as_dict() - if self.requirements is not None: - body["requirements"] = self.requirements - if self.whl is not None: - body["whl"] = self.whl + if self.cran: body['cran'] = self.cran.as_dict() + if self.egg is not None: body['egg'] = self.egg + if self.jar is not None: body['jar'] = self.jar + if self.maven: body['maven'] = self.maven.as_dict() + if self.pypi: body['pypi'] = self.pypi.as_dict() + if self.requirements is not None: body['requirements'] = self.requirements + if self.whl is not None: body['whl'] = self.whl return body def as_shallow_dict(self) -> dict: """Serializes the Library into a shallow dictionary of its immediate attributes.""" body = {} - if self.cran: - body["cran"] = self.cran - if self.egg is not None: - body["egg"] = self.egg - if self.jar is not None: - body["jar"] = self.jar - if self.maven: - body["maven"] = self.maven - if self.pypi: - body["pypi"] = self.pypi - if self.requirements is not None: - body["requirements"] = self.requirements - if self.whl is not None: - body["whl"] = self.whl + if self.cran: body['cran'] = self.cran + if self.egg is not None: body['egg'] = self.egg + if self.jar is not None: body['jar'] = self.jar + if self.maven: body['maven'] = self.maven + if self.pypi: body['pypi'] = self.pypi + if self.requirements is not None: body['requirements'] = self.requirements + if self.whl is not None: body['whl'] = self.whl return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Library: """Deserializes the Library from a dictionary.""" - return cls( - cran=_from_dict(d, "cran", RCranLibrary), - egg=d.get("egg", None), - jar=d.get("jar", None), - maven=_from_dict(d, "maven", MavenLibrary), - pypi=_from_dict(d, "pypi", PythonPyPiLibrary), - requirements=d.get("requirements", None), - whl=d.get("whl", None), - ) + return cls(cran=_from_dict(d, 'cran', RCranLibrary), egg=d.get('egg', None), jar=d.get('jar', None), maven=_from_dict(d, 'maven', MavenLibrary), pypi=_from_dict(d, 'pypi', PythonPyPiLibrary), requirements=d.get('requirements', None), whl=d.get('whl', None)) + + @dataclass class LibraryFullStatus: """The status of the library on a specific cluster.""" - + is_library_for_all_clusters: Optional[bool] = None """Whether the library was set to be installed on all clusters via the libraries UI.""" - + library: Optional[Library] = None """Unique identifier for the library.""" - + messages: Optional[List[str]] = None """All the info and warning messages that have occurred so far for this library.""" - + status: Optional[LibraryInstallStatus] = None """Status of installing the library on the cluster.""" - + def as_dict(self) -> dict: """Serializes the LibraryFullStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_library_for_all_clusters is not None: - body["is_library_for_all_clusters"] = self.is_library_for_all_clusters - if self.library: - body["library"] = self.library.as_dict() - if self.messages: - body["messages"] = [v for v in self.messages] - if self.status is not None: - body["status"] = self.status.value + if self.is_library_for_all_clusters is not None: body['is_library_for_all_clusters'] = self.is_library_for_all_clusters + if self.library: body['library'] = self.library.as_dict() + if self.messages: body['messages'] = [v for v in self.messages] + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the LibraryFullStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_library_for_all_clusters is not None: - body["is_library_for_all_clusters"] = self.is_library_for_all_clusters - if self.library: - body["library"] = self.library - if self.messages: - body["messages"] = self.messages - if self.status is not None: - body["status"] = self.status + if self.is_library_for_all_clusters is not None: body['is_library_for_all_clusters'] = self.is_library_for_all_clusters + if self.library: body['library'] = self.library + if self.messages: body['messages'] = self.messages + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LibraryFullStatus: """Deserializes the LibraryFullStatus from a dictionary.""" - return cls( - is_library_for_all_clusters=d.get("is_library_for_all_clusters", None), - library=_from_dict(d, "library", Library), - messages=d.get("messages", None), - status=_enum(d, "status", LibraryInstallStatus), - ) + return cls(is_library_for_all_clusters=d.get('is_library_for_all_clusters', None), library=_from_dict(d, 'library', Library), messages=d.get('messages', None), status=_enum(d, 'status', LibraryInstallStatus)) + + class LibraryInstallStatus(Enum): """The status of a library on a specific cluster.""" - - FAILED = "FAILED" - INSTALLED = "INSTALLED" - INSTALLING = "INSTALLING" - PENDING = "PENDING" - RESOLVING = "RESOLVING" - RESTORED = "RESTORED" - SKIPPED = "SKIPPED" - UNINSTALL_ON_RESTART = "UNINSTALL_ON_RESTART" - + + FAILED = 'FAILED' + INSTALLED = 'INSTALLED' + INSTALLING = 'INSTALLING' + PENDING = 'PENDING' + RESOLVING = 'RESOLVING' + RESTORED = 'RESTORED' + SKIPPED = 'SKIPPED' + UNINSTALL_ON_RESTART = 'UNINSTALL_ON_RESTART' @dataclass class ListAllClusterLibraryStatusesResponse: statuses: Optional[List[ClusterLibraryStatuses]] = None """A list of cluster statuses.""" - + def as_dict(self) -> dict: """Serializes the ListAllClusterLibraryStatusesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.statuses: - body["statuses"] = [v.as_dict() for v in self.statuses] + if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses] return body def as_shallow_dict(self) -> dict: """Serializes the ListAllClusterLibraryStatusesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.statuses: - body["statuses"] = self.statuses + if self.statuses: body['statuses'] = self.statuses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAllClusterLibraryStatusesResponse: """Deserializes the ListAllClusterLibraryStatusesResponse from a dictionary.""" - return cls(statuses=_repeated_dict(d, "statuses", ClusterLibraryStatuses)) + return cls(statuses=_repeated_dict(d, 'statuses', ClusterLibraryStatuses)) + + @dataclass class ListAvailableZonesResponse: default_zone: Optional[str] = None """The availability zone if no ``zone_id`` is provided in the cluster creation request.""" - + zones: Optional[List[str]] = None """The list of available zones (e.g., ['us-west-2c', 'us-east-2']).""" - + def as_dict(self) -> dict: """Serializes the ListAvailableZonesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_zone is not None: - body["default_zone"] = self.default_zone - if self.zones: - body["zones"] = [v for v in self.zones] + if self.default_zone is not None: body['default_zone'] = self.default_zone + if self.zones: body['zones'] = [v for v in self.zones] return body def as_shallow_dict(self) -> dict: """Serializes the ListAvailableZonesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_zone is not None: - body["default_zone"] = self.default_zone - if self.zones: - body["zones"] = self.zones + if self.default_zone is not None: body['default_zone'] = self.default_zone + if self.zones: body['zones'] = self.zones return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAvailableZonesResponse: """Deserializes the ListAvailableZonesResponse from a dictionary.""" - return cls(default_zone=d.get("default_zone", None), zones=d.get("zones", None)) + return cls(default_zone=d.get('default_zone', None), zones=d.get('zones', None)) + + + + + @dataclass class ListClusterCompliancesResponse: clusters: Optional[List[ClusterCompliance]] = None """A list of clusters and their policy compliance statuses.""" - + next_page_token: Optional[str] = None """This field represents the pagination token to retrieve the next page of results. If the value is "", it means no further results for the request.""" - + prev_page_token: Optional[str] = None """This field represents the pagination token to retrieve the previous page of results. If the value is "", it means no further results for the request.""" - + def as_dict(self) -> dict: """Serializes the ListClusterCompliancesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clusters: - body["clusters"] = [v.as_dict() for v in self.clusters] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListClusterCompliancesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.clusters: - body["clusters"] = self.clusters - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.clusters: body['clusters'] = self.clusters + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListClusterCompliancesResponse: """Deserializes the ListClusterCompliancesResponse from a dictionary.""" - return cls( - clusters=_repeated_dict(d, "clusters", ClusterCompliance), - next_page_token=d.get("next_page_token", None), - prev_page_token=d.get("prev_page_token", None), - ) + return cls(clusters=_repeated_dict(d, 'clusters', ClusterCompliance), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) + + + + + @dataclass class ListClustersFilterBy: cluster_sources: Optional[List[ClusterSource]] = None """The source of cluster creation.""" - + cluster_states: Optional[List[State]] = None """The current state of the clusters.""" - + is_pinned: Optional[bool] = None """Whether the clusters are pinned or not.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + def as_dict(self) -> dict: """Serializes the ListClustersFilterBy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_sources: - body["cluster_sources"] = [v.value for v in self.cluster_sources] - if self.cluster_states: - body["cluster_states"] = [v.value for v in self.cluster_states] - if self.is_pinned is not None: - body["is_pinned"] = self.is_pinned - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.cluster_sources: body['cluster_sources'] = [v.value for v in self.cluster_sources] + if self.cluster_states: body['cluster_states'] = [v.value for v in self.cluster_states] + if self.is_pinned is not None: body['is_pinned'] = self.is_pinned + if self.policy_id is not None: body['policy_id'] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the ListClustersFilterBy into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_sources: - body["cluster_sources"] = self.cluster_sources - if self.cluster_states: - body["cluster_states"] = self.cluster_states - if self.is_pinned is not None: - body["is_pinned"] = self.is_pinned - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.cluster_sources: body['cluster_sources'] = self.cluster_sources + if self.cluster_states: body['cluster_states'] = self.cluster_states + if self.is_pinned is not None: body['is_pinned'] = self.is_pinned + if self.policy_id is not None: body['policy_id'] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListClustersFilterBy: """Deserializes the ListClustersFilterBy from a dictionary.""" - return cls( - cluster_sources=_repeated_enum(d, "cluster_sources", ClusterSource), - cluster_states=_repeated_enum(d, "cluster_states", State), - is_pinned=d.get("is_pinned", None), - policy_id=d.get("policy_id", None), - ) + return cls(cluster_sources=_repeated_enum(d, 'cluster_sources', ClusterSource), cluster_states=_repeated_enum(d, 'cluster_states', State), is_pinned=d.get('is_pinned', None), policy_id=d.get('policy_id', None)) + + + + + @dataclass class ListClustersResponse: clusters: Optional[List[ClusterDetails]] = None - + next_page_token: Optional[str] = None """This field represents the pagination token to retrieve the next page of results. If the value is "", it means no further results for the request.""" - + prev_page_token: Optional[str] = None """This field represents the pagination token to retrieve the previous page of results. If the value is "", it means no further results for the request.""" - + def as_dict(self) -> dict: """Serializes the ListClustersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clusters: - body["clusters"] = [v.as_dict() for v in self.clusters] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListClustersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.clusters: - body["clusters"] = self.clusters - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.clusters: body['clusters'] = self.clusters + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListClustersResponse: """Deserializes the ListClustersResponse from a dictionary.""" - return cls( - clusters=_repeated_dict(d, "clusters", ClusterDetails), - next_page_token=d.get("next_page_token", None), - prev_page_token=d.get("prev_page_token", None), - ) + return cls(clusters=_repeated_dict(d, 'clusters', ClusterDetails), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) + + @dataclass class ListClustersSortBy: direction: Optional[ListClustersSortByDirection] = None """The direction to sort by.""" - + field: Optional[ListClustersSortByField] = None """The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest precedence: cluster state, pinned or unpinned, then cluster name.""" - + def as_dict(self) -> dict: """Serializes the ListClustersSortBy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.direction is not None: - body["direction"] = self.direction.value - if self.field is not None: - body["field"] = self.field.value + if self.direction is not None: body['direction'] = self.direction.value + if self.field is not None: body['field'] = self.field.value return body def as_shallow_dict(self) -> dict: """Serializes the ListClustersSortBy into a shallow dictionary of its immediate attributes.""" body = {} - if self.direction is not None: - body["direction"] = self.direction - if self.field is not None: - body["field"] = self.field + if self.direction is not None: body['direction'] = self.direction + if self.field is not None: body['field'] = self.field return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListClustersSortBy: """Deserializes the ListClustersSortBy from a dictionary.""" - return cls( - direction=_enum(d, "direction", ListClustersSortByDirection), - field=_enum(d, "field", ListClustersSortByField), - ) - + return cls(direction=_enum(d, 'direction', ListClustersSortByDirection), field=_enum(d, 'field', ListClustersSortByField)) + -class ListClustersSortByDirection(Enum): - ASC = "ASC" - DESC = "DESC" +class ListClustersSortByDirection(Enum): + + + ASC = 'ASC' + DESC = 'DESC' class ListClustersSortByField(Enum): - - CLUSTER_NAME = "CLUSTER_NAME" - DEFAULT = "DEFAULT" - + + + CLUSTER_NAME = 'CLUSTER_NAME' + DEFAULT = 'DEFAULT' @dataclass class ListGlobalInitScriptsResponse: scripts: Optional[List[GlobalInitScriptDetails]] = None - + def as_dict(self) -> dict: """Serializes the ListGlobalInitScriptsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scripts: - body["scripts"] = [v.as_dict() for v in self.scripts] + if self.scripts: body['scripts'] = [v.as_dict() for v in self.scripts] return body def as_shallow_dict(self) -> dict: """Serializes the ListGlobalInitScriptsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.scripts: - body["scripts"] = self.scripts + if self.scripts: body['scripts'] = self.scripts return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListGlobalInitScriptsResponse: """Deserializes the ListGlobalInitScriptsResponse from a dictionary.""" - return cls(scripts=_repeated_dict(d, "scripts", GlobalInitScriptDetails)) + return cls(scripts=_repeated_dict(d, 'scripts', GlobalInitScriptDetails)) + + @dataclass class ListInstancePools: instance_pools: Optional[List[InstancePoolAndStats]] = None - + def as_dict(self) -> dict: """Serializes the ListInstancePools into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_pools: - body["instance_pools"] = [v.as_dict() for v in self.instance_pools] + if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools] return body def as_shallow_dict(self) -> dict: """Serializes the ListInstancePools into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_pools: - body["instance_pools"] = self.instance_pools + if self.instance_pools: body['instance_pools'] = self.instance_pools return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListInstancePools: """Deserializes the ListInstancePools from a dictionary.""" - return cls(instance_pools=_repeated_dict(d, "instance_pools", InstancePoolAndStats)) + return cls(instance_pools=_repeated_dict(d, 'instance_pools', InstancePoolAndStats)) + + @dataclass class ListInstanceProfilesResponse: instance_profiles: Optional[List[InstanceProfile]] = None """A list of instance profiles that the user can access.""" - + def as_dict(self) -> dict: """Serializes the ListInstanceProfilesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_profiles: - body["instance_profiles"] = [v.as_dict() for v in self.instance_profiles] + if self.instance_profiles: body['instance_profiles'] = [v.as_dict() for v in self.instance_profiles] return body def as_shallow_dict(self) -> dict: """Serializes the ListInstanceProfilesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_profiles: - body["instance_profiles"] = self.instance_profiles + if self.instance_profiles: body['instance_profiles'] = self.instance_profiles return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListInstanceProfilesResponse: """Deserializes the ListInstanceProfilesResponse from a dictionary.""" - return cls(instance_profiles=_repeated_dict(d, "instance_profiles", InstanceProfile)) + return cls(instance_profiles=_repeated_dict(d, 'instance_profiles', InstanceProfile)) + + @dataclass class ListNodeTypesResponse: node_types: Optional[List[NodeType]] = None """The list of available Spark node types.""" - + def as_dict(self) -> dict: """Serializes the ListNodeTypesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.node_types: - body["node_types"] = [v.as_dict() for v in self.node_types] + if self.node_types: body['node_types'] = [v.as_dict() for v in self.node_types] return body def as_shallow_dict(self) -> dict: """Serializes the ListNodeTypesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.node_types: - body["node_types"] = self.node_types + if self.node_types: body['node_types'] = self.node_types return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNodeTypesResponse: """Deserializes the ListNodeTypesResponse from a dictionary.""" - return cls(node_types=_repeated_dict(d, "node_types", NodeType)) + return cls(node_types=_repeated_dict(d, 'node_types', NodeType)) + + @dataclass class ListPoliciesResponse: policies: Optional[List[Policy]] = None """List of policies.""" - + def as_dict(self) -> dict: """Serializes the ListPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.policies: - body["policies"] = [v.as_dict() for v in self.policies] + if self.policies: body['policies'] = [v.as_dict() for v in self.policies] return body def as_shallow_dict(self) -> dict: """Serializes the ListPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.policies: - body["policies"] = self.policies + if self.policies: body['policies'] = self.policies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPoliciesResponse: """Deserializes the ListPoliciesResponse from a dictionary.""" - return cls(policies=_repeated_dict(d, "policies", Policy)) + return cls(policies=_repeated_dict(d, 'policies', Policy)) + + + + + @dataclass @@ -7607,189 +6272,171 @@ class ListPolicyFamiliesResponse: next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" - + policy_families: Optional[List[PolicyFamily]] = None """List of policy families.""" - + def as_dict(self) -> dict: """Serializes the ListPolicyFamiliesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policy_families: - body["policy_families"] = [v.as_dict() for v in self.policy_families] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policy_families: body['policy_families'] = [v.as_dict() for v in self.policy_families] return body def as_shallow_dict(self) -> dict: """Serializes the ListPolicyFamiliesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policy_families: - body["policy_families"] = self.policy_families + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policy_families: body['policy_families'] = self.policy_families return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPolicyFamiliesResponse: """Deserializes the ListPolicyFamiliesResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - policy_families=_repeated_dict(d, "policy_families", PolicyFamily), - ) - + return cls(next_page_token=d.get('next_page_token', None), policy_families=_repeated_dict(d, 'policy_families', PolicyFamily)) + -class ListSortColumn(Enum): - POLICY_CREATION_TIME = "POLICY_CREATION_TIME" - POLICY_NAME = "POLICY_NAME" +class ListSortColumn(Enum): + + + POLICY_CREATION_TIME = 'POLICY_CREATION_TIME' + POLICY_NAME = 'POLICY_NAME' class ListSortOrder(Enum): - - ASC = "ASC" - DESC = "DESC" - + + + ASC = 'ASC' + DESC = 'DESC' @dataclass class LocalFileInfo: destination: str """local file destination, e.g. `file:/my/local/file.sh`""" - + def as_dict(self) -> dict: """Serializes the LocalFileInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the LocalFileInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LocalFileInfo: """Deserializes the LocalFileInfo from a dictionary.""" - return cls(destination=d.get("destination", None)) + return cls(destination=d.get('destination', None)) + + @dataclass class LogAnalyticsInfo: log_analytics_primary_key: Optional[str] = None - + log_analytics_workspace_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the LogAnalyticsInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_analytics_primary_key is not None: - body["log_analytics_primary_key"] = self.log_analytics_primary_key - if self.log_analytics_workspace_id is not None: - body["log_analytics_workspace_id"] = self.log_analytics_workspace_id + if self.log_analytics_primary_key is not None: body['log_analytics_primary_key'] = self.log_analytics_primary_key + if self.log_analytics_workspace_id is not None: body['log_analytics_workspace_id'] = self.log_analytics_workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the LogAnalyticsInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_analytics_primary_key is not None: - body["log_analytics_primary_key"] = self.log_analytics_primary_key - if self.log_analytics_workspace_id is not None: - body["log_analytics_workspace_id"] = self.log_analytics_workspace_id + if self.log_analytics_primary_key is not None: body['log_analytics_primary_key'] = self.log_analytics_primary_key + if self.log_analytics_workspace_id is not None: body['log_analytics_workspace_id'] = self.log_analytics_workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogAnalyticsInfo: """Deserializes the LogAnalyticsInfo from a dictionary.""" - return cls( - log_analytics_primary_key=d.get("log_analytics_primary_key", None), - log_analytics_workspace_id=d.get("log_analytics_workspace_id", None), - ) + return cls(log_analytics_primary_key=d.get('log_analytics_primary_key', None), log_analytics_workspace_id=d.get('log_analytics_workspace_id', None)) + + @dataclass class LogSyncStatus: """The log delivery status""" - + last_attempted: Optional[int] = None """The timestamp of last attempt. If the last attempt fails, `last_exception` will contain the exception in the last attempt.""" - + last_exception: Optional[str] = None """The exception thrown in the last attempt, it would be null (omitted in the response) if there is no exception in last attempted.""" - + def as_dict(self) -> dict: """Serializes the LogSyncStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_attempted is not None: - body["last_attempted"] = self.last_attempted - if self.last_exception is not None: - body["last_exception"] = self.last_exception + if self.last_attempted is not None: body['last_attempted'] = self.last_attempted + if self.last_exception is not None: body['last_exception'] = self.last_exception return body def as_shallow_dict(self) -> dict: """Serializes the LogSyncStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_attempted is not None: - body["last_attempted"] = self.last_attempted - if self.last_exception is not None: - body["last_exception"] = self.last_exception + if self.last_attempted is not None: body['last_attempted'] = self.last_attempted + if self.last_exception is not None: body['last_exception'] = self.last_exception return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogSyncStatus: """Deserializes the LogSyncStatus from a dictionary.""" - return cls(last_attempted=d.get("last_attempted", None), last_exception=d.get("last_exception", None)) + return cls(last_attempted=d.get('last_attempted', None), last_exception=d.get('last_exception', None)) + -MapAny = Dict[str, Any] + +MapAny = Dict[str,Any] @dataclass class MavenLibrary: coordinates: str """Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2".""" - + exclusions: Optional[List[str]] = None """List of dependences to exclude. For example: `["slf4j:slf4j", "*:hadoop-client"]`. Maven dependency exclusions: https://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html.""" - + repo: Optional[str] = None """Maven repo to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched.""" - + def as_dict(self) -> dict: """Serializes the MavenLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.coordinates is not None: - body["coordinates"] = self.coordinates - if self.exclusions: - body["exclusions"] = [v for v in self.exclusions] - if self.repo is not None: - body["repo"] = self.repo + if self.coordinates is not None: body['coordinates'] = self.coordinates + if self.exclusions: body['exclusions'] = [v for v in self.exclusions] + if self.repo is not None: body['repo'] = self.repo return body def as_shallow_dict(self) -> dict: """Serializes the MavenLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.coordinates is not None: - body["coordinates"] = self.coordinates - if self.exclusions: - body["exclusions"] = self.exclusions - if self.repo is not None: - body["repo"] = self.repo + if self.coordinates is not None: body['coordinates'] = self.coordinates + if self.exclusions: body['exclusions'] = self.exclusions + if self.repo is not None: body['repo'] = self.repo return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MavenLibrary: """Deserializes the MavenLibrary from a dictionary.""" - return cls( - coordinates=d.get("coordinates", None), exclusions=d.get("exclusions", None), repo=d.get("repo", None) - ) + return cls(coordinates=d.get('coordinates', None), exclusions=d.get('exclusions', None), repo=d.get('repo', None)) + + @dataclass @@ -7797,305 +6444,230 @@ class NodeInstanceType: """This structure embodies the machine type that hosts spark containers Note: this should be an internal data structure for now It is defined in proto in case we want to send it over the wire in the future (which is likely)""" - + instance_type_id: str """Unique identifier across instance types""" - + local_disk_size_gb: Optional[int] = None """Size of the individual local disks attached to this instance (i.e. per local disk).""" - + local_disks: Optional[int] = None """Number of local disks that are present on this instance.""" - + local_nvme_disk_size_gb: Optional[int] = None """Size of the individual local nvme disks attached to this instance (i.e. per local disk).""" - + local_nvme_disks: Optional[int] = None """Number of local nvme disks that are present on this instance.""" - + def as_dict(self) -> dict: """Serializes the NodeInstanceType into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_type_id is not None: - body["instance_type_id"] = self.instance_type_id - if self.local_disk_size_gb is not None: - body["local_disk_size_gb"] = self.local_disk_size_gb - if self.local_disks is not None: - body["local_disks"] = self.local_disks - if self.local_nvme_disk_size_gb is not None: - body["local_nvme_disk_size_gb"] = self.local_nvme_disk_size_gb - if self.local_nvme_disks is not None: - body["local_nvme_disks"] = self.local_nvme_disks + if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id + if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb + if self.local_disks is not None: body['local_disks'] = self.local_disks + if self.local_nvme_disk_size_gb is not None: body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb + if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks return body def as_shallow_dict(self) -> dict: """Serializes the NodeInstanceType into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_type_id is not None: - body["instance_type_id"] = self.instance_type_id - if self.local_disk_size_gb is not None: - body["local_disk_size_gb"] = self.local_disk_size_gb - if self.local_disks is not None: - body["local_disks"] = self.local_disks - if self.local_nvme_disk_size_gb is not None: - body["local_nvme_disk_size_gb"] = self.local_nvme_disk_size_gb - if self.local_nvme_disks is not None: - body["local_nvme_disks"] = self.local_nvme_disks + if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id + if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb + if self.local_disks is not None: body['local_disks'] = self.local_disks + if self.local_nvme_disk_size_gb is not None: body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb + if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NodeInstanceType: """Deserializes the NodeInstanceType from a dictionary.""" - return cls( - instance_type_id=d.get("instance_type_id", None), - local_disk_size_gb=d.get("local_disk_size_gb", None), - local_disks=d.get("local_disks", None), - local_nvme_disk_size_gb=d.get("local_nvme_disk_size_gb", None), - local_nvme_disks=d.get("local_nvme_disks", None), - ) + return cls(instance_type_id=d.get('instance_type_id', None), local_disk_size_gb=d.get('local_disk_size_gb', None), local_disks=d.get('local_disks', None), local_nvme_disk_size_gb=d.get('local_nvme_disk_size_gb', None), local_nvme_disks=d.get('local_nvme_disks', None)) + + @dataclass class NodeType: """A description of a Spark node type including both the dimensions of the node and the instance type on which it will be hosted.""" - + node_type_id: str """Unique identifier for this node type.""" - + memory_mb: int """Memory (in MB) available for this node type.""" - + num_cores: float """Number of CPU cores available for this node type. Note that this can be fractional, e.g., 2.5 cores, if the the number of cores on a machine instance is not divisible by the number of Spark nodes on that machine.""" - + description: str """A string description associated with this node type, e.g., "r3.xlarge".""" - + instance_type_id: str """An identifier for the type of hardware that this node runs on, e.g., "r3.2xlarge" in AWS.""" - + category: str """A descriptive category for this node type. Examples include "Memory Optimized" and "Compute Optimized".""" - + display_order: Optional[int] = None """An optional hint at the display order of node types in the UI. Within a node type category, lowest numbers come first.""" - + is_deprecated: Optional[bool] = None """Whether the node type is deprecated. Non-deprecated node types offer greater performance.""" - + is_encrypted_in_transit: Optional[bool] = None """AWS specific, whether this instance supports encryption in transit, used for hipaa and pci workloads.""" - + is_graviton: Optional[bool] = None """Whether this is an Arm-based instance.""" - + is_hidden: Optional[bool] = None """Whether this node is hidden from presentation in the UI.""" - + is_io_cache_enabled: Optional[bool] = None """Whether this node comes with IO cache enabled by default.""" - + node_info: Optional[CloudProviderNodeInfo] = None """A collection of node type info reported by the cloud provider""" - + node_instance_type: Optional[NodeInstanceType] = None """The NodeInstanceType object corresponding to instance_type_id""" - + num_gpus: Optional[int] = None """Number of GPUs available for this node type.""" - + photon_driver_capable: Optional[bool] = None - + photon_worker_capable: Optional[bool] = None - + support_cluster_tags: Optional[bool] = None """Whether this node type support cluster tags.""" - + support_ebs_volumes: Optional[bool] = None """Whether this node type support EBS volumes. EBS volumes is disabled for node types that we could place multiple corresponding containers on the same hosting instance.""" - + support_port_forwarding: Optional[bool] = None """Whether this node type supports port forwarding.""" - + def as_dict(self) -> dict: """Serializes the NodeType into a dictionary suitable for use as a JSON request body.""" body = {} - if self.category is not None: - body["category"] = self.category - if self.description is not None: - body["description"] = self.description - if self.display_order is not None: - body["display_order"] = self.display_order - if self.instance_type_id is not None: - body["instance_type_id"] = self.instance_type_id - if self.is_deprecated is not None: - body["is_deprecated"] = self.is_deprecated - if self.is_encrypted_in_transit is not None: - body["is_encrypted_in_transit"] = self.is_encrypted_in_transit - if self.is_graviton is not None: - body["is_graviton"] = self.is_graviton - if self.is_hidden is not None: - body["is_hidden"] = self.is_hidden - if self.is_io_cache_enabled is not None: - body["is_io_cache_enabled"] = self.is_io_cache_enabled - if self.memory_mb is not None: - body["memory_mb"] = self.memory_mb - if self.node_info: - body["node_info"] = self.node_info.as_dict() - if self.node_instance_type: - body["node_instance_type"] = self.node_instance_type.as_dict() - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_cores is not None: - body["num_cores"] = self.num_cores - if self.num_gpus is not None: - body["num_gpus"] = self.num_gpus - if self.photon_driver_capable is not None: - body["photon_driver_capable"] = self.photon_driver_capable - if self.photon_worker_capable is not None: - body["photon_worker_capable"] = self.photon_worker_capable - if self.support_cluster_tags is not None: - body["support_cluster_tags"] = self.support_cluster_tags - if self.support_ebs_volumes is not None: - body["support_ebs_volumes"] = self.support_ebs_volumes - if self.support_port_forwarding is not None: - body["support_port_forwarding"] = self.support_port_forwarding + if self.category is not None: body['category'] = self.category + if self.description is not None: body['description'] = self.description + if self.display_order is not None: body['display_order'] = self.display_order + if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id + if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated + if self.is_encrypted_in_transit is not None: body['is_encrypted_in_transit'] = self.is_encrypted_in_transit + if self.is_graviton is not None: body['is_graviton'] = self.is_graviton + if self.is_hidden is not None: body['is_hidden'] = self.is_hidden + if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled + if self.memory_mb is not None: body['memory_mb'] = self.memory_mb + if self.node_info: body['node_info'] = self.node_info.as_dict() + if self.node_instance_type: body['node_instance_type'] = self.node_instance_type.as_dict() + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_cores is not None: body['num_cores'] = self.num_cores + if self.num_gpus is not None: body['num_gpus'] = self.num_gpus + if self.photon_driver_capable is not None: body['photon_driver_capable'] = self.photon_driver_capable + if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable + if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags + if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes + if self.support_port_forwarding is not None: body['support_port_forwarding'] = self.support_port_forwarding return body def as_shallow_dict(self) -> dict: """Serializes the NodeType into a shallow dictionary of its immediate attributes.""" body = {} - if self.category is not None: - body["category"] = self.category - if self.description is not None: - body["description"] = self.description - if self.display_order is not None: - body["display_order"] = self.display_order - if self.instance_type_id is not None: - body["instance_type_id"] = self.instance_type_id - if self.is_deprecated is not None: - body["is_deprecated"] = self.is_deprecated - if self.is_encrypted_in_transit is not None: - body["is_encrypted_in_transit"] = self.is_encrypted_in_transit - if self.is_graviton is not None: - body["is_graviton"] = self.is_graviton - if self.is_hidden is not None: - body["is_hidden"] = self.is_hidden - if self.is_io_cache_enabled is not None: - body["is_io_cache_enabled"] = self.is_io_cache_enabled - if self.memory_mb is not None: - body["memory_mb"] = self.memory_mb - if self.node_info: - body["node_info"] = self.node_info - if self.node_instance_type: - body["node_instance_type"] = self.node_instance_type - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_cores is not None: - body["num_cores"] = self.num_cores - if self.num_gpus is not None: - body["num_gpus"] = self.num_gpus - if self.photon_driver_capable is not None: - body["photon_driver_capable"] = self.photon_driver_capable - if self.photon_worker_capable is not None: - body["photon_worker_capable"] = self.photon_worker_capable - if self.support_cluster_tags is not None: - body["support_cluster_tags"] = self.support_cluster_tags - if self.support_ebs_volumes is not None: - body["support_ebs_volumes"] = self.support_ebs_volumes - if self.support_port_forwarding is not None: - body["support_port_forwarding"] = self.support_port_forwarding + if self.category is not None: body['category'] = self.category + if self.description is not None: body['description'] = self.description + if self.display_order is not None: body['display_order'] = self.display_order + if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id + if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated + if self.is_encrypted_in_transit is not None: body['is_encrypted_in_transit'] = self.is_encrypted_in_transit + if self.is_graviton is not None: body['is_graviton'] = self.is_graviton + if self.is_hidden is not None: body['is_hidden'] = self.is_hidden + if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled + if self.memory_mb is not None: body['memory_mb'] = self.memory_mb + if self.node_info: body['node_info'] = self.node_info + if self.node_instance_type: body['node_instance_type'] = self.node_instance_type + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_cores is not None: body['num_cores'] = self.num_cores + if self.num_gpus is not None: body['num_gpus'] = self.num_gpus + if self.photon_driver_capable is not None: body['photon_driver_capable'] = self.photon_driver_capable + if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable + if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags + if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes + if self.support_port_forwarding is not None: body['support_port_forwarding'] = self.support_port_forwarding return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NodeType: """Deserializes the NodeType from a dictionary.""" - return cls( - category=d.get("category", None), - description=d.get("description", None), - display_order=d.get("display_order", None), - instance_type_id=d.get("instance_type_id", None), - is_deprecated=d.get("is_deprecated", None), - is_encrypted_in_transit=d.get("is_encrypted_in_transit", None), - is_graviton=d.get("is_graviton", None), - is_hidden=d.get("is_hidden", None), - is_io_cache_enabled=d.get("is_io_cache_enabled", None), - memory_mb=d.get("memory_mb", None), - node_info=_from_dict(d, "node_info", CloudProviderNodeInfo), - node_instance_type=_from_dict(d, "node_instance_type", NodeInstanceType), - node_type_id=d.get("node_type_id", None), - num_cores=d.get("num_cores", None), - num_gpus=d.get("num_gpus", None), - photon_driver_capable=d.get("photon_driver_capable", None), - photon_worker_capable=d.get("photon_worker_capable", None), - support_cluster_tags=d.get("support_cluster_tags", None), - support_ebs_volumes=d.get("support_ebs_volumes", None), - support_port_forwarding=d.get("support_port_forwarding", None), - ) + return cls(category=d.get('category', None), description=d.get('description', None), display_order=d.get('display_order', None), instance_type_id=d.get('instance_type_id', None), is_deprecated=d.get('is_deprecated', None), is_encrypted_in_transit=d.get('is_encrypted_in_transit', None), is_graviton=d.get('is_graviton', None), is_hidden=d.get('is_hidden', None), is_io_cache_enabled=d.get('is_io_cache_enabled', None), memory_mb=d.get('memory_mb', None), node_info=_from_dict(d, 'node_info', CloudProviderNodeInfo), node_instance_type=_from_dict(d, 'node_instance_type', NodeInstanceType), node_type_id=d.get('node_type_id', None), num_cores=d.get('num_cores', None), num_gpus=d.get('num_gpus', None), photon_driver_capable=d.get('photon_driver_capable', None), photon_worker_capable=d.get('photon_worker_capable', None), support_cluster_tags=d.get('support_cluster_tags', None), support_ebs_volumes=d.get('support_ebs_volumes', None), support_port_forwarding=d.get('support_port_forwarding', None)) + + @dataclass class PendingInstanceError: """Error message of a failed pending instances""" - + instance_id: Optional[str] = None - + message: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the PendingInstanceError into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_id is not None: - body["instance_id"] = self.instance_id - if self.message is not None: - body["message"] = self.message + if self.instance_id is not None: body['instance_id'] = self.instance_id + if self.message is not None: body['message'] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the PendingInstanceError into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_id is not None: - body["instance_id"] = self.instance_id - if self.message is not None: - body["message"] = self.message + if self.instance_id is not None: body['instance_id'] = self.instance_id + if self.message is not None: body['message'] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PendingInstanceError: """Deserializes the PendingInstanceError from a dictionary.""" - return cls(instance_id=d.get("instance_id", None), message=d.get("message", None)) + return cls(instance_id=d.get('instance_id', None), message=d.get('message', None)) + + @dataclass class PermanentDeleteCluster: cluster_id: str """The cluster to be deleted.""" - + def as_dict(self) -> dict: """Serializes the PermanentDeleteCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the PermanentDeleteCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermanentDeleteCluster: """Deserializes the PermanentDeleteCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) + return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass @@ -8114,30 +6686,32 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PermanentDeleteClusterResponse: """Deserializes the PermanentDeleteClusterResponse from a dictionary.""" return cls() + + @dataclass class PinCluster: cluster_id: str - + def as_dict(self) -> dict: """Serializes the PinCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the PinCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PinCluster: """Deserializes the PinCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) + return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass @@ -8156,43 +6730,45 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PinClusterResponse: """Deserializes the PinClusterResponse from a dictionary.""" return cls() + + @dataclass class Policy: """Describes a Cluster Policy entity.""" - + created_at_timestamp: Optional[int] = None """Creation time. The timestamp (in millisecond) when this Cluster Policy was created.""" - + creator_user_name: Optional[str] = None """Creator user name. The field won't be included in the response if the user has already been deleted.""" - + definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + description: Optional[str] = None """Additional human-readable description of the cluster policy.""" - + is_default: Optional[bool] = None """If true, policy is a default policy created and managed by Databricks. Default policies cannot be deleted, and their policy families cannot be changed.""" - + libraries: Optional[List[Library]] = None """A list of libraries to be installed on the next cluster restart that uses this policy. The maximum number of libraries is 500.""" - + max_clusters_per_user: Optional[int] = None """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" - + name: Optional[str] = None """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 characters.""" - + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -8201,87 +6777,55 @@ class Policy: rules specified here are merged into the inherited policy definition. [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + policy_family_id: Optional[str] = None """ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition.""" - + policy_id: Optional[str] = None """Canonical unique identifier for the Cluster Policy.""" - + def as_dict(self) -> dict: """Serializes the Policy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at_timestamp is not None: - body["created_at_timestamp"] = self.created_at_timestamp - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.is_default is not None: - body["is_default"] = self.is_default - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.is_default is not None: body['is_default'] = self.is_default + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name is not None: body['name'] = self.name + if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.policy_id is not None: body['policy_id'] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the Policy into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at_timestamp is not None: - body["created_at_timestamp"] = self.created_at_timestamp - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.is_default is not None: - body["is_default"] = self.is_default - if self.libraries: - body["libraries"] = self.libraries - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id + if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.is_default is not None: body['is_default'] = self.is_default + if self.libraries: body['libraries'] = self.libraries + if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user + if self.name is not None: body['name'] = self.name + if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.policy_id is not None: body['policy_id'] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Policy: """Deserializes the Policy from a dictionary.""" - return cls( - created_at_timestamp=d.get("created_at_timestamp", None), - creator_user_name=d.get("creator_user_name", None), - definition=d.get("definition", None), - description=d.get("description", None), - is_default=d.get("is_default", None), - libraries=_repeated_dict(d, "libraries", Library), - max_clusters_per_user=d.get("max_clusters_per_user", None), - name=d.get("name", None), - policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), - policy_family_id=d.get("policy_family_id", None), - policy_id=d.get("policy_id", None), - ) + return cls(created_at_timestamp=d.get('created_at_timestamp', None), creator_user_name=d.get('creator_user_name', None), definition=d.get('definition', None), description=d.get('description', None), is_default=d.get('is_default', None), libraries=_repeated_dict(d, 'libraries', Library), max_clusters_per_user=d.get('max_clusters_per_user', None), name=d.get('name', None), policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), policy_family_id=d.get('policy_family_id', None), policy_id=d.get('policy_id', None)) + + @dataclass @@ -8290,51 +6834,40 @@ class PolicyFamily: """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + description: Optional[str] = None """Human-readable description of the purpose of the policy family.""" - + name: Optional[str] = None """Name of the policy family.""" - + policy_family_id: Optional[str] = None """Unique identifier for the policy family.""" - + def as_dict(self) -> dict: """Serializes the PolicyFamily into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id return body def as_shallow_dict(self) -> dict: """Serializes the PolicyFamily into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id + if self.definition is not None: body['definition'] = self.definition + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PolicyFamily: """Deserializes the PolicyFamily from a dictionary.""" - return cls( - definition=d.get("definition", None), - description=d.get("description", None), - name=d.get("name", None), - policy_family_id=d.get("policy_family_id", None), - ) + return cls(definition=d.get('definition', None), description=d.get('description', None), name=d.get('name', None), policy_family_id=d.get('policy_family_id', None)) + + @dataclass @@ -8342,89 +6875,85 @@ class PythonPyPiLibrary: package: str """The name of the pypi package to install. An optional exact version specification is also supported. Examples: "simplejson" and "simplejson==3.8.0".""" - + repo: Optional[str] = None """The repository where the package can be found. If not specified, the default pip index is used.""" - + def as_dict(self) -> dict: """Serializes the PythonPyPiLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.package is not None: - body["package"] = self.package - if self.repo is not None: - body["repo"] = self.repo + if self.package is not None: body['package'] = self.package + if self.repo is not None: body['repo'] = self.repo return body def as_shallow_dict(self) -> dict: """Serializes the PythonPyPiLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.package is not None: - body["package"] = self.package - if self.repo is not None: - body["repo"] = self.repo + if self.package is not None: body['package'] = self.package + if self.repo is not None: body['repo'] = self.repo return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PythonPyPiLibrary: """Deserializes the PythonPyPiLibrary from a dictionary.""" - return cls(package=d.get("package", None), repo=d.get("repo", None)) + return cls(package=d.get('package', None), repo=d.get('repo', None)) + + @dataclass class RCranLibrary: package: str """The name of the CRAN package to install.""" - + repo: Optional[str] = None """The repository where the package can be found. If not specified, the default CRAN repo is used.""" - + def as_dict(self) -> dict: """Serializes the RCranLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.package is not None: - body["package"] = self.package - if self.repo is not None: - body["repo"] = self.repo + if self.package is not None: body['package'] = self.package + if self.repo is not None: body['repo'] = self.repo return body def as_shallow_dict(self) -> dict: """Serializes the RCranLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.package is not None: - body["package"] = self.package - if self.repo is not None: - body["repo"] = self.repo + if self.package is not None: body['package'] = self.package + if self.repo is not None: body['repo'] = self.repo return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RCranLibrary: """Deserializes the RCranLibrary from a dictionary.""" - return cls(package=d.get("package", None), repo=d.get("repo", None)) + return cls(package=d.get('package', None), repo=d.get('repo', None)) + + @dataclass class RemoveInstanceProfile: instance_profile_arn: str """The ARN of the instance profile to remove. This field is required.""" - + def as_dict(self) -> dict: """Serializes the RemoveInstanceProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn return body def as_shallow_dict(self) -> dict: """Serializes the RemoveInstanceProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RemoveInstanceProfile: """Deserializes the RemoveInstanceProfile from a dictionary.""" - return cls(instance_profile_arn=d.get("instance_profile_arn", None)) + return cls(instance_profile_arn=d.get('instance_profile_arn', None)) + + @dataclass @@ -8443,17 +6972,19 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RemoveResponse: """Deserializes the RemoveResponse from a dictionary.""" return cls() + + @dataclass class ResizeCluster: cluster_id: str """The cluster to be resized.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -8463,37 +6994,29 @@ class ResizeCluster: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + def as_dict(self) -> dict: """Serializes the ResizeCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.num_workers is not None: body['num_workers'] = self.num_workers return body def as_shallow_dict(self) -> dict: """Serializes the ResizeCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.autoscale: - body["autoscale"] = self.autoscale - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers + if self.autoscale: body['autoscale'] = self.autoscale + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.num_workers is not None: body['num_workers'] = self.num_workers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResizeCluster: """Deserializes the ResizeCluster from a dictionary.""" - return cls( - autoscale=_from_dict(d, "autoscale", AutoScale), - cluster_id=d.get("cluster_id", None), - num_workers=d.get("num_workers", None), - ) + return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), cluster_id=d.get('cluster_id', None), num_workers=d.get('num_workers', None)) + + @dataclass @@ -8512,37 +7035,37 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ResizeClusterResponse: """Deserializes the ResizeClusterResponse from a dictionary.""" return cls() + + @dataclass class RestartCluster: cluster_id: str """The cluster to be started.""" - + restart_user: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the RestartCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.restart_user is not None: - body["restart_user"] = self.restart_user + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.restart_user is not None: body['restart_user'] = self.restart_user return body def as_shallow_dict(self) -> dict: """Serializes the RestartCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.restart_user is not None: - body["restart_user"] = self.restart_user + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.restart_user is not None: body['restart_user'] = self.restart_user return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestartCluster: """Deserializes the RestartCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), restart_user=d.get("restart_user", None)) + return cls(cluster_id=d.get('cluster_id', None), restart_user=d.get('restart_user', None)) + + @dataclass @@ -8561,129 +7084,102 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RestartClusterResponse: """Deserializes the RestartClusterResponse from a dictionary.""" return cls() + -class ResultType(Enum): - - ERROR = "error" - IMAGE = "image" - IMAGES = "images" - TABLE = "table" - TEXT = "text" +class ResultType(Enum): + + + ERROR = 'error' + IMAGE = 'image' + IMAGES = 'images' + TABLE = 'table' + TEXT = 'text' @dataclass class Results: cause: Optional[str] = None """The cause of the error""" - + data: Optional[Any] = None - + file_name: Optional[str] = None """The image filename""" - + file_names: Optional[List[str]] = None - + is_json_schema: Optional[bool] = None """true if a JSON schema is returned instead of a string representation of the Hive type.""" - + pos: Optional[int] = None """internal field used by SDK""" - + result_type: Optional[ResultType] = None - - schema: Optional[List[Dict[str, Any]]] = None + + schema: Optional[List[Dict[str,Any]]] = None """The table schema""" - + summary: Optional[str] = None """The summary of the error""" - + truncated: Optional[bool] = None """true if partial results are returned.""" - + def as_dict(self) -> dict: """Serializes the Results into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cause is not None: - body["cause"] = self.cause - if self.data: - body["data"] = self.data - if self.file_name is not None: - body["fileName"] = self.file_name - if self.file_names: - body["fileNames"] = [v for v in self.file_names] - if self.is_json_schema is not None: - body["isJsonSchema"] = self.is_json_schema - if self.pos is not None: - body["pos"] = self.pos - if self.result_type is not None: - body["resultType"] = self.result_type.value - if self.schema: - body["schema"] = [v for v in self.schema] - if self.summary is not None: - body["summary"] = self.summary - if self.truncated is not None: - body["truncated"] = self.truncated + if self.cause is not None: body['cause'] = self.cause + if self.data: body['data'] = self.data + if self.file_name is not None: body['fileName'] = self.file_name + if self.file_names: body['fileNames'] = [v for v in self.file_names] + if self.is_json_schema is not None: body['isJsonSchema'] = self.is_json_schema + if self.pos is not None: body['pos'] = self.pos + if self.result_type is not None: body['resultType'] = self.result_type.value + if self.schema: body['schema'] = [v for v in self.schema] + if self.summary is not None: body['summary'] = self.summary + if self.truncated is not None: body['truncated'] = self.truncated return body def as_shallow_dict(self) -> dict: """Serializes the Results into a shallow dictionary of its immediate attributes.""" body = {} - if self.cause is not None: - body["cause"] = self.cause - if self.data: - body["data"] = self.data - if self.file_name is not None: - body["fileName"] = self.file_name - if self.file_names: - body["fileNames"] = self.file_names - if self.is_json_schema is not None: - body["isJsonSchema"] = self.is_json_schema - if self.pos is not None: - body["pos"] = self.pos - if self.result_type is not None: - body["resultType"] = self.result_type - if self.schema: - body["schema"] = self.schema - if self.summary is not None: - body["summary"] = self.summary - if self.truncated is not None: - body["truncated"] = self.truncated + if self.cause is not None: body['cause'] = self.cause + if self.data: body['data'] = self.data + if self.file_name is not None: body['fileName'] = self.file_name + if self.file_names: body['fileNames'] = self.file_names + if self.is_json_schema is not None: body['isJsonSchema'] = self.is_json_schema + if self.pos is not None: body['pos'] = self.pos + if self.result_type is not None: body['resultType'] = self.result_type + if self.schema: body['schema'] = self.schema + if self.summary is not None: body['summary'] = self.summary + if self.truncated is not None: body['truncated'] = self.truncated return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Results: """Deserializes the Results from a dictionary.""" - return cls( - cause=d.get("cause", None), - data=d.get("data", None), - file_name=d.get("fileName", None), - file_names=d.get("fileNames", None), - is_json_schema=d.get("isJsonSchema", None), - pos=d.get("pos", None), - result_type=_enum(d, "resultType", ResultType), - schema=d.get("schema", None), - summary=d.get("summary", None), - truncated=d.get("truncated", None), - ) - + return cls(cause=d.get('cause', None), data=d.get('data', None), file_name=d.get('fileName', None), file_names=d.get('fileNames', None), is_json_schema=d.get('isJsonSchema', None), pos=d.get('pos', None), result_type=_enum(d, 'resultType', ResultType), schema=d.get('schema', None), summary=d.get('summary', None), truncated=d.get('truncated', None)) + -class RuntimeEngine(Enum): - NULL = "NULL" - PHOTON = "PHOTON" - STANDARD = "STANDARD" +class RuntimeEngine(Enum): + + + NULL = 'NULL' + PHOTON = 'PHOTON' + STANDARD = 'STANDARD' @dataclass class S3StorageInfo: """A storage location in Amazon S3""" - + destination: str """S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using cluster iam role, please make sure you set cluster iam role and the role has write access to the destination. Please also note that you cannot use AWS keys to deliver logs.""" - + canned_acl: Optional[str] = None """(Optional) Set canned access control list for the logs, e.g. `bucket-owner-full-control`. If `canned_cal` is set, please make sure the cluster iam role has `s3:PutObjectAcl` permission on @@ -8692,183 +7188,143 @@ class S3StorageInfo: that by default only the object owner gets full controls. If you are using cross account role for writing data, you may want to set `bucket-owner-full-control` to make bucket owner able to read the logs.""" - + enable_encryption: Optional[bool] = None """(Optional) Flag to enable server side encryption, `false` by default.""" - + encryption_type: Optional[str] = None """(Optional) The encryption type, it could be `sse-s3` or `sse-kms`. It will be used only when encryption is enabled and the default type is `sse-s3`.""" - + endpoint: Optional[str] = None """S3 endpoint, e.g. `https://s3-us-west-2.amazonaws.com`. Either region or endpoint needs to be set. If both are set, endpoint will be used.""" - + kms_key: Optional[str] = None """(Optional) Kms key which will be used if encryption is enabled and encryption type is set to `sse-kms`.""" - + region: Optional[str] = None """S3 region, e.g. `us-west-2`. Either region or endpoint needs to be set. If both are set, endpoint will be used.""" - + def as_dict(self) -> dict: """Serializes the S3StorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.canned_acl is not None: - body["canned_acl"] = self.canned_acl - if self.destination is not None: - body["destination"] = self.destination - if self.enable_encryption is not None: - body["enable_encryption"] = self.enable_encryption - if self.encryption_type is not None: - body["encryption_type"] = self.encryption_type - if self.endpoint is not None: - body["endpoint"] = self.endpoint - if self.kms_key is not None: - body["kms_key"] = self.kms_key - if self.region is not None: - body["region"] = self.region + if self.canned_acl is not None: body['canned_acl'] = self.canned_acl + if self.destination is not None: body['destination'] = self.destination + if self.enable_encryption is not None: body['enable_encryption'] = self.enable_encryption + if self.encryption_type is not None: body['encryption_type'] = self.encryption_type + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.kms_key is not None: body['kms_key'] = self.kms_key + if self.region is not None: body['region'] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the S3StorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.canned_acl is not None: - body["canned_acl"] = self.canned_acl - if self.destination is not None: - body["destination"] = self.destination - if self.enable_encryption is not None: - body["enable_encryption"] = self.enable_encryption - if self.encryption_type is not None: - body["encryption_type"] = self.encryption_type - if self.endpoint is not None: - body["endpoint"] = self.endpoint - if self.kms_key is not None: - body["kms_key"] = self.kms_key - if self.region is not None: - body["region"] = self.region + if self.canned_acl is not None: body['canned_acl'] = self.canned_acl + if self.destination is not None: body['destination'] = self.destination + if self.enable_encryption is not None: body['enable_encryption'] = self.enable_encryption + if self.encryption_type is not None: body['encryption_type'] = self.encryption_type + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.kms_key is not None: body['kms_key'] = self.kms_key + if self.region is not None: body['region'] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> S3StorageInfo: """Deserializes the S3StorageInfo from a dictionary.""" - return cls( - canned_acl=d.get("canned_acl", None), - destination=d.get("destination", None), - enable_encryption=d.get("enable_encryption", None), - encryption_type=d.get("encryption_type", None), - endpoint=d.get("endpoint", None), - kms_key=d.get("kms_key", None), - region=d.get("region", None), - ) + return cls(canned_acl=d.get('canned_acl', None), destination=d.get('destination', None), enable_encryption=d.get('enable_encryption', None), encryption_type=d.get('encryption_type', None), endpoint=d.get('endpoint', None), kms_key=d.get('kms_key', None), region=d.get('region', None)) + + @dataclass class SparkNode: """Describes a specific Spark driver or executor.""" - + host_private_ip: Optional[str] = None """The private IP address of the host instance.""" - + instance_id: Optional[str] = None """Globally unique identifier for the host instance from the cloud provider.""" - + node_aws_attributes: Optional[SparkNodeAwsAttributes] = None """Attributes specific to AWS for a Spark node.""" - + node_id: Optional[str] = None """Globally unique identifier for this node.""" - + private_ip: Optional[str] = None """Private IP address (typically a 10.x.x.x address) of the Spark node. Note that this is different from the private IP address of the host instance.""" - + public_dns: Optional[str] = None """Public DNS address of this node. This address can be used to access the Spark JDBC server on the driver node. To communicate with the JDBC server, traffic must be manually authorized by adding security group rules to the "worker-unmanaged" security group via the AWS console.""" - + start_timestamp: Optional[int] = None """The timestamp (in millisecond) when the Spark node is launched.""" - + def as_dict(self) -> dict: """Serializes the SparkNode into a dictionary suitable for use as a JSON request body.""" body = {} - if self.host_private_ip is not None: - body["host_private_ip"] = self.host_private_ip - if self.instance_id is not None: - body["instance_id"] = self.instance_id - if self.node_aws_attributes: - body["node_aws_attributes"] = self.node_aws_attributes.as_dict() - if self.node_id is not None: - body["node_id"] = self.node_id - if self.private_ip is not None: - body["private_ip"] = self.private_ip - if self.public_dns is not None: - body["public_dns"] = self.public_dns - if self.start_timestamp is not None: - body["start_timestamp"] = self.start_timestamp + if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip + if self.instance_id is not None: body['instance_id'] = self.instance_id + if self.node_aws_attributes: body['node_aws_attributes'] = self.node_aws_attributes.as_dict() + if self.node_id is not None: body['node_id'] = self.node_id + if self.private_ip is not None: body['private_ip'] = self.private_ip + if self.public_dns is not None: body['public_dns'] = self.public_dns + if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp return body def as_shallow_dict(self) -> dict: """Serializes the SparkNode into a shallow dictionary of its immediate attributes.""" body = {} - if self.host_private_ip is not None: - body["host_private_ip"] = self.host_private_ip - if self.instance_id is not None: - body["instance_id"] = self.instance_id - if self.node_aws_attributes: - body["node_aws_attributes"] = self.node_aws_attributes - if self.node_id is not None: - body["node_id"] = self.node_id - if self.private_ip is not None: - body["private_ip"] = self.private_ip - if self.public_dns is not None: - body["public_dns"] = self.public_dns - if self.start_timestamp is not None: - body["start_timestamp"] = self.start_timestamp + if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip + if self.instance_id is not None: body['instance_id'] = self.instance_id + if self.node_aws_attributes: body['node_aws_attributes'] = self.node_aws_attributes + if self.node_id is not None: body['node_id'] = self.node_id + if self.private_ip is not None: body['private_ip'] = self.private_ip + if self.public_dns is not None: body['public_dns'] = self.public_dns + if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkNode: """Deserializes the SparkNode from a dictionary.""" - return cls( - host_private_ip=d.get("host_private_ip", None), - instance_id=d.get("instance_id", None), - node_aws_attributes=_from_dict(d, "node_aws_attributes", SparkNodeAwsAttributes), - node_id=d.get("node_id", None), - private_ip=d.get("private_ip", None), - public_dns=d.get("public_dns", None), - start_timestamp=d.get("start_timestamp", None), - ) + return cls(host_private_ip=d.get('host_private_ip', None), instance_id=d.get('instance_id', None), node_aws_attributes=_from_dict(d, 'node_aws_attributes', SparkNodeAwsAttributes), node_id=d.get('node_id', None), private_ip=d.get('private_ip', None), public_dns=d.get('public_dns', None), start_timestamp=d.get('start_timestamp', None)) + + @dataclass class SparkNodeAwsAttributes: """Attributes specific to AWS for a Spark node.""" - + is_spot: Optional[bool] = None """Whether this node is on an Amazon spot instance.""" - + def as_dict(self) -> dict: """Serializes the SparkNodeAwsAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_spot is not None: - body["is_spot"] = self.is_spot + if self.is_spot is not None: body['is_spot'] = self.is_spot return body def as_shallow_dict(self) -> dict: """Serializes the SparkNodeAwsAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_spot is not None: - body["is_spot"] = self.is_spot + if self.is_spot is not None: body['is_spot'] = self.is_spot return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkNodeAwsAttributes: """Deserializes the SparkNodeAwsAttributes from a dictionary.""" - return cls(is_spot=d.get("is_spot", None)) + return cls(is_spot=d.get('is_spot', None)) + + @dataclass @@ -8878,57 +7334,55 @@ class SparkVersion: the "spark_version" when creating a new cluster. Note that the exact Spark version may change over time for a "wildcard" version (i.e., "2.1.x-scala2.11" is a "wildcard" version) with minor bug fixes.""" - + name: Optional[str] = None """A descriptive name for this Spark version, for example "Spark 2.1".""" - + def as_dict(self) -> dict: """Serializes the SparkVersion into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name + if self.key is not None: body['key'] = self.key + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the SparkVersion into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name + if self.key is not None: body['key'] = self.key + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkVersion: """Deserializes the SparkVersion from a dictionary.""" - return cls(key=d.get("key", None), name=d.get("name", None)) + return cls(key=d.get('key', None), name=d.get('name', None)) + + @dataclass class StartCluster: cluster_id: str """The cluster to be started.""" - + def as_dict(self) -> dict: """Serializes the StartCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the StartCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StartCluster: """Deserializes the StartCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) + return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass @@ -8947,284 +7401,274 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> StartClusterResponse: """Deserializes the StartClusterResponse from a dictionary.""" return cls() + + class State(Enum): """The state of a Cluster. The current allowable state transitions are as follows: - + - `PENDING` -> `RUNNING` - `PENDING` -> `TERMINATING` - `RUNNING` -> `RESIZING` - `RUNNING` -> `RESTARTING` - `RUNNING` -> `TERMINATING` - `RESTARTING` -> `RUNNING` - `RESTARTING` -> `TERMINATING` - `RESIZING` -> `RUNNING` - `RESIZING` -> `TERMINATING` - `TERMINATING` -> `TERMINATED`""" - - ERROR = "ERROR" - PENDING = "PENDING" - RESIZING = "RESIZING" - RESTARTING = "RESTARTING" - RUNNING = "RUNNING" - TERMINATED = "TERMINATED" - TERMINATING = "TERMINATING" - UNKNOWN = "UNKNOWN" - + + ERROR = 'ERROR' + PENDING = 'PENDING' + RESIZING = 'RESIZING' + RESTARTING = 'RESTARTING' + RUNNING = 'RUNNING' + TERMINATED = 'TERMINATED' + TERMINATING = 'TERMINATING' + UNKNOWN = 'UNKNOWN' @dataclass class TerminationReason: code: Optional[TerminationReasonCode] = None """status code indicating why the cluster was terminated""" - - parameters: Optional[Dict[str, str]] = None + + parameters: Optional[Dict[str,str]] = None """list of parameters that provide additional information about why the cluster was terminated""" - + type: Optional[TerminationReasonType] = None """type of the termination""" - + def as_dict(self) -> dict: """Serializes the TerminationReason into a dictionary suitable for use as a JSON request body.""" body = {} - if self.code is not None: - body["code"] = self.code.value - if self.parameters: - body["parameters"] = self.parameters - if self.type is not None: - body["type"] = self.type.value + if self.code is not None: body['code'] = self.code.value + if self.parameters: body['parameters'] = self.parameters + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the TerminationReason into a shallow dictionary of its immediate attributes.""" body = {} - if self.code is not None: - body["code"] = self.code - if self.parameters: - body["parameters"] = self.parameters - if self.type is not None: - body["type"] = self.type + if self.code is not None: body['code'] = self.code + if self.parameters: body['parameters'] = self.parameters + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TerminationReason: """Deserializes the TerminationReason from a dictionary.""" - return cls( - code=_enum(d, "code", TerminationReasonCode), - parameters=d.get("parameters", None), - type=_enum(d, "type", TerminationReasonType), - ) + return cls(code=_enum(d, 'code', TerminationReasonCode), parameters=d.get('parameters', None), type=_enum(d, 'type', TerminationReasonType)) + + class TerminationReasonCode(Enum): """The status code indicating why the cluster was terminated""" - - ABUSE_DETECTED = "ABUSE_DETECTED" - ACCESS_TOKEN_FAILURE = "ACCESS_TOKEN_FAILURE" - ALLOCATION_TIMEOUT = "ALLOCATION_TIMEOUT" - ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY = "ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY" - ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS = "ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS" - ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS = "ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS" - ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS = "ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS" - ALLOCATION_TIMEOUT_NO_READY_CLUSTERS = "ALLOCATION_TIMEOUT_NO_READY_CLUSTERS" - ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS = "ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS" - ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS = "ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS" - ATTACH_PROJECT_FAILURE = "ATTACH_PROJECT_FAILURE" - AWS_AUTHORIZATION_FAILURE = "AWS_AUTHORIZATION_FAILURE" - AWS_INACCESSIBLE_KMS_KEY_FAILURE = "AWS_INACCESSIBLE_KMS_KEY_FAILURE" - AWS_INSTANCE_PROFILE_UPDATE_FAILURE = "AWS_INSTANCE_PROFILE_UPDATE_FAILURE" - AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE" - AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE" - AWS_INVALID_KEY_PAIR = "AWS_INVALID_KEY_PAIR" - AWS_INVALID_KMS_KEY_STATE = "AWS_INVALID_KMS_KEY_STATE" - AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE" - AWS_REQUEST_LIMIT_EXCEEDED = "AWS_REQUEST_LIMIT_EXCEEDED" - AWS_RESOURCE_QUOTA_EXCEEDED = "AWS_RESOURCE_QUOTA_EXCEEDED" - AWS_UNSUPPORTED_FAILURE = "AWS_UNSUPPORTED_FAILURE" - AZURE_BYOK_KEY_PERMISSION_FAILURE = "AZURE_BYOK_KEY_PERMISSION_FAILURE" - AZURE_EPHEMERAL_DISK_FAILURE = "AZURE_EPHEMERAL_DISK_FAILURE" - AZURE_INVALID_DEPLOYMENT_TEMPLATE = "AZURE_INVALID_DEPLOYMENT_TEMPLATE" - AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION" - AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE = "AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE" - AZURE_QUOTA_EXCEEDED_EXCEPTION = "AZURE_QUOTA_EXCEEDED_EXCEPTION" - AZURE_RESOURCE_MANAGER_THROTTLING = "AZURE_RESOURCE_MANAGER_THROTTLING" - AZURE_RESOURCE_PROVIDER_THROTTLING = "AZURE_RESOURCE_PROVIDER_THROTTLING" - AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = "AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE" - AZURE_VM_EXTENSION_FAILURE = "AZURE_VM_EXTENSION_FAILURE" - AZURE_VNET_CONFIGURATION_FAILURE = "AZURE_VNET_CONFIGURATION_FAILURE" - BOOTSTRAP_TIMEOUT = "BOOTSTRAP_TIMEOUT" - BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION" - BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG = "BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG" - BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED = "BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED" - BUDGET_POLICY_RESOLUTION_FAILURE = "BUDGET_POLICY_RESOLUTION_FAILURE" - CLOUD_ACCOUNT_SETUP_FAILURE = "CLOUD_ACCOUNT_SETUP_FAILURE" - CLOUD_OPERATION_CANCELLED = "CLOUD_OPERATION_CANCELLED" - CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE" - CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED = "CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED" - CLOUD_PROVIDER_LAUNCH_FAILURE = "CLOUD_PROVIDER_LAUNCH_FAILURE" - CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG = "CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG" - CLOUD_PROVIDER_RESOURCE_STOCKOUT = "CLOUD_PROVIDER_RESOURCE_STOCKOUT" - CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG = "CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG" - CLOUD_PROVIDER_SHUTDOWN = "CLOUD_PROVIDER_SHUTDOWN" - CLUSTER_OPERATION_THROTTLED = "CLUSTER_OPERATION_THROTTLED" - CLUSTER_OPERATION_TIMEOUT = "CLUSTER_OPERATION_TIMEOUT" - COMMUNICATION_LOST = "COMMUNICATION_LOST" - CONTAINER_LAUNCH_FAILURE = "CONTAINER_LAUNCH_FAILURE" - CONTROL_PLANE_REQUEST_FAILURE = "CONTROL_PLANE_REQUEST_FAILURE" - CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG = "CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG" - DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE" - DATA_ACCESS_CONFIG_CHANGED = "DATA_ACCESS_CONFIG_CHANGED" - DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY" - DISASTER_RECOVERY_REPLICATION = "DISASTER_RECOVERY_REPLICATION" - DNS_RESOLUTION_ERROR = "DNS_RESOLUTION_ERROR" - DOCKER_CONTAINER_CREATION_EXCEPTION = "DOCKER_CONTAINER_CREATION_EXCEPTION" - DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" - DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = "DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION" - DOCKER_INVALID_OS_EXCEPTION = "DOCKER_INVALID_OS_EXCEPTION" - DRIVER_EVICTION = "DRIVER_EVICTION" - DRIVER_LAUNCH_TIMEOUT = "DRIVER_LAUNCH_TIMEOUT" - DRIVER_NODE_UNREACHABLE = "DRIVER_NODE_UNREACHABLE" - DRIVER_OUT_OF_DISK = "DRIVER_OUT_OF_DISK" - DRIVER_OUT_OF_MEMORY = "DRIVER_OUT_OF_MEMORY" - DRIVER_POD_CREATION_FAILURE = "DRIVER_POD_CREATION_FAILURE" - DRIVER_UNEXPECTED_FAILURE = "DRIVER_UNEXPECTED_FAILURE" - DRIVER_UNREACHABLE = "DRIVER_UNREACHABLE" - DRIVER_UNRESPONSIVE = "DRIVER_UNRESPONSIVE" - DYNAMIC_SPARK_CONF_SIZE_EXCEEDED = "DYNAMIC_SPARK_CONF_SIZE_EXCEEDED" - EOS_SPARK_IMAGE = "EOS_SPARK_IMAGE" - EXECUTION_COMPONENT_UNHEALTHY = "EXECUTION_COMPONENT_UNHEALTHY" - EXECUTOR_POD_UNSCHEDULED = "EXECUTOR_POD_UNSCHEDULED" - GCP_API_RATE_QUOTA_EXCEEDED = "GCP_API_RATE_QUOTA_EXCEEDED" - GCP_DENIED_BY_ORG_POLICY = "GCP_DENIED_BY_ORG_POLICY" - GCP_FORBIDDEN = "GCP_FORBIDDEN" - GCP_IAM_TIMEOUT = "GCP_IAM_TIMEOUT" - GCP_INACCESSIBLE_KMS_KEY_FAILURE = "GCP_INACCESSIBLE_KMS_KEY_FAILURE" - GCP_INSUFFICIENT_CAPACITY = "GCP_INSUFFICIENT_CAPACITY" - GCP_IP_SPACE_EXHAUSTED = "GCP_IP_SPACE_EXHAUSTED" - GCP_KMS_KEY_PERMISSION_DENIED = "GCP_KMS_KEY_PERMISSION_DENIED" - GCP_NOT_FOUND = "GCP_NOT_FOUND" - GCP_QUOTA_EXCEEDED = "GCP_QUOTA_EXCEEDED" - GCP_RESOURCE_QUOTA_EXCEEDED = "GCP_RESOURCE_QUOTA_EXCEEDED" - GCP_SERVICE_ACCOUNT_ACCESS_DENIED = "GCP_SERVICE_ACCOUNT_ACCESS_DENIED" - GCP_SERVICE_ACCOUNT_DELETED = "GCP_SERVICE_ACCOUNT_DELETED" - GCP_SERVICE_ACCOUNT_NOT_FOUND = "GCP_SERVICE_ACCOUNT_NOT_FOUND" - GCP_SUBNET_NOT_READY = "GCP_SUBNET_NOT_READY" - GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED = "GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED" - GKE_BASED_CLUSTER_TERMINATION = "GKE_BASED_CLUSTER_TERMINATION" - GLOBAL_INIT_SCRIPT_FAILURE = "GLOBAL_INIT_SCRIPT_FAILURE" - HIVE_METASTORE_PROVISIONING_FAILURE = "HIVE_METASTORE_PROVISIONING_FAILURE" - IMAGE_PULL_PERMISSION_DENIED = "IMAGE_PULL_PERMISSION_DENIED" - INACTIVITY = "INACTIVITY" - INIT_CONTAINER_NOT_FINISHED = "INIT_CONTAINER_NOT_FINISHED" - INIT_SCRIPT_FAILURE = "INIT_SCRIPT_FAILURE" - INSTANCE_POOL_CLUSTER_FAILURE = "INSTANCE_POOL_CLUSTER_FAILURE" - INSTANCE_POOL_MAX_CAPACITY_REACHED = "INSTANCE_POOL_MAX_CAPACITY_REACHED" - INSTANCE_POOL_NOT_FOUND = "INSTANCE_POOL_NOT_FOUND" - INSTANCE_UNREACHABLE = "INSTANCE_UNREACHABLE" - INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG = "INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG" - INTERNAL_CAPACITY_FAILURE = "INTERNAL_CAPACITY_FAILURE" - INTERNAL_ERROR = "INTERNAL_ERROR" - INVALID_ARGUMENT = "INVALID_ARGUMENT" - INVALID_AWS_PARAMETER = "INVALID_AWS_PARAMETER" - INVALID_INSTANCE_PLACEMENT_PROTOCOL = "INVALID_INSTANCE_PLACEMENT_PROTOCOL" - INVALID_SPARK_IMAGE = "INVALID_SPARK_IMAGE" - INVALID_WORKER_IMAGE_FAILURE = "INVALID_WORKER_IMAGE_FAILURE" - IN_PENALTY_BOX = "IN_PENALTY_BOX" - IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE" - JOB_FINISHED = "JOB_FINISHED" - K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE" - K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT" - LAZY_ALLOCATION_TIMEOUT = "LAZY_ALLOCATION_TIMEOUT" - MAINTENANCE_MODE = "MAINTENANCE_MODE" - METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY" - NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT" - NETVISOR_SETUP_TIMEOUT = "NETVISOR_SETUP_TIMEOUT" - NETWORK_CHECK_CONTROL_PLANE_FAILURE = "NETWORK_CHECK_CONTROL_PLANE_FAILURE" - NETWORK_CHECK_DNS_SERVER_FAILURE = "NETWORK_CHECK_DNS_SERVER_FAILURE" - NETWORK_CHECK_METADATA_ENDPOINT_FAILURE = "NETWORK_CHECK_METADATA_ENDPOINT_FAILURE" - NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE = "NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE" - NETWORK_CHECK_NIC_FAILURE = "NETWORK_CHECK_NIC_FAILURE" - NETWORK_CHECK_STORAGE_FAILURE = "NETWORK_CHECK_STORAGE_FAILURE" - NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" - NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" - NO_MATCHED_K8S = "NO_MATCHED_K8S" - NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" - NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" - NPIP_TUNNEL_TOKEN_FAILURE = "NPIP_TUNNEL_TOKEN_FAILURE" - POD_ASSIGNMENT_FAILURE = "POD_ASSIGNMENT_FAILURE" - POD_SCHEDULING_FAILURE = "POD_SCHEDULING_FAILURE" - REQUEST_REJECTED = "REQUEST_REJECTED" - REQUEST_THROTTLED = "REQUEST_THROTTLED" - RESOURCE_USAGE_BLOCKED = "RESOURCE_USAGE_BLOCKED" - SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" - SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" - SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" - SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" - SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" - SERVERLESS_LONG_RUNNING_TERMINATED = "SERVERLESS_LONG_RUNNING_TERMINATED" - SKIPPED_SLOW_NODES = "SKIPPED_SLOW_NODES" - SLOW_IMAGE_DOWNLOAD = "SLOW_IMAGE_DOWNLOAD" - SPARK_ERROR = "SPARK_ERROR" - SPARK_IMAGE_DOWNLOAD_FAILURE = "SPARK_IMAGE_DOWNLOAD_FAILURE" - SPARK_IMAGE_DOWNLOAD_THROTTLED = "SPARK_IMAGE_DOWNLOAD_THROTTLED" - SPARK_IMAGE_NOT_FOUND = "SPARK_IMAGE_NOT_FOUND" - SPARK_STARTUP_FAILURE = "SPARK_STARTUP_FAILURE" - SPOT_INSTANCE_TERMINATION = "SPOT_INSTANCE_TERMINATION" - SSH_BOOTSTRAP_FAILURE = "SSH_BOOTSTRAP_FAILURE" - STORAGE_DOWNLOAD_FAILURE = "STORAGE_DOWNLOAD_FAILURE" - STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG = "STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG" - STORAGE_DOWNLOAD_FAILURE_SLOW = "STORAGE_DOWNLOAD_FAILURE_SLOW" - STORAGE_DOWNLOAD_FAILURE_THROTTLED = "STORAGE_DOWNLOAD_FAILURE_THROTTLED" - STS_CLIENT_SETUP_FAILURE = "STS_CLIENT_SETUP_FAILURE" - SUBNET_EXHAUSTED_FAILURE = "SUBNET_EXHAUSTED_FAILURE" - TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" - TRIAL_EXPIRED = "TRIAL_EXPIRED" - UNEXPECTED_LAUNCH_FAILURE = "UNEXPECTED_LAUNCH_FAILURE" - UNEXPECTED_POD_RECREATION = "UNEXPECTED_POD_RECREATION" - UNKNOWN = "UNKNOWN" - UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE" - UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE" - USER_INITIATED_VM_TERMINATION = "USER_INITIATED_VM_TERMINATION" - USER_REQUEST = "USER_REQUEST" - WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE" - WORKSPACE_CANCELLED_ERROR = "WORKSPACE_CANCELLED_ERROR" - WORKSPACE_CONFIGURATION_ERROR = "WORKSPACE_CONFIGURATION_ERROR" - WORKSPACE_UPDATE = "WORKSPACE_UPDATE" - + + ABUSE_DETECTED = 'ABUSE_DETECTED' + ACCESS_TOKEN_FAILURE = 'ACCESS_TOKEN_FAILURE' + ALLOCATION_TIMEOUT = 'ALLOCATION_TIMEOUT' + ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY = 'ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY' + ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS' + ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS' + ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS' + ALLOCATION_TIMEOUT_NO_READY_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_READY_CLUSTERS' + ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS' + ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS' + ATTACH_PROJECT_FAILURE = 'ATTACH_PROJECT_FAILURE' + AWS_AUTHORIZATION_FAILURE = 'AWS_AUTHORIZATION_FAILURE' + AWS_INACCESSIBLE_KMS_KEY_FAILURE = 'AWS_INACCESSIBLE_KMS_KEY_FAILURE' + AWS_INSTANCE_PROFILE_UPDATE_FAILURE = 'AWS_INSTANCE_PROFILE_UPDATE_FAILURE' + AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = 'AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE' + AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = 'AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE' + AWS_INVALID_KEY_PAIR = 'AWS_INVALID_KEY_PAIR' + AWS_INVALID_KMS_KEY_STATE = 'AWS_INVALID_KMS_KEY_STATE' + AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = 'AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE' + AWS_REQUEST_LIMIT_EXCEEDED = 'AWS_REQUEST_LIMIT_EXCEEDED' + AWS_RESOURCE_QUOTA_EXCEEDED = 'AWS_RESOURCE_QUOTA_EXCEEDED' + AWS_UNSUPPORTED_FAILURE = 'AWS_UNSUPPORTED_FAILURE' + AZURE_BYOK_KEY_PERMISSION_FAILURE = 'AZURE_BYOK_KEY_PERMISSION_FAILURE' + AZURE_EPHEMERAL_DISK_FAILURE = 'AZURE_EPHEMERAL_DISK_FAILURE' + AZURE_INVALID_DEPLOYMENT_TEMPLATE = 'AZURE_INVALID_DEPLOYMENT_TEMPLATE' + AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = 'AZURE_OPERATION_NOT_ALLOWED_EXCEPTION' + AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE = 'AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE' + AZURE_QUOTA_EXCEEDED_EXCEPTION = 'AZURE_QUOTA_EXCEEDED_EXCEPTION' + AZURE_RESOURCE_MANAGER_THROTTLING = 'AZURE_RESOURCE_MANAGER_THROTTLING' + AZURE_RESOURCE_PROVIDER_THROTTLING = 'AZURE_RESOURCE_PROVIDER_THROTTLING' + AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = 'AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE' + AZURE_VM_EXTENSION_FAILURE = 'AZURE_VM_EXTENSION_FAILURE' + AZURE_VNET_CONFIGURATION_FAILURE = 'AZURE_VNET_CONFIGURATION_FAILURE' + BOOTSTRAP_TIMEOUT = 'BOOTSTRAP_TIMEOUT' + BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = 'BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION' + BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG = 'BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG' + BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED = 'BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED' + BUDGET_POLICY_RESOLUTION_FAILURE = 'BUDGET_POLICY_RESOLUTION_FAILURE' + CLOUD_ACCOUNT_SETUP_FAILURE = 'CLOUD_ACCOUNT_SETUP_FAILURE' + CLOUD_OPERATION_CANCELLED = 'CLOUD_OPERATION_CANCELLED' + CLOUD_PROVIDER_DISK_SETUP_FAILURE = 'CLOUD_PROVIDER_DISK_SETUP_FAILURE' + CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED = 'CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED' + CLOUD_PROVIDER_LAUNCH_FAILURE = 'CLOUD_PROVIDER_LAUNCH_FAILURE' + CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG = 'CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG' + CLOUD_PROVIDER_RESOURCE_STOCKOUT = 'CLOUD_PROVIDER_RESOURCE_STOCKOUT' + CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG = 'CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG' + CLOUD_PROVIDER_SHUTDOWN = 'CLOUD_PROVIDER_SHUTDOWN' + CLUSTER_OPERATION_THROTTLED = 'CLUSTER_OPERATION_THROTTLED' + CLUSTER_OPERATION_TIMEOUT = 'CLUSTER_OPERATION_TIMEOUT' + COMMUNICATION_LOST = 'COMMUNICATION_LOST' + CONTAINER_LAUNCH_FAILURE = 'CONTAINER_LAUNCH_FAILURE' + CONTROL_PLANE_REQUEST_FAILURE = 'CONTROL_PLANE_REQUEST_FAILURE' + CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG = 'CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG' + DATABASE_CONNECTION_FAILURE = 'DATABASE_CONNECTION_FAILURE' + DATA_ACCESS_CONFIG_CHANGED = 'DATA_ACCESS_CONFIG_CHANGED' + DBFS_COMPONENT_UNHEALTHY = 'DBFS_COMPONENT_UNHEALTHY' + DISASTER_RECOVERY_REPLICATION = 'DISASTER_RECOVERY_REPLICATION' + DNS_RESOLUTION_ERROR = 'DNS_RESOLUTION_ERROR' + DOCKER_CONTAINER_CREATION_EXCEPTION = 'DOCKER_CONTAINER_CREATION_EXCEPTION' + DOCKER_IMAGE_PULL_FAILURE = 'DOCKER_IMAGE_PULL_FAILURE' + DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = 'DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION' + DOCKER_INVALID_OS_EXCEPTION = 'DOCKER_INVALID_OS_EXCEPTION' + DRIVER_EVICTION = 'DRIVER_EVICTION' + DRIVER_LAUNCH_TIMEOUT = 'DRIVER_LAUNCH_TIMEOUT' + DRIVER_NODE_UNREACHABLE = 'DRIVER_NODE_UNREACHABLE' + DRIVER_OUT_OF_DISK = 'DRIVER_OUT_OF_DISK' + DRIVER_OUT_OF_MEMORY = 'DRIVER_OUT_OF_MEMORY' + DRIVER_POD_CREATION_FAILURE = 'DRIVER_POD_CREATION_FAILURE' + DRIVER_UNEXPECTED_FAILURE = 'DRIVER_UNEXPECTED_FAILURE' + DRIVER_UNHEALTHY = 'DRIVER_UNHEALTHY' + DRIVER_UNREACHABLE = 'DRIVER_UNREACHABLE' + DRIVER_UNRESPONSIVE = 'DRIVER_UNRESPONSIVE' + DYNAMIC_SPARK_CONF_SIZE_EXCEEDED = 'DYNAMIC_SPARK_CONF_SIZE_EXCEEDED' + EOS_SPARK_IMAGE = 'EOS_SPARK_IMAGE' + EXECUTION_COMPONENT_UNHEALTHY = 'EXECUTION_COMPONENT_UNHEALTHY' + EXECUTOR_POD_UNSCHEDULED = 'EXECUTOR_POD_UNSCHEDULED' + GCP_API_RATE_QUOTA_EXCEEDED = 'GCP_API_RATE_QUOTA_EXCEEDED' + GCP_DENIED_BY_ORG_POLICY = 'GCP_DENIED_BY_ORG_POLICY' + GCP_FORBIDDEN = 'GCP_FORBIDDEN' + GCP_IAM_TIMEOUT = 'GCP_IAM_TIMEOUT' + GCP_INACCESSIBLE_KMS_KEY_FAILURE = 'GCP_INACCESSIBLE_KMS_KEY_FAILURE' + GCP_INSUFFICIENT_CAPACITY = 'GCP_INSUFFICIENT_CAPACITY' + GCP_IP_SPACE_EXHAUSTED = 'GCP_IP_SPACE_EXHAUSTED' + GCP_KMS_KEY_PERMISSION_DENIED = 'GCP_KMS_KEY_PERMISSION_DENIED' + GCP_NOT_FOUND = 'GCP_NOT_FOUND' + GCP_QUOTA_EXCEEDED = 'GCP_QUOTA_EXCEEDED' + GCP_RESOURCE_QUOTA_EXCEEDED = 'GCP_RESOURCE_QUOTA_EXCEEDED' + GCP_SERVICE_ACCOUNT_ACCESS_DENIED = 'GCP_SERVICE_ACCOUNT_ACCESS_DENIED' + GCP_SERVICE_ACCOUNT_DELETED = 'GCP_SERVICE_ACCOUNT_DELETED' + GCP_SERVICE_ACCOUNT_NOT_FOUND = 'GCP_SERVICE_ACCOUNT_NOT_FOUND' + GCP_SUBNET_NOT_READY = 'GCP_SUBNET_NOT_READY' + GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED = 'GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED' + GKE_BASED_CLUSTER_TERMINATION = 'GKE_BASED_CLUSTER_TERMINATION' + GLOBAL_INIT_SCRIPT_FAILURE = 'GLOBAL_INIT_SCRIPT_FAILURE' + HIVE_METASTORE_PROVISIONING_FAILURE = 'HIVE_METASTORE_PROVISIONING_FAILURE' + IMAGE_PULL_PERMISSION_DENIED = 'IMAGE_PULL_PERMISSION_DENIED' + INACTIVITY = 'INACTIVITY' + INIT_CONTAINER_NOT_FINISHED = 'INIT_CONTAINER_NOT_FINISHED' + INIT_SCRIPT_FAILURE = 'INIT_SCRIPT_FAILURE' + INSTANCE_POOL_CLUSTER_FAILURE = 'INSTANCE_POOL_CLUSTER_FAILURE' + INSTANCE_POOL_MAX_CAPACITY_REACHED = 'INSTANCE_POOL_MAX_CAPACITY_REACHED' + INSTANCE_POOL_NOT_FOUND = 'INSTANCE_POOL_NOT_FOUND' + INSTANCE_UNREACHABLE = 'INSTANCE_UNREACHABLE' + INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG = 'INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG' + INTERNAL_CAPACITY_FAILURE = 'INTERNAL_CAPACITY_FAILURE' + INTERNAL_ERROR = 'INTERNAL_ERROR' + INVALID_ARGUMENT = 'INVALID_ARGUMENT' + INVALID_AWS_PARAMETER = 'INVALID_AWS_PARAMETER' + INVALID_INSTANCE_PLACEMENT_PROTOCOL = 'INVALID_INSTANCE_PLACEMENT_PROTOCOL' + INVALID_SPARK_IMAGE = 'INVALID_SPARK_IMAGE' + INVALID_WORKER_IMAGE_FAILURE = 'INVALID_WORKER_IMAGE_FAILURE' + IN_PENALTY_BOX = 'IN_PENALTY_BOX' + IP_EXHAUSTION_FAILURE = 'IP_EXHAUSTION_FAILURE' + JOB_FINISHED = 'JOB_FINISHED' + K8S_AUTOSCALING_FAILURE = 'K8S_AUTOSCALING_FAILURE' + K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = 'K8S_DBR_CLUSTER_LAUNCH_TIMEOUT' + LAZY_ALLOCATION_TIMEOUT = 'LAZY_ALLOCATION_TIMEOUT' + MAINTENANCE_MODE = 'MAINTENANCE_MODE' + METASTORE_COMPONENT_UNHEALTHY = 'METASTORE_COMPONENT_UNHEALTHY' + NEPHOS_RESOURCE_MANAGEMENT = 'NEPHOS_RESOURCE_MANAGEMENT' + NETVISOR_SETUP_TIMEOUT = 'NETVISOR_SETUP_TIMEOUT' + NETWORK_CHECK_CONTROL_PLANE_FAILURE = 'NETWORK_CHECK_CONTROL_PLANE_FAILURE' + NETWORK_CHECK_DNS_SERVER_FAILURE = 'NETWORK_CHECK_DNS_SERVER_FAILURE' + NETWORK_CHECK_METADATA_ENDPOINT_FAILURE = 'NETWORK_CHECK_METADATA_ENDPOINT_FAILURE' + NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE = 'NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE' + NETWORK_CHECK_NIC_FAILURE = 'NETWORK_CHECK_NIC_FAILURE' + NETWORK_CHECK_STORAGE_FAILURE = 'NETWORK_CHECK_STORAGE_FAILURE' + NETWORK_CONFIGURATION_FAILURE = 'NETWORK_CONFIGURATION_FAILURE' + NFS_MOUNT_FAILURE = 'NFS_MOUNT_FAILURE' + NO_MATCHED_K8S = 'NO_MATCHED_K8S' + NO_MATCHED_K8S_TESTING_TAG = 'NO_MATCHED_K8S_TESTING_TAG' + NPIP_TUNNEL_SETUP_FAILURE = 'NPIP_TUNNEL_SETUP_FAILURE' + NPIP_TUNNEL_TOKEN_FAILURE = 'NPIP_TUNNEL_TOKEN_FAILURE' + POD_ASSIGNMENT_FAILURE = 'POD_ASSIGNMENT_FAILURE' + POD_SCHEDULING_FAILURE = 'POD_SCHEDULING_FAILURE' + REQUEST_REJECTED = 'REQUEST_REJECTED' + REQUEST_THROTTLED = 'REQUEST_THROTTLED' + RESOURCE_USAGE_BLOCKED = 'RESOURCE_USAGE_BLOCKED' + SECRET_CREATION_FAILURE = 'SECRET_CREATION_FAILURE' + SECRET_PERMISSION_DENIED = 'SECRET_PERMISSION_DENIED' + SECRET_RESOLUTION_ERROR = 'SECRET_RESOLUTION_ERROR' + SECURITY_DAEMON_REGISTRATION_EXCEPTION = 'SECURITY_DAEMON_REGISTRATION_EXCEPTION' + SELF_BOOTSTRAP_FAILURE = 'SELF_BOOTSTRAP_FAILURE' + SERVERLESS_LONG_RUNNING_TERMINATED = 'SERVERLESS_LONG_RUNNING_TERMINATED' + SKIPPED_SLOW_NODES = 'SKIPPED_SLOW_NODES' + SLOW_IMAGE_DOWNLOAD = 'SLOW_IMAGE_DOWNLOAD' + SPARK_ERROR = 'SPARK_ERROR' + SPARK_IMAGE_DOWNLOAD_FAILURE = 'SPARK_IMAGE_DOWNLOAD_FAILURE' + SPARK_IMAGE_DOWNLOAD_THROTTLED = 'SPARK_IMAGE_DOWNLOAD_THROTTLED' + SPARK_IMAGE_NOT_FOUND = 'SPARK_IMAGE_NOT_FOUND' + SPARK_STARTUP_FAILURE = 'SPARK_STARTUP_FAILURE' + SPOT_INSTANCE_TERMINATION = 'SPOT_INSTANCE_TERMINATION' + SSH_BOOTSTRAP_FAILURE = 'SSH_BOOTSTRAP_FAILURE' + STORAGE_DOWNLOAD_FAILURE = 'STORAGE_DOWNLOAD_FAILURE' + STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG = 'STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG' + STORAGE_DOWNLOAD_FAILURE_SLOW = 'STORAGE_DOWNLOAD_FAILURE_SLOW' + STORAGE_DOWNLOAD_FAILURE_THROTTLED = 'STORAGE_DOWNLOAD_FAILURE_THROTTLED' + STS_CLIENT_SETUP_FAILURE = 'STS_CLIENT_SETUP_FAILURE' + SUBNET_EXHAUSTED_FAILURE = 'SUBNET_EXHAUSTED_FAILURE' + TEMPORARILY_UNAVAILABLE = 'TEMPORARILY_UNAVAILABLE' + TRIAL_EXPIRED = 'TRIAL_EXPIRED' + UNEXPECTED_LAUNCH_FAILURE = 'UNEXPECTED_LAUNCH_FAILURE' + UNEXPECTED_POD_RECREATION = 'UNEXPECTED_POD_RECREATION' + UNKNOWN = 'UNKNOWN' + UNSUPPORTED_INSTANCE_TYPE = 'UNSUPPORTED_INSTANCE_TYPE' + UPDATE_INSTANCE_PROFILE_FAILURE = 'UPDATE_INSTANCE_PROFILE_FAILURE' + USER_INITIATED_VM_TERMINATION = 'USER_INITIATED_VM_TERMINATION' + USER_REQUEST = 'USER_REQUEST' + WORKER_SETUP_FAILURE = 'WORKER_SETUP_FAILURE' + WORKSPACE_CANCELLED_ERROR = 'WORKSPACE_CANCELLED_ERROR' + WORKSPACE_CONFIGURATION_ERROR = 'WORKSPACE_CONFIGURATION_ERROR' + WORKSPACE_UPDATE = 'WORKSPACE_UPDATE' class TerminationReasonType(Enum): """type of the termination""" - - CLIENT_ERROR = "CLIENT_ERROR" - CLOUD_FAILURE = "CLOUD_FAILURE" - SERVICE_FAULT = "SERVICE_FAULT" - SUCCESS = "SUCCESS" - + + CLIENT_ERROR = 'CLIENT_ERROR' + CLOUD_FAILURE = 'CLOUD_FAILURE' + SERVICE_FAULT = 'SERVICE_FAULT' + SUCCESS = 'SUCCESS' @dataclass class UninstallLibraries: cluster_id: str """Unique identifier for the cluster on which to uninstall these libraries.""" - + libraries: List[Library] """The libraries to uninstall.""" - + def as_dict(self) -> dict: """Serializes the UninstallLibraries into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] return body def as_shallow_dict(self) -> dict: """Serializes the UninstallLibraries into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = self.libraries + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.libraries: body['libraries'] = self.libraries return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UninstallLibraries: """Deserializes the UninstallLibraries from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), libraries=_repeated_dict(d, "libraries", Library)) + return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated_dict(d, 'libraries', Library)) + + @dataclass @@ -9243,30 +7687,32 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UninstallLibrariesResponse: """Deserializes the UninstallLibrariesResponse from a dictionary.""" return cls() + + @dataclass class UnpinCluster: cluster_id: str - + def as_dict(self) -> dict: """Serializes the UnpinCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the UnpinCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UnpinCluster: """Deserializes the UnpinCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) + return cls(cluster_id=d.get('cluster_id', None)) + + @dataclass @@ -9285,13 +7731,15 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UnpinClusterResponse: """Deserializes the UnpinClusterResponse from a dictionary.""" return cls() + + @dataclass class UpdateCluster: cluster_id: str """ID of the cluster.""" - + update_mask: str """Used to specify which cluster attributes and size fields to update. See https://google.aip.dev/161 for more details. @@ -9305,40 +7753,32 @@ class UpdateCluster: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + cluster: Optional[UpdateClusterResource] = None """The cluster to be updated.""" - + def as_dict(self) -> dict: """Serializes the UpdateCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster: - body["cluster"] = self.cluster.as_dict() - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.update_mask is not None: - body["update_mask"] = self.update_mask + if self.cluster: body['cluster'] = self.cluster.as_dict() + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.update_mask is not None: body['update_mask'] = self.update_mask return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster: - body["cluster"] = self.cluster - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.update_mask is not None: - body["update_mask"] = self.update_mask + if self.cluster: body['cluster'] = self.cluster + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.update_mask is not None: body['update_mask'] = self.update_mask return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCluster: """Deserializes the UpdateCluster from a dictionary.""" - return cls( - cluster=_from_dict(d, "cluster", UpdateClusterResource), - cluster_id=d.get("cluster_id", None), - update_mask=d.get("update_mask", None), - ) + return cls(cluster=_from_dict(d, 'cluster', UpdateClusterResource), cluster_id=d.get('cluster_id', None), update_mask=d.get('update_mask', None)) + + @dataclass @@ -9346,34 +7786,34 @@ class UpdateClusterResource: autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -9381,7 +7821,7 @@ class UpdateClusterResource: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -9407,14 +7847,14 @@ class UpdateClusterResource: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -9422,33 +7862,33 @@ class UpdateClusterResource: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -9463,13 +7903,13 @@ class UpdateClusterResource: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -9479,10 +7919,10 @@ class UpdateClusterResource: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -9491,16 +7931,16 @@ class UpdateClusterResource: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str, str]] = None + + spark_conf: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str, str]] = None + + spark_env_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -9512,185 +7952,99 @@ class UpdateClusterResource: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + spark_version: Optional[str] = None """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the UpdateClusterResource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind.value - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value + if self.docker_image: body['docker_image'] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind.value + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateClusterResource into a shallow dictionary of its immediate attributes.""" body = {} - if self.autoscale: - body["autoscale"] = self.autoscale - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + if self.autoscale: body['autoscale'] = self.autoscale + if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode + if self.docker_image: body['docker_image'] = self.docker_image + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.is_single_node is not None: body['is_single_node'] = self.is_single_node + if self.kind is not None: body['kind'] = self.kind + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime + if self.workload_type: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateClusterResource: """Deserializes the UpdateClusterResource from a dictionary.""" - return cls( - autoscale=_from_dict(d, "autoscale", AutoScale), - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_name=d.get("cluster_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - kind=_enum(d, "kind", Kind), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - ssh_public_keys=d.get("ssh_public_keys", None), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), - ) + return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + @dataclass @@ -9709,6 +8063,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateClusterResponse: """Deserializes the UpdateClusterResponse from a dictionary.""" return cls() + + @dataclass @@ -9727,130 +8083,135 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() + + @dataclass class VolumesStorageInfo: """A storage location back by UC Volumes.""" - + destination: str """UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` or `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`""" - + def as_dict(self) -> dict: """Serializes the VolumesStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the VolumesStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VolumesStorageInfo: """Deserializes the VolumesStorageInfo from a dictionary.""" - return cls(destination=d.get("destination", None)) + return cls(destination=d.get('destination', None)) + + @dataclass class WorkloadType: """Cluster Attributes showing for clusters workload types.""" - + clients: ClientsTypes """defined what type of clients can use the cluster. E.g. Notebooks, Jobs""" - + def as_dict(self) -> dict: """Serializes the WorkloadType into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clients: - body["clients"] = self.clients.as_dict() + if self.clients: body['clients'] = self.clients.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the WorkloadType into a shallow dictionary of its immediate attributes.""" body = {} - if self.clients: - body["clients"] = self.clients + if self.clients: body['clients'] = self.clients return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkloadType: """Deserializes the WorkloadType from a dictionary.""" - return cls(clients=_from_dict(d, "clients", ClientsTypes)) + return cls(clients=_from_dict(d, 'clients', ClientsTypes)) + + @dataclass class WorkspaceStorageInfo: """A storage location in Workspace Filesystem (WSFS)""" - + destination: str """wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh`""" - + def as_dict(self) -> dict: """Serializes the WorkspaceStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: - body["destination"] = self.destination + if self.destination is not None: body['destination'] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceStorageInfo: """Deserializes the WorkspaceStorageInfo from a dictionary.""" - return cls(destination=d.get("destination", None)) + return cls(destination=d.get('destination', None)) + + + + class ClusterPoliciesAPI: """You can use cluster policies to control users' ability to configure clusters based on a set of rules. These rules specify which attributes or attribute values can be used during cluster creation. Cluster policies have ACLs that limit their use to specific users and groups. - + With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in the policy's "libraries" field (Public Preview). - Limit users to creating clusters with the prescribed settings. - Simplify the user interface, enabling more users to create clusters, by fixing and hiding some fields. - Manage costs by setting limits on attributes that impact the hourly rate. - + Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted policy and create fully-configurable clusters. - A user who has both unrestricted cluster create permission and access to cluster policies can select the Unrestricted policy and policies they have access to. - A user that has access to only cluster policies, can select the policies they have access to. - + If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create, edit, and delete policies. Admin users also have access to all policies.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - *, - definition: Optional[str] = None, - description: Optional[str] = None, - libraries: Optional[List[Library]] = None, - max_clusters_per_user: Optional[int] = None, - name: Optional[str] = None, - policy_family_definition_overrides: Optional[str] = None, - policy_family_id: Optional[str] = None, - ) -> CreatePolicyResponse: - """Create a new policy. + - Creates a new policy with prescribed settings. + + + + + def create(self + + , * + , definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, name: Optional[str] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None) -> CreatePolicyResponse: + """Create a new policy. + + Creates a new policy with prescribed settings. + :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - + [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param description: str (optional) Additional human-readable description of the cluster policy. @@ -9866,85 +8227,80 @@ def create( :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. - + You can use this to customize the policy definition inherited from the policy family. Policy rules specified here are merged into the inherited policy definition. - + [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param policy_family_id: str (optional) ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. - + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition. - + :returns: :class:`CreatePolicyResponse` """ body = {} - if definition is not None: - body["definition"] = definition - if description is not None: - body["description"] = description - if libraries is not None: - body["libraries"] = [v.as_dict() for v in libraries] - if max_clusters_per_user is not None: - body["max_clusters_per_user"] = max_clusters_per_user - if name is not None: - body["name"] = name - if policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = policy_family_definition_overrides - if policy_family_id is not None: - body["policy_family_id"] = policy_family_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/policies/clusters/create", body=body, headers=headers) + if definition is not None: body['definition'] = definition + if description is not None: body['description'] = description + if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] + if max_clusters_per_user is not None: body['max_clusters_per_user'] = max_clusters_per_user + if name is not None: body['name'] = name + if policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = policy_family_definition_overrides + if policy_family_id is not None: body['policy_family_id'] = policy_family_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/policies/clusters/create', body=body + + , headers=headers + ) return CreatePolicyResponse.from_dict(res) - def delete(self, policy_id: str): - """Delete a cluster policy. + + + + def delete(self + , policy_id: str + ): + """Delete a cluster policy. + Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited. - + :param policy_id: str The ID of the policy to delete. - - + + """ body = {} - if policy_id is not None: - body["policy_id"] = policy_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/policies/clusters/delete", body=body, headers=headers) - - def edit( - self, - policy_id: str, - *, - definition: Optional[str] = None, - description: Optional[str] = None, - libraries: Optional[List[Library]] = None, - max_clusters_per_user: Optional[int] = None, - name: Optional[str] = None, - policy_family_definition_overrides: Optional[str] = None, - policy_family_id: Optional[str] = None, - ): - """Update a cluster policy. + if policy_id is not None: body['policy_id'] = policy_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/policies/clusters/delete', body=body + + , headers=headers + ) + + + + + + def edit(self + , policy_id: str + , * + , definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, name: Optional[str] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None): + """Update a cluster policy. + Update an existing policy for cluster. This operation may make some clusters governed by the previous policy invalid. - + :param policy_id: str The ID of the policy to update. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - + [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param description: str (optional) Additional human-readable description of the cluster policy. @@ -9960,358 +8316,350 @@ def edit( :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. - + You can use this to customize the policy definition inherited from the policy family. Policy rules specified here are merged into the inherited policy definition. - + [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param policy_family_id: str (optional) ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. - + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition. - - + + """ body = {} - if definition is not None: - body["definition"] = definition - if description is not None: - body["description"] = description - if libraries is not None: - body["libraries"] = [v.as_dict() for v in libraries] - if max_clusters_per_user is not None: - body["max_clusters_per_user"] = max_clusters_per_user - if name is not None: - body["name"] = name - if policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = policy_family_definition_overrides - if policy_family_id is not None: - body["policy_family_id"] = policy_family_id - if policy_id is not None: - body["policy_id"] = policy_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/policies/clusters/edit", body=body, headers=headers) - - def get(self, policy_id: str) -> Policy: - """Get a cluster policy. + if definition is not None: body['definition'] = definition + if description is not None: body['description'] = description + if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] + if max_clusters_per_user is not None: body['max_clusters_per_user'] = max_clusters_per_user + if name is not None: body['name'] = name + if policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = policy_family_definition_overrides + if policy_family_id is not None: body['policy_family_id'] = policy_family_id + if policy_id is not None: body['policy_id'] = policy_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/policies/clusters/edit', body=body + + , headers=headers + ) + - Get a cluster policy entity. Creation and editing is available to admins only. + + + + def get(self + , policy_id: str + ) -> Policy: + """Get a cluster policy. + + Get a cluster policy entity. Creation and editing is available to admins only. + :param policy_id: str Canonical unique identifier for the Cluster Policy. - + :returns: :class:`Policy` """ - + query = {} - if policy_id is not None: - query["policy_id"] = policy_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/policies/clusters/get", query=query, headers=headers) + if policy_id is not None: query['policy_id'] = policy_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/policies/clusters/get', query=query + + , headers=headers + ) return Policy.from_dict(res) - def get_permission_levels(self, cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse: - """Get cluster policy permission levels. + + + + def get_permission_levels(self + , cluster_policy_id: str + ) -> GetClusterPolicyPermissionLevelsResponse: + """Get cluster policy permission levels. + Gets the permission levels that a user can have on an object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. - + :returns: :class:`GetClusterPolicyPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}/permissionLevels", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}/permissionLevels' + + , headers=headers + ) return GetClusterPolicyPermissionLevelsResponse.from_dict(res) - def get_permissions(self, cluster_policy_id: str) -> ClusterPolicyPermissions: - """Get cluster policy permissions. + + + + def get_permissions(self + , cluster_policy_id: str + ) -> ClusterPolicyPermissions: + """Get cluster policy permissions. + Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. - + :returns: :class:`ClusterPolicyPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}' + + , headers=headers + ) return ClusterPolicyPermissions.from_dict(res) - def list( - self, *, sort_column: Optional[ListSortColumn] = None, sort_order: Optional[ListSortOrder] = None - ) -> Iterator[Policy]: - """List cluster policies. + + + + def list(self + + , * + , sort_column: Optional[ListSortColumn] = None, sort_order: Optional[ListSortOrder] = None) -> Iterator[Policy]: + """List cluster policies. + Returns a list of policies accessible by the requesting user. - + :param sort_column: :class:`ListSortColumn` (optional) The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy creation time. * `POLICY_NAME` - Sort result list by policy name. :param sort_order: :class:`ListSortOrder` (optional) The order in which the policies get listed. * `DESC` - Sort result list in descending order. * `ASC` - Sort result list in ascending order. - + :returns: Iterator over :class:`Policy` """ - + query = {} - if sort_column is not None: - query["sort_column"] = sort_column.value - if sort_order is not None: - query["sort_order"] = sort_order.value - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/policies/clusters/list", query=query, headers=headers) + if sort_column is not None: query['sort_column'] = sort_column.value + if sort_order is not None: query['sort_order'] = sort_order.value + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/policies/clusters/list', query=query + + , headers=headers + ) parsed = ListPoliciesResponse.from_dict(json).policies return parsed if parsed is not None else [] + - def set_permissions( - self, cluster_policy_id: str, *, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None - ) -> ClusterPolicyPermissions: - """Set cluster policy permissions. + + + + def set_permissions(self + , cluster_policy_id: str + , * + , access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None) -> ClusterPolicyPermissions: + """Set cluster policy permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional) - + :returns: :class:`ClusterPolicyPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}', body=body + + , headers=headers + ) return ClusterPolicyPermissions.from_dict(res) - def update_permissions( - self, cluster_policy_id: str, *, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None - ) -> ClusterPolicyPermissions: - """Update cluster policy permissions. + + + + def update_permissions(self + , cluster_policy_id: str + , * + , access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None) -> ClusterPolicyPermissions: + """Update cluster policy permissions. + Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional) - + :returns: :class:`ClusterPolicyPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}', body=body + + , headers=headers + ) return ClusterPolicyPermissions.from_dict(res) - + + class ClustersAPI: """The Clusters API allows you to create, start, edit, list, terminate, and delete clusters. - + Databricks maps cluster node instance types to compute units known as DBUs. See the instance type pricing page for a list of the supported instance types and their corresponding DBUs. - + A Databricks cluster is a set of computation resources and configurations on which you run data engineering, data science, and data analytics workloads, such as production ETL pipelines, streaming analytics, ad-hoc analytics, and machine learning. - + You run these workloads as a set of commands in a notebook or as an automated job. Databricks makes a distinction between all-purpose clusters and job clusters. You use all-purpose clusters to analyze data collaboratively using interactive notebooks. You use job clusters to run fast and robust automated jobs. - + You can create an all-purpose cluster using the UI, CLI, or REST API. You can manually terminate and restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive analysis. - + IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list.""" - + def __init__(self, api_client): self._api = api_client - - def wait_get_cluster_running( - self, - cluster_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[ClusterDetails], None]] = None, - ) -> ClusterDetails: - deadline = time.time() + timeout.total_seconds() - target_states = (State.RUNNING,) - failure_states = ( - State.ERROR, - State.TERMINATED, - ) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(cluster_id=cluster_id) - status = poll.state - status_message = poll.state_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach RUNNING, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def wait_get_cluster_terminated( - self, - cluster_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[ClusterDetails], None]] = None, - ) -> ClusterDetails: - deadline = time.time() + timeout.total_seconds() - target_states = (State.TERMINATED,) - failure_states = (State.ERROR,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(cluster_id=cluster_id) - status = poll.state - status_message = poll.state_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach TERMINATED, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def change_owner(self, cluster_id: str, owner_username: str): + + + + + + def wait_get_cluster_running(self, cluster_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails: + deadline = time.time() + timeout.total_seconds() + target_states = (State.RUNNING, ) + failure_states = (State.ERROR, State.TERMINATED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(cluster_id=cluster_id) + status = poll.state + status_message = poll.state_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach RUNNING, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_get_cluster_terminated(self, cluster_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails: + deadline = time.time() + timeout.total_seconds() + target_states = (State.TERMINATED, ) + failure_states = (State.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(cluster_id=cluster_id) + status = poll.state + status_message = poll.state_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach TERMINATED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + + def change_owner(self + , cluster_id: str, owner_username: str + ): """Change cluster owner. - + Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform this operation. The service principal application ID can be supplied as an argument to `owner_username`. - + :param cluster_id: str :param owner_username: str New owner of the cluster_id after this RPC. - - + + """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - if owner_username is not None: - body["owner_username"] = owner_username - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.1/clusters/change-owner", body=body, headers=headers) - - def create( - self, - spark_version: str, - *, - apply_policy_default_values: Optional[bool] = None, - autoscale: Optional[AutoScale] = None, - autotermination_minutes: Optional[int] = None, - aws_attributes: Optional[AwsAttributes] = None, - azure_attributes: Optional[AzureAttributes] = None, - clone_from: Optional[CloneCluster] = None, - cluster_log_conf: Optional[ClusterLogConf] = None, - cluster_name: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - data_security_mode: Optional[DataSecurityMode] = None, - docker_image: Optional[DockerImage] = None, - driver_instance_pool_id: Optional[str] = None, - driver_node_type_id: Optional[str] = None, - enable_elastic_disk: Optional[bool] = None, - enable_local_disk_encryption: Optional[bool] = None, - gcp_attributes: Optional[GcpAttributes] = None, - init_scripts: Optional[List[InitScriptInfo]] = None, - instance_pool_id: Optional[str] = None, - is_single_node: Optional[bool] = None, - kind: Optional[Kind] = None, - node_type_id: Optional[str] = None, - num_workers: Optional[int] = None, - policy_id: Optional[str] = None, - runtime_engine: Optional[RuntimeEngine] = None, - single_user_name: Optional[str] = None, - spark_conf: Optional[Dict[str, str]] = None, - spark_env_vars: Optional[Dict[str, str]] = None, - ssh_public_keys: Optional[List[str]] = None, - use_ml_runtime: Optional[bool] = None, - workload_type: Optional[WorkloadType] = None, - ) -> Wait[ClusterDetails]: - """Create new cluster. + if cluster_id is not None: body['cluster_id'] = cluster_id + if owner_username is not None: body['owner_username'] = owner_username + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.1/clusters/change-owner', body=body + + , headers=headers + ) + + + + + + def create(self + , spark_version: str + , * + , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, clone_from: Optional[CloneCluster] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, is_single_node: Optional[bool] = None, kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: + """Create new cluster. + Creates a new Spark cluster. This method will acquire new instances from the cloud provider if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The cluster will be usable once it enters a ``RUNNING`` state. Note: Databricks may not be able to acquire some of the requested nodes, due to cloud provider limitations (account limits, spot price, etc.) or transient network issues. - + If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed. Otherwise the cluster will terminate with an informative error message. - + Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out the [create compute UI] and then copying the generated JSON definition from the UI. - + [create compute UI]: https://docs.databricks.com/compute/configure.html - + :param spark_version: str The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. @@ -10346,18 +8694,18 @@ def create( :param custom_tags: Dict[str,str] (optional) Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags - + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. @@ -10366,10 +8714,10 @@ def create( fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on @@ -10383,7 +8731,7 @@ def create( :param driver_node_type_id: str (optional) The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. - + This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence. @@ -10404,22 +8752,22 @@ def create( The optional ID of the instance pool to which the cluster belongs. :param is_single_node: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in @@ -10429,7 +8777,7 @@ def create( :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas @@ -10439,10 +8787,10 @@ def create( The ID of the cluster policy used to create the cluster if applicable. :param runtime_engine: :class:`RuntimeEngine` (optional) Determines the cluster's runtime engine, either standard or Photon. - + This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - + If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used. :param single_user_name: str (optional) @@ -10455,11 +8803,11 @@ def create( An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. - + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks managed environmental variables are included as well. - + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}` @@ -10469,242 +8817,125 @@ def create( specified. :param use_ml_runtime: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) Cluster Attributes showing for clusters workload types. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if apply_policy_default_values is not None: - body["apply_policy_default_values"] = apply_policy_default_values - if autoscale is not None: - body["autoscale"] = autoscale.as_dict() - if autotermination_minutes is not None: - body["autotermination_minutes"] = autotermination_minutes - if aws_attributes is not None: - body["aws_attributes"] = aws_attributes.as_dict() - if azure_attributes is not None: - body["azure_attributes"] = azure_attributes.as_dict() - if clone_from is not None: - body["clone_from"] = clone_from.as_dict() - if cluster_log_conf is not None: - body["cluster_log_conf"] = cluster_log_conf.as_dict() - if cluster_name is not None: - body["cluster_name"] = cluster_name - if custom_tags is not None: - body["custom_tags"] = custom_tags - if data_security_mode is not None: - body["data_security_mode"] = data_security_mode.value - if docker_image is not None: - body["docker_image"] = docker_image.as_dict() - if driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = driver_instance_pool_id - if driver_node_type_id is not None: - body["driver_node_type_id"] = driver_node_type_id - if enable_elastic_disk is not None: - body["enable_elastic_disk"] = enable_elastic_disk - if enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = enable_local_disk_encryption - if gcp_attributes is not None: - body["gcp_attributes"] = gcp_attributes.as_dict() - if init_scripts is not None: - body["init_scripts"] = [v.as_dict() for v in init_scripts] - if instance_pool_id is not None: - body["instance_pool_id"] = instance_pool_id - if is_single_node is not None: - body["is_single_node"] = is_single_node - if kind is not None: - body["kind"] = kind.value - if node_type_id is not None: - body["node_type_id"] = node_type_id - if num_workers is not None: - body["num_workers"] = num_workers - if policy_id is not None: - body["policy_id"] = policy_id - if runtime_engine is not None: - body["runtime_engine"] = runtime_engine.value - if single_user_name is not None: - body["single_user_name"] = single_user_name - if spark_conf is not None: - body["spark_conf"] = spark_conf - if spark_env_vars is not None: - body["spark_env_vars"] = spark_env_vars - if spark_version is not None: - body["spark_version"] = spark_version - if ssh_public_keys is not None: - body["ssh_public_keys"] = [v for v in ssh_public_keys] - if use_ml_runtime is not None: - body["use_ml_runtime"] = use_ml_runtime - if workload_type is not None: - body["workload_type"] = workload_type.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.1/clusters/create", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, - response=CreateClusterResponse.from_dict(op_response), - cluster_id=op_response["cluster_id"], - ) - - def create_and_wait( - self, - spark_version: str, - *, - apply_policy_default_values: Optional[bool] = None, - autoscale: Optional[AutoScale] = None, - autotermination_minutes: Optional[int] = None, - aws_attributes: Optional[AwsAttributes] = None, - azure_attributes: Optional[AzureAttributes] = None, - clone_from: Optional[CloneCluster] = None, - cluster_log_conf: Optional[ClusterLogConf] = None, - cluster_name: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - data_security_mode: Optional[DataSecurityMode] = None, - docker_image: Optional[DockerImage] = None, - driver_instance_pool_id: Optional[str] = None, - driver_node_type_id: Optional[str] = None, - enable_elastic_disk: Optional[bool] = None, - enable_local_disk_encryption: Optional[bool] = None, - gcp_attributes: Optional[GcpAttributes] = None, - init_scripts: Optional[List[InitScriptInfo]] = None, - instance_pool_id: Optional[str] = None, - is_single_node: Optional[bool] = None, - kind: Optional[Kind] = None, - node_type_id: Optional[str] = None, - num_workers: Optional[int] = None, - policy_id: Optional[str] = None, - runtime_engine: Optional[RuntimeEngine] = None, - single_user_name: Optional[str] = None, - spark_conf: Optional[Dict[str, str]] = None, - spark_env_vars: Optional[Dict[str, str]] = None, - ssh_public_keys: Optional[List[str]] = None, - use_ml_runtime: Optional[bool] = None, - workload_type: Optional[WorkloadType] = None, - timeout=timedelta(minutes=20), - ) -> ClusterDetails: - return self.create( - apply_policy_default_values=apply_policy_default_values, - autoscale=autoscale, - autotermination_minutes=autotermination_minutes, - aws_attributes=aws_attributes, - azure_attributes=azure_attributes, - clone_from=clone_from, - cluster_log_conf=cluster_log_conf, - cluster_name=cluster_name, - custom_tags=custom_tags, - data_security_mode=data_security_mode, - docker_image=docker_image, - driver_instance_pool_id=driver_instance_pool_id, - driver_node_type_id=driver_node_type_id, - enable_elastic_disk=enable_elastic_disk, - enable_local_disk_encryption=enable_local_disk_encryption, - gcp_attributes=gcp_attributes, - init_scripts=init_scripts, - instance_pool_id=instance_pool_id, - is_single_node=is_single_node, - kind=kind, - node_type_id=node_type_id, - num_workers=num_workers, - policy_id=policy_id, - runtime_engine=runtime_engine, - single_user_name=single_user_name, - spark_conf=spark_conf, - spark_env_vars=spark_env_vars, - spark_version=spark_version, - ssh_public_keys=ssh_public_keys, - use_ml_runtime=use_ml_runtime, - workload_type=workload_type, - ).result(timeout=timeout) - - def delete(self, cluster_id: str) -> Wait[ClusterDetails]: + if apply_policy_default_values is not None: body['apply_policy_default_values'] = apply_policy_default_values + if autoscale is not None: body['autoscale'] = autoscale.as_dict() + if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes + if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict() + if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict() + if clone_from is not None: body['clone_from'] = clone_from.as_dict() + if cluster_log_conf is not None: body['cluster_log_conf'] = cluster_log_conf.as_dict() + if cluster_name is not None: body['cluster_name'] = cluster_name + if custom_tags is not None: body['custom_tags'] = custom_tags + if data_security_mode is not None: body['data_security_mode'] = data_security_mode.value + if docker_image is not None: body['docker_image'] = docker_image.as_dict() + if driver_instance_pool_id is not None: body['driver_instance_pool_id'] = driver_instance_pool_id + if driver_node_type_id is not None: body['driver_node_type_id'] = driver_node_type_id + if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk + if enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = enable_local_disk_encryption + if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() + if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts] + if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id + if is_single_node is not None: body['is_single_node'] = is_single_node + if kind is not None: body['kind'] = kind.value + if node_type_id is not None: body['node_type_id'] = node_type_id + if num_workers is not None: body['num_workers'] = num_workers + if policy_id is not None: body['policy_id'] = policy_id + if runtime_engine is not None: body['runtime_engine'] = runtime_engine.value + if single_user_name is not None: body['single_user_name'] = single_user_name + if spark_conf is not None: body['spark_conf'] = spark_conf + if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars + if spark_version is not None: body['spark_version'] = spark_version + if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys] + if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime + if workload_type is not None: body['workload_type'] = workload_type.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.1/clusters/create', body=body + + , headers=headers + ) + return Wait(self.wait_get_cluster_running + , response = CreateClusterResponse.from_dict(op_response) + , cluster_id=op_response['cluster_id']) + + + def create_and_wait(self + , spark_version: str + , * + , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, clone_from: Optional[CloneCluster] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, is_single_node: Optional[bool] = None, kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None, + timeout=timedelta(minutes=20)) -> ClusterDetails: + return self.create(apply_policy_default_values=apply_policy_default_values, autoscale=autoscale, autotermination_minutes=autotermination_minutes, aws_attributes=aws_attributes, azure_attributes=azure_attributes, clone_from=clone_from, cluster_log_conf=cluster_log_conf, cluster_name=cluster_name, custom_tags=custom_tags, data_security_mode=data_security_mode, docker_image=docker_image, driver_instance_pool_id=driver_instance_pool_id, driver_node_type_id=driver_node_type_id, enable_elastic_disk=enable_elastic_disk, enable_local_disk_encryption=enable_local_disk_encryption, gcp_attributes=gcp_attributes, init_scripts=init_scripts, instance_pool_id=instance_pool_id, is_single_node=is_single_node, kind=kind, node_type_id=node_type_id, num_workers=num_workers, policy_id=policy_id, runtime_engine=runtime_engine, single_user_name=single_user_name, spark_conf=spark_conf, spark_env_vars=spark_env_vars, spark_version=spark_version, ssh_public_keys=ssh_public_keys, use_ml_runtime=use_ml_runtime, workload_type=workload_type).result(timeout=timeout) + + + + + def delete(self + , cluster_id: str + ) -> Wait[ClusterDetails]: """Terminate cluster. - + Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a `TERMINATING` or `TERMINATED` state, nothing will happen. - + :param cluster_id: str The cluster to be terminated. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_terminated for more details. """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.1/clusters/delete", body=body, headers=headers) - return Wait( - self.wait_get_cluster_terminated, - response=DeleteClusterResponse.from_dict(op_response), - cluster_id=cluster_id, - ) - - def delete_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: + if cluster_id is not None: body['cluster_id'] = cluster_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.1/clusters/delete', body=body + + , headers=headers + ) + return Wait(self.wait_get_cluster_terminated + , response = DeleteClusterResponse.from_dict(op_response) + , cluster_id=cluster_id) + + + def delete_and_wait(self + , cluster_id: str + , + timeout=timedelta(minutes=20)) -> ClusterDetails: return self.delete(cluster_id=cluster_id).result(timeout=timeout) + + + - def edit( - self, - cluster_id: str, - spark_version: str, - *, - apply_policy_default_values: Optional[bool] = None, - autoscale: Optional[AutoScale] = None, - autotermination_minutes: Optional[int] = None, - aws_attributes: Optional[AwsAttributes] = None, - azure_attributes: Optional[AzureAttributes] = None, - cluster_log_conf: Optional[ClusterLogConf] = None, - cluster_name: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - data_security_mode: Optional[DataSecurityMode] = None, - docker_image: Optional[DockerImage] = None, - driver_instance_pool_id: Optional[str] = None, - driver_node_type_id: Optional[str] = None, - enable_elastic_disk: Optional[bool] = None, - enable_local_disk_encryption: Optional[bool] = None, - gcp_attributes: Optional[GcpAttributes] = None, - init_scripts: Optional[List[InitScriptInfo]] = None, - instance_pool_id: Optional[str] = None, - is_single_node: Optional[bool] = None, - kind: Optional[Kind] = None, - node_type_id: Optional[str] = None, - num_workers: Optional[int] = None, - policy_id: Optional[str] = None, - runtime_engine: Optional[RuntimeEngine] = None, - single_user_name: Optional[str] = None, - spark_conf: Optional[Dict[str, str]] = None, - spark_env_vars: Optional[Dict[str, str]] = None, - ssh_public_keys: Optional[List[str]] = None, - use_ml_runtime: Optional[bool] = None, - workload_type: Optional[WorkloadType] = None, - ) -> Wait[ClusterDetails]: + def edit(self + , cluster_id: str, spark_version: str + , * + , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, is_single_node: Optional[bool] = None, kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: """Update cluster configuration. - + Updates the configuration of a cluster to match the provided attributes and size. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. - + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes can take effect. - + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time it is started using the `clusters/start` API, the new attributes will take effect. Any attempt to update a cluster in any other state will be rejected with an `INVALID_STATE` error code. - + Clusters created by the Databricks Jobs service cannot be edited. - + :param cluster_id: str ID of the cluster :param spark_version: str @@ -10739,18 +8970,18 @@ def edit( :param custom_tags: Dict[str,str] (optional) Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags - + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. @@ -10759,10 +8990,10 @@ def edit( fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on @@ -10776,7 +9007,7 @@ def edit( :param driver_node_type_id: str (optional) The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. - + This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence. @@ -10797,22 +9028,22 @@ def edit( The optional ID of the instance pool to which the cluster belongs. :param is_single_node: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in @@ -10822,7 +9053,7 @@ def edit( :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas @@ -10832,10 +9063,10 @@ def edit( The ID of the cluster policy used to create the cluster if applicable. :param runtime_engine: :class:`RuntimeEngine` (optional) Determines the cluster's runtime engine, either standard or Photon. - + This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - + If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used. :param single_user_name: str (optional) @@ -10848,11 +9079,11 @@ def edit( An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. - + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks managed environmental variables are included as well. - + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}` @@ -10862,177 +9093,78 @@ def edit( specified. :param use_ml_runtime: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) Cluster Attributes showing for clusters workload types. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if apply_policy_default_values is not None: - body["apply_policy_default_values"] = apply_policy_default_values - if autoscale is not None: - body["autoscale"] = autoscale.as_dict() - if autotermination_minutes is not None: - body["autotermination_minutes"] = autotermination_minutes - if aws_attributes is not None: - body["aws_attributes"] = aws_attributes.as_dict() - if azure_attributes is not None: - body["azure_attributes"] = azure_attributes.as_dict() - if cluster_id is not None: - body["cluster_id"] = cluster_id - if cluster_log_conf is not None: - body["cluster_log_conf"] = cluster_log_conf.as_dict() - if cluster_name is not None: - body["cluster_name"] = cluster_name - if custom_tags is not None: - body["custom_tags"] = custom_tags - if data_security_mode is not None: - body["data_security_mode"] = data_security_mode.value - if docker_image is not None: - body["docker_image"] = docker_image.as_dict() - if driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = driver_instance_pool_id - if driver_node_type_id is not None: - body["driver_node_type_id"] = driver_node_type_id - if enable_elastic_disk is not None: - body["enable_elastic_disk"] = enable_elastic_disk - if enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = enable_local_disk_encryption - if gcp_attributes is not None: - body["gcp_attributes"] = gcp_attributes.as_dict() - if init_scripts is not None: - body["init_scripts"] = [v.as_dict() for v in init_scripts] - if instance_pool_id is not None: - body["instance_pool_id"] = instance_pool_id - if is_single_node is not None: - body["is_single_node"] = is_single_node - if kind is not None: - body["kind"] = kind.value - if node_type_id is not None: - body["node_type_id"] = node_type_id - if num_workers is not None: - body["num_workers"] = num_workers - if policy_id is not None: - body["policy_id"] = policy_id - if runtime_engine is not None: - body["runtime_engine"] = runtime_engine.value - if single_user_name is not None: - body["single_user_name"] = single_user_name - if spark_conf is not None: - body["spark_conf"] = spark_conf - if spark_env_vars is not None: - body["spark_env_vars"] = spark_env_vars - if spark_version is not None: - body["spark_version"] = spark_version - if ssh_public_keys is not None: - body["ssh_public_keys"] = [v for v in ssh_public_keys] - if use_ml_runtime is not None: - body["use_ml_runtime"] = use_ml_runtime - if workload_type is not None: - body["workload_type"] = workload_type.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.1/clusters/edit", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=EditClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) - - def edit_and_wait( - self, - cluster_id: str, - spark_version: str, - *, - apply_policy_default_values: Optional[bool] = None, - autoscale: Optional[AutoScale] = None, - autotermination_minutes: Optional[int] = None, - aws_attributes: Optional[AwsAttributes] = None, - azure_attributes: Optional[AzureAttributes] = None, - cluster_log_conf: Optional[ClusterLogConf] = None, - cluster_name: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - data_security_mode: Optional[DataSecurityMode] = None, - docker_image: Optional[DockerImage] = None, - driver_instance_pool_id: Optional[str] = None, - driver_node_type_id: Optional[str] = None, - enable_elastic_disk: Optional[bool] = None, - enable_local_disk_encryption: Optional[bool] = None, - gcp_attributes: Optional[GcpAttributes] = None, - init_scripts: Optional[List[InitScriptInfo]] = None, - instance_pool_id: Optional[str] = None, - is_single_node: Optional[bool] = None, - kind: Optional[Kind] = None, - node_type_id: Optional[str] = None, - num_workers: Optional[int] = None, - policy_id: Optional[str] = None, - runtime_engine: Optional[RuntimeEngine] = None, - single_user_name: Optional[str] = None, - spark_conf: Optional[Dict[str, str]] = None, - spark_env_vars: Optional[Dict[str, str]] = None, - ssh_public_keys: Optional[List[str]] = None, - use_ml_runtime: Optional[bool] = None, - workload_type: Optional[WorkloadType] = None, - timeout=timedelta(minutes=20), - ) -> ClusterDetails: - return self.edit( - apply_policy_default_values=apply_policy_default_values, - autoscale=autoscale, - autotermination_minutes=autotermination_minutes, - aws_attributes=aws_attributes, - azure_attributes=azure_attributes, - cluster_id=cluster_id, - cluster_log_conf=cluster_log_conf, - cluster_name=cluster_name, - custom_tags=custom_tags, - data_security_mode=data_security_mode, - docker_image=docker_image, - driver_instance_pool_id=driver_instance_pool_id, - driver_node_type_id=driver_node_type_id, - enable_elastic_disk=enable_elastic_disk, - enable_local_disk_encryption=enable_local_disk_encryption, - gcp_attributes=gcp_attributes, - init_scripts=init_scripts, - instance_pool_id=instance_pool_id, - is_single_node=is_single_node, - kind=kind, - node_type_id=node_type_id, - num_workers=num_workers, - policy_id=policy_id, - runtime_engine=runtime_engine, - single_user_name=single_user_name, - spark_conf=spark_conf, - spark_env_vars=spark_env_vars, - spark_version=spark_version, - ssh_public_keys=ssh_public_keys, - use_ml_runtime=use_ml_runtime, - workload_type=workload_type, - ).result(timeout=timeout) - - def events( - self, - cluster_id: str, - *, - end_time: Optional[int] = None, - event_types: Optional[List[EventType]] = None, - limit: Optional[int] = None, - offset: Optional[int] = None, - order: Optional[GetEventsOrder] = None, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - start_time: Optional[int] = None, - ) -> Iterator[ClusterEvent]: + if apply_policy_default_values is not None: body['apply_policy_default_values'] = apply_policy_default_values + if autoscale is not None: body['autoscale'] = autoscale.as_dict() + if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes + if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict() + if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict() + if cluster_id is not None: body['cluster_id'] = cluster_id + if cluster_log_conf is not None: body['cluster_log_conf'] = cluster_log_conf.as_dict() + if cluster_name is not None: body['cluster_name'] = cluster_name + if custom_tags is not None: body['custom_tags'] = custom_tags + if data_security_mode is not None: body['data_security_mode'] = data_security_mode.value + if docker_image is not None: body['docker_image'] = docker_image.as_dict() + if driver_instance_pool_id is not None: body['driver_instance_pool_id'] = driver_instance_pool_id + if driver_node_type_id is not None: body['driver_node_type_id'] = driver_node_type_id + if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk + if enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = enable_local_disk_encryption + if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() + if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts] + if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id + if is_single_node is not None: body['is_single_node'] = is_single_node + if kind is not None: body['kind'] = kind.value + if node_type_id is not None: body['node_type_id'] = node_type_id + if num_workers is not None: body['num_workers'] = num_workers + if policy_id is not None: body['policy_id'] = policy_id + if runtime_engine is not None: body['runtime_engine'] = runtime_engine.value + if single_user_name is not None: body['single_user_name'] = single_user_name + if spark_conf is not None: body['spark_conf'] = spark_conf + if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars + if spark_version is not None: body['spark_version'] = spark_version + if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys] + if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime + if workload_type is not None: body['workload_type'] = workload_type.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.1/clusters/edit', body=body + + , headers=headers + ) + return Wait(self.wait_get_cluster_running + , response = EditClusterResponse.from_dict(op_response) + , cluster_id=cluster_id) + + + def edit_and_wait(self + , cluster_id: str, spark_version: str + , * + , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, is_single_node: Optional[bool] = None, kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None, + timeout=timedelta(minutes=20)) -> ClusterDetails: + return self.edit(apply_policy_default_values=apply_policy_default_values, autoscale=autoscale, autotermination_minutes=autotermination_minutes, aws_attributes=aws_attributes, azure_attributes=azure_attributes, cluster_id=cluster_id, cluster_log_conf=cluster_log_conf, cluster_name=cluster_name, custom_tags=custom_tags, data_security_mode=data_security_mode, docker_image=docker_image, driver_instance_pool_id=driver_instance_pool_id, driver_node_type_id=driver_node_type_id, enable_elastic_disk=enable_elastic_disk, enable_local_disk_encryption=enable_local_disk_encryption, gcp_attributes=gcp_attributes, init_scripts=init_scripts, instance_pool_id=instance_pool_id, is_single_node=is_single_node, kind=kind, node_type_id=node_type_id, num_workers=num_workers, policy_id=policy_id, runtime_engine=runtime_engine, single_user_name=single_user_name, spark_conf=spark_conf, spark_env_vars=spark_env_vars, spark_version=spark_version, ssh_public_keys=ssh_public_keys, use_ml_runtime=use_ml_runtime, workload_type=workload_type).result(timeout=timeout) + + + + + def events(self + , cluster_id: str + , * + , end_time: Optional[int] = None, event_types: Optional[List[EventType]] = None, limit: Optional[int] = None, offset: Optional[int] = None, order: Optional[GetEventsOrder] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, start_time: Optional[int] = None) -> Iterator[ClusterEvent]: """List cluster activity events. - + Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more events to read, the response includes all the parameters necessary to request the next page of events. - + :param cluster_id: str The ID of the cluster to retrieve events about. :param end_time: int (optional) @@ -11041,12 +9173,12 @@ def events( An optional set of event types to filter on. If empty, all event types are returned. :param limit: int (optional) Deprecated: use page_token in combination with page_size instead. - + The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed value is 500. :param offset: int (optional) Deprecated: use page_token in combination with page_size instead. - + The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results are requested in descending order, the end_time field is required. :param order: :class:`GetEventsOrder` (optional) @@ -11061,113 +9193,127 @@ def events( previous page of events respectively. If page_token is empty, the first page is returned. :param start_time: int (optional) The start time in epoch milliseconds. If empty, returns events starting from the beginning of time. - + :returns: Iterator over :class:`ClusterEvent` """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - if end_time is not None: - body["end_time"] = end_time - if event_types is not None: - body["event_types"] = [v.value for v in event_types] - if limit is not None: - body["limit"] = limit - if offset is not None: - body["offset"] = offset - if order is not None: - body["order"] = order.value - if page_size is not None: - body["page_size"] = page_size - if page_token is not None: - body["page_token"] = page_token - if start_time is not None: - body["start_time"] = start_time - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - + if cluster_id is not None: body['cluster_id'] = cluster_id + if end_time is not None: body['end_time'] = end_time + if event_types is not None: body['event_types'] = [v.value for v in event_types] + if limit is not None: body['limit'] = limit + if offset is not None: body['offset'] = offset + if order is not None: body['order'] = order.value + if page_size is not None: body['page_size'] = page_size + if page_token is not None: body['page_token'] = page_token + if start_time is not None: body['start_time'] = start_time + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + + while True: - json = self._api.do("POST", "/api/2.1/clusters/events", body=body, headers=headers) - if "events" in json: - for v in json["events"]: - yield ClusterEvent.from_dict(v) - if "next_page" not in json or not json["next_page"]: - return - body = json["next_page"] - - def get(self, cluster_id: str) -> ClusterDetails: - """Get cluster info. + json = self._api.do('POST','/api/2.1/clusters/events', body=body + + , headers=headers + ) + if 'events' in json: + for v in json['events']: + yield ClusterEvent.from_dict(v) + if 'next_page' not in json or not json['next_page']: + return + body = json['next_page'] + + + + + + def get(self + , cluster_id: str + ) -> ClusterDetails: + """Get cluster info. + Retrieves the information for a cluster given its identifier. Clusters can be described while they are running, or up to 60 days after they are terminated. - + :param cluster_id: str The cluster about which to retrieve information. - + :returns: :class:`ClusterDetails` """ - + query = {} - if cluster_id is not None: - query["cluster_id"] = cluster_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.1/clusters/get", query=query, headers=headers) + if cluster_id is not None: query['cluster_id'] = cluster_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.1/clusters/get', query=query + + , headers=headers + ) return ClusterDetails.from_dict(res) - def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsResponse: - """Get cluster permission levels. + + + + def get_permission_levels(self + , cluster_id: str + ) -> GetClusterPermissionLevelsResponse: + """Get cluster permission levels. + Gets the permission levels that a user can have on an object. - + :param cluster_id: str The cluster for which to get or manage permissions. - + :returns: :class:`GetClusterPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/clusters/{cluster_id}/permissionLevels", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/clusters/{cluster_id}/permissionLevels' + + , headers=headers + ) return GetClusterPermissionLevelsResponse.from_dict(res) - def get_permissions(self, cluster_id: str) -> ClusterPermissions: - """Get cluster permissions. + + + + def get_permissions(self + , cluster_id: str + ) -> ClusterPermissions: + """Get cluster permissions. + Gets the permissions of a cluster. Clusters can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. - + :returns: :class:`ClusterPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/clusters/{cluster_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/clusters/{cluster_id}' + + , headers=headers + ) return ClusterPermissions.from_dict(res) - def list( - self, - *, - filter_by: Optional[ListClustersFilterBy] = None, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - sort_by: Optional[ListClustersSortBy] = None, - ) -> Iterator[ClusterDetails]: - """List clusters. + + + + def list(self + + , * + , filter_by: Optional[ListClustersFilterBy] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, sort_by: Optional[ListClustersSortBy] = None) -> Iterator[ClusterDetails]: + """List clusters. + Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. - + :param filter_by: :class:`ListClustersFilterBy` (optional) Filters to apply to the list of clusters. :param page_size: int (optional) @@ -11178,115 +9324,140 @@ def list( previous page of clusters respectively. :param sort_by: :class:`ListClustersSortBy` (optional) Sort the list of clusters by a specific criteria. - + :returns: Iterator over :class:`ClusterDetails` """ - + query = {} - if filter_by is not None: - query["filter_by"] = filter_by.as_dict() - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - if sort_by is not None: - query["sort_by"] = sort_by.as_dict() - headers = { - "Accept": "application/json", - } - + if filter_by is not None: query['filter_by'] = filter_by.as_dict() + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if sort_by is not None: query['sort_by'] = sort_by.as_dict() + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/clusters/list", query=query, headers=headers) - if "clusters" in json: - for v in json["clusters"]: - yield ClusterDetails.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] + json = self._api.do('GET','/api/2.1/clusters/list', query=query + + , headers=headers + ) + if 'clusters' in json: + for v in json['clusters']: + yield ClusterDetails.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + def list_node_types(self) -> ListNodeTypesResponse: """List node types. - + Returns a list of supported Spark node types. These node types can be used to launch a cluster. - + :returns: :class:`ListNodeTypesResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.1/clusters/list-node-types", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.1/clusters/list-node-types' + , headers=headers + ) return ListNodeTypesResponse.from_dict(res) + + + + def list_zones(self) -> ListAvailableZonesResponse: """List availability zones. - + Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These zones can be used to launch a cluster. - + :returns: :class:`ListAvailableZonesResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.1/clusters/list-zones", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.1/clusters/list-zones' + , headers=headers + ) return ListAvailableZonesResponse.from_dict(res) - def permanent_delete(self, cluster_id: str): - """Permanently delete cluster. + + + + def permanent_delete(self + , cluster_id: str + ): + """Permanently delete cluster. + Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously removed. - + In addition, users will no longer see permanently deleted clusters in the cluster list, and API users can no longer perform any action on permanently deleted clusters. - + :param cluster_id: str The cluster to be deleted. - - + + """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if cluster_id is not None: body['cluster_id'] = cluster_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.1/clusters/permanent-delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.1/clusters/permanent-delete", body=body, headers=headers) + + + - def pin(self, cluster_id: str): + def pin(self + , cluster_id: str + ): """Pin cluster. - + Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a cluster that is already pinned will have no effect. This API can only be called by workspace admins. - + :param cluster_id: str - - + + """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if cluster_id is not None: body['cluster_id'] = cluster_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.1/clusters/pin', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.1/clusters/pin", body=body, headers=headers) + + + - def resize( - self, cluster_id: str, *, autoscale: Optional[AutoScale] = None, num_workers: Optional[int] = None - ) -> Wait[ClusterDetails]: + def resize(self + , cluster_id: str + , * + , autoscale: Optional[AutoScale] = None, num_workers: Optional[int] = None) -> Wait[ClusterDetails]: """Resize cluster. - + Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a `RUNNING` state. - + :param cluster_id: str The cluster to be resized. :param autoscale: :class:`AutoScale` (optional) @@ -11295,176 +9466,204 @@ def resize( :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if autoscale is not None: - body["autoscale"] = autoscale.as_dict() - if cluster_id is not None: - body["cluster_id"] = cluster_id - if num_workers is not None: - body["num_workers"] = num_workers - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.1/clusters/resize", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=ResizeClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) - - def resize_and_wait( - self, - cluster_id: str, - *, - autoscale: Optional[AutoScale] = None, - num_workers: Optional[int] = None, - timeout=timedelta(minutes=20), - ) -> ClusterDetails: + if autoscale is not None: body['autoscale'] = autoscale.as_dict() + if cluster_id is not None: body['cluster_id'] = cluster_id + if num_workers is not None: body['num_workers'] = num_workers + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.1/clusters/resize', body=body + + , headers=headers + ) + return Wait(self.wait_get_cluster_running + , response = ResizeClusterResponse.from_dict(op_response) + , cluster_id=cluster_id) + + + def resize_and_wait(self + , cluster_id: str + , * + , autoscale: Optional[AutoScale] = None, num_workers: Optional[int] = None, + timeout=timedelta(minutes=20)) -> ClusterDetails: return self.resize(autoscale=autoscale, cluster_id=cluster_id, num_workers=num_workers).result(timeout=timeout) + + + - def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wait[ClusterDetails]: + def restart(self + , cluster_id: str + , * + , restart_user: Optional[str] = None) -> Wait[ClusterDetails]: """Restart cluster. - + Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state, nothing will happen. - + :param cluster_id: str The cluster to be started. :param restart_user: str (optional) - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - if restart_user is not None: - body["restart_user"] = restart_user - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.1/clusters/restart", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=RestartClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) - - def restart_and_wait( - self, cluster_id: str, *, restart_user: Optional[str] = None, timeout=timedelta(minutes=20) - ) -> ClusterDetails: + if cluster_id is not None: body['cluster_id'] = cluster_id + if restart_user is not None: body['restart_user'] = restart_user + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.1/clusters/restart', body=body + + , headers=headers + ) + return Wait(self.wait_get_cluster_running + , response = RestartClusterResponse.from_dict(op_response) + , cluster_id=cluster_id) + + + def restart_and_wait(self + , cluster_id: str + , * + , restart_user: Optional[str] = None, + timeout=timedelta(minutes=20)) -> ClusterDetails: return self.restart(cluster_id=cluster_id, restart_user=restart_user).result(timeout=timeout) + + + - def set_permissions( - self, cluster_id: str, *, access_control_list: Optional[List[ClusterAccessControlRequest]] = None - ) -> ClusterPermissions: + def set_permissions(self + , cluster_id: str + , * + , access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions: """Set cluster permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional) - + :returns: :class:`ClusterPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/permissions/clusters/{cluster_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/clusters/{cluster_id}', body=body + + , headers=headers + ) return ClusterPermissions.from_dict(res) + + + + def spark_versions(self) -> GetSparkVersionsResponse: """List available Spark versions. - + Returns the list of available Spark versions. These versions can be used to launch a cluster. - + :returns: :class:`GetSparkVersionsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.1/clusters/spark-versions", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.1/clusters/spark-versions' + , headers=headers + ) return GetSparkVersionsResponse.from_dict(res) - def start(self, cluster_id: str) -> Wait[ClusterDetails]: - """Start terminated cluster. + + + + def start(self + , cluster_id: str + ) -> Wait[ClusterDetails]: + """Start terminated cluster. + Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster starts with the last specified cluster size. - If the previous cluster was an autoscaling cluster, the current cluster starts with the minimum number of nodes. - If the cluster is not currently in a ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job cannot be started. - + :param cluster_id: str The cluster to be started. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.1/clusters/start", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=StartClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) + if cluster_id is not None: body['cluster_id'] = cluster_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.1/clusters/start', body=body + + , headers=headers + ) + return Wait(self.wait_get_cluster_running + , response = StartClusterResponse.from_dict(op_response) + , cluster_id=cluster_id) - def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: + + def start_and_wait(self + , cluster_id: str + , + timeout=timedelta(minutes=20)) -> ClusterDetails: return self.start(cluster_id=cluster_id).result(timeout=timeout) + + + - def unpin(self, cluster_id: str): + def unpin(self + , cluster_id: str + ): """Unpin cluster. - + Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace admins. - + :param cluster_id: str - - + + """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if cluster_id is not None: body['cluster_id'] = cluster_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.1/clusters/unpin', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.1/clusters/unpin", body=body, headers=headers) + + + - def update( - self, cluster_id: str, update_mask: str, *, cluster: Optional[UpdateClusterResource] = None - ) -> Wait[ClusterDetails]: + def update(self + , cluster_id: str, update_mask: str + , * + , cluster: Optional[UpdateClusterResource] = None) -> Wait[ClusterDetails]: """Update cluster configuration (partial). - + Updates the configuration of a cluster to match the partial set of attributes and size. Denote which fields to update using the `update_mask` field in the request body. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be @@ -11473,372 +9672,363 @@ def update( is started using the `clusters/start` API. Attempts to update a cluster in any other state will be rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be updated. - + :param cluster_id: str ID of the cluster. :param update_mask: str Used to specify which cluster attributes and size fields to update. See https://google.aip.dev/161 for more details. - + The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. :param cluster: :class:`UpdateClusterResource` (optional) The cluster to be updated. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if cluster is not None: - body["cluster"] = cluster.as_dict() - if cluster_id is not None: - body["cluster_id"] = cluster_id - if update_mask is not None: - body["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.1/clusters/update", body=body, headers=headers) - return Wait( - self.wait_get_cluster_running, response=UpdateClusterResponse.from_dict(op_response), cluster_id=cluster_id - ) - - def update_and_wait( - self, - cluster_id: str, - update_mask: str, - *, - cluster: Optional[UpdateClusterResource] = None, - timeout=timedelta(minutes=20), - ) -> ClusterDetails: + if cluster is not None: body['cluster'] = cluster.as_dict() + if cluster_id is not None: body['cluster_id'] = cluster_id + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.1/clusters/update', body=body + + , headers=headers + ) + return Wait(self.wait_get_cluster_running + , response = UpdateClusterResponse.from_dict(op_response) + , cluster_id=cluster_id) + + + def update_and_wait(self + , cluster_id: str, update_mask: str + , * + , cluster: Optional[UpdateClusterResource] = None, + timeout=timedelta(minutes=20)) -> ClusterDetails: return self.update(cluster=cluster, cluster_id=cluster_id, update_mask=update_mask).result(timeout=timeout) + + + - def update_permissions( - self, cluster_id: str, *, access_control_list: Optional[List[ClusterAccessControlRequest]] = None - ) -> ClusterPermissions: + def update_permissions(self + , cluster_id: str + , * + , access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions: """Update cluster permissions. - + Updates the permissions on a cluster. Clusters can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional) - + :returns: :class:`ClusterPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/permissions/clusters/{cluster_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/clusters/{cluster_id}', body=body + + , headers=headers + ) return ClusterPermissions.from_dict(res) - + + class CommandExecutionAPI: """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API only supports (classic) all-purpose clusters. Serverless compute is not supported.""" - + def __init__(self, api_client): self._api = api_client - - def wait_command_status_command_execution_cancelled( - self, - cluster_id: str, - command_id: str, - context_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[CommandStatusResponse], None]] = None, - ) -> CommandStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (CommandStatus.CANCELLED,) - failure_states = (CommandStatus.ERROR,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) - status = poll.status - status_message = f"current status: {status}" - if poll.results: - status_message = poll.results.cause - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach Cancelled, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def wait_context_status_command_execution_running( - self, - cluster_id: str, - context_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[ContextStatusResponse], None]] = None, - ) -> ContextStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (ContextStatus.RUNNING,) - failure_states = (ContextStatus.ERROR,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.context_status(cluster_id=cluster_id, context_id=context_id) - status = poll.status - status_message = f"current status: {status}" - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach Running, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def wait_command_status_command_execution_finished_or_error( - self, - cluster_id: str, - command_id: str, - context_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[CommandStatusResponse], None]] = None, - ) -> CommandStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = ( - CommandStatus.FINISHED, - CommandStatus.ERROR, - ) - failure_states = ( - CommandStatus.CANCELLED, - CommandStatus.CANCELLING, - ) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) - status = poll.status - status_message = f"current status: {status}" - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach Finished or Error, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def cancel( - self, *, cluster_id: Optional[str] = None, command_id: Optional[str] = None, context_id: Optional[str] = None - ) -> Wait[CommandStatusResponse]: + + + + + + def wait_command_status_command_execution_cancelled(self, cluster_id: str, command_id: str, context_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (CommandStatus.CANCELLED, ) + failure_states = (CommandStatus.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + status = poll.status + status_message = f'current status: {status}' + if poll.results: + status_message = poll.results.cause + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach Cancelled, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_context_status_command_execution_running(self, cluster_id: str, context_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (ContextStatus.RUNNING, ) + failure_states = (ContextStatus.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.context_status(cluster_id=cluster_id, context_id=context_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach Running, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_command_status_command_execution_finished_or_error(self, cluster_id: str, command_id: str, context_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, ) + failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach Finished or Error, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + + def cancel(self + + , * + , cluster_id: Optional[str] = None, command_id: Optional[str] = None, context_id: Optional[str] = None) -> Wait[CommandStatusResponse]: """Cancel a command. - + Cancels a currently running command within an execution context. - + The command ID is obtained from a prior successful call to __execute__. - + :param cluster_id: str (optional) :param command_id: str (optional) :param context_id: str (optional) - + :returns: Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_cancelled for more details. """ body = {} - if cluster_id is not None: - body["clusterId"] = cluster_id - if command_id is not None: - body["commandId"] = command_id - if context_id is not None: - body["contextId"] = context_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/1.2/commands/cancel", body=body, headers=headers) - return Wait( - self.wait_command_status_command_execution_cancelled, - response=CancelResponse.from_dict(op_response), - cluster_id=cluster_id, - command_id=command_id, - context_id=context_id, - ) - - def cancel_and_wait( - self, - *, - cluster_id: Optional[str] = None, - command_id: Optional[str] = None, - context_id: Optional[str] = None, - timeout=timedelta(minutes=20), - ) -> CommandStatusResponse: + if cluster_id is not None: body['clusterId'] = cluster_id + if command_id is not None: body['commandId'] = command_id + if context_id is not None: body['contextId'] = context_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/1.2/commands/cancel', body=body + + , headers=headers + ) + return Wait(self.wait_command_status_command_execution_cancelled + , response = CancelResponse.from_dict(op_response) + , cluster_id=cluster_id, command_id=command_id, context_id=context_id) + + + def cancel_and_wait(self + + , * + , cluster_id: Optional[str] = None, command_id: Optional[str] = None, context_id: Optional[str] = None, + timeout=timedelta(minutes=20)) -> CommandStatusResponse: return self.cancel(cluster_id=cluster_id, command_id=command_id, context_id=context_id).result(timeout=timeout) + + + - def command_status(self, cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse: + def command_status(self + , cluster_id: str, context_id: str, command_id: str + ) -> CommandStatusResponse: """Get command info. - + Gets the status of and, if available, the results from a currently executing command. - + The command ID is obtained from a prior successful call to __execute__. - + :param cluster_id: str :param context_id: str :param command_id: str - + :returns: :class:`CommandStatusResponse` """ - + query = {} - if cluster_id is not None: - query["clusterId"] = cluster_id - if command_id is not None: - query["commandId"] = command_id - if context_id is not None: - query["contextId"] = context_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/1.2/commands/status", query=query, headers=headers) + if cluster_id is not None: query['clusterId'] = cluster_id + if command_id is not None: query['commandId'] = command_id + if context_id is not None: query['contextId'] = context_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/1.2/commands/status', query=query + + , headers=headers + ) return CommandStatusResponse.from_dict(res) - def context_status(self, cluster_id: str, context_id: str) -> ContextStatusResponse: - """Get status. + + + + def context_status(self + , cluster_id: str, context_id: str + ) -> ContextStatusResponse: + """Get status. + Gets the status for an execution context. - + :param cluster_id: str :param context_id: str - + :returns: :class:`ContextStatusResponse` """ - + query = {} - if cluster_id is not None: - query["clusterId"] = cluster_id - if context_id is not None: - query["contextId"] = context_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/1.2/contexts/status", query=query, headers=headers) + if cluster_id is not None: query['clusterId'] = cluster_id + if context_id is not None: query['contextId'] = context_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/1.2/contexts/status', query=query + + , headers=headers + ) return ContextStatusResponse.from_dict(res) - def create( - self, *, cluster_id: Optional[str] = None, language: Optional[Language] = None - ) -> Wait[ContextStatusResponse]: - """Create an execution context. + + + + def create(self + + , * + , cluster_id: Optional[str] = None, language: Optional[Language] = None) -> Wait[ContextStatusResponse]: + """Create an execution context. + Creates an execution context for running cluster commands. - + If successful, this method returns the ID of the new execution context. - + :param cluster_id: str (optional) Running cluster id :param language: :class:`Language` (optional) - + :returns: Long-running operation waiter for :class:`ContextStatusResponse`. See :method:wait_context_status_command_execution_running for more details. """ body = {} - if cluster_id is not None: - body["clusterId"] = cluster_id - if language is not None: - body["language"] = language.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/1.2/contexts/create", body=body, headers=headers) - return Wait( - self.wait_context_status_command_execution_running, - response=Created.from_dict(op_response), - cluster_id=cluster_id, - context_id=op_response["id"], - ) - - def create_and_wait( - self, *, cluster_id: Optional[str] = None, language: Optional[Language] = None, timeout=timedelta(minutes=20) - ) -> ContextStatusResponse: + if cluster_id is not None: body['clusterId'] = cluster_id + if language is not None: body['language'] = language.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/1.2/contexts/create', body=body + + , headers=headers + ) + return Wait(self.wait_context_status_command_execution_running + , response = Created.from_dict(op_response) + , cluster_id=cluster_id, context_id=op_response['id']) + + + def create_and_wait(self + + , * + , cluster_id: Optional[str] = None, language: Optional[Language] = None, + timeout=timedelta(minutes=20)) -> ContextStatusResponse: return self.create(cluster_id=cluster_id, language=language).result(timeout=timeout) + + + - def destroy(self, cluster_id: str, context_id: str): + def destroy(self + , cluster_id: str, context_id: str + ): """Delete an execution context. - + Deletes an execution context. - + :param cluster_id: str :param context_id: str - - + + """ body = {} - if cluster_id is not None: - body["clusterId"] = cluster_id - if context_id is not None: - body["contextId"] = context_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/1.2/contexts/destroy", body=body, headers=headers) - - def execute( - self, - *, - cluster_id: Optional[str] = None, - command: Optional[str] = None, - context_id: Optional[str] = None, - language: Optional[Language] = None, - ) -> Wait[CommandStatusResponse]: - """Run a command. + if cluster_id is not None: body['clusterId'] = cluster_id + if context_id is not None: body['contextId'] = context_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/1.2/contexts/destroy', body=body + + , headers=headers + ) + - Runs a cluster command in the given execution context, using the provided language. + + + + def execute(self + + , * + , cluster_id: Optional[str] = None, command: Optional[str] = None, context_id: Optional[str] = None, language: Optional[Language] = None) -> Wait[CommandStatusResponse]: + """Run a command. + + Runs a cluster command in the given execution context, using the provided language. + If successful, it returns an ID for tracking the status of the command's execution. - + :param cluster_id: str (optional) Running cluster id :param command: str (optional) @@ -11846,67 +10036,63 @@ def execute( :param context_id: str (optional) Running context id :param language: :class:`Language` (optional) - + :returns: Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_finished_or_error for more details. """ body = {} - if cluster_id is not None: - body["clusterId"] = cluster_id - if command is not None: - body["command"] = command - if context_id is not None: - body["contextId"] = context_id - if language is not None: - body["language"] = language.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/1.2/commands/execute", body=body, headers=headers) - return Wait( - self.wait_command_status_command_execution_finished_or_error, - response=Created.from_dict(op_response), - cluster_id=cluster_id, - command_id=op_response["id"], - context_id=context_id, - ) - - def execute_and_wait( - self, - *, - cluster_id: Optional[str] = None, - command: Optional[str] = None, - context_id: Optional[str] = None, - language: Optional[Language] = None, - timeout=timedelta(minutes=20), - ) -> CommandStatusResponse: - return self.execute(cluster_id=cluster_id, command=command, context_id=context_id, language=language).result( - timeout=timeout - ) - + if cluster_id is not None: body['clusterId'] = cluster_id + if command is not None: body['command'] = command + if context_id is not None: body['contextId'] = context_id + if language is not None: body['language'] = language.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/1.2/commands/execute', body=body + + , headers=headers + ) + return Wait(self.wait_command_status_command_execution_finished_or_error + , response = Created.from_dict(op_response) + , cluster_id=cluster_id, command_id=op_response['id'], context_id=context_id) + + def execute_and_wait(self + + , * + , cluster_id: Optional[str] = None, command: Optional[str] = None, context_id: Optional[str] = None, language: Optional[Language] = None, + timeout=timedelta(minutes=20)) -> CommandStatusResponse: + return self.execute(cluster_id=cluster_id, command=command, context_id=context_id, language=language).result(timeout=timeout) + + class GlobalInitScriptsAPI: """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace. These scripts run on every node in every cluster in the workspace. - + **Important:** Existing clusters must be restarted to pick up any changes made to global init scripts. Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark container fails to launch and init scripts with later position are skipped. If enough containers fail, the entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, name: str, script: str, *, enabled: Optional[bool] = None, position: Optional[int] = None - ) -> CreateResponse: - """Create init script. + - Creates a new global init script in this workspace. + + + + + def create(self + , name: str, script: str + , * + , enabled: Optional[bool] = None, position: Optional[int] = None) -> CreateResponse: + """Create init script. + + Creates a new global init script in this workspace. + :param name: str The name of the script :param script: str @@ -11916,94 +10102,115 @@ def create( :param position: int (optional) The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. - + If you omit the numeric position for a new global init script, it defaults to last position. It will run after all current scripts. Setting any value greater than the position of the last script is equivalent to the last position. Example: Take three existing scripts with positions 0, 1, and 2. Any position of (3) or greater puts the script in the last position. If an explicit position value conflicts with an existing script value, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1. - + :returns: :class:`CreateResponse` """ body = {} - if enabled is not None: - body["enabled"] = enabled - if name is not None: - body["name"] = name - if position is not None: - body["position"] = position - if script is not None: - body["script"] = script - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/global-init-scripts", body=body, headers=headers) + if enabled is not None: body['enabled'] = enabled + if name is not None: body['name'] = name + if position is not None: body['position'] = position + if script is not None: body['script'] = script + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/global-init-scripts', body=body + + , headers=headers + ) return CreateResponse.from_dict(res) - def delete(self, script_id: str): - """Delete init script. + + + + def delete(self + , script_id: str + ): + """Delete init script. + Deletes a global init script. - + :param script_id: str The ID of the global init script. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/global-init-scripts/{script_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/global-init-scripts/{script_id}", headers=headers) + + + - def get(self, script_id: str) -> GlobalInitScriptDetailsWithContent: + def get(self + , script_id: str + ) -> GlobalInitScriptDetailsWithContent: """Get an init script. - + Gets all the details of a script, including its Base64-encoded contents. - + :param script_id: str The ID of the global init script. - + :returns: :class:`GlobalInitScriptDetailsWithContent` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/global-init-scripts/{script_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/global-init-scripts/{script_id}' + + , headers=headers + ) return GlobalInitScriptDetailsWithContent.from_dict(res) + + + + def list(self) -> Iterator[GlobalInitScriptDetails]: """Get init scripts. - + Get a list of all global init scripts for this workspace. This returns all properties for each script but **not** the script contents. To retrieve the contents of a script, use the [get a global init script](:method:globalinitscripts/get) operation. - + :returns: Iterator over :class:`GlobalInitScriptDetails` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/global-init-scripts", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/global-init-scripts' + , headers=headers + ) parsed = ListGlobalInitScriptsResponse.from_dict(json).scripts return parsed if parsed is not None else [] + - def update( - self, script_id: str, name: str, script: str, *, enabled: Optional[bool] = None, position: Optional[int] = None - ): - """Update init script. + + + + def update(self + , script_id: str, name: str, script: str + , * + , enabled: Optional[bool] = None, position: Optional[int] = None): + """Update init script. + Updates a global init script, specifying only the fields to change. All fields are optional. Unspecified fields retain their current value. - + :param script_id: str The ID of the global init script. :param name: str @@ -12015,73 +10222,66 @@ def update( :param position: int (optional) The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. To move the script to run first, set its position to 0. - + To move the script to the end, set its position to any value greater or equal to the position of the last script. Example, three existing scripts with positions 0, 1, and 2. Any position value of 2 or greater puts the script in the last position (2). - + If an explicit position value conflicts with an existing script, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1. - - + + """ body = {} - if enabled is not None: - body["enabled"] = enabled - if name is not None: - body["name"] = name - if position is not None: - body["position"] = position - if script is not None: - body["script"] = script - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/global-init-scripts/{script_id}", body=body, headers=headers) - + if enabled is not None: body['enabled'] = enabled + if name is not None: body['name'] = name + if position is not None: body['position'] = position + if script is not None: body['script'] = script + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/global-init-scripts/{script_id}', body=body + + , headers=headers + ) + + + class InstancePoolsAPI: """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times. - + Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle instances. If the pool has no idle instances, the pool expands by allocating a new instance from the instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that pool’s idle instances. - + You can specify a different pool for the driver node and worker nodes, or use the same pool for both. - + Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does apply. See pricing.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - instance_pool_name: str, - node_type_id: str, - *, - aws_attributes: Optional[InstancePoolAwsAttributes] = None, - azure_attributes: Optional[InstancePoolAzureAttributes] = None, - custom_tags: Optional[Dict[str, str]] = None, - disk_spec: Optional[DiskSpec] = None, - enable_elastic_disk: Optional[bool] = None, - gcp_attributes: Optional[InstancePoolGcpAttributes] = None, - idle_instance_autotermination_minutes: Optional[int] = None, - max_capacity: Optional[int] = None, - min_idle_instances: Optional[int] = None, - preloaded_docker_images: Optional[List[DockerImage]] = None, - preloaded_spark_versions: Optional[List[str]] = None, - ) -> CreateInstancePoolResponse: - """Create a new instance pool. + - Creates a new instance pool using idle and ready-to-use cloud instances. + + + + + def create(self + , instance_pool_name: str, node_type_id: str + , * + , aws_attributes: Optional[InstancePoolAwsAttributes] = None, azure_attributes: Optional[InstancePoolAzureAttributes] = None, custom_tags: Optional[Dict[str,str]] = None, disk_spec: Optional[DiskSpec] = None, enable_elastic_disk: Optional[bool] = None, gcp_attributes: Optional[InstancePoolGcpAttributes] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, preloaded_docker_images: Optional[List[DockerImage]] = None, preloaded_spark_versions: Optional[List[str]] = None) -> CreateInstancePoolResponse: + """Create a new instance pool. + + Creates a new instance pool using idle and ready-to-use cloud instances. + :param instance_pool_name: str Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters. @@ -12099,7 +10299,7 @@ def create( :param custom_tags: Dict[str,str] (optional) Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags :param disk_spec: :class:`DiskSpec` (optional) Defines the specification of the disks that will be attached to all spark containers. @@ -12128,79 +10328,69 @@ def create( A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. - + :returns: :class:`CreateInstancePoolResponse` """ body = {} - if aws_attributes is not None: - body["aws_attributes"] = aws_attributes.as_dict() - if azure_attributes is not None: - body["azure_attributes"] = azure_attributes.as_dict() - if custom_tags is not None: - body["custom_tags"] = custom_tags - if disk_spec is not None: - body["disk_spec"] = disk_spec.as_dict() - if enable_elastic_disk is not None: - body["enable_elastic_disk"] = enable_elastic_disk - if gcp_attributes is not None: - body["gcp_attributes"] = gcp_attributes.as_dict() - if idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes - if instance_pool_name is not None: - body["instance_pool_name"] = instance_pool_name - if max_capacity is not None: - body["max_capacity"] = max_capacity - if min_idle_instances is not None: - body["min_idle_instances"] = min_idle_instances - if node_type_id is not None: - body["node_type_id"] = node_type_id - if preloaded_docker_images is not None: - body["preloaded_docker_images"] = [v.as_dict() for v in preloaded_docker_images] - if preloaded_spark_versions is not None: - body["preloaded_spark_versions"] = [v for v in preloaded_spark_versions] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/instance-pools/create", body=body, headers=headers) + if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict() + if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict() + if custom_tags is not None: body['custom_tags'] = custom_tags + if disk_spec is not None: body['disk_spec'] = disk_spec.as_dict() + if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk + if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() + if idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes + if instance_pool_name is not None: body['instance_pool_name'] = instance_pool_name + if max_capacity is not None: body['max_capacity'] = max_capacity + if min_idle_instances is not None: body['min_idle_instances'] = min_idle_instances + if node_type_id is not None: body['node_type_id'] = node_type_id + if preloaded_docker_images is not None: body['preloaded_docker_images'] = [v.as_dict() for v in preloaded_docker_images] + if preloaded_spark_versions is not None: body['preloaded_spark_versions'] = [v for v in preloaded_spark_versions] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/instance-pools/create', body=body + + , headers=headers + ) return CreateInstancePoolResponse.from_dict(res) - def delete(self, instance_pool_id: str): - """Delete an instance pool. + + + + def delete(self + , instance_pool_id: str + ): + """Delete an instance pool. + Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously. - + :param instance_pool_id: str The instance pool to be terminated. - - + + """ body = {} - if instance_pool_id is not None: - body["instance_pool_id"] = instance_pool_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/instance-pools/delete", body=body, headers=headers) - - def edit( - self, - instance_pool_id: str, - instance_pool_name: str, - node_type_id: str, - *, - custom_tags: Optional[Dict[str, str]] = None, - idle_instance_autotermination_minutes: Optional[int] = None, - max_capacity: Optional[int] = None, - min_idle_instances: Optional[int] = None, - ): - """Edit an existing instance pool. + if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/instance-pools/delete', body=body + + , headers=headers + ) + - Modifies the configuration of an existing instance pool. + + + + def edit(self + , instance_pool_id: str, instance_pool_name: str, node_type_id: str + , * + , custom_tags: Optional[Dict[str,str]] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None): + """Edit an existing instance pool. + + Modifies the configuration of an existing instance pool. + :param instance_pool_id: str Instance pool ID :param instance_pool_name: str @@ -12214,7 +10404,7 @@ def edit( :param custom_tags: Dict[str,str] (optional) Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags :param idle_instance_autotermination_minutes: int (optional) Automatically terminates the extra instances in the pool cache after they are inactive for this time @@ -12228,195 +10418,222 @@ def edit( upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool - - + + """ body = {} - if custom_tags is not None: - body["custom_tags"] = custom_tags - if idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes - if instance_pool_id is not None: - body["instance_pool_id"] = instance_pool_id - if instance_pool_name is not None: - body["instance_pool_name"] = instance_pool_name - if max_capacity is not None: - body["max_capacity"] = max_capacity - if min_idle_instances is not None: - body["min_idle_instances"] = min_idle_instances - if node_type_id is not None: - body["node_type_id"] = node_type_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/instance-pools/edit", body=body, headers=headers) - - def get(self, instance_pool_id: str) -> GetInstancePool: - """Get instance pool information. + if custom_tags is not None: body['custom_tags'] = custom_tags + if idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes + if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id + if instance_pool_name is not None: body['instance_pool_name'] = instance_pool_name + if max_capacity is not None: body['max_capacity'] = max_capacity + if min_idle_instances is not None: body['min_idle_instances'] = min_idle_instances + if node_type_id is not None: body['node_type_id'] = node_type_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/instance-pools/edit', body=body + + , headers=headers + ) + - Retrieve the information for an instance pool based on its identifier. + + + + def get(self + , instance_pool_id: str + ) -> GetInstancePool: + """Get instance pool information. + + Retrieve the information for an instance pool based on its identifier. + :param instance_pool_id: str The canonical unique identifier for the instance pool. - + :returns: :class:`GetInstancePool` """ - + query = {} - if instance_pool_id is not None: - query["instance_pool_id"] = instance_pool_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/instance-pools/get", query=query, headers=headers) + if instance_pool_id is not None: query['instance_pool_id'] = instance_pool_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/instance-pools/get', query=query + + , headers=headers + ) return GetInstancePool.from_dict(res) - def get_permission_levels(self, instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse: - """Get instance pool permission levels. + + + + def get_permission_levels(self + , instance_pool_id: str + ) -> GetInstancePoolPermissionLevelsResponse: + """Get instance pool permission levels. + Gets the permission levels that a user can have on an object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. - + :returns: :class:`GetInstancePoolPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/permissions/instance-pools/{instance_pool_id}/permissionLevels", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/instance-pools/{instance_pool_id}/permissionLevels' + + , headers=headers + ) return GetInstancePoolPermissionLevelsResponse.from_dict(res) - def get_permissions(self, instance_pool_id: str) -> InstancePoolPermissions: - """Get instance pool permissions. + + + + def get_permissions(self + , instance_pool_id: str + ) -> InstancePoolPermissions: + """Get instance pool permissions. + Gets the permissions of an instance pool. Instance pools can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. - + :returns: :class:`InstancePoolPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/instance-pools/{instance_pool_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/instance-pools/{instance_pool_id}' + + , headers=headers + ) return InstancePoolPermissions.from_dict(res) + + + + def list(self) -> Iterator[InstancePoolAndStats]: """List instance pool info. - + Gets a list of instance pools with their statistics. - + :returns: Iterator over :class:`InstancePoolAndStats` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/instance-pools/list", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/instance-pools/list' + , headers=headers + ) parsed = ListInstancePools.from_dict(json).instance_pools return parsed if parsed is not None else [] + - def set_permissions( - self, instance_pool_id: str, *, access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None - ) -> InstancePoolPermissions: - """Set instance pool permissions. + + + + def set_permissions(self + , instance_pool_id: str + , * + , access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None) -> InstancePoolPermissions: + """Set instance pool permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional) - + :returns: :class:`InstancePoolPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/permissions/instance-pools/{instance_pool_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/instance-pools/{instance_pool_id}', body=body + + , headers=headers + ) return InstancePoolPermissions.from_dict(res) - def update_permissions( - self, instance_pool_id: str, *, access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None - ) -> InstancePoolPermissions: - """Update instance pool permissions. + + + + def update_permissions(self + , instance_pool_id: str + , * + , access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None) -> InstancePoolPermissions: + """Update instance pool permissions. + Updates the permissions on an instance pool. Instance pools can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional) - + :returns: :class:`InstancePoolPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/permissions/instance-pools/{instance_pool_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/instance-pools/{instance_pool_id}', body=body + + , headers=headers + ) return InstancePoolPermissions.from_dict(res) - + + class InstanceProfilesAPI: """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3 buckets] using instance profiles for more information. - - [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html - """ - + + [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html""" + def __init__(self, api_client): self._api = api_client + - def add( - self, - instance_profile_arn: str, - *, - iam_role_arn: Optional[str] = None, - is_meta_instance_profile: Optional[bool] = None, - skip_validation: Optional[bool] = None, - ): - """Register an instance profile. + + + + + + + def add(self + , instance_profile_arn: str + , * + , iam_role_arn: Optional[str] = None, is_meta_instance_profile: Optional[bool] = None, skip_validation: Optional[bool] = None): + """Register an instance profile. + Registers an instance profile in Databricks. In the UI, you can then give users the permission to use this instance profile when launching clusters. - + This API is only available to admin users. - + :param instance_profile_arn: str The AWS ARN of the instance profile to register with Databricks. This field is required. :param iam_role_arn: str (optional) The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile with [Databricks SQL Serverless]. - + Otherwise, this field is optional. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html :param is_meta_instance_profile: bool (optional) Boolean flag indicating whether the instance profile should only be used in credential passthrough @@ -12429,314 +10646,367 @@ def add( fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your requested instance type is not supported in your requested availability zone”), you can pass this flag to skip the validation and forcibly add the instance profile. - - + + """ body = {} - if iam_role_arn is not None: - body["iam_role_arn"] = iam_role_arn - if instance_profile_arn is not None: - body["instance_profile_arn"] = instance_profile_arn - if is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = is_meta_instance_profile - if skip_validation is not None: - body["skip_validation"] = skip_validation - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/instance-profiles/add", body=body, headers=headers) - - def edit( - self, - instance_profile_arn: str, - *, - iam_role_arn: Optional[str] = None, - is_meta_instance_profile: Optional[bool] = None, - ): - """Edit an instance profile. + if iam_role_arn is not None: body['iam_role_arn'] = iam_role_arn + if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn + if is_meta_instance_profile is not None: body['is_meta_instance_profile'] = is_meta_instance_profile + if skip_validation is not None: body['skip_validation'] = skip_validation + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/instance-profiles/add', body=body + + , headers=headers + ) + + + + + + def edit(self + , instance_profile_arn: str + , * + , iam_role_arn: Optional[str] = None, is_meta_instance_profile: Optional[bool] = None): + """Edit an instance profile. + The only supported field to change is the optional IAM role ARN associated with the instance profile. It is required to specify the IAM role ARN if both of the following are true: - + * Your role name and instance profile name do not match. The name is the part after the last slash in each ARN. * You want to use the instance profile with [Databricks SQL Serverless]. - + To understand where these fields are in the AWS console, see [Enable serverless SQL warehouses]. - + This API is only available to admin users. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html - + :param instance_profile_arn: str The AWS ARN of the instance profile to register with Databricks. This field is required. :param iam_role_arn: str (optional) The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile with [Databricks SQL Serverless]. - + Otherwise, this field is optional. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html :param is_meta_instance_profile: bool (optional) Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios. If true, it means the instance profile contains an meta IAM role which could assume a wide range of roles. Therefore it should always be used with authorization. This field is optional, the default value is `false`. - - + + """ body = {} - if iam_role_arn is not None: - body["iam_role_arn"] = iam_role_arn - if instance_profile_arn is not None: - body["instance_profile_arn"] = instance_profile_arn - if is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = is_meta_instance_profile - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if iam_role_arn is not None: body['iam_role_arn'] = iam_role_arn + if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn + if is_meta_instance_profile is not None: body['is_meta_instance_profile'] = is_meta_instance_profile + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/instance-profiles/edit', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/instance-profiles/edit", body=body, headers=headers) + + + def list(self) -> Iterator[InstanceProfile]: """List available instance profiles. - + List the instance profiles that the calling user can use to launch a cluster. - + This API is available to all users. - + :returns: Iterator over :class:`InstanceProfile` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/instance-profiles/list", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/instance-profiles/list' + , headers=headers + ) parsed = ListInstanceProfilesResponse.from_dict(json).instance_profiles return parsed if parsed is not None else [] + - def remove(self, instance_profile_arn: str): - """Remove the instance profile. + + + + def remove(self + , instance_profile_arn: str + ): + """Remove the instance profile. + Remove the instance profile with the provided ARN. Existing clusters with this instance profile will continue to function. - + This API is only accessible to admin users. - + :param instance_profile_arn: str The ARN of the instance profile to remove. This field is required. - - + + """ body = {} - if instance_profile_arn is not None: - body["instance_profile_arn"] = instance_profile_arn - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/instance-profiles/remove", body=body, headers=headers) - + if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/instance-profiles/remove', body=body + + , headers=headers + ) + + + class LibrariesAPI: """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster. - + To make third-party or custom code available to notebooks and jobs running on your clusters, you can install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories. - + Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library directly from a public repository such as PyPI or Maven, using a previously installed workspace library, or using an init script. - + When you uninstall a library from a cluster, the library is removed only when you restart the cluster. Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.""" - + def __init__(self, api_client): self._api = api_client + + + + + + + + def all_cluster_statuses(self) -> Iterator[ClusterLibraryStatuses]: """Get all statuses. - + Get the status of all libraries on all clusters. A status is returned for all libraries installed on this cluster via the API or the libraries UI. - + :returns: Iterator over :class:`ClusterLibraryStatuses` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/libraries/all-cluster-statuses", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/libraries/all-cluster-statuses' + , headers=headers + ) parsed = ListAllClusterLibraryStatusesResponse.from_dict(json).statuses return parsed if parsed is not None else [] + - def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]: - """Get status. + + + + def cluster_status(self + , cluster_id: str + ) -> Iterator[LibraryFullStatus]: + """Get status. + Get the status of libraries on a cluster. A status is returned for all libraries installed on this cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries set to be installed on this cluster, in the order that the libraries were added to the cluster, are returned first. 2. Libraries that were previously requested to be installed on this cluster or, but are now marked for removal, in no particular order, are returned last. - + :param cluster_id: str Unique identifier of the cluster whose status should be retrieved. - + :returns: Iterator over :class:`LibraryFullStatus` """ - + query = {} - if cluster_id is not None: - query["cluster_id"] = cluster_id - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/libraries/cluster-status", query=query, headers=headers) + if cluster_id is not None: query['cluster_id'] = cluster_id + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/libraries/cluster-status', query=query + + , headers=headers + ) parsed = ClusterLibraryStatuses.from_dict(json).library_statuses return parsed if parsed is not None else [] + - def install(self, cluster_id: str, libraries: List[Library]): - """Add a library. + + + + def install(self + , cluster_id: str, libraries: List[Library] + ): + """Add a library. + Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. - + :param cluster_id: str Unique identifier for the cluster on which to install these libraries. :param libraries: List[:class:`Library`] The libraries to install. - - + + """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - if libraries is not None: - body["libraries"] = [v.as_dict() for v in libraries] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if cluster_id is not None: body['cluster_id'] = cluster_id + if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/libraries/install', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/libraries/install", body=body, headers=headers) + + + - def uninstall(self, cluster_id: str, libraries: List[Library]): + def uninstall(self + , cluster_id: str, libraries: List[Library] + ): """Uninstall libraries. - + Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. - + :param cluster_id: str Unique identifier for the cluster on which to uninstall these libraries. :param libraries: List[:class:`Library`] The libraries to uninstall. - - + + """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - if libraries is not None: - body["libraries"] = [v.as_dict() for v in libraries] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/libraries/uninstall", body=body, headers=headers) - + if cluster_id is not None: body['cluster_id'] = cluster_id + if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/libraries/uninstall', body=body + + , headers=headers + ) + + + class PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace. - + A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce compliance API allows you to update a cluster to be compliant with the current version of its policy.""" - + def __init__(self, api_client): self._api = api_client + - def enforce_compliance( - self, cluster_id: str, *, validate_only: Optional[bool] = None - ) -> EnforceClusterComplianceResponse: - """Enforce cluster policy compliance. + + + + + + + def enforce_compliance(self + , cluster_id: str + , * + , validate_only: Optional[bool] = None) -> EnforceClusterComplianceResponse: + """Enforce cluster policy compliance. + Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. - + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes can take effect. - + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the cluster is started, the new attributes will take effect. - + Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API. Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs. - + :param cluster_id: str The ID of the cluster you want to enforce policy compliance on. :param validate_only: bool (optional) If set, previews the changes that would be made to a cluster to enforce compliance but does not update the cluster. - + :returns: :class:`EnforceClusterComplianceResponse` """ body = {} - if cluster_id is not None: - body["cluster_id"] = cluster_id - if validate_only is not None: - body["validate_only"] = validate_only - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/policies/clusters/enforce-compliance", body=body, headers=headers) + if cluster_id is not None: body['cluster_id'] = cluster_id + if validate_only is not None: body['validate_only'] = validate_only + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/policies/clusters/enforce-compliance', body=body + + , headers=headers + ) return EnforceClusterComplianceResponse.from_dict(res) - def get_compliance(self, cluster_id: str) -> GetClusterComplianceResponse: - """Get cluster policy compliance. + + + + def get_compliance(self + , cluster_id: str + ) -> GetClusterComplianceResponse: + """Get cluster policy compliance. + Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + :param cluster_id: str The ID of the cluster to get the compliance status - + :returns: :class:`GetClusterComplianceResponse` """ - + query = {} - if cluster_id is not None: - query["cluster_id"] = cluster_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/policies/clusters/get-compliance", query=query, headers=headers) + if cluster_id is not None: query['cluster_id'] = cluster_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/policies/clusters/get-compliance', query=query + + , headers=headers + ) return GetClusterComplianceResponse.from_dict(res) - def list_compliance( - self, policy_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ClusterCompliance]: - """List cluster policy compliance. + + + + def list_compliance(self + , policy_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ClusterCompliance]: + """List cluster policy compliance. + Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + :param policy_id: str Canonical unique identifier for the cluster policy. :param page_size: int (optional) @@ -12745,96 +11015,121 @@ def list_compliance( :param page_token: str (optional) A page token that can be used to navigate to the next page or previous page as returned by `next_page_token` or `prev_page_token`. - + :returns: Iterator over :class:`ClusterCompliance` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - if policy_id is not None: - query["policy_id"] = policy_id - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if policy_id is not None: query['policy_id'] = policy_id + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/policies/clusters/list-compliance", query=query, headers=headers) - if "clusters" in json: - for v in json["clusters"]: - yield ClusterCompliance.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - + json = self._api.do('GET','/api/2.0/policies/clusters/list-compliance', query=query + + , headers=headers + ) + if 'clusters' in json: + for v in json['clusters']: + yield ClusterCompliance.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + class PolicyFamiliesAPI: """View available policy families. A policy family contains a policy definition providing best practices for configuring clusters for a particular use case. - + Databricks manages and provides policy families for several common cluster use cases. You cannot create, edit, or delete policy families. - + Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a policy family. Cluster policies created using a policy family inherit the policy family's policy definition.""" - + def __init__(self, api_client): self._api = api_client + - def get(self, policy_family_id: str, *, version: Optional[int] = None) -> PolicyFamily: - """Get policy family information. + - Retrieve the information for an policy family based on its identifier and version + + + + + def get(self + , policy_family_id: str + , * + , version: Optional[int] = None) -> PolicyFamily: + """Get policy family information. + + Retrieve the information for an policy family based on its identifier and version + :param policy_family_id: str The family ID about which to retrieve information. :param version: int (optional) The version number for the family to fetch. Defaults to the latest version. - + :returns: :class:`PolicyFamily` """ - + query = {} - if version is not None: - query["version"] = version - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/policy-families/{policy_family_id}", query=query, headers=headers) + if version is not None: query['version'] = version + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/policy-families/{policy_family_id}', query=query + + , headers=headers + ) return PolicyFamily.from_dict(res) - def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PolicyFamily]: - """List policy families. + + + + def list(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PolicyFamily]: + """List policy families. + Returns the list of policy definition types available to use at their latest version. This API is paginated. - + :param max_results: int (optional) Maximum number of policy families to return. :param page_token: str (optional) A token that can be used to get the next page of results. - + :returns: Iterator over :class:`PolicyFamily` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/policy-families", query=query, headers=headers) - if "policy_families" in json: - for v in json["policy_families"]: - yield PolicyFamily.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] + json = self._api.do('GET','/api/2.0/policy-families', query=query + + , headers=headers + ) + if 'policy_families' in json: + for v in json['policy_families']: + yield PolicyFamily.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + \ No newline at end of file diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 6a3945727..1aab1390b 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -1,78 +1,68 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') from databricks.sdk.service import sql # all definitions in this file are in alphabetical order - @dataclass class AuthorizationDetails: grant_rules: Optional[List[AuthorizationDetailsGrantRule]] = None """Represents downscoped permission rules with specific access rights. This field is specific to `workspace_rule_set` constraint.""" - + resource_legacy_acl_path: Optional[str] = None """The acl path of the tree store resource resource.""" - + resource_name: Optional[str] = None """The resource name to which the authorization rule applies. This field is specific to `workspace_rule_set` constraint. Format: `workspaces/{workspace_id}/dashboards/{dashboard_id}`""" - + type: Optional[str] = None """The type of authorization downscoping policy. Ex: `workspace_rule_set` defines access rules for a specific workspace resource""" - + def as_dict(self) -> dict: """Serializes the AuthorizationDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.grant_rules: - body["grant_rules"] = [v.as_dict() for v in self.grant_rules] - if self.resource_legacy_acl_path is not None: - body["resource_legacy_acl_path"] = self.resource_legacy_acl_path - if self.resource_name is not None: - body["resource_name"] = self.resource_name - if self.type is not None: - body["type"] = self.type + if self.grant_rules: body['grant_rules'] = [v.as_dict() for v in self.grant_rules] + if self.resource_legacy_acl_path is not None: body['resource_legacy_acl_path'] = self.resource_legacy_acl_path + if self.resource_name is not None: body['resource_name'] = self.resource_name + if self.type is not None: body['type'] = self.type return body def as_shallow_dict(self) -> dict: """Serializes the AuthorizationDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.grant_rules: - body["grant_rules"] = self.grant_rules - if self.resource_legacy_acl_path is not None: - body["resource_legacy_acl_path"] = self.resource_legacy_acl_path - if self.resource_name is not None: - body["resource_name"] = self.resource_name - if self.type is not None: - body["type"] = self.type + if self.grant_rules: body['grant_rules'] = self.grant_rules + if self.resource_legacy_acl_path is not None: body['resource_legacy_acl_path'] = self.resource_legacy_acl_path + if self.resource_name is not None: body['resource_name'] = self.resource_name + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AuthorizationDetails: """Deserializes the AuthorizationDetails from a dictionary.""" - return cls( - grant_rules=_repeated_dict(d, "grant_rules", AuthorizationDetailsGrantRule), - resource_legacy_acl_path=d.get("resource_legacy_acl_path", None), - resource_name=d.get("resource_name", None), - type=d.get("type", None), - ) + return cls(grant_rules=_repeated_dict(d, 'grant_rules', AuthorizationDetailsGrantRule), resource_legacy_acl_path=d.get('resource_legacy_acl_path', None), resource_name=d.get('resource_name', None), type=d.get('type', None)) + + @dataclass @@ -81,95 +71,34 @@ class AuthorizationDetailsGrantRule: """Permission sets for dashboard are defined in iam-common/rbac-common/permission-sets/definitions/TreeStoreBasePermissionSets Ex: `permissionSets/dashboard.runner`""" - + def as_dict(self) -> dict: """Serializes the AuthorizationDetailsGrantRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_set is not None: - body["permission_set"] = self.permission_set + if self.permission_set is not None: body['permission_set'] = self.permission_set return body def as_shallow_dict(self) -> dict: """Serializes the AuthorizationDetailsGrantRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_set is not None: - body["permission_set"] = self.permission_set + if self.permission_set is not None: body['permission_set'] = self.permission_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AuthorizationDetailsGrantRule: """Deserializes the AuthorizationDetailsGrantRule from a dictionary.""" - return cls(permission_set=d.get("permission_set", None)) - - -@dataclass -class CancelQueryExecutionResponse: - status: Optional[List[CancelQueryExecutionResponseStatus]] = None + return cls(permission_set=d.get('permission_set', None)) + - def as_dict(self) -> dict: - """Serializes the CancelQueryExecutionResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.status: - body["status"] = [v.as_dict() for v in self.status] - return body - def as_shallow_dict(self) -> dict: - """Serializes the CancelQueryExecutionResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.status: - body["status"] = self.status - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponse: - """Deserializes the CancelQueryExecutionResponse from a dictionary.""" - return cls(status=_repeated_dict(d, "status", CancelQueryExecutionResponseStatus)) -@dataclass -class CancelQueryExecutionResponseStatus: - data_token: str - """The token to poll for result asynchronously Example: - EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" - pending: Optional[Empty] = None - """Represents an empty message, similar to google.protobuf.Empty, which is not available in the - firm right now.""" - success: Optional[Empty] = None - """Represents an empty message, similar to google.protobuf.Empty, which is not available in the - firm right now.""" - def as_dict(self) -> dict: - """Serializes the CancelQueryExecutionResponseStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data_token is not None: - body["data_token"] = self.data_token - if self.pending: - body["pending"] = self.pending.as_dict() - if self.success: - body["success"] = self.success.as_dict() - return body - def as_shallow_dict(self) -> dict: - """Serializes the CancelQueryExecutionResponseStatus into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data_token is not None: - body["data_token"] = self.data_token - if self.pending: - body["pending"] = self.pending - if self.success: - body["success"] = self.success - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelQueryExecutionResponseStatus: - """Deserializes the CancelQueryExecutionResponseStatus from a dictionary.""" - return cls( - data_token=d.get("data_token", None), - pending=_from_dict(d, "pending", Empty), - success=_from_dict(d, "success", Empty), - ) @dataclass @@ -179,63 +108,61 @@ class CronSchedule: Trigger] for details. [Cron Trigger]: http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" - + timezone_id: str """A Java timezone id. The schedule will be resolved with respect to this timezone. See [Java TimeZone] for details. [Java TimeZone]: https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html""" - + def as_dict(self) -> dict: """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: """Deserializes the CronSchedule from a dictionary.""" - return cls(quartz_cron_expression=d.get("quartz_cron_expression", None), timezone_id=d.get("timezone_id", None)) + return cls(quartz_cron_expression=d.get('quartz_cron_expression', None), timezone_id=d.get('timezone_id', None)) + + @dataclass class Dashboard: create_time: Optional[str] = None """The timestamp of when the dashboard was created.""" - + dashboard_id: Optional[str] = None """UUID identifying the dashboard.""" - + display_name: Optional[str] = None """The display name of the dashboard.""" - + etag: Optional[str] = None """The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has not been modified since the last read. This field is excluded in List Dashboards responses.""" - + lifecycle_state: Optional[LifecycleState] = None """The state of the dashboard resource. Used for tracking trashed status.""" - + parent_path: Optional[str] = None """The workspace path of the folder containing the dashboard. Includes leading slash and no trailing slash. This field is excluded in List Dashboards responses.""" - + path: Optional[str] = None """The workspace path of the dashboard asset, including the file name. Exported dashboards always have the file extension `.lvdash.json`. This field is excluded in List Dashboards responses.""" - + serialized_dashboard: Optional[str] = None """The contents of the dashboard in serialized string form. This field is excluded in List Dashboards responses. Use the [get dashboard API] to retrieve an example response, which @@ -243,84 +170,58 @@ class Dashboard: that represents the dashboard's layout and components. [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get""" - + update_time: Optional[str] = None """The timestamp of when the dashboard was last updated by the user. This field is excluded in List Dashboards responses.""" - + warehouse_id: Optional[str] = None """The warehouse ID used to run the dashboard.""" - + def as_dict(self) -> dict: """Serializes the Dashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.display_name is not None: - body["display_name"] = self.display_name - if self.etag is not None: - body["etag"] = self.etag - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state.value - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.path is not None: - body["path"] = self.path - if self.serialized_dashboard is not None: - body["serialized_dashboard"] = self.serialized_dashboard - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.etag is not None: body['etag'] = self.etag + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.path is not None: body['path'] = self.path + if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the Dashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.display_name is not None: - body["display_name"] = self.display_name - if self.etag is not None: - body["etag"] = self.etag - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.path is not None: - body["path"] = self.path - if self.serialized_dashboard is not None: - body["serialized_dashboard"] = self.serialized_dashboard - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.etag is not None: body['etag'] = self.etag + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.path is not None: body['path'] = self.path + if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Dashboard: """Deserializes the Dashboard from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - dashboard_id=d.get("dashboard_id", None), - display_name=d.get("display_name", None), - etag=d.get("etag", None), - lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), - parent_path=d.get("parent_path", None), - path=d.get("path", None), - serialized_dashboard=d.get("serialized_dashboard", None), - update_time=d.get("update_time", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(create_time=d.get('create_time', None), dashboard_id=d.get('dashboard_id', None), display_name=d.get('display_name', None), etag=d.get('etag', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), parent_path=d.get('parent_path', None), path=d.get('path', None), serialized_dashboard=d.get('serialized_dashboard', None), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) + + class DashboardView(Enum): + + + DASHBOARD_VIEW_BASIC = 'DASHBOARD_VIEW_BASIC' + - DASHBOARD_VIEW_BASIC = "DASHBOARD_VIEW_BASIC" @dataclass @@ -339,6 +240,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteScheduleResponse: """Deserializes the DeleteScheduleResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -357,282 +263,175 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteSubscriptionResponse: """Deserializes the DeleteSubscriptionResponse from a dictionary.""" return cls() + -@dataclass -class Empty: - """Represents an empty message, similar to google.protobuf.Empty, which is not available in the - firm right now.""" - - def as_dict(self) -> dict: - """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Empty into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Empty: - """Deserializes the Empty from a dictionary.""" - return cls() - - -@dataclass -class ExecutePublishedDashboardQueryRequest: - """Execute query request for published Dashboards. Since published dashboards have the option of - running as the publisher, the datasets, warehouse_id are excluded from the request and instead - read from the source (lakeview-config) via the additional parameters (dashboardName and - dashboardRevisionId)""" - - dashboard_name: str - """Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains - the list of datasets, warehouse_id, and embedded_credentials""" - - dashboard_revision_id: str - - override_warehouse_id: Optional[str] = None - """A dashboard schedule can override the warehouse used as compute for processing the published - dashboard queries""" - - def as_dict(self) -> dict: - """Serializes the ExecutePublishedDashboardQueryRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_name is not None: - body["dashboard_name"] = self.dashboard_name - if self.dashboard_revision_id is not None: - body["dashboard_revision_id"] = self.dashboard_revision_id - if self.override_warehouse_id is not None: - body["override_warehouse_id"] = self.override_warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExecutePublishedDashboardQueryRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_name is not None: - body["dashboard_name"] = self.dashboard_name - if self.dashboard_revision_id is not None: - body["dashboard_revision_id"] = self.dashboard_revision_id - if self.override_warehouse_id is not None: - body["override_warehouse_id"] = self.override_warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExecutePublishedDashboardQueryRequest: - """Deserializes the ExecutePublishedDashboardQueryRequest from a dictionary.""" - return cls( - dashboard_name=d.get("dashboard_name", None), - dashboard_revision_id=d.get("dashboard_revision_id", None), - override_warehouse_id=d.get("override_warehouse_id", None), - ) - - -@dataclass -class ExecuteQueryResponse: - def as_dict(self) -> dict: - """Serializes the ExecuteQueryResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExecuteQueryResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExecuteQueryResponse: - """Deserializes the ExecuteQueryResponse from a dictionary.""" - return cls() - @dataclass class GenieAttachment: """Genie AI Response""" - + attachment_id: Optional[str] = None """Attachment ID""" - + query: Optional[GenieQueryAttachment] = None """Query Attachment if Genie responds with a SQL query""" - + text: Optional[TextAttachment] = None """Text Attachment if Genie responds with text""" - + def as_dict(self) -> dict: """Serializes the GenieAttachment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attachment_id is not None: - body["attachment_id"] = self.attachment_id - if self.query: - body["query"] = self.query.as_dict() - if self.text: - body["text"] = self.text.as_dict() + if self.attachment_id is not None: body['attachment_id'] = self.attachment_id + if self.query: body['query'] = self.query.as_dict() + if self.text: body['text'] = self.text.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GenieAttachment into a shallow dictionary of its immediate attributes.""" body = {} - if self.attachment_id is not None: - body["attachment_id"] = self.attachment_id - if self.query: - body["query"] = self.query - if self.text: - body["text"] = self.text + if self.attachment_id is not None: body['attachment_id'] = self.attachment_id + if self.query: body['query'] = self.query + if self.text: body['text'] = self.text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieAttachment: """Deserializes the GenieAttachment from a dictionary.""" - return cls( - attachment_id=d.get("attachment_id", None), - query=_from_dict(d, "query", GenieQueryAttachment), - text=_from_dict(d, "text", TextAttachment), - ) + return cls(attachment_id=d.get('attachment_id', None), query=_from_dict(d, 'query', GenieQueryAttachment), text=_from_dict(d, 'text', TextAttachment)) + + @dataclass class GenieConversation: id: str """Conversation ID. Legacy identifier, use conversation_id instead""" - + space_id: str """Genie space ID""" - + user_id: int """ID of the user who created the conversation""" - + title: str """Conversation title""" - + conversation_id: str """Conversation ID""" - + created_timestamp: Optional[int] = None """Timestamp when the message was created""" - + last_updated_timestamp: Optional[int] = None """Timestamp when the message was last updated""" - + def as_dict(self) -> dict: """Serializes the GenieConversation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.created_timestamp is not None: - body["created_timestamp"] = self.created_timestamp - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.space_id is not None: - body["space_id"] = self.space_id - if self.title is not None: - body["title"] = self.title - if self.user_id is not None: - body["user_id"] = self.user_id + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.space_id is not None: body['space_id'] = self.space_id + if self.title is not None: body['title'] = self.title + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieConversation into a shallow dictionary of its immediate attributes.""" body = {} - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.created_timestamp is not None: - body["created_timestamp"] = self.created_timestamp - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.space_id is not None: - body["space_id"] = self.space_id - if self.title is not None: - body["title"] = self.title - if self.user_id is not None: - body["user_id"] = self.user_id + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.space_id is not None: body['space_id'] = self.space_id + if self.title is not None: body['title'] = self.title + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieConversation: """Deserializes the GenieConversation from a dictionary.""" - return cls( - conversation_id=d.get("conversation_id", None), - created_timestamp=d.get("created_timestamp", None), - id=d.get("id", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - space_id=d.get("space_id", None), - title=d.get("title", None), - user_id=d.get("user_id", None), - ) + return cls(conversation_id=d.get('conversation_id', None), created_timestamp=d.get('created_timestamp', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), space_id=d.get('space_id', None), title=d.get('title', None), user_id=d.get('user_id', None)) + + @dataclass class GenieCreateConversationMessageRequest: content: str """User message content.""" - + conversation_id: Optional[str] = None """The ID associated with the conversation.""" - + space_id: Optional[str] = None """The ID associated with the Genie space where the conversation is started.""" - + def as_dict(self) -> dict: """Serializes the GenieCreateConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.space_id is not None: - body["space_id"] = self.space_id + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.space_id is not None: body['space_id'] = self.space_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieCreateConversationMessageRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.space_id is not None: - body["space_id"] = self.space_id + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.space_id is not None: body['space_id'] = self.space_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieCreateConversationMessageRequest: """Deserializes the GenieCreateConversationMessageRequest from a dictionary.""" - return cls( - content=d.get("content", None), - conversation_id=d.get("conversation_id", None), - space_id=d.get("space_id", None), - ) + return cls(content=d.get('content', None), conversation_id=d.get('conversation_id', None), space_id=d.get('space_id', None)) + + + + + + + + + + + @dataclass class GenieGenerateDownloadFullQueryResultResponse: download_id: Optional[str] = None """Download ID. Use this ID to track the download request in subsequent polling calls""" - + def as_dict(self) -> dict: """Serializes the GenieGenerateDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.download_id is not None: - body["download_id"] = self.download_id + if self.download_id is not None: body['download_id'] = self.download_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieGenerateDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.download_id is not None: - body["download_id"] = self.download_id + if self.download_id is not None: body['download_id'] = self.download_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieGenerateDownloadFullQueryResultResponse: """Deserializes the GenieGenerateDownloadFullQueryResultResponse from a dictionary.""" - return cls(download_id=d.get("download_id", None)) + return cls(download_id=d.get('download_id', None)) + + + + + + + + @dataclass @@ -640,25 +439,31 @@ class GenieGetDownloadFullQueryResultResponse: statement_response: Optional[sql.StatementResponse] = None """SQL Statement Execution response. See [Get status, manifest, and result first chunk](:method:statementexecution/getstatement) for more details.""" - + def as_dict(self) -> dict: """Serializes the GenieGetDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.statement_response: - body["statement_response"] = self.statement_response.as_dict() + if self.statement_response: body['statement_response'] = self.statement_response.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GenieGetDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.statement_response: - body["statement_response"] = self.statement_response + if self.statement_response: body['statement_response'] = self.statement_response return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieGetDownloadFullQueryResultResponse: """Deserializes the GenieGetDownloadFullQueryResultResponse from a dictionary.""" - return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) + return cls(statement_response=_from_dict(d, 'statement_response', sql.StatementResponse)) + + + + + + + + @dataclass @@ -666,60 +471,99 @@ class GenieGetMessageQueryResultResponse: statement_response: Optional[sql.StatementResponse] = None """SQL Statement Execution response. See [Get status, manifest, and result first chunk](:method:statementexecution/getstatement) for more details.""" - + def as_dict(self) -> dict: """Serializes the GenieGetMessageQueryResultResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.statement_response: - body["statement_response"] = self.statement_response.as_dict() + if self.statement_response: body['statement_response'] = self.statement_response.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GenieGetMessageQueryResultResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.statement_response: - body["statement_response"] = self.statement_response + if self.statement_response: body['statement_response'] = self.statement_response return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieGetMessageQueryResultResponse: """Deserializes the GenieGetMessageQueryResultResponse from a dictionary.""" - return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) + return cls(statement_response=_from_dict(d, 'statement_response', sql.StatementResponse)) + + + + + + + + + + + + + +@dataclass +class GenieListSpacesResponse: + next_page_token: Optional[str] = None + """Token to get the next page of results""" + + spaces: Optional[List[GenieSpace]] = None + """List of Genie spaces""" + + def as_dict(self) -> dict: + """Serializes the GenieListSpacesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.spaces: body['spaces'] = [v.as_dict() for v in self.spaces] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenieListSpacesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.spaces: body['spaces'] = self.spaces + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenieListSpacesResponse: + """Deserializes the GenieListSpacesResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), spaces=_repeated_dict(d, 'spaces', GenieSpace)) + + @dataclass class GenieMessage: id: str """Message ID. Legacy identifier, use message_id instead""" - + space_id: str """Genie space ID""" - + conversation_id: str """Conversation ID""" - + content: str """User message content""" - + message_id: str """Message ID""" - + attachments: Optional[List[GenieAttachment]] = None """AI-generated response to the message""" - + created_timestamp: Optional[int] = None """Timestamp when the message was created""" - + error: Optional[MessageError] = None """Error message if Genie failed to respond to the message""" - + last_updated_timestamp: Optional[int] = None """Timestamp when the message was last updated""" - + query_result: Optional[Result] = None """The result of SQL query if the message includes a query attachment. Deprecated. Use `query_result_metadata` in `GenieQueryAttachment` instead.""" - + status: Optional[MessageStatus] = None """MessageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * @@ -735,330 +579,247 @@ class GenieMessage: anymore. The user needs to rerun the query. Rerun the SQL query result by calling [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * `CANCELLED`: Message has been cancelled.""" - + user_id: Optional[int] = None """ID of the user who created the message""" - + def as_dict(self) -> dict: """Serializes the GenieMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attachments: - body["attachments"] = [v.as_dict() for v in self.attachments] - if self.content is not None: - body["content"] = self.content - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.created_timestamp is not None: - body["created_timestamp"] = self.created_timestamp - if self.error: - body["error"] = self.error.as_dict() - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.message_id is not None: - body["message_id"] = self.message_id - if self.query_result: - body["query_result"] = self.query_result.as_dict() - if self.space_id is not None: - body["space_id"] = self.space_id - if self.status is not None: - body["status"] = self.status.value - if self.user_id is not None: - body["user_id"] = self.user_id + if self.attachments: body['attachments'] = [v.as_dict() for v in self.attachments] + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.error: body['error'] = self.error.as_dict() + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.message_id is not None: body['message_id'] = self.message_id + if self.query_result: body['query_result'] = self.query_result.as_dict() + if self.space_id is not None: body['space_id'] = self.space_id + if self.status is not None: body['status'] = self.status.value + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.attachments: - body["attachments"] = self.attachments - if self.content is not None: - body["content"] = self.content - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.created_timestamp is not None: - body["created_timestamp"] = self.created_timestamp - if self.error: - body["error"] = self.error - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.message_id is not None: - body["message_id"] = self.message_id - if self.query_result: - body["query_result"] = self.query_result - if self.space_id is not None: - body["space_id"] = self.space_id - if self.status is not None: - body["status"] = self.status - if self.user_id is not None: - body["user_id"] = self.user_id + if self.attachments: body['attachments'] = self.attachments + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.error: body['error'] = self.error + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.message_id is not None: body['message_id'] = self.message_id + if self.query_result: body['query_result'] = self.query_result + if self.space_id is not None: body['space_id'] = self.space_id + if self.status is not None: body['status'] = self.status + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieMessage: """Deserializes the GenieMessage from a dictionary.""" - return cls( - attachments=_repeated_dict(d, "attachments", GenieAttachment), - content=d.get("content", None), - conversation_id=d.get("conversation_id", None), - created_timestamp=d.get("created_timestamp", None), - error=_from_dict(d, "error", MessageError), - id=d.get("id", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - message_id=d.get("message_id", None), - query_result=_from_dict(d, "query_result", Result), - space_id=d.get("space_id", None), - status=_enum(d, "status", MessageStatus), - user_id=d.get("user_id", None), - ) + return cls(attachments=_repeated_dict(d, 'attachments', GenieAttachment), content=d.get('content', None), conversation_id=d.get('conversation_id', None), created_timestamp=d.get('created_timestamp', None), error=_from_dict(d, 'error', MessageError), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), message_id=d.get('message_id', None), query_result=_from_dict(d, 'query_result', Result), space_id=d.get('space_id', None), status=_enum(d, 'status', MessageStatus), user_id=d.get('user_id', None)) + + @dataclass class GenieQueryAttachment: description: Optional[str] = None """Description of the query""" - + id: Optional[str] = None - + last_updated_timestamp: Optional[int] = None """Time when the user updated the query last""" - + query: Optional[str] = None """AI generated SQL query""" - + query_result_metadata: Optional[GenieResultMetadata] = None """Metadata associated with the query result.""" - + statement_id: Optional[str] = None """Statement Execution API statement id. Use [Get status, manifest, and result first chunk](:method:statementexecution/getstatement) to get the full result data.""" - + title: Optional[str] = None """Name of the query""" - + def as_dict(self) -> dict: """Serializes the GenieQueryAttachment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.query is not None: - body["query"] = self.query - if self.query_result_metadata: - body["query_result_metadata"] = self.query_result_metadata.as_dict() - if self.statement_id is not None: - body["statement_id"] = self.statement_id - if self.title is not None: - body["title"] = self.title + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.query is not None: body['query'] = self.query + if self.query_result_metadata: body['query_result_metadata'] = self.query_result_metadata.as_dict() + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.title is not None: body['title'] = self.title return body def as_shallow_dict(self) -> dict: """Serializes the GenieQueryAttachment into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.query is not None: - body["query"] = self.query - if self.query_result_metadata: - body["query_result_metadata"] = self.query_result_metadata - if self.statement_id is not None: - body["statement_id"] = self.statement_id - if self.title is not None: - body["title"] = self.title + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.query is not None: body['query'] = self.query + if self.query_result_metadata: body['query_result_metadata'] = self.query_result_metadata + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.title is not None: body['title'] = self.title return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieQueryAttachment: """Deserializes the GenieQueryAttachment from a dictionary.""" - return cls( - description=d.get("description", None), - id=d.get("id", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - query=d.get("query", None), - query_result_metadata=_from_dict(d, "query_result_metadata", GenieResultMetadata), - statement_id=d.get("statement_id", None), - title=d.get("title", None), - ) + return cls(description=d.get('description', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), query=d.get('query', None), query_result_metadata=_from_dict(d, 'query_result_metadata', GenieResultMetadata), statement_id=d.get('statement_id', None), title=d.get('title', None)) + + @dataclass class GenieResultMetadata: is_truncated: Optional[bool] = None """Indicates whether the result set is truncated.""" - + row_count: Optional[int] = None """The number of rows in the result set.""" - + def as_dict(self) -> dict: """Serializes the GenieResultMetadata into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_truncated is not None: - body["is_truncated"] = self.is_truncated - if self.row_count is not None: - body["row_count"] = self.row_count + if self.is_truncated is not None: body['is_truncated'] = self.is_truncated + if self.row_count is not None: body['row_count'] = self.row_count return body def as_shallow_dict(self) -> dict: """Serializes the GenieResultMetadata into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_truncated is not None: - body["is_truncated"] = self.is_truncated - if self.row_count is not None: - body["row_count"] = self.row_count + if self.is_truncated is not None: body['is_truncated'] = self.is_truncated + if self.row_count is not None: body['row_count'] = self.row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieResultMetadata: """Deserializes the GenieResultMetadata from a dictionary.""" - return cls(is_truncated=d.get("is_truncated", None), row_count=d.get("row_count", None)) + return cls(is_truncated=d.get('is_truncated', None), row_count=d.get('row_count', None)) + + @dataclass class GenieSpace: space_id: str """Genie space ID""" - + title: str """Title of the Genie Space""" - + description: Optional[str] = None """Description of the Genie Space""" - + def as_dict(self) -> dict: """Serializes the GenieSpace into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.space_id is not None: - body["space_id"] = self.space_id - if self.title is not None: - body["title"] = self.title + if self.description is not None: body['description'] = self.description + if self.space_id is not None: body['space_id'] = self.space_id + if self.title is not None: body['title'] = self.title return body def as_shallow_dict(self) -> dict: """Serializes the GenieSpace into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.space_id is not None: - body["space_id"] = self.space_id - if self.title is not None: - body["title"] = self.title + if self.description is not None: body['description'] = self.description + if self.space_id is not None: body['space_id'] = self.space_id + if self.title is not None: body['title'] = self.title return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieSpace: """Deserializes the GenieSpace from a dictionary.""" - return cls(description=d.get("description", None), space_id=d.get("space_id", None), title=d.get("title", None)) + return cls(description=d.get('description', None), space_id=d.get('space_id', None), title=d.get('title', None)) + + @dataclass class GenieStartConversationMessageRequest: content: str """The text of the message that starts the conversation.""" - + space_id: Optional[str] = None """The ID associated with the Genie space where you want to start a conversation.""" - + def as_dict(self) -> dict: """Serializes the GenieStartConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.space_id is not None: - body["space_id"] = self.space_id + if self.content is not None: body['content'] = self.content + if self.space_id is not None: body['space_id'] = self.space_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieStartConversationMessageRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.space_id is not None: - body["space_id"] = self.space_id + if self.content is not None: body['content'] = self.content + if self.space_id is not None: body['space_id'] = self.space_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieStartConversationMessageRequest: """Deserializes the GenieStartConversationMessageRequest from a dictionary.""" - return cls(content=d.get("content", None), space_id=d.get("space_id", None)) + return cls(content=d.get('content', None), space_id=d.get('space_id', None)) + + @dataclass class GenieStartConversationResponse: message_id: str """Message ID""" - + conversation_id: str """Conversation ID""" - + conversation: Optional[GenieConversation] = None - + message: Optional[GenieMessage] = None - + def as_dict(self) -> dict: """Serializes the GenieStartConversationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.conversation: - body["conversation"] = self.conversation.as_dict() - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.message: - body["message"] = self.message.as_dict() - if self.message_id is not None: - body["message_id"] = self.message_id + if self.conversation: body['conversation'] = self.conversation.as_dict() + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.message: body['message'] = self.message.as_dict() + if self.message_id is not None: body['message_id'] = self.message_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieStartConversationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.conversation: - body["conversation"] = self.conversation - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.message: - body["message"] = self.message - if self.message_id is not None: - body["message_id"] = self.message_id + if self.conversation: body['conversation'] = self.conversation + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.message: body['message'] = self.message + if self.message_id is not None: body['message_id'] = self.message_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieStartConversationResponse: """Deserializes the GenieStartConversationResponse from a dictionary.""" - return cls( - conversation=_from_dict(d, "conversation", GenieConversation), - conversation_id=d.get("conversation_id", None), - message=_from_dict(d, "message", GenieMessage), - message_id=d.get("message_id", None), - ) + return cls(conversation=_from_dict(d, 'conversation', GenieConversation), conversation_id=d.get('conversation_id', None), message=_from_dict(d, 'message', GenieMessage), message_id=d.get('message_id', None)) + + + + + + + -@dataclass -class GetPublishedDashboardEmbeddedResponse: - def as_dict(self) -> dict: - """Serializes the GetPublishedDashboardEmbeddedResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - def as_shallow_dict(self) -> dict: - """Serializes the GetPublishedDashboardEmbeddedResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetPublishedDashboardEmbeddedResponse: - """Deserializes the GetPublishedDashboardEmbeddedResponse from a dictionary.""" - return cls() @dataclass @@ -1067,84 +828,84 @@ class GetPublishedDashboardTokenInfoResponse: """Authorization constraints for accessing the published dashboard. Currently includes `workspace_rule_set` and could be enriched with `unity_catalog_privileges` before oAuth token generation.""" - + custom_claim: Optional[str] = None """Custom claim generated from external_value and external_viewer_id. Format: `urn:aibi:external_data:::`""" - + scope: Optional[str] = None """Scope defining access permissions.""" - + def as_dict(self) -> dict: """Serializes the GetPublishedDashboardTokenInfoResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authorization_details: - body["authorization_details"] = [v.as_dict() for v in self.authorization_details] - if self.custom_claim is not None: - body["custom_claim"] = self.custom_claim - if self.scope is not None: - body["scope"] = self.scope + if self.authorization_details: body['authorization_details'] = [v.as_dict() for v in self.authorization_details] + if self.custom_claim is not None: body['custom_claim'] = self.custom_claim + if self.scope is not None: body['scope'] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the GetPublishedDashboardTokenInfoResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.authorization_details: - body["authorization_details"] = self.authorization_details - if self.custom_claim is not None: - body["custom_claim"] = self.custom_claim - if self.scope is not None: - body["scope"] = self.scope + if self.authorization_details: body['authorization_details'] = self.authorization_details + if self.custom_claim is not None: body['custom_claim'] = self.custom_claim + if self.scope is not None: body['scope'] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPublishedDashboardTokenInfoResponse: """Deserializes the GetPublishedDashboardTokenInfoResponse from a dictionary.""" - return cls( - authorization_details=_repeated_dict(d, "authorization_details", AuthorizationDetails), - custom_claim=d.get("custom_claim", None), - scope=d.get("scope", None), - ) + return cls(authorization_details=_repeated_dict(d, 'authorization_details', AuthorizationDetails), custom_claim=d.get('custom_claim', None), scope=d.get('scope', None)) + + + + + + + + class LifecycleState(Enum): + + + ACTIVE = 'ACTIVE' + TRASHED = 'TRASHED' + - ACTIVE = "ACTIVE" - TRASHED = "TRASHED" @dataclass class ListDashboardsResponse: dashboards: Optional[List[Dashboard]] = None - + next_page_token: Optional[str] = None """A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, there are no subsequent dashboards.""" - + def as_dict(self) -> dict: """Serializes the ListDashboardsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboards: - body["dashboards"] = [v.as_dict() for v in self.dashboards] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.dashboards: body['dashboards'] = [v.as_dict() for v in self.dashboards] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListDashboardsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboards: - body["dashboards"] = self.dashboards - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.dashboards: body['dashboards'] = self.dashboards + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListDashboardsResponse: """Deserializes the ListDashboardsResponse from a dictionary.""" - return cls( - dashboards=_repeated_dict(d, "dashboards", Dashboard), next_page_token=d.get("next_page_token", None) - ) + return cls(dashboards=_repeated_dict(d, 'dashboards', Dashboard), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass @@ -1152,31 +913,32 @@ class ListSchedulesResponse: next_page_token: Optional[str] = None """A token that can be used as a `page_token` in subsequent requests to retrieve the next page of results. If this field is omitted, there are no subsequent schedules.""" - + schedules: Optional[List[Schedule]] = None - + def as_dict(self) -> dict: """Serializes the ListSchedulesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schedules: - body["schedules"] = [v.as_dict() for v in self.schedules] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.schedules: body['schedules'] = [v.as_dict() for v in self.schedules] return body def as_shallow_dict(self) -> dict: """Serializes the ListSchedulesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schedules: - body["schedules"] = self.schedules + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.schedules: body['schedules'] = self.schedules return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSchedulesResponse: """Deserializes the ListSchedulesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), schedules=_repeated_dict(d, "schedules", Schedule)) + return cls(next_page_token=d.get('next_page_token', None), schedules=_repeated_dict(d, 'schedules', Schedule)) + + + + + @dataclass @@ -1184,122 +946,115 @@ class ListSubscriptionsResponse: next_page_token: Optional[str] = None """A token that can be used as a `page_token` in subsequent requests to retrieve the next page of results. If this field is omitted, there are no subsequent subscriptions.""" - + subscriptions: Optional[List[Subscription]] = None - + def as_dict(self) -> dict: """Serializes the ListSubscriptionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.subscriptions: - body["subscriptions"] = [v.as_dict() for v in self.subscriptions] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] return body def as_shallow_dict(self) -> dict: """Serializes the ListSubscriptionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.subscriptions: - body["subscriptions"] = self.subscriptions + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.subscriptions: body['subscriptions'] = self.subscriptions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSubscriptionsResponse: """Deserializes the ListSubscriptionsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - subscriptions=_repeated_dict(d, "subscriptions", Subscription), - ) + return cls(next_page_token=d.get('next_page_token', None), subscriptions=_repeated_dict(d, 'subscriptions', Subscription)) + + @dataclass class MessageError: error: Optional[str] = None - + type: Optional[MessageErrorType] = None - + def as_dict(self) -> dict: """Serializes the MessageError into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error is not None: - body["error"] = self.error - if self.type is not None: - body["type"] = self.type.value + if self.error is not None: body['error'] = self.error + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the MessageError into a shallow dictionary of its immediate attributes.""" body = {} - if self.error is not None: - body["error"] = self.error - if self.type is not None: - body["type"] = self.type + if self.error is not None: body['error'] = self.error + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MessageError: """Deserializes the MessageError from a dictionary.""" - return cls(error=d.get("error", None), type=_enum(d, "type", MessageErrorType)) - + return cls(error=d.get('error', None), type=_enum(d, 'type', MessageErrorType)) + -class MessageErrorType(Enum): - BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION = "BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION" - CHAT_COMPLETION_CLIENT_EXCEPTION = "CHAT_COMPLETION_CLIENT_EXCEPTION" - CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION = "CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION" - CHAT_COMPLETION_NETWORK_EXCEPTION = "CHAT_COMPLETION_NETWORK_EXCEPTION" - CONTENT_FILTER_EXCEPTION = "CONTENT_FILTER_EXCEPTION" - CONTEXT_EXCEEDED_EXCEPTION = "CONTEXT_EXCEEDED_EXCEPTION" - COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION = "COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION" - COULD_NOT_GET_UC_SCHEMA_EXCEPTION = "COULD_NOT_GET_UC_SCHEMA_EXCEPTION" - DEPLOYMENT_NOT_FOUND_EXCEPTION = "DEPLOYMENT_NOT_FOUND_EXCEPTION" - DESCRIBE_QUERY_INVALID_SQL_ERROR = "DESCRIBE_QUERY_INVALID_SQL_ERROR" - DESCRIBE_QUERY_TIMEOUT = "DESCRIBE_QUERY_TIMEOUT" - DESCRIBE_QUERY_UNEXPECTED_FAILURE = "DESCRIBE_QUERY_UNEXPECTED_FAILURE" - FUNCTIONS_NOT_AVAILABLE_EXCEPTION = "FUNCTIONS_NOT_AVAILABLE_EXCEPTION" - FUNCTION_ARGUMENTS_INVALID_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_EXCEPTION" - FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION" - FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION" - FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = "FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION" - GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION = "GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION" - GENERIC_CHAT_COMPLETION_EXCEPTION = "GENERIC_CHAT_COMPLETION_EXCEPTION" - GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION" - GENERIC_SQL_EXEC_API_CALL_EXCEPTION = "GENERIC_SQL_EXEC_API_CALL_EXCEPTION" - ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION" - INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION" - INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" - INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION" - INVALID_CHAT_COMPLETION_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_JSON_EXCEPTION" - INVALID_COMPLETION_REQUEST_EXCEPTION = "INVALID_COMPLETION_REQUEST_EXCEPTION" - INVALID_FUNCTION_CALL_EXCEPTION = "INVALID_FUNCTION_CALL_EXCEPTION" - INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION = "INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION" - INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION = "INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION" - INVALID_SQL_UNKNOWN_TABLE_EXCEPTION = "INVALID_SQL_UNKNOWN_TABLE_EXCEPTION" - INVALID_TABLE_IDENTIFIER_EXCEPTION = "INVALID_TABLE_IDENTIFIER_EXCEPTION" - LOCAL_CONTEXT_EXCEEDED_EXCEPTION = "LOCAL_CONTEXT_EXCEEDED_EXCEPTION" - MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION" - MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION" - MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION" - MISSING_SQL_QUERY_EXCEPTION = "MISSING_SQL_QUERY_EXCEPTION" - NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = "NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE" - NO_QUERY_TO_VISUALIZE_EXCEPTION = "NO_QUERY_TO_VISUALIZE_EXCEPTION" - NO_TABLES_TO_QUERY_EXCEPTION = "NO_TABLES_TO_QUERY_EXCEPTION" - RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = "RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION" - RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = "RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION" - REPLY_PROCESS_TIMEOUT_EXCEPTION = "REPLY_PROCESS_TIMEOUT_EXCEPTION" - RETRYABLE_PROCESSING_EXCEPTION = "RETRYABLE_PROCESSING_EXCEPTION" - SQL_EXECUTION_EXCEPTION = "SQL_EXECUTION_EXCEPTION" - STOP_PROCESS_DUE_TO_AUTO_REGENERATE = "STOP_PROCESS_DUE_TO_AUTO_REGENERATE" - TABLES_MISSING_EXCEPTION = "TABLES_MISSING_EXCEPTION" - TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = "TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION" - TOO_MANY_TABLES_EXCEPTION = "TOO_MANY_TABLES_EXCEPTION" - UNEXPECTED_REPLY_PROCESS_EXCEPTION = "UNEXPECTED_REPLY_PROCESS_EXCEPTION" - UNKNOWN_AI_MODEL = "UNKNOWN_AI_MODEL" - WAREHOUSE_ACCESS_MISSING_EXCEPTION = "WAREHOUSE_ACCESS_MISSING_EXCEPTION" - WAREHOUSE_NOT_FOUND_EXCEPTION = "WAREHOUSE_NOT_FOUND_EXCEPTION" +class MessageErrorType(Enum): + + + BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION = 'BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION' + CHAT_COMPLETION_CLIENT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_EXCEPTION' + CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION' + CHAT_COMPLETION_NETWORK_EXCEPTION = 'CHAT_COMPLETION_NETWORK_EXCEPTION' + CONTENT_FILTER_EXCEPTION = 'CONTENT_FILTER_EXCEPTION' + CONTEXT_EXCEEDED_EXCEPTION = 'CONTEXT_EXCEEDED_EXCEPTION' + COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION = 'COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION' + COULD_NOT_GET_UC_SCHEMA_EXCEPTION = 'COULD_NOT_GET_UC_SCHEMA_EXCEPTION' + DEPLOYMENT_NOT_FOUND_EXCEPTION = 'DEPLOYMENT_NOT_FOUND_EXCEPTION' + DESCRIBE_QUERY_INVALID_SQL_ERROR = 'DESCRIBE_QUERY_INVALID_SQL_ERROR' + DESCRIBE_QUERY_TIMEOUT = 'DESCRIBE_QUERY_TIMEOUT' + DESCRIBE_QUERY_UNEXPECTED_FAILURE = 'DESCRIBE_QUERY_UNEXPECTED_FAILURE' + FUNCTIONS_NOT_AVAILABLE_EXCEPTION = 'FUNCTIONS_NOT_AVAILABLE_EXCEPTION' + FUNCTION_ARGUMENTS_INVALID_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_EXCEPTION' + FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION' + FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION' + FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = 'FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION' + GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION = 'GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION' + GENERIC_CHAT_COMPLETION_EXCEPTION = 'GENERIC_CHAT_COMPLETION_EXCEPTION' + GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = 'GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION' + GENERIC_SQL_EXEC_API_CALL_EXCEPTION = 'GENERIC_SQL_EXEC_API_CALL_EXCEPTION' + ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = 'ILLEGAL_PARAMETER_DEFINITION_EXCEPTION' + INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION' + INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION' + INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION = 'INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION' + INVALID_CHAT_COMPLETION_JSON_EXCEPTION = 'INVALID_CHAT_COMPLETION_JSON_EXCEPTION' + INVALID_COMPLETION_REQUEST_EXCEPTION = 'INVALID_COMPLETION_REQUEST_EXCEPTION' + INVALID_FUNCTION_CALL_EXCEPTION = 'INVALID_FUNCTION_CALL_EXCEPTION' + INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION = 'INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION' + INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION = 'INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION' + INVALID_SQL_UNKNOWN_TABLE_EXCEPTION = 'INVALID_SQL_UNKNOWN_TABLE_EXCEPTION' + INVALID_TABLE_IDENTIFIER_EXCEPTION = 'INVALID_TABLE_IDENTIFIER_EXCEPTION' + LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION' + MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION' + MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION' + MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION' + MISSING_SQL_QUERY_EXCEPTION = 'MISSING_SQL_QUERY_EXCEPTION' + NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = 'NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE' + NO_QUERY_TO_VISUALIZE_EXCEPTION = 'NO_QUERY_TO_VISUALIZE_EXCEPTION' + NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION' + RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION' + RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = 'RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION' + REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION' + RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION' + SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION' + STOP_PROCESS_DUE_TO_AUTO_REGENERATE = 'STOP_PROCESS_DUE_TO_AUTO_REGENERATE' + TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION' + TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION' + TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION' + UNEXPECTED_REPLY_PROCESS_EXCEPTION = 'UNEXPECTED_REPLY_PROCESS_EXCEPTION' + UNKNOWN_AI_MODEL = 'UNKNOWN_AI_MODEL' + WAREHOUSE_ACCESS_MISSING_EXCEPTION = 'WAREHOUSE_ACCESS_MISSING_EXCEPTION' + WAREHOUSE_NOT_FOUND_EXCEPTION = 'WAREHOUSE_NOT_FOUND_EXCEPTION' class MessageStatus(Enum): """MessageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data @@ -1316,678 +1071,418 @@ class MessageStatus(Enum): anymore. The user needs to rerun the query. Rerun the SQL query result by calling [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * `CANCELLED`: Message has been cancelled.""" - - ASKING_AI = "ASKING_AI" - CANCELLED = "CANCELLED" - COMPLETED = "COMPLETED" - EXECUTING_QUERY = "EXECUTING_QUERY" - FAILED = "FAILED" - FETCHING_METADATA = "FETCHING_METADATA" - FILTERING_CONTEXT = "FILTERING_CONTEXT" - PENDING_WAREHOUSE = "PENDING_WAREHOUSE" - QUERY_RESULT_EXPIRED = "QUERY_RESULT_EXPIRED" - SUBMITTED = "SUBMITTED" - + + ASKING_AI = 'ASKING_AI' + CANCELLED = 'CANCELLED' + COMPLETED = 'COMPLETED' + EXECUTING_QUERY = 'EXECUTING_QUERY' + FAILED = 'FAILED' + FETCHING_METADATA = 'FETCHING_METADATA' + FILTERING_CONTEXT = 'FILTERING_CONTEXT' + PENDING_WAREHOUSE = 'PENDING_WAREHOUSE' + QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED' + SUBMITTED = 'SUBMITTED' @dataclass class MigrateDashboardRequest: source_dashboard_id: str """UUID of the dashboard to be migrated.""" - + display_name: Optional[str] = None """Display name for the new Lakeview dashboard.""" - + parent_path: Optional[str] = None """The workspace path of the folder to contain the migrated Lakeview dashboard.""" - + update_parameter_syntax: Optional[bool] = None """Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax (:param) when converting datasets in the dashboard.""" - + def as_dict(self) -> dict: """Serializes the MigrateDashboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.source_dashboard_id is not None: - body["source_dashboard_id"] = self.source_dashboard_id - if self.update_parameter_syntax is not None: - body["update_parameter_syntax"] = self.update_parameter_syntax + if self.display_name is not None: body['display_name'] = self.display_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id + if self.update_parameter_syntax is not None: body['update_parameter_syntax'] = self.update_parameter_syntax return body def as_shallow_dict(self) -> dict: """Serializes the MigrateDashboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.source_dashboard_id is not None: - body["source_dashboard_id"] = self.source_dashboard_id - if self.update_parameter_syntax is not None: - body["update_parameter_syntax"] = self.update_parameter_syntax + if self.display_name is not None: body['display_name'] = self.display_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id + if self.update_parameter_syntax is not None: body['update_parameter_syntax'] = self.update_parameter_syntax return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MigrateDashboardRequest: """Deserializes the MigrateDashboardRequest from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - parent_path=d.get("parent_path", None), - source_dashboard_id=d.get("source_dashboard_id", None), - update_parameter_syntax=d.get("update_parameter_syntax", None), - ) - - -@dataclass -class PendingStatus: - data_token: str - """The token to poll for result asynchronously Example: - EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" - - def as_dict(self) -> dict: - """Serializes the PendingStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data_token is not None: - body["data_token"] = self.data_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PendingStatus into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data_token is not None: - body["data_token"] = self.data_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PendingStatus: - """Deserializes the PendingStatus from a dictionary.""" - return cls(data_token=d.get("data_token", None)) - - -@dataclass -class PollQueryStatusResponse: - data: Optional[List[PollQueryStatusResponseData]] = None - - def as_dict(self) -> dict: - """Serializes the PollQueryStatusResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data: - body["data"] = [v.as_dict() for v in self.data] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PollQueryStatusResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data: - body["data"] = self.data - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponse: - """Deserializes the PollQueryStatusResponse from a dictionary.""" - return cls(data=_repeated_dict(d, "data", PollQueryStatusResponseData)) - - -@dataclass -class PollQueryStatusResponseData: - status: QueryResponseStatus - - def as_dict(self) -> dict: - """Serializes the PollQueryStatusResponseData into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.status: - body["status"] = self.status.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PollQueryStatusResponseData into a shallow dictionary of its immediate attributes.""" - body = {} - if self.status: - body["status"] = self.status - return body + return cls(display_name=d.get('display_name', None), parent_path=d.get('parent_path', None), source_dashboard_id=d.get('source_dashboard_id', None), update_parameter_syntax=d.get('update_parameter_syntax', None)) + - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PollQueryStatusResponseData: - """Deserializes the PollQueryStatusResponseData from a dictionary.""" - return cls(status=_from_dict(d, "status", QueryResponseStatus)) @dataclass class PublishRequest: dashboard_id: Optional[str] = None """UUID identifying the dashboard to be published.""" - + embed_credentials: Optional[bool] = None """Flag to indicate if the publisher's credentials should be embedded in the published dashboard. These embedded credentials will be used to execute the published dashboard's queries.""" - + warehouse_id: Optional[str] = None """The ID of the warehouse that can be used to override the warehouse which was set in the draft.""" - + def as_dict(self) -> dict: """Serializes the PublishRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.embed_credentials is not None: - body["embed_credentials"] = self.embed_credentials - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the PublishRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.embed_credentials is not None: - body["embed_credentials"] = self.embed_credentials - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PublishRequest: """Deserializes the PublishRequest from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - embed_credentials=d.get("embed_credentials", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(dashboard_id=d.get('dashboard_id', None), embed_credentials=d.get('embed_credentials', None), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class PublishedDashboard: display_name: Optional[str] = None """The display name of the published dashboard.""" - + embed_credentials: Optional[bool] = None """Indicates whether credentials are embedded in the published dashboard.""" - + revision_create_time: Optional[str] = None """The timestamp of when the published dashboard was last revised.""" - + warehouse_id: Optional[str] = None """The warehouse ID used to run the published dashboard.""" - + def as_dict(self) -> dict: """Serializes the PublishedDashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.embed_credentials is not None: - body["embed_credentials"] = self.embed_credentials - if self.revision_create_time is not None: - body["revision_create_time"] = self.revision_create_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials + if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the PublishedDashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.embed_credentials is not None: - body["embed_credentials"] = self.embed_credentials - if self.revision_create_time is not None: - body["revision_create_time"] = self.revision_create_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials + if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PublishedDashboard: """Deserializes the PublishedDashboard from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - embed_credentials=d.get("embed_credentials", None), - revision_create_time=d.get("revision_create_time", None), - warehouse_id=d.get("warehouse_id", None), - ) - - -@dataclass -class QueryResponseStatus: - canceled: Optional[Empty] = None - """Represents an empty message, similar to google.protobuf.Empty, which is not available in the - firm right now.""" - - closed: Optional[Empty] = None - """Represents an empty message, similar to google.protobuf.Empty, which is not available in the - firm right now.""" - - pending: Optional[PendingStatus] = None - - statement_id: Optional[str] = None - """The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be - identical to data_token in SuccessStatus and PendingStatus. This field is created for audit - logging purpose to record the statement_id of all QueryResponseStatus.""" - - success: Optional[SuccessStatus] = None - - def as_dict(self) -> dict: - """Serializes the QueryResponseStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.canceled: - body["canceled"] = self.canceled.as_dict() - if self.closed: - body["closed"] = self.closed.as_dict() - if self.pending: - body["pending"] = self.pending.as_dict() - if self.statement_id is not None: - body["statement_id"] = self.statement_id - if self.success: - body["success"] = self.success.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryResponseStatus into a shallow dictionary of its immediate attributes.""" - body = {} - if self.canceled: - body["canceled"] = self.canceled - if self.closed: - body["closed"] = self.closed - if self.pending: - body["pending"] = self.pending - if self.statement_id is not None: - body["statement_id"] = self.statement_id - if self.success: - body["success"] = self.success - return body + return cls(display_name=d.get('display_name', None), embed_credentials=d.get('embed_credentials', None), revision_create_time=d.get('revision_create_time', None), warehouse_id=d.get('warehouse_id', None)) + - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryResponseStatus: - """Deserializes the QueryResponseStatus from a dictionary.""" - return cls( - canceled=_from_dict(d, "canceled", Empty), - closed=_from_dict(d, "closed", Empty), - pending=_from_dict(d, "pending", PendingStatus), - statement_id=d.get("statement_id", None), - success=_from_dict(d, "success", SuccessStatus), - ) @dataclass class Result: is_truncated: Optional[bool] = None """If result is truncated""" - + row_count: Optional[int] = None """Row count of the result""" - + statement_id: Optional[str] = None """Statement Execution API statement id. Use [Get status, manifest, and result first chunk](:method:statementexecution/getstatement) to get the full result data.""" - + def as_dict(self) -> dict: """Serializes the Result into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_truncated is not None: - body["is_truncated"] = self.is_truncated - if self.row_count is not None: - body["row_count"] = self.row_count - if self.statement_id is not None: - body["statement_id"] = self.statement_id + if self.is_truncated is not None: body['is_truncated'] = self.is_truncated + if self.row_count is not None: body['row_count'] = self.row_count + if self.statement_id is not None: body['statement_id'] = self.statement_id return body def as_shallow_dict(self) -> dict: """Serializes the Result into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_truncated is not None: - body["is_truncated"] = self.is_truncated - if self.row_count is not None: - body["row_count"] = self.row_count - if self.statement_id is not None: - body["statement_id"] = self.statement_id + if self.is_truncated is not None: body['is_truncated'] = self.is_truncated + if self.row_count is not None: body['row_count'] = self.row_count + if self.statement_id is not None: body['statement_id'] = self.statement_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Result: """Deserializes the Result from a dictionary.""" - return cls( - is_truncated=d.get("is_truncated", None), - row_count=d.get("row_count", None), - statement_id=d.get("statement_id", None), - ) + return cls(is_truncated=d.get('is_truncated', None), row_count=d.get('row_count', None), statement_id=d.get('statement_id', None)) + + @dataclass class Schedule: cron_schedule: CronSchedule """The cron expression describing the frequency of the periodic refresh for this schedule.""" - + create_time: Optional[str] = None """A timestamp indicating when the schedule was created.""" - + dashboard_id: Optional[str] = None """UUID identifying the dashboard to which the schedule belongs.""" - + display_name: Optional[str] = None """The display name for schedule.""" - + etag: Optional[str] = None """The etag for the schedule. Must be left empty on create, must be provided on updates to ensure that the schedule has not been modified since the last read, and can be optionally provided on delete.""" - + pause_status: Optional[SchedulePauseStatus] = None """The status indicates whether this schedule is paused or not.""" - + schedule_id: Optional[str] = None """UUID identifying the schedule.""" - + update_time: Optional[str] = None """A timestamp indicating when the schedule was last updated.""" - + warehouse_id: Optional[str] = None """The warehouse id to run the dashboard with for the schedule.""" - + def as_dict(self) -> dict: """Serializes the Schedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.cron_schedule: - body["cron_schedule"] = self.cron_schedule.as_dict() - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.display_name is not None: - body["display_name"] = self.display_name - if self.etag is not None: - body["etag"] = self.etag - if self.pause_status is not None: - body["pause_status"] = self.pause_status.value - if self.schedule_id is not None: - body["schedule_id"] = self.schedule_id - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.cron_schedule: body['cron_schedule'] = self.cron_schedule.as_dict() + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.etag is not None: body['etag'] = self.etag + if self.pause_status is not None: body['pause_status'] = self.pause_status.value + if self.schedule_id is not None: body['schedule_id'] = self.schedule_id + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the Schedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.cron_schedule: - body["cron_schedule"] = self.cron_schedule - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.display_name is not None: - body["display_name"] = self.display_name - if self.etag is not None: - body["etag"] = self.etag - if self.pause_status is not None: - body["pause_status"] = self.pause_status - if self.schedule_id is not None: - body["schedule_id"] = self.schedule_id - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.cron_schedule: body['cron_schedule'] = self.cron_schedule + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.etag is not None: body['etag'] = self.etag + if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.schedule_id is not None: body['schedule_id'] = self.schedule_id + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Schedule: """Deserializes the Schedule from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - cron_schedule=_from_dict(d, "cron_schedule", CronSchedule), - dashboard_id=d.get("dashboard_id", None), - display_name=d.get("display_name", None), - etag=d.get("etag", None), - pause_status=_enum(d, "pause_status", SchedulePauseStatus), - schedule_id=d.get("schedule_id", None), - update_time=d.get("update_time", None), - warehouse_id=d.get("warehouse_id", None), - ) - + return cls(create_time=d.get('create_time', None), cron_schedule=_from_dict(d, 'cron_schedule', CronSchedule), dashboard_id=d.get('dashboard_id', None), display_name=d.get('display_name', None), etag=d.get('etag', None), pause_status=_enum(d, 'pause_status', SchedulePauseStatus), schedule_id=d.get('schedule_id', None), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) + -class SchedulePauseStatus(Enum): - PAUSED = "PAUSED" - UNPAUSED = "UNPAUSED" +class SchedulePauseStatus(Enum): + + + PAUSED = 'PAUSED' + UNPAUSED = 'UNPAUSED' @dataclass class Subscriber: destination_subscriber: Optional[SubscriptionSubscriberDestination] = None """The destination to receive the subscription email. This parameter is mutually exclusive with `user_subscriber`.""" - + user_subscriber: Optional[SubscriptionSubscriberUser] = None """The user to receive the subscription email. This parameter is mutually exclusive with `destination_subscriber`.""" - + def as_dict(self) -> dict: """Serializes the Subscriber into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_subscriber: - body["destination_subscriber"] = self.destination_subscriber.as_dict() - if self.user_subscriber: - body["user_subscriber"] = self.user_subscriber.as_dict() + if self.destination_subscriber: body['destination_subscriber'] = self.destination_subscriber.as_dict() + if self.user_subscriber: body['user_subscriber'] = self.user_subscriber.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Subscriber into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_subscriber: - body["destination_subscriber"] = self.destination_subscriber - if self.user_subscriber: - body["user_subscriber"] = self.user_subscriber + if self.destination_subscriber: body['destination_subscriber'] = self.destination_subscriber + if self.user_subscriber: body['user_subscriber'] = self.user_subscriber return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Subscriber: """Deserializes the Subscriber from a dictionary.""" - return cls( - destination_subscriber=_from_dict(d, "destination_subscriber", SubscriptionSubscriberDestination), - user_subscriber=_from_dict(d, "user_subscriber", SubscriptionSubscriberUser), - ) + return cls(destination_subscriber=_from_dict(d, 'destination_subscriber', SubscriptionSubscriberDestination), user_subscriber=_from_dict(d, 'user_subscriber', SubscriptionSubscriberUser)) + + @dataclass class Subscription: subscriber: Subscriber """Subscriber details for users and destinations to be added as subscribers to the schedule.""" - + create_time: Optional[str] = None """A timestamp indicating when the subscription was created.""" - + created_by_user_id: Optional[int] = None """UserId of the user who adds subscribers (users or notification destinations) to the dashboard's schedule.""" - + dashboard_id: Optional[str] = None """UUID identifying the dashboard to which the subscription belongs.""" - + etag: Optional[str] = None """The etag for the subscription. Must be left empty on create, can be optionally provided on delete to ensure that the subscription has not been deleted since the last read.""" - + schedule_id: Optional[str] = None """UUID identifying the schedule to which the subscription belongs.""" - + subscription_id: Optional[str] = None """UUID identifying the subscription.""" - + update_time: Optional[str] = None """A timestamp indicating when the subscription was last updated.""" - + def as_dict(self) -> dict: """Serializes the Subscription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.created_by_user_id is not None: - body["created_by_user_id"] = self.created_by_user_id - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.etag is not None: - body["etag"] = self.etag - if self.schedule_id is not None: - body["schedule_id"] = self.schedule_id - if self.subscriber: - body["subscriber"] = self.subscriber.as_dict() - if self.subscription_id is not None: - body["subscription_id"] = self.subscription_id - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by_user_id is not None: body['created_by_user_id'] = self.created_by_user_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.etag is not None: body['etag'] = self.etag + if self.schedule_id is not None: body['schedule_id'] = self.schedule_id + if self.subscriber: body['subscriber'] = self.subscriber.as_dict() + if self.subscription_id is not None: body['subscription_id'] = self.subscription_id + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the Subscription into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.created_by_user_id is not None: - body["created_by_user_id"] = self.created_by_user_id - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.etag is not None: - body["etag"] = self.etag - if self.schedule_id is not None: - body["schedule_id"] = self.schedule_id - if self.subscriber: - body["subscriber"] = self.subscriber - if self.subscription_id is not None: - body["subscription_id"] = self.subscription_id - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by_user_id is not None: body['created_by_user_id'] = self.created_by_user_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.etag is not None: body['etag'] = self.etag + if self.schedule_id is not None: body['schedule_id'] = self.schedule_id + if self.subscriber: body['subscriber'] = self.subscriber + if self.subscription_id is not None: body['subscription_id'] = self.subscription_id + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Subscription: """Deserializes the Subscription from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - created_by_user_id=d.get("created_by_user_id", None), - dashboard_id=d.get("dashboard_id", None), - etag=d.get("etag", None), - schedule_id=d.get("schedule_id", None), - subscriber=_from_dict(d, "subscriber", Subscriber), - subscription_id=d.get("subscription_id", None), - update_time=d.get("update_time", None), - ) + return cls(create_time=d.get('create_time', None), created_by_user_id=d.get('created_by_user_id', None), dashboard_id=d.get('dashboard_id', None), etag=d.get('etag', None), schedule_id=d.get('schedule_id', None), subscriber=_from_dict(d, 'subscriber', Subscriber), subscription_id=d.get('subscription_id', None), update_time=d.get('update_time', None)) + + @dataclass class SubscriptionSubscriberDestination: destination_id: str """The canonical identifier of the destination to receive email notification.""" - + def as_dict(self) -> dict: """Serializes the SubscriptionSubscriberDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_id is not None: - body["destination_id"] = self.destination_id + if self.destination_id is not None: body['destination_id'] = self.destination_id return body def as_shallow_dict(self) -> dict: """Serializes the SubscriptionSubscriberDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_id is not None: - body["destination_id"] = self.destination_id + if self.destination_id is not None: body['destination_id'] = self.destination_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriberDestination: """Deserializes the SubscriptionSubscriberDestination from a dictionary.""" - return cls(destination_id=d.get("destination_id", None)) + return cls(destination_id=d.get('destination_id', None)) + + @dataclass class SubscriptionSubscriberUser: user_id: int """UserId of the subscriber.""" - + def as_dict(self) -> dict: """Serializes the SubscriptionSubscriberUser into a dictionary suitable for use as a JSON request body.""" body = {} - if self.user_id is not None: - body["user_id"] = self.user_id + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the SubscriptionSubscriberUser into a shallow dictionary of its immediate attributes.""" body = {} - if self.user_id is not None: - body["user_id"] = self.user_id + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriberUser: """Deserializes the SubscriptionSubscriberUser from a dictionary.""" - return cls(user_id=d.get("user_id", None)) - - -@dataclass -class SuccessStatus: - data_token: str - """The token to poll for result asynchronously Example: - EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ""" - - truncated: Optional[bool] = None - """Whether the query result is truncated (either by byte limit or row limit)""" - - def as_dict(self) -> dict: - """Serializes the SuccessStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data_token is not None: - body["data_token"] = self.data_token - if self.truncated is not None: - body["truncated"] = self.truncated - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SuccessStatus into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data_token is not None: - body["data_token"] = self.data_token - if self.truncated is not None: - body["truncated"] = self.truncated - return body + return cls(user_id=d.get('user_id', None)) + - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SuccessStatus: - """Deserializes the SuccessStatus from a dictionary.""" - return cls(data_token=d.get("data_token", None), truncated=d.get("truncated", None)) @dataclass class TextAttachment: content: Optional[str] = None """AI generated message""" - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.id is not None: - body["id"] = self.id + if self.content is not None: body['content'] = self.content + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the TextAttachment into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.id is not None: - body["id"] = self.id + if self.content is not None: body['content'] = self.content + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TextAttachment: """Deserializes the TextAttachment from a dictionary.""" - return cls(content=d.get("content", None), id=d.get("id", None)) + return cls(content=d.get('content', None), id=d.get('id', None)) + + + + + @dataclass @@ -2006,6 +1501,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TrashDashboardResponse: """Deserializes the TrashDashboardResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -2024,6 +1524,16 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UnpublishDashboardResponse: """Deserializes the UnpublishDashboardResponse from a dictionary.""" return cls() + + + + + + + + + + class GenieAPI: @@ -2031,98 +1541,95 @@ class GenieAPI: business users can use to ask questions using natural language. Genie uses data registered to Unity Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks Assistant must be enabled.""" - + def __init__(self, api_client): self._api = api_client + - def wait_get_message_genie_completed( - self, - conversation_id: str, - message_id: str, - space_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[GenieMessage], None]] = None, - ) -> GenieMessage: - deadline = time.time() + timeout.total_seconds() - target_states = (MessageStatus.COMPLETED,) - failure_states = (MessageStatus.FAILED,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id) - status = poll.status - status_message = f"current status: {status}" - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach COMPLETED, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]: - """Create conversation message. + + + + def wait_get_message_genie_completed(self, conversation_id: str, message_id: str, space_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage: + deadline = time.time() + timeout.total_seconds() + target_states = (MessageStatus.COMPLETED, ) + failure_states = (MessageStatus.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach COMPLETED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + def create_message(self + , space_id: str, conversation_id: str, content: str + ) -> Wait[GenieMessage]: + """Create conversation message. + Create new message in a [conversation](:method:genie/startconversation). The AI response uses all previously created messages in the conversation to respond. - + :param space_id: str The ID associated with the Genie space where the conversation is started. :param conversation_id: str The ID associated with the conversation. :param content: str User message content. - + :returns: Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. """ body = {} - if content is not None: - body["content"] = content - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do( - "POST", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages", - body=body, - headers=headers, - ) - return Wait( - self.wait_get_message_genie_completed, - response=GenieMessage.from_dict(op_response), - conversation_id=conversation_id, - message_id=op_response["message_id"], - space_id=space_id, - ) - - def create_message_and_wait( - self, space_id: str, conversation_id: str, content: str, timeout=timedelta(minutes=20) - ) -> GenieMessage: - return self.create_message(content=content, conversation_id=conversation_id, space_id=space_id).result( - timeout=timeout - ) - - def execute_message_attachment_query( - self, space_id: str, conversation_id: str, message_id: str, attachment_id: str - ) -> GenieGetMessageQueryResultResponse: - """Execute message attachment SQL query. + if content is not None: body['content'] = content + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages', body=body + + , headers=headers + ) + return Wait(self.wait_get_message_genie_completed + , response = GenieMessage.from_dict(op_response) + , conversation_id=conversation_id, message_id=op_response['message_id'], space_id=space_id) + + def create_message_and_wait(self + , space_id: str, conversation_id: str, content: str + , + timeout=timedelta(minutes=20)) -> GenieMessage: + return self.create_message(content=content, conversation_id=conversation_id, space_id=space_id).result(timeout=timeout) + + + + + def execute_message_attachment_query(self + , space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGetMessageQueryResultResponse: + """Execute message attachment SQL query. + Execute the SQL for a message query attachment. Use this API when the query attachment has expired and needs to be re-executed. - + :param space_id: str Genie space ID :param conversation_id: str @@ -2131,60 +1638,62 @@ def execute_message_attachment_query( Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/execute-query", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/execute-query' + + , headers=headers + ) return GenieGetMessageQueryResultResponse.from_dict(res) - def execute_message_query( - self, space_id: str, conversation_id: str, message_id: str - ) -> GenieGetMessageQueryResultResponse: - """[Deprecated] Execute SQL query in a conversation message. + + + + def execute_message_query(self + , space_id: str, conversation_id: str, message_id: str + ) -> GenieGetMessageQueryResultResponse: + """[Deprecated] Execute SQL query in a conversation message. + Execute the SQL query in the message. - + :param space_id: str Genie space ID :param conversation_id: str Conversation ID :param message_id: str Message ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query' + + , headers=headers + ) return GenieGetMessageQueryResultResponse.from_dict(res) - def generate_download_full_query_result( - self, space_id: str, conversation_id: str, message_id: str, attachment_id: str - ) -> GenieGenerateDownloadFullQueryResultResponse: - """Generate full query result download. + + + + def generate_download_full_query_result(self + , space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGenerateDownloadFullQueryResultResponse: + """Generate full query result download. + Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of the download. The query result is stored in an external link and can be retrieved using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. See [Execute Statement](:method:statementexecution/executestatement) for more details. - + :param space_id: str Genie space ID :param conversation_id: str @@ -2193,26 +1702,27 @@ def generate_download_full_query_result( Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads' + + , headers=headers + ) return GenieGenerateDownloadFullQueryResultResponse.from_dict(res) - def get_download_full_query_result( - self, space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str - ) -> GenieGetDownloadFullQueryResultResponse: - """Get download full query result. + + + + def get_download_full_query_result(self + , space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str + ) -> GenieGetDownloadFullQueryResultResponse: + """Get download full query result. + After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and successfully receiving a `download_id`, use this API to poll the download progress. When the download is complete, the API returns one or more external links to the query result files. Warning: Databricks @@ -2220,7 +1730,7 @@ def get_download_full_query_result( You must not set an Authorization header in download requests. When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant temporary access to data. See [Execute Statement](:method:statementexecution/executestatement) for more details. - + :param space_id: str Genie space ID :param conversation_id: str @@ -2232,55 +1742,59 @@ def get_download_full_query_result( :param download_id: str Download ID. This ID is provided by the [Generate Download endpoint](:method:genie/generateDownloadFullQueryResult) - + :returns: :class:`GenieGetDownloadFullQueryResultResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads/{download_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads/{download_id}' + + , headers=headers + ) return GenieGetDownloadFullQueryResultResponse.from_dict(res) - def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: - """Get conversation message. + + + + def get_message(self + , space_id: str, conversation_id: str, message_id: str + ) -> GenieMessage: + """Get conversation message. + Get message from conversation. - + :param space_id: str The ID associated with the Genie space where the target conversation is located. :param conversation_id: str The ID associated with the target conversation. :param message_id: str The ID associated with the target message from the identified conversation. - + :returns: :class:`GenieMessage` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}' + + , headers=headers + ) return GenieMessage.from_dict(res) - def get_message_attachment_query_result( - self, space_id: str, conversation_id: str, message_id: str, attachment_id: str - ) -> GenieGetMessageQueryResultResponse: - """Get message attachment SQL query result. + + + + def get_message_attachment_query_result(self + , space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGetMessageQueryResultResponse: + """Get message attachment SQL query result. + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. - + :param space_id: str Genie space ID :param conversation_id: str @@ -2289,58 +1803,60 @@ def get_message_attachment_query_result( Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/query-result", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/query-result' + + , headers=headers + ) return GenieGetMessageQueryResultResponse.from_dict(res) - def get_message_query_result( - self, space_id: str, conversation_id: str, message_id: str - ) -> GenieGetMessageQueryResultResponse: - """[Deprecated] Get conversation message SQL query result. + + + + def get_message_query_result(self + , space_id: str, conversation_id: str, message_id: str + ) -> GenieGetMessageQueryResultResponse: + """[Deprecated] Get conversation message SQL query result. + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY`. - + :param space_id: str Genie space ID :param conversation_id: str Conversation ID :param message_id: str Message ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result' + + , headers=headers + ) return GenieGetMessageQueryResultResponse.from_dict(res) - def get_message_query_result_by_attachment( - self, space_id: str, conversation_id: str, message_id: str, attachment_id: str - ) -> GenieGetMessageQueryResultResponse: - """[Deprecated] Get conversation message SQL query result. + + + + def get_message_query_result_by_attachment(self + , space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGetMessageQueryResultResponse: + """[Deprecated] Get conversation message SQL query result. + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. - + :param space_id: str Genie space ID :param conversation_id: str @@ -2349,147 +1865,212 @@ def get_message_query_result_by_attachment( Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}' + + , headers=headers + ) return GenieGetMessageQueryResultResponse.from_dict(res) - def get_space(self, space_id: str) -> GenieSpace: - """Get Genie Space. + + + + def get_space(self + , space_id: str + ) -> GenieSpace: + """Get Genie Space. + Get details of a Genie Space. - + :param space_id: str The ID associated with the Genie space - + :returns: :class:`GenieSpace` """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}' + + , headers=headers + ) + return GenieSpace.from_dict(res) - headers = { - "Accept": "application/json", - } + + + - res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}", headers=headers) - return GenieSpace.from_dict(res) + def list_spaces(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> GenieListSpacesResponse: + """List Genie spaces. + + Get list of Genie Spaces. + + :param page_size: int (optional) + Maximum number of spaces to return per page + :param page_token: str (optional) + Pagination token for getting the next page of results + + :returns: :class:`GenieListSpacesResponse` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/genie/spaces', query=query + + , headers=headers + ) + return GenieListSpacesResponse.from_dict(res) - def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]: - """Start conversation. + + + + def start_conversation(self + , space_id: str, content: str + ) -> Wait[GenieMessage]: + """Start conversation. + Start a new conversation. - + :param space_id: str The ID associated with the Genie space where you want to start a conversation. :param content: str The text of the message that starts the conversation. - + :returns: Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. """ body = {} - if content is not None: - body["content"] = content - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do( - "POST", f"/api/2.0/genie/spaces/{space_id}/start-conversation", body=body, headers=headers - ) - return Wait( - self.wait_get_message_genie_completed, - response=GenieStartConversationResponse.from_dict(op_response), - conversation_id=op_response["conversation_id"], - message_id=op_response["message_id"], - space_id=space_id, - ) - - def start_conversation_and_wait(self, space_id: str, content: str, timeout=timedelta(minutes=20)) -> GenieMessage: - return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout) - + if content is not None: body['content'] = content + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/start-conversation', body=body + + , headers=headers + ) + return Wait(self.wait_get_message_genie_completed + , response = GenieStartConversationResponse.from_dict(op_response) + , conversation_id=op_response['conversation_id'], message_id=op_response['message_id'], space_id=space_id) + + def start_conversation_and_wait(self + , space_id: str, content: str + , + timeout=timedelta(minutes=20)) -> GenieMessage: + return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout) + + class LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can be done with Workspace API (import, export, get-status, list, delete).""" - + def __init__(self, api_client): self._api = api_client + - def create(self, dashboard: Dashboard) -> Dashboard: - """Create dashboard. + - Create a draft dashboard. + - :param dashboard: :class:`Dashboard` + + + def create(self + , dashboard: Dashboard + ) -> Dashboard: + """Create dashboard. + + Create a draft dashboard. + + :param dashboard: :class:`Dashboard` + :returns: :class:`Dashboard` """ body = dashboard.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/lakeview/dashboards", body=body, headers=headers) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/lakeview/dashboards', body=body + + , headers=headers + ) return Dashboard.from_dict(res) - def create_schedule(self, dashboard_id: str, schedule: Schedule) -> Schedule: - """Create dashboard schedule. + + + + def create_schedule(self + , dashboard_id: str, schedule: Schedule + ) -> Schedule: + """Create dashboard schedule. + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule: :class:`Schedule` - + :returns: :class:`Schedule` """ body = schedule.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules", body=body, headers=headers) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules', body=body + + , headers=headers + ) return Schedule.from_dict(res) - def create_subscription(self, dashboard_id: str, schedule_id: str, subscription: Subscription) -> Subscription: - """Create schedule subscription. + + + + def create_subscription(self + , dashboard_id: str, schedule_id: str, subscription: Subscription + ) -> Subscription: + """Create schedule subscription. + :param dashboard_id: str UUID identifying the dashboard to which the subscription belongs. :param schedule_id: str UUID identifying the schedule to which the subscription belongs. :param subscription: :class:`Subscription` - + :returns: :class:`Subscription` """ body = subscription.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions", - body=body, - headers=headers, - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions', body=body + + , headers=headers + ) return Subscription.from_dict(res) - def delete_schedule(self, dashboard_id: str, schedule_id: str, *, etag: Optional[str] = None): - """Delete dashboard schedule. + + + + def delete_schedule(self + , dashboard_id: str, schedule_id: str + , * + , etag: Optional[str] = None): + """Delete dashboard schedule. + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str @@ -2497,29 +2078,30 @@ def delete_schedule(self, dashboard_id: str, schedule_id: str, *, etag: Optional :param etag: str (optional) The etag for the schedule. Optionally, it can be provided to verify that the schedule has not been modified from its last retrieval. - - + + """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}", - query=query, - headers=headers, - ) - - def delete_subscription( - self, dashboard_id: str, schedule_id: str, subscription_id: str, *, etag: Optional[str] = None - ): - """Delete schedule subscription. + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}', query=query + + , headers=headers + ) + + + + + + def delete_subscription(self + , dashboard_id: str, schedule_id: str, subscription_id: str + , * + , etag: Optional[str] = None): + """Delete schedule subscription. + :param dashboard_id: str UUID identifying the dashboard which the subscription belongs. :param schedule_id: str @@ -2529,114 +2111,132 @@ def delete_subscription( :param etag: str (optional) The etag for the subscription. Can be optionally provided to ensure that the subscription has not been modified since the last read. - - + + """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}", - query=query, - headers=headers, - ) - - def get(self, dashboard_id: str) -> Dashboard: - """Get dashboard. + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}', query=query + + , headers=headers + ) + - Get a draft dashboard. + + + + def get(self + , dashboard_id: str + ) -> Dashboard: + """Get dashboard. + + Get a draft dashboard. + :param dashboard_id: str UUID identifying the dashboard. - + :returns: :class:`Dashboard` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}' + + , headers=headers + ) return Dashboard.from_dict(res) - def get_published(self, dashboard_id: str) -> PublishedDashboard: - """Get published dashboard. + + + + def get_published(self + , dashboard_id: str + ) -> PublishedDashboard: + """Get published dashboard. + Get the current published dashboard. - + :param dashboard_id: str UUID identifying the published dashboard. - + :returns: :class:`PublishedDashboard` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/published' + + , headers=headers + ) return PublishedDashboard.from_dict(res) - def get_schedule(self, dashboard_id: str, schedule_id: str) -> Schedule: - """Get dashboard schedule. + + + + def get_schedule(self + , dashboard_id: str, schedule_id: str + ) -> Schedule: + """Get dashboard schedule. + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. - + :returns: :class:`Schedule` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}' + + , headers=headers + ) return Schedule.from_dict(res) - def get_subscription(self, dashboard_id: str, schedule_id: str, subscription_id: str) -> Subscription: - """Get schedule subscription. + + + + def get_subscription(self + , dashboard_id: str, schedule_id: str, subscription_id: str + ) -> Subscription: + """Get schedule subscription. + :param dashboard_id: str UUID identifying the dashboard which the subscription belongs. :param schedule_id: str UUID identifying the schedule which the subscription belongs. :param subscription_id: str UUID identifying the subscription. - + :returns: :class:`Subscription` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}' + + , headers=headers + ) return Subscription.from_dict(res) - def list( - self, - *, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - show_trashed: Optional[bool] = None, - view: Optional[DashboardView] = None, - ) -> Iterator[Dashboard]: - """List dashboards. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None, show_trashed: Optional[bool] = None, view: Optional[DashboardView] = None) -> Iterator[Dashboard]: + """List dashboards. + :param page_size: int (optional) The number of dashboards to return per page. :param page_token: str (optional) @@ -2647,37 +2247,42 @@ def list( returned. :param view: :class:`DashboardView` (optional) `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard. - + :returns: Iterator over :class:`Dashboard` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - if show_trashed is not None: - query["show_trashed"] = show_trashed - if view is not None: - query["view"] = view.value - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if show_trashed is not None: query['show_trashed'] = show_trashed + if view is not None: query['view'] = view.value + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/lakeview/dashboards", query=query, headers=headers) - if "dashboards" in json: - for v in json["dashboards"]: - yield Dashboard.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_schedules( - self, dashboard_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[Schedule]: - """List dashboard schedules. + json = self._api.do('GET','/api/2.0/lakeview/dashboards', query=query + + , headers=headers + ) + if 'dashboards' in json: + for v in json['dashboards']: + yield Dashboard.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def list_schedules(self + , dashboard_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Schedule]: + """List dashboard schedules. + :param dashboard_id: str UUID identifying the dashboard to which the schedules belongs. :param page_size: int (optional) @@ -2685,35 +2290,40 @@ def list_schedules( :param page_token: str (optional) A page token, received from a previous `ListSchedules` call. Use this to retrieve the subsequent page. - + :returns: Iterator over :class:`Schedule` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules", query=query, headers=headers - ) - if "schedules" in json: - for v in json["schedules"]: - yield Schedule.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_subscriptions( - self, dashboard_id: str, schedule_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[Subscription]: - """List schedule subscriptions. + json = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules', query=query + + , headers=headers + ) + if 'schedules' in json: + for v in json['schedules']: + yield Schedule.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def list_subscriptions(self + , dashboard_id: str, schedule_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Subscription]: + """List schedule subscriptions. + :param dashboard_id: str UUID identifying the dashboard which the subscriptions belongs. :param schedule_id: str @@ -2723,45 +2333,42 @@ def list_subscriptions( :param page_token: str (optional) A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the subsequent page. - + :returns: Iterator over :class:`Subscription` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions", - query=query, - headers=headers, - ) - if "subscriptions" in json: - for v in json["subscriptions"]: - yield Subscription.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def migrate( - self, - source_dashboard_id: str, - *, - display_name: Optional[str] = None, - parent_path: Optional[str] = None, - update_parameter_syntax: Optional[bool] = None, - ) -> Dashboard: - """Migrate dashboard. + json = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions', query=query + + , headers=headers + ) + if 'subscriptions' in json: + for v in json['subscriptions']: + yield Subscription.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Migrates a classic SQL dashboard to Lakeview. + + + + def migrate(self + , source_dashboard_id: str + , * + , display_name: Optional[str] = None, parent_path: Optional[str] = None, update_parameter_syntax: Optional[bool] = None) -> Dashboard: + """Migrate dashboard. + + Migrates a classic SQL dashboard to Lakeview. + :param source_dashboard_id: str UUID of the dashboard to be migrated. :param display_name: str (optional) @@ -2771,33 +2378,34 @@ def migrate( :param update_parameter_syntax: bool (optional) Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax (:param) when converting datasets in the dashboard. - + :returns: :class:`Dashboard` """ body = {} - if display_name is not None: - body["display_name"] = display_name - if parent_path is not None: - body["parent_path"] = parent_path - if source_dashboard_id is not None: - body["source_dashboard_id"] = source_dashboard_id - if update_parameter_syntax is not None: - body["update_parameter_syntax"] = update_parameter_syntax - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/lakeview/dashboards/migrate", body=body, headers=headers) + if display_name is not None: body['display_name'] = display_name + if parent_path is not None: body['parent_path'] = parent_path + if source_dashboard_id is not None: body['source_dashboard_id'] = source_dashboard_id + if update_parameter_syntax is not None: body['update_parameter_syntax'] = update_parameter_syntax + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/lakeview/dashboards/migrate', body=body + + , headers=headers + ) return Dashboard.from_dict(res) - def publish( - self, dashboard_id: str, *, embed_credentials: Optional[bool] = None, warehouse_id: Optional[str] = None - ) -> PublishedDashboard: - """Publish dashboard. + + + + def publish(self + , dashboard_id: str + , * + , embed_credentials: Optional[bool] = None, warehouse_id: Optional[str] = None) -> PublishedDashboard: + """Publish dashboard. + Publish the current draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard to be published. :param embed_credentials: bool (optional) @@ -2805,244 +2413,173 @@ def publish( embedded credentials will be used to execute the published dashboard's queries. :param warehouse_id: str (optional) The ID of the warehouse that can be used to override the warehouse which was set in the draft. - + :returns: :class:`PublishedDashboard` """ body = {} - if embed_credentials is not None: - body["embed_credentials"] = embed_credentials - if warehouse_id is not None: - body["warehouse_id"] = warehouse_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published", body=body, headers=headers) + if embed_credentials is not None: body['embed_credentials'] = embed_credentials + if warehouse_id is not None: body['warehouse_id'] = warehouse_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', body=body + + , headers=headers + ) return PublishedDashboard.from_dict(res) - def trash(self, dashboard_id: str): - """Trash dashboard. + + + + def trash(self + , dashboard_id: str + ): + """Trash dashboard. + Trash a dashboard. - + :param dashboard_id: str UUID identifying the dashboard. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/lakeview/dashboards/{dashboard_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/lakeview/dashboards/{dashboard_id}", headers=headers) + + + - def unpublish(self, dashboard_id: str): + def unpublish(self + , dashboard_id: str + ): """Unpublish dashboard. - + Unpublish the dashboard. - + :param dashboard_id: str UUID identifying the published dashboard. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/lakeview/dashboards/{dashboard_id}/published' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published", headers=headers) + + + - def update(self, dashboard_id: str, dashboard: Dashboard) -> Dashboard: + def update(self + , dashboard_id: str, dashboard: Dashboard + ) -> Dashboard: """Update dashboard. - + Update a draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard. :param dashboard: :class:`Dashboard` - + :returns: :class:`Dashboard` """ body = dashboard.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/lakeview/dashboards/{dashboard_id}", body=body, headers=headers) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/lakeview/dashboards/{dashboard_id}', body=body + + , headers=headers + ) return Dashboard.from_dict(res) - def update_schedule(self, dashboard_id: str, schedule_id: str, schedule: Schedule) -> Schedule: - """Update dashboard schedule. + + + + def update_schedule(self + , dashboard_id: str, schedule_id: str, schedule: Schedule + ) -> Schedule: + """Update dashboard schedule. + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. :param schedule: :class:`Schedule` - + :returns: :class:`Schedule` """ body = schedule.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}", body=body, headers=headers - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}', body=body + + , headers=headers + ) return Schedule.from_dict(res) - + + class LakeviewEmbeddedAPI: """Token-based Lakeview APIs for embedding dashboards in external applications.""" - + def __init__(self, api_client): self._api = api_client + - def get_published_dashboard_embedded(self, dashboard_id: str): - """Read a published dashboard in an embedded ui. - - Get the current published dashboard within an embedded context. - - :param dashboard_id: str - UUID identifying the published dashboard. - - - """ + - headers = { - "Accept": "application/json", - } + - self._api.do("GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded", headers=headers) + + - def get_published_dashboard_token_info( - self, dashboard_id: str, *, external_value: Optional[str] = None, external_viewer_id: Optional[str] = None - ) -> GetPublishedDashboardTokenInfoResponse: + def get_published_dashboard_token_info(self + , dashboard_id: str + , * + , external_value: Optional[str] = None, external_viewer_id: Optional[str] = None) -> GetPublishedDashboardTokenInfoResponse: """Read an information of a published dashboard to mint an OAuth token. - + Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The `authorization_details` can be enriched to apply additional restriction. - + Example: Adding the following `authorization_details` object to downscope the viewer permission to specific table ``` { type: "unity_catalog_privileges", privileges: ["SELECT"], object_type: "TABLE", object_full_path: "main.default.testdata" } ``` - + :param dashboard_id: str UUID identifying the published dashboard. :param external_value: str (optional) Provided external value to be included in the custom claim. :param external_viewer_id: str (optional) Provided external viewer id to be included in the custom claim. - + :returns: :class:`GetPublishedDashboardTokenInfoResponse` """ - + query = {} - if external_value is not None: - query["external_value"] = external_value - if external_viewer_id is not None: - query["external_viewer_id"] = external_viewer_id - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/tokeninfo", query=query, headers=headers - ) + if external_value is not None: query['external_value'] = external_value + if external_viewer_id is not None: query['external_viewer_id'] = external_viewer_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/published/tokeninfo', query=query + + , headers=headers + ) return GetPublishedDashboardTokenInfoResponse.from_dict(res) - -class QueryExecutionAPI: - """Query execution APIs for AI / BI Dashboards""" - - def __init__(self, api_client): - self._api = api_client - - def cancel_published_query_execution( - self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None - ) -> CancelQueryExecutionResponse: - """Cancel the results for the a query for a published, embedded dashboard. - - :param dashboard_name: str - :param dashboard_revision_id: str - :param tokens: List[str] (optional) - Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ - - :returns: :class:`CancelQueryExecutionResponse` - """ - - query = {} - if dashboard_name is not None: - query["dashboard_name"] = dashboard_name - if dashboard_revision_id is not None: - query["dashboard_revision_id"] = dashboard_revision_id - if tokens is not None: - query["tokens"] = [v for v in tokens] - headers = { - "Accept": "application/json", - } - - res = self._api.do("DELETE", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) - return CancelQueryExecutionResponse.from_dict(res) - - def execute_published_dashboard_query( - self, dashboard_name: str, dashboard_revision_id: str, *, override_warehouse_id: Optional[str] = None - ): - """Execute a query for a published dashboard. - - :param dashboard_name: str - Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the - list of datasets, warehouse_id, and embedded_credentials - :param dashboard_revision_id: str - :param override_warehouse_id: str (optional) - A dashboard schedule can override the warehouse used as compute for processing the published - dashboard queries - - - """ - body = {} - if dashboard_name is not None: - body["dashboard_name"] = dashboard_name - if dashboard_revision_id is not None: - body["dashboard_revision_id"] = dashboard_revision_id - if override_warehouse_id is not None: - body["override_warehouse_id"] = override_warehouse_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/lakeview-query/query/published", body=body, headers=headers) - - def poll_published_query_status( - self, dashboard_name: str, dashboard_revision_id: str, *, tokens: Optional[List[str]] = None - ) -> PollQueryStatusResponse: - """Poll the results for the a query for a published, embedded dashboard. - - :param dashboard_name: str - :param dashboard_revision_id: str - :param tokens: List[str] (optional) - Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ - - :returns: :class:`PollQueryStatusResponse` - """ - - query = {} - if dashboard_name is not None: - query["dashboard_name"] = dashboard_name - if dashboard_revision_id is not None: - query["dashboard_revision_id"] = dashboard_revision_id - if tokens is not None: - query["tokens"] = [v for v in tokens] - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/lakeview-query/query/published", query=query, headers=headers) - return PollQueryStatusResponse.from_dict(res) + + \ No newline at end of file diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py new file mode 100755 index 000000000..0d5a2c1e1 --- /dev/null +++ b/databricks/sdk/service/database.py @@ -0,0 +1,1276 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations +from dataclasses import dataclass +from datetime import timedelta +from enum import Enum +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading + +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token + +_LOG = logging.getLogger('databricks.sdk') + + + +# all definitions in this file are in alphabetical order + + + + + + + + + + + + + +@dataclass +class DatabaseCatalog: + name: str + """The name of the catalog in UC.""" + + database_instance_name: str + """The name of the DatabaseInstance housing the database.""" + + database_name: str + """The name of the database (in a instance) associated with the catalog.""" + + create_database_if_not_exists: Optional[bool] = None + + uid: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the DatabaseCatalog into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.create_database_if_not_exists is not None: body['create_database_if_not_exists'] = self.create_database_if_not_exists + if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name + if self.database_name is not None: body['database_name'] = self.database_name + if self.name is not None: body['name'] = self.name + if self.uid is not None: body['uid'] = self.uid + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseCatalog into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_database_if_not_exists is not None: body['create_database_if_not_exists'] = self.create_database_if_not_exists + if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name + if self.database_name is not None: body['database_name'] = self.database_name + if self.name is not None: body['name'] = self.name + if self.uid is not None: body['uid'] = self.uid + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog: + """Deserializes the DatabaseCatalog from a dictionary.""" + return cls(create_database_if_not_exists=d.get('create_database_if_not_exists', None), database_instance_name=d.get('database_instance_name', None), database_name=d.get('database_name', None), name=d.get('name', None), uid=d.get('uid', None)) + + + + +@dataclass +class DatabaseCredential: + token: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the DatabaseCredential into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.token is not None: body['token'] = self.token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseCredential into a shallow dictionary of its immediate attributes.""" + body = {} + if self.token is not None: body['token'] = self.token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseCredential: + """Deserializes the DatabaseCredential from a dictionary.""" + return cls(token=d.get('token', None)) + + + + +@dataclass +class DatabaseInstance: + """A DatabaseInstance represents a logical Postgres instance, comprised of both compute and + storage.""" + + name: str + """The name of the instance. This is the unique identifier for the instance.""" + + capacity: Optional[str] = None + """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8".""" + + creation_time: Optional[str] = None + """The timestamp when the instance was created.""" + + creator: Optional[str] = None + """The email of the creator of the instance.""" + + pg_version: Optional[str] = None + """The version of Postgres running on the instance.""" + + read_write_dns: Optional[str] = None + """The DNS endpoint to connect to the instance for read+write access.""" + + state: Optional[DatabaseInstanceState] = None + """The current state of the instance.""" + + stopped: Optional[bool] = None + """Whether the instance is stopped.""" + + uid: Optional[str] = None + """An immutable UUID identifier for the instance.""" + + def as_dict(self) -> dict: + """Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.capacity is not None: body['capacity'] = self.capacity + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.creator is not None: body['creator'] = self.creator + if self.name is not None: body['name'] = self.name + if self.pg_version is not None: body['pg_version'] = self.pg_version + if self.read_write_dns is not None: body['read_write_dns'] = self.read_write_dns + if self.state is not None: body['state'] = self.state.value + if self.stopped is not None: body['stopped'] = self.stopped + if self.uid is not None: body['uid'] = self.uid + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseInstance into a shallow dictionary of its immediate attributes.""" + body = {} + if self.capacity is not None: body['capacity'] = self.capacity + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.creator is not None: body['creator'] = self.creator + if self.name is not None: body['name'] = self.name + if self.pg_version is not None: body['pg_version'] = self.pg_version + if self.read_write_dns is not None: body['read_write_dns'] = self.read_write_dns + if self.state is not None: body['state'] = self.state + if self.stopped is not None: body['stopped'] = self.stopped + if self.uid is not None: body['uid'] = self.uid + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: + """Deserializes the DatabaseInstance from a dictionary.""" + return cls(capacity=d.get('capacity', None), creation_time=d.get('creation_time', None), creator=d.get('creator', None), name=d.get('name', None), pg_version=d.get('pg_version', None), read_write_dns=d.get('read_write_dns', None), state=_enum(d, 'state', DatabaseInstanceState), stopped=d.get('stopped', None), uid=d.get('uid', None)) + + + + +class DatabaseInstanceState(Enum): + + + AVAILABLE = 'AVAILABLE' + DELETING = 'DELETING' + FAILING_OVER = 'FAILING_OVER' + STARTING = 'STARTING' + STOPPED = 'STOPPED' + UPDATING = 'UPDATING' + +@dataclass +class DatabaseTable: + """Next field marker: 13""" + + name: str + """Full three-part (catalog, schema, table) name of the table.""" + + database_instance_name: Optional[str] = None + """Name of the target database instance. This is required when creating database tables in standard + catalogs. This is optional when creating database tables in registered catalogs. If this field + is specified when creating database tables in registered catalogs, the database instance name + MUST match that of the registered catalog (or the request will be rejected).""" + + logical_database_name: Optional[str] = None + """Target Postgres database object (logical database) name for this table. This field is optional + in all scenarios. + + When creating a table in a registered Postgres catalog, the target Postgres database name is + inferred to be that of the registered catalog. If this field is specified in this scenario, the + Postgres database name MUST match that of the registered catalog (or the request will be + rejected). + + When creating a table in a standard catalog, the target database name is inferred to be that of + the standard catalog. In this scenario, specifying this field will allow targeting an arbitrary + postgres database. Note that this has implications for the `create_database_objects_is_missing` + field in `spec`.""" + + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + + def as_dict(self) -> dict: + """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name + if self.logical_database_name is not None: body['logical_database_name'] = self.logical_database_name + if self.name is not None: body['name'] = self.name + if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DatabaseTable into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name + if self.logical_database_name is not None: body['logical_database_name'] = self.logical_database_name + if self.name is not None: body['name'] = self.name + if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable: + """Deserializes the DatabaseTable from a dictionary.""" + return cls(database_instance_name=d.get('database_instance_name', None), logical_database_name=d.get('logical_database_name', None), name=d.get('name', None), table_serving_url=d.get('table_serving_url', None)) + + + + + + + +@dataclass +class DeleteDatabaseCatalogResponse: + def as_dict(self) -> dict: + """Serializes the DeleteDatabaseCatalogResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDatabaseCatalogResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseCatalogResponse: + """Deserializes the DeleteDatabaseCatalogResponse from a dictionary.""" + return cls() + + + + + + + +@dataclass +class DeleteDatabaseInstanceResponse: + def as_dict(self) -> dict: + """Serializes the DeleteDatabaseInstanceResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDatabaseInstanceResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseInstanceResponse: + """Deserializes the DeleteDatabaseInstanceResponse from a dictionary.""" + return cls() + + + + + + + +@dataclass +class DeleteDatabaseTableResponse: + def as_dict(self) -> dict: + """Serializes the DeleteDatabaseTableResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDatabaseTableResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseTableResponse: + """Deserializes the DeleteDatabaseTableResponse from a dictionary.""" + return cls() + + + + + + + +@dataclass +class DeleteSyncedDatabaseTableResponse: + def as_dict(self) -> dict: + """Serializes the DeleteSyncedDatabaseTableResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteSyncedDatabaseTableResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse: + """Deserializes the DeleteSyncedDatabaseTableResponse from a dictionary.""" + return cls() + + + + + + + +@dataclass +class GenerateDatabaseCredentialRequest: + """Generates a credential that can be used to access database instances""" + + instance_names: Optional[List[str]] = None + """Instances to which the token will be scoped.""" + + request_id: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the GenerateDatabaseCredentialRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.instance_names: body['instance_names'] = [v for v in self.instance_names] + if self.request_id is not None: body['request_id'] = self.request_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GenerateDatabaseCredentialRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance_names: body['instance_names'] = self.instance_names + if self.request_id is not None: body['request_id'] = self.request_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenerateDatabaseCredentialRequest: + """Deserializes the GenerateDatabaseCredentialRequest from a dictionary.""" + return cls(instance_names=d.get('instance_names', None), request_id=d.get('request_id', None)) + + + + + + + + + + + + + + + + + + + +@dataclass +class ListDatabaseInstancesResponse: + database_instances: Optional[List[DatabaseInstance]] = None + """List of instances.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of instances.""" + + def as_dict(self) -> dict: + """Serializes the ListDatabaseInstancesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_instances: body['database_instances'] = [v.as_dict() for v in self.database_instances] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListDatabaseInstancesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_instances: body['database_instances'] = self.database_instances + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse: + """Deserializes the ListDatabaseInstancesResponse from a dictionary.""" + return cls(database_instances=_repeated_dict(d, 'database_instances', DatabaseInstance), next_page_token=d.get('next_page_token', None)) + + + + +@dataclass +class NewPipelineSpec: + """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other + fields of pipeline are still inferred by table def internally""" + + storage_catalog: Optional[str] = None + """UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This + needs to be a standard catalog where the user has permissions to create Delta tables.""" + + storage_schema: Optional[str] = None + """UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This needs + to be in the standard catalog where the user has permissions to create Delta tables.""" + + def as_dict(self) -> dict: + """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.storage_catalog is not None: body['storage_catalog'] = self.storage_catalog + if self.storage_schema is not None: body['storage_schema'] = self.storage_schema + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.storage_catalog is not None: body['storage_catalog'] = self.storage_catalog + if self.storage_schema is not None: body['storage_schema'] = self.storage_schema + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec: + """Deserializes the NewPipelineSpec from a dictionary.""" + return cls(storage_catalog=d.get('storage_catalog', None), storage_schema=d.get('storage_schema', None)) + + + + +class ProvisioningInfoState(Enum): + + + ACTIVE = 'ACTIVE' + DEGRADED = 'DEGRADED' + DELETING = 'DELETING' + FAILED = 'FAILED' + PROVISIONING = 'PROVISIONING' + UPDATING = 'UPDATING' + +@dataclass +class SyncedDatabaseTable: + """Next field marker: 12""" + + name: str + """Full three-part (catalog, schema, table) name of the table.""" + + data_synchronization_status: Optional[SyncedTableStatus] = None + """Synced Table data synchronization status""" + + database_instance_name: Optional[str] = None + """Name of the target database instance. This is required when creating synced database tables in + standard catalogs. This is optional when creating synced database tables in registered catalogs. + If this field is specified when creating synced database tables in registered catalogs, the + database instance name MUST match that of the registered catalog (or the request will be + rejected).""" + + logical_database_name: Optional[str] = None + """Target Postgres database object (logical database) name for this table. This field is optional + in all scenarios. + + When creating a synced table in a registered Postgres catalog, the target Postgres database name + is inferred to be that of the registered catalog. If this field is specified in this scenario, + the Postgres database name MUST match that of the registered catalog (or the request will be + rejected). + + When creating a synced table in a standard catalog, the target database name is inferred to be + that of the standard catalog. In this scenario, specifying this field will allow targeting an + arbitrary postgres database.""" + + spec: Optional[SyncedTableSpec] = None + """Specification of a synced database table.""" + + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None + """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the + state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline + may be in "PROVISIONING" as it runs asynchronously).""" + + def as_dict(self) -> dict: + """Serializes the SyncedDatabaseTable into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.data_synchronization_status: body['data_synchronization_status'] = self.data_synchronization_status.as_dict() + if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name + if self.logical_database_name is not None: body['logical_database_name'] = self.logical_database_name + if self.name is not None: body['name'] = self.name + if self.spec: body['spec'] = self.spec.as_dict() + if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedDatabaseTable into a shallow dictionary of its immediate attributes.""" + body = {} + if self.data_synchronization_status: body['data_synchronization_status'] = self.data_synchronization_status + if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name + if self.logical_database_name is not None: body['logical_database_name'] = self.logical_database_name + if self.name is not None: body['name'] = self.name + if self.spec: body['spec'] = self.spec + if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable: + """Deserializes the SyncedDatabaseTable from a dictionary.""" + return cls(data_synchronization_status=_from_dict(d, 'data_synchronization_status', SyncedTableStatus), database_instance_name=d.get('database_instance_name', None), logical_database_name=d.get('logical_database_name', None), name=d.get('name', None), spec=_from_dict(d, 'spec', SyncedTableSpec), table_serving_url=d.get('table_serving_url', None), unity_catalog_provisioning_state=_enum(d, 'unity_catalog_provisioning_state', ProvisioningInfoState)) + + + + +@dataclass +class SyncedTableContinuousUpdateStatus: + """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE + or the SYNCED_UPDATING_PIPELINE_RESOURCES state.""" + + initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None + """Progress of the initial data synchronization.""" + + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was synced to the synced table. Note that this Delta + version may not be completely synced to the synced table yet.""" + + timestamp: Optional[str] = None + """The timestamp of the last time any data was synchronized from the source table to the synced + table.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableContinuousUpdateStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict() + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableContinuousUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableContinuousUpdateStatus: + """Deserializes the SyncedTableContinuousUpdateStatus from a dictionary.""" + return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', SyncedTablePipelineProgress), last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None)) + + + + +@dataclass +class SyncedTableFailedStatus: + """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the + SYNCED_PIPELINE_FAILED state.""" + + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was synced to the synced table. Note that this Delta + version may only be partially synced to the synced table. Only populated if the table is still + synced and available for serving.""" + + timestamp: Optional[str] = None + """The timestamp of the last time any data was synchronized from the source table to the synced + table. Only populated if the table is still synced and available for serving.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableFailedStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableFailedStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableFailedStatus: + """Deserializes the SyncedTableFailedStatus from a dictionary.""" + return cls(last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None)) + + + + +@dataclass +class SyncedTablePipelineProgress: + """Progress information of the Synced Table data synchronization pipeline.""" + + estimated_completion_time_seconds: Optional[float] = None + """The estimated time remaining to complete this update in seconds.""" + + latest_version_currently_processing: Optional[int] = None + """The source table Delta version that was last processed by the pipeline. The pipeline may not + have completely processed this version yet.""" + + sync_progress_completion: Optional[float] = None + """The completion ratio of this update. This is a number between 0 and 1.""" + + synced_row_count: Optional[int] = None + """The number of rows that have been synced in this update.""" + + total_row_count: Optional[int] = None + """The total number of rows that need to be synced in this update. This number may be an estimate.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTablePipelineProgress into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.estimated_completion_time_seconds is not None: body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: body['latest_version_currently_processing'] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: body['sync_progress_completion'] = self.sync_progress_completion + if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count + if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTablePipelineProgress into a shallow dictionary of its immediate attributes.""" + body = {} + if self.estimated_completion_time_seconds is not None: body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: body['latest_version_currently_processing'] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: body['sync_progress_completion'] = self.sync_progress_completion + if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count + if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePipelineProgress: + """Deserializes the SyncedTablePipelineProgress from a dictionary.""" + return cls(estimated_completion_time_seconds=d.get('estimated_completion_time_seconds', None), latest_version_currently_processing=d.get('latest_version_currently_processing', None), sync_progress_completion=d.get('sync_progress_completion', None), synced_row_count=d.get('synced_row_count', None), total_row_count=d.get('total_row_count', None)) + + + + +@dataclass +class SyncedTableProvisioningStatus: + """Detailed status of a synced table. Shown if the synced table is in the + PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" + + initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None + """Details about initial data synchronization. Only populated when in the + PROVISIONING_INITIAL_SNAPSHOT state.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableProvisioningStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableProvisioningStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableProvisioningStatus: + """Deserializes the SyncedTableProvisioningStatus from a dictionary.""" + return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', SyncedTablePipelineProgress)) + + + + +class SyncedTableSchedulingPolicy(Enum): + + + CONTINUOUS = 'CONTINUOUS' + SNAPSHOT = 'SNAPSHOT' + TRIGGERED = 'TRIGGERED' + +@dataclass +class SyncedTableSpec: + """Specification of a synced database table.""" + + create_database_objects_if_missing: Optional[bool] = None + """If true, the synced table's logical database and schema resources in PG will be created if they + do not already exist.""" + + new_pipeline_spec: Optional[NewPipelineSpec] = None + """Spec of new pipeline. Should be empty if pipeline_id is set""" + + pipeline_id: Optional[str] = None + """ID of the associated pipeline. Should be empty if new_pipeline_spec is set""" + + primary_key_columns: Optional[List[str]] = None + """Primary Key columns to be used for data insert/update in the destination.""" + + scheduling_policy: Optional[SyncedTableSchedulingPolicy] = None + """Scheduling policy of the underlying pipeline.""" + + source_table_full_name: Optional[str] = None + """Three-part (catalog, schema, table) name of the source Delta table.""" + + timeseries_key: Optional[str] = None + """Time series key to deduplicate (tie-break) rows with the same primary key.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.create_database_objects_if_missing is not None: body['create_database_objects_if_missing'] = self.create_database_objects_if_missing + if self.new_pipeline_spec: body['new_pipeline_spec'] = self.new_pipeline_spec.as_dict() + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.primary_key_columns: body['primary_key_columns'] = [v for v in self.primary_key_columns] + if self.scheduling_policy is not None: body['scheduling_policy'] = self.scheduling_policy.value + if self.source_table_full_name is not None: body['source_table_full_name'] = self.source_table_full_name + if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.create_database_objects_if_missing is not None: body['create_database_objects_if_missing'] = self.create_database_objects_if_missing + if self.new_pipeline_spec: body['new_pipeline_spec'] = self.new_pipeline_spec + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.primary_key_columns: body['primary_key_columns'] = self.primary_key_columns + if self.scheduling_policy is not None: body['scheduling_policy'] = self.scheduling_policy + if self.source_table_full_name is not None: body['source_table_full_name'] = self.source_table_full_name + if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableSpec: + """Deserializes the SyncedTableSpec from a dictionary.""" + return cls(create_database_objects_if_missing=d.get('create_database_objects_if_missing', None), new_pipeline_spec=_from_dict(d, 'new_pipeline_spec', NewPipelineSpec), pipeline_id=d.get('pipeline_id', None), primary_key_columns=d.get('primary_key_columns', None), scheduling_policy=_enum(d, 'scheduling_policy', SyncedTableSchedulingPolicy), source_table_full_name=d.get('source_table_full_name', None), timeseries_key=d.get('timeseries_key', None)) + + + + +class SyncedTableState(Enum): + """The state of a synced table.""" + + SYNCED_TABLED_OFFLINE = 'SYNCED_TABLED_OFFLINE' + SYNCED_TABLE_OFFLINE_FAILED = 'SYNCED_TABLE_OFFLINE_FAILED' + SYNCED_TABLE_ONLINE = 'SYNCED_TABLE_ONLINE' + SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE = 'SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE' + SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE = 'SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE' + SYNCED_TABLE_ONLINE_PIPELINE_FAILED = 'SYNCED_TABLE_ONLINE_PIPELINE_FAILED' + SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE = 'SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE' + SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES = 'SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES' + SYNCED_TABLE_PROVISIONING = 'SYNCED_TABLE_PROVISIONING' + SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT = 'SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT' + SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES = 'SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES' + +@dataclass +class SyncedTableStatus: + """Status of a synced table.""" + + continuous_update_status: Optional[SyncedTableContinuousUpdateStatus] = None + """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE + or the SYNCED_UPDATING_PIPELINE_RESOURCES state.""" + + detailed_state: Optional[SyncedTableState] = None + """The state of the synced table.""" + + failed_status: Optional[SyncedTableFailedStatus] = None + """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the + SYNCED_PIPELINE_FAILED state.""" + + message: Optional[str] = None + """A text description of the current state of the synced table.""" + + provisioning_status: Optional[SyncedTableProvisioningStatus] = None + """Detailed status of a synced table. Shown if the synced table is in the + PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" + + triggered_update_status: Optional[SyncedTableTriggeredUpdateStatus] = None + """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE + or the SYNCED_NO_PENDING_UPDATE state.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status.as_dict() + if self.detailed_state is not None: body['detailed_state'] = self.detailed_state.value + if self.failed_status: body['failed_status'] = self.failed_status.as_dict() + if self.message is not None: body['message'] = self.message + if self.provisioning_status: body['provisioning_status'] = self.provisioning_status.as_dict() + if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status + if self.detailed_state is not None: body['detailed_state'] = self.detailed_state + if self.failed_status: body['failed_status'] = self.failed_status + if self.message is not None: body['message'] = self.message + if self.provisioning_status: body['provisioning_status'] = self.provisioning_status + if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableStatus: + """Deserializes the SyncedTableStatus from a dictionary.""" + return cls(continuous_update_status=_from_dict(d, 'continuous_update_status', SyncedTableContinuousUpdateStatus), detailed_state=_enum(d, 'detailed_state', SyncedTableState), failed_status=_from_dict(d, 'failed_status', SyncedTableFailedStatus), message=d.get('message', None), provisioning_status=_from_dict(d, 'provisioning_status', SyncedTableProvisioningStatus), triggered_update_status=_from_dict(d, 'triggered_update_status', SyncedTableTriggeredUpdateStatus)) + + + + +@dataclass +class SyncedTableTriggeredUpdateStatus: + """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE + or the SYNCED_NO_PENDING_UPDATE state.""" + + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was synced to the synced table. Note that this Delta + version may not be completely synced to the synced table yet.""" + + timestamp: Optional[str] = None + """The timestamp of the last time any data was synchronized from the source table to the synced + table.""" + + triggered_update_progress: Optional[SyncedTablePipelineProgress] = None + """Progress of the active data synchronization pipeline.""" + + def as_dict(self) -> dict: + """Serializes the SyncedTableTriggeredUpdateStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SyncedTableTriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SyncedTableTriggeredUpdateStatus: + """Deserializes the SyncedTableTriggeredUpdateStatus from a dictionary.""" + return cls(last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None), triggered_update_progress=_from_dict(d, 'triggered_update_progress', SyncedTablePipelineProgress)) + + + + + + + + + +class DatabaseAPI: + """Database Instances provide access to a database via REST API or direct SQL.""" + + def __init__(self, api_client): + self._api = api_client + + + + + + + + + + def create_database_catalog(self + , catalog: DatabaseCatalog + ) -> DatabaseCatalog: + """Create a Database Catalog. + + :param catalog: :class:`DatabaseCatalog` + + :returns: :class:`DatabaseCatalog` + """ + body = catalog.as_dict() + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/database/catalogs', body=body + + , headers=headers + ) + return DatabaseCatalog.from_dict(res) + + + + + + def create_database_instance(self + , database_instance: DatabaseInstance + ) -> DatabaseInstance: + """Create a Database Instance. + + :param database_instance: :class:`DatabaseInstance` + A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. + + :returns: :class:`DatabaseInstance` + """ + body = database_instance.as_dict() + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/database/instances', body=body + + , headers=headers + ) + return DatabaseInstance.from_dict(res) + + + + + + def create_database_table(self + , table: DatabaseTable + ) -> DatabaseTable: + """Create a Database Table. + + :param table: :class:`DatabaseTable` + Next field marker: 13 + + :returns: :class:`DatabaseTable` + """ + body = table.as_dict() + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/database/tables', body=body + + , headers=headers + ) + return DatabaseTable.from_dict(res) + + + + + + def create_synced_database_table(self + , synced_table: SyncedDatabaseTable + ) -> SyncedDatabaseTable: + """Create a Synced Database Table. + + :param synced_table: :class:`SyncedDatabaseTable` + Next field marker: 12 + + :returns: :class:`SyncedDatabaseTable` + """ + body = synced_table.as_dict() + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/database/synced_tables', body=body + + , headers=headers + ) + return SyncedDatabaseTable.from_dict(res) + + + + + + def delete_database_catalog(self + , name: str + ): + """Delete a Database Catalog. + + :param name: str + + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/database/catalogs/{name}' + + , headers=headers + ) + + + + + + + def delete_database_instance(self + , name: str + , * + , force: Optional[bool] = None, purge: Optional[bool] = None): + """Delete a Database Instance. + + :param name: str + Name of the instance to delete. + :param force: bool (optional) + By default, a instance cannot be deleted if it has descendant instances created via PITR. If this + flag is specified as true, all descendent instances will be deleted as well. + :param purge: bool (optional) + If false, the database instance is soft deleted. Soft deleted instances behave as if they are + deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by + calling the undelete API for a limited time. If true, the database instance is hard deleted and + cannot be undeleted. + + + """ + + query = {} + if force is not None: query['force'] = force + if purge is not None: query['purge'] = purge + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/database/instances/{name}', query=query + + , headers=headers + ) + + + + + + + def delete_database_table(self + , name: str + ): + """Delete a Database Table. + + :param name: str + + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/database/tables/{name}' + + , headers=headers + ) + + + + + + + def delete_synced_database_table(self + , name: str + ): + """Delete a Synced Database Table. + + :param name: str + + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/database/synced_tables/{name}' + + , headers=headers + ) + + + + + + + def find_database_instance_by_uid(self + + , * + , uid: Optional[str] = None) -> DatabaseInstance: + """Find a Database Instance by uid. + + :param uid: str (optional) + UID of the cluster to get. + + :returns: :class:`DatabaseInstance` + """ + + query = {} + if uid is not None: query['uid'] = uid + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/database/instances:findByUid', query=query + + , headers=headers + ) + return DatabaseInstance.from_dict(res) + + + + + + def generate_database_credential(self + + , * + , instance_names: Optional[List[str]] = None, request_id: Optional[str] = None) -> DatabaseCredential: + """Generates a credential that can be used to access database instances. + + :param instance_names: List[str] (optional) + Instances to which the token will be scoped. + :param request_id: str (optional) + + :returns: :class:`DatabaseCredential` + """ + body = {} + if instance_names is not None: body['instance_names'] = [v for v in instance_names] + if request_id is not None: body['request_id'] = request_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/database/credentials', body=body + + , headers=headers + ) + return DatabaseCredential.from_dict(res) + + + + + + def get_database_catalog(self + , name: str + ) -> DatabaseCatalog: + """Get a Database Catalog. + + :param name: str + + :returns: :class:`DatabaseCatalog` + """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/database/catalogs/{name}' + + , headers=headers + ) + return DatabaseCatalog.from_dict(res) + + + + + + def get_database_instance(self + , name: str + ) -> DatabaseInstance: + """Get a Database Instance. + + :param name: str + Name of the cluster to get. + + :returns: :class:`DatabaseInstance` + """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/database/instances/{name}' + + , headers=headers + ) + return DatabaseInstance.from_dict(res) + + + + + + def get_database_table(self + , name: str + ) -> DatabaseTable: + """Get a Database Table. + + :param name: str + + :returns: :class:`DatabaseTable` + """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/database/tables/{name}' + + , headers=headers + ) + return DatabaseTable.from_dict(res) + + + + + + def get_synced_database_table(self + , name: str + ) -> SyncedDatabaseTable: + """Get a Synced Database Table. + + :param name: str + + :returns: :class:`SyncedDatabaseTable` + """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/database/synced_tables/{name}' + + , headers=headers + ) + return SyncedDatabaseTable.from_dict(res) + + + + + + def list_database_instances(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[DatabaseInstance]: + """List Database Instances. + + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of Database Instances. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseInstance` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + + while True: + json = self._api.do('GET','/api/2.0/database/instances', query=query + + , headers=headers + ) + if 'database_instances' in json: + for v in json['database_instances']: + yield DatabaseInstance.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update_database_instance(self + , name: str, database_instance: DatabaseInstance, update_mask: str + ) -> DatabaseInstance: + """Update a Database Instance. + + :param name: str + The name of the instance. This is the unique identifier for the instance. + :param database_instance: :class:`DatabaseInstance` + A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. + :param update_mask: str + The list of fields to update. + + :returns: :class:`DatabaseInstance` + """ + body = database_instance.as_dict() + query = {} + if update_mask is not None: query['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/database/instances/{name}', query=query, body=body + + , headers=headers + ) + return DatabaseInstance.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index 52496e84b..51edd6751 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -1,49 +1,54 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging from dataclasses import dataclass -from typing import Any, BinaryIO, Dict, Iterator, List, Optional +from datetime import timedelta +from enum import Enum +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ._internal import _escape_multi_segment_path_parameter, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class AddBlock: handle: int """The handle on an open stream.""" - + data: str """The base64-encoded data to append to the stream. This has a limit of 1 MB.""" - + def as_dict(self) -> dict: """Serializes the AddBlock into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data is not None: - body["data"] = self.data - if self.handle is not None: - body["handle"] = self.handle + if self.data is not None: body['data'] = self.data + if self.handle is not None: body['handle'] = self.handle return body def as_shallow_dict(self) -> dict: """Serializes the AddBlock into a shallow dictionary of its immediate attributes.""" body = {} - if self.data is not None: - body["data"] = self.data - if self.handle is not None: - body["handle"] = self.handle + if self.data is not None: body['data'] = self.data + if self.handle is not None: body['handle'] = self.handle return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AddBlock: """Deserializes the AddBlock from a dictionary.""" - return cls(data=d.get("data", None), handle=d.get("handle", None)) + return cls(data=d.get('data', None), handle=d.get('handle', None)) + + @dataclass @@ -62,31 +67,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AddBlockResponse: """Deserializes the AddBlockResponse from a dictionary.""" return cls() + + @dataclass class Close: handle: int """The handle on an open stream.""" - + def as_dict(self) -> dict: """Serializes the Close into a dictionary suitable for use as a JSON request body.""" body = {} - if self.handle is not None: - body["handle"] = self.handle + if self.handle is not None: body['handle'] = self.handle return body def as_shallow_dict(self) -> dict: """Serializes the Close into a shallow dictionary of its immediate attributes.""" body = {} - if self.handle is not None: - body["handle"] = self.handle + if self.handle is not None: body['handle'] = self.handle return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Close: """Deserializes the Close from a dictionary.""" - return cls(handle=d.get("handle", None)) + return cls(handle=d.get('handle', None)) + + @dataclass @@ -105,38 +112,41 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CloseResponse: """Deserializes the CloseResponse from a dictionary.""" return cls() + + @dataclass class Create: path: str """The path of the new file. The path should be the absolute DBFS path.""" - + overwrite: Optional[bool] = None """The flag that specifies whether to overwrite existing file/files.""" - + def as_dict(self) -> dict: """Serializes the Create into a dictionary suitable for use as a JSON request body.""" body = {} - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path + if self.overwrite is not None: body['overwrite'] = self.overwrite + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the Create into a shallow dictionary of its immediate attributes.""" body = {} - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path + if self.overwrite is not None: body['overwrite'] = self.overwrite + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Create: """Deserializes the Create from a dictionary.""" - return cls(overwrite=d.get("overwrite", None), path=d.get("path", None)) + return cls(overwrite=d.get('overwrite', None), path=d.get('path', None)) + + + + + @dataclass @@ -155,6 +165,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CreateDirectoryResponse: """Deserializes the CreateDirectoryResponse from a dictionary.""" return cls() + + @dataclass @@ -162,58 +174,59 @@ class CreateResponse: handle: Optional[int] = None """Handle which should subsequently be passed into the AddBlock and Close calls when writing to a file through a stream.""" - + def as_dict(self) -> dict: """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.handle is not None: - body["handle"] = self.handle + if self.handle is not None: body['handle'] = self.handle return body def as_shallow_dict(self) -> dict: """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.handle is not None: - body["handle"] = self.handle + if self.handle is not None: body['handle'] = self.handle return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" - return cls(handle=d.get("handle", None)) + return cls(handle=d.get('handle', None)) + + @dataclass class Delete: path: str """The path of the file or directory to delete. The path should be the absolute DBFS path.""" - + recursive: Optional[bool] = None """Whether or not to recursively delete the directory's contents. Deleting empty directories can be done without providing the recursive flag.""" - + def as_dict(self) -> dict: """Serializes the Delete into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive + if self.path is not None: body['path'] = self.path + if self.recursive is not None: body['recursive'] = self.recursive return body def as_shallow_dict(self) -> dict: """Serializes the Delete into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive + if self.path is not None: body['path'] = self.path + if self.recursive is not None: body['recursive'] = self.recursive return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Delete: """Deserializes the Delete from a dictionary.""" - return cls(path=d.get("path", None), recursive=d.get("recursive", None)) + return cls(path=d.get('path', None), recursive=d.get('recursive', None)) + + + + + @dataclass @@ -232,6 +245,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteDirectoryResponse: """Deserializes the DeleteDirectoryResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -250,165 +268,137 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + @dataclass class DirectoryEntry: file_size: Optional[int] = None """The length of the file in bytes. This field is omitted for directories.""" - + is_directory: Optional[bool] = None """True if the path is a directory.""" - + last_modified: Optional[int] = None """Last modification time of given file in milliseconds since unix epoch.""" - + name: Optional[str] = None """The name of the file or directory. This is the last component of the path.""" - + path: Optional[str] = None """The absolute path of the file or directory.""" - + def as_dict(self) -> dict: """Serializes the DirectoryEntry into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_size is not None: - body["file_size"] = self.file_size - if self.is_directory is not None: - body["is_directory"] = self.is_directory - if self.last_modified is not None: - body["last_modified"] = self.last_modified - if self.name is not None: - body["name"] = self.name - if self.path is not None: - body["path"] = self.path + if self.file_size is not None: body['file_size'] = self.file_size + if self.is_directory is not None: body['is_directory'] = self.is_directory + if self.last_modified is not None: body['last_modified'] = self.last_modified + if self.name is not None: body['name'] = self.name + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the DirectoryEntry into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_size is not None: - body["file_size"] = self.file_size - if self.is_directory is not None: - body["is_directory"] = self.is_directory - if self.last_modified is not None: - body["last_modified"] = self.last_modified - if self.name is not None: - body["name"] = self.name - if self.path is not None: - body["path"] = self.path + if self.file_size is not None: body['file_size'] = self.file_size + if self.is_directory is not None: body['is_directory'] = self.is_directory + if self.last_modified is not None: body['last_modified'] = self.last_modified + if self.name is not None: body['name'] = self.name + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DirectoryEntry: """Deserializes the DirectoryEntry from a dictionary.""" - return cls( - file_size=d.get("file_size", None), - is_directory=d.get("is_directory", None), - last_modified=d.get("last_modified", None), - name=d.get("name", None), - path=d.get("path", None), - ) + return cls(file_size=d.get('file_size', None), is_directory=d.get('is_directory', None), last_modified=d.get('last_modified', None), name=d.get('name', None), path=d.get('path', None)) + + + + + @dataclass class DownloadResponse: content_length: Optional[int] = None """The length of the HTTP response body in bytes.""" - + content_type: Optional[str] = None - + contents: Optional[BinaryIO] = None - + last_modified: Optional[str] = None """The last modified time of the file in HTTP-date (RFC 7231) format.""" - + def as_dict(self) -> dict: """Serializes the DownloadResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content_length is not None: - body["content-length"] = self.content_length - if self.content_type is not None: - body["content-type"] = self.content_type - if self.contents: - body["contents"] = self.contents - if self.last_modified is not None: - body["last-modified"] = self.last_modified + if self.content_length is not None: body['content-length'] = self.content_length + if self.content_type is not None: body['content-type'] = self.content_type + if self.contents: body['contents'] = self.contents + if self.last_modified is not None: body['last-modified'] = self.last_modified return body def as_shallow_dict(self) -> dict: """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.content_length is not None: - body["content-length"] = self.content_length - if self.content_type is not None: - body["content-type"] = self.content_type - if self.contents: - body["contents"] = self.contents - if self.last_modified is not None: - body["last-modified"] = self.last_modified + if self.content_length is not None: body['content-length'] = self.content_length + if self.content_type is not None: body['content-type'] = self.content_type + if self.contents: body['contents'] = self.contents + if self.last_modified is not None: body['last-modified'] = self.last_modified return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DownloadResponse: """Deserializes the DownloadResponse from a dictionary.""" - return cls( - content_length=int(d.get("content-length", None)), - content_type=d.get("content-type", None), - contents=d.get("contents", None), - last_modified=d.get("last-modified", None), - ) + return cls(content_length= int(d.get('content-length', None)), content_type=d.get('content-type', None), contents=d.get('contents', None), last_modified=d.get('last-modified', None)) + + @dataclass class FileInfo: file_size: Optional[int] = None """The length of the file in bytes. This field is omitted for directories.""" - + is_dir: Optional[bool] = None """True if the path is a directory.""" - + modification_time: Optional[int] = None """Last modification time of given file in milliseconds since epoch.""" - + path: Optional[str] = None """The absolute path of the file or directory.""" - + def as_dict(self) -> dict: """Serializes the FileInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_size is not None: - body["file_size"] = self.file_size - if self.is_dir is not None: - body["is_dir"] = self.is_dir - if self.modification_time is not None: - body["modification_time"] = self.modification_time - if self.path is not None: - body["path"] = self.path + if self.file_size is not None: body['file_size'] = self.file_size + if self.is_dir is not None: body['is_dir'] = self.is_dir + if self.modification_time is not None: body['modification_time'] = self.modification_time + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the FileInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_size is not None: - body["file_size"] = self.file_size - if self.is_dir is not None: - body["is_dir"] = self.is_dir - if self.modification_time is not None: - body["modification_time"] = self.modification_time - if self.path is not None: - body["path"] = self.path + if self.file_size is not None: body['file_size'] = self.file_size + if self.is_dir is not None: body['is_dir'] = self.is_dir + if self.modification_time is not None: body['modification_time'] = self.modification_time + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileInfo: """Deserializes the FileInfo from a dictionary.""" - return cls( - file_size=d.get("file_size", None), - is_dir=d.get("is_dir", None), - modification_time=d.get("modification_time", None), - path=d.get("path", None), - ) + return cls(file_size=d.get('file_size', None), is_dir=d.get('is_dir', None), modification_time=d.get('modification_time', None), path=d.get('path', None)) + + + + + @dataclass @@ -427,132 +417,134 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> GetDirectoryMetadataResponse: """Deserializes the GetDirectoryMetadataResponse from a dictionary.""" return cls() + + + + + @dataclass class GetMetadataResponse: content_length: Optional[int] = None """The length of the HTTP response body in bytes.""" - + content_type: Optional[str] = None - + last_modified: Optional[str] = None """The last modified time of the file in HTTP-date (RFC 7231) format.""" - + def as_dict(self) -> dict: """Serializes the GetMetadataResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content_length is not None: - body["content-length"] = self.content_length - if self.content_type is not None: - body["content-type"] = self.content_type - if self.last_modified is not None: - body["last-modified"] = self.last_modified + if self.content_length is not None: body['content-length'] = self.content_length + if self.content_type is not None: body['content-type'] = self.content_type + if self.last_modified is not None: body['last-modified'] = self.last_modified return body def as_shallow_dict(self) -> dict: """Serializes the GetMetadataResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.content_length is not None: - body["content-length"] = self.content_length - if self.content_type is not None: - body["content-type"] = self.content_type - if self.last_modified is not None: - body["last-modified"] = self.last_modified + if self.content_length is not None: body['content-length'] = self.content_length + if self.content_type is not None: body['content-type'] = self.content_type + if self.last_modified is not None: body['last-modified'] = self.last_modified return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetMetadataResponse: """Deserializes the GetMetadataResponse from a dictionary.""" - return cls( - content_length=int(d.get("content-length", None)), - content_type=d.get("content-type", None), - last_modified=d.get("last-modified", None), - ) + return cls(content_length= int(d.get('content-length', None)), content_type=d.get('content-type', None), last_modified=d.get('last-modified', None)) + + + + + + + + + + + @dataclass class ListDirectoryResponse: contents: Optional[List[DirectoryEntry]] = None """Array of DirectoryEntry.""" - + next_page_token: Optional[str] = None """A token, which can be sent as `page_token` to retrieve the next page.""" - + def as_dict(self) -> dict: """Serializes the ListDirectoryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: - body["contents"] = [v.as_dict() for v in self.contents] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.contents: body['contents'] = [v.as_dict() for v in self.contents] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListDirectoryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: - body["contents"] = self.contents - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.contents: body['contents'] = self.contents + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListDirectoryResponse: """Deserializes the ListDirectoryResponse from a dictionary.""" - return cls( - contents=_repeated_dict(d, "contents", DirectoryEntry), next_page_token=d.get("next_page_token", None) - ) + return cls(contents=_repeated_dict(d, 'contents', DirectoryEntry), next_page_token=d.get('next_page_token', None)) + + @dataclass class ListStatusResponse: files: Optional[List[FileInfo]] = None """A list of FileInfo's that describe contents of directory or file. See example above.""" - + def as_dict(self) -> dict: """Serializes the ListStatusResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.files: - body["files"] = [v.as_dict() for v in self.files] + if self.files: body['files'] = [v.as_dict() for v in self.files] return body def as_shallow_dict(self) -> dict: """Serializes the ListStatusResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.files: - body["files"] = self.files + if self.files: body['files'] = self.files return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListStatusResponse: """Deserializes the ListStatusResponse from a dictionary.""" - return cls(files=_repeated_dict(d, "files", FileInfo)) + return cls(files=_repeated_dict(d, 'files', FileInfo)) + + @dataclass class MkDirs: path: str """The path of the new directory. The path should be the absolute DBFS path.""" - + def as_dict(self) -> dict: """Serializes the MkDirs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: - body["path"] = self.path + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the MkDirs into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: - body["path"] = self.path + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MkDirs: """Deserializes the MkDirs from a dictionary.""" - return cls(path=d.get("path", None)) + return cls(path=d.get('path', None)) + + @dataclass @@ -571,38 +563,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> MkDirsResponse: """Deserializes the MkDirsResponse from a dictionary.""" return cls() + + @dataclass class Move: source_path: str """The source path of the file or directory. The path should be the absolute DBFS path.""" - + destination_path: str """The destination path of the file or directory. The path should be the absolute DBFS path.""" - + def as_dict(self) -> dict: """Serializes the Move into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_path is not None: - body["destination_path"] = self.destination_path - if self.source_path is not None: - body["source_path"] = self.source_path + if self.destination_path is not None: body['destination_path'] = self.destination_path + if self.source_path is not None: body['source_path'] = self.source_path return body def as_shallow_dict(self) -> dict: """Serializes the Move into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_path is not None: - body["destination_path"] = self.destination_path - if self.source_path is not None: - body["source_path"] = self.source_path + if self.destination_path is not None: body['destination_path'] = self.destination_path + if self.source_path is not None: body['source_path'] = self.source_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Move: """Deserializes the Move from a dictionary.""" - return cls(destination_path=d.get("destination_path", None), source_path=d.get("source_path", None)) + return cls(destination_path=d.get('destination_path', None), source_path=d.get('source_path', None)) + + @dataclass @@ -621,45 +613,43 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> MoveResponse: """Deserializes the MoveResponse from a dictionary.""" return cls() + + @dataclass class Put: path: str """The path of the new file. The path should be the absolute DBFS path.""" - + contents: Optional[str] = None """This parameter might be absent, and instead a posted file will be used.""" - + overwrite: Optional[bool] = None """The flag that specifies whether to overwrite existing file/files.""" - + def as_dict(self) -> dict: """Serializes the Put into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents is not None: - body["contents"] = self.contents - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path + if self.contents is not None: body['contents'] = self.contents + if self.overwrite is not None: body['overwrite'] = self.overwrite + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the Put into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents is not None: - body["contents"] = self.contents - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path + if self.contents is not None: body['contents'] = self.contents + if self.overwrite is not None: body['overwrite'] = self.overwrite + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Put: """Deserializes the Put from a dictionary.""" - return cls(contents=d.get("contents", None), overwrite=d.get("overwrite", None), path=d.get("path", None)) + return cls(contents=d.get('contents', None), overwrite=d.get('overwrite', None), path=d.get('path', None)) + + @dataclass @@ -678,6 +668,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PutResponse: """Deserializes the PutResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -685,32 +680,33 @@ class ReadResponse: bytes_read: Optional[int] = None """The number of bytes read (could be less than ``length`` if we hit end of file). This refers to number of bytes read in unencoded version (response data is base64-encoded).""" - + data: Optional[str] = None """The base64-encoded contents of the file read.""" - + def as_dict(self) -> dict: """Serializes the ReadResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bytes_read is not None: - body["bytes_read"] = self.bytes_read - if self.data is not None: - body["data"] = self.data + if self.bytes_read is not None: body['bytes_read'] = self.bytes_read + if self.data is not None: body['data'] = self.data return body def as_shallow_dict(self) -> dict: """Serializes the ReadResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bytes_read is not None: - body["bytes_read"] = self.bytes_read - if self.data is not None: - body["data"] = self.data + if self.bytes_read is not None: body['bytes_read'] = self.bytes_read + if self.data is not None: body['data'] = self.data return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ReadResponse: """Deserializes the ReadResponse from a dictionary.""" - return cls(bytes_read=d.get("bytes_read", None), data=d.get("data", None)) + return cls(bytes_read=d.get('bytes_read', None), data=d.get('data', None)) + + + + + @dataclass @@ -729,283 +725,349 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UploadResponse: """Deserializes the UploadResponse from a dictionary.""" return cls() + + + + class DbfsAPI: """DBFS API makes it simple to interact with various data sources without having to include a users credentials every time to read a file.""" - + def __init__(self, api_client): self._api = api_client + - def add_block(self, handle: int, data: str): - """Append data block. + + + + + + + def add_block(self + , handle: int, data: str + ): + """Append data block. + Appends a block of data to the stream specified by the input handle. If the handle does not exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``. - + If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``. - + :param handle: int The handle on an open stream. :param data: str The base64-encoded data to append to the stream. This has a limit of 1 MB. - - + + """ body = {} - if data is not None: - body["data"] = data - if handle is not None: - body["handle"] = handle - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/dbfs/add-block", body=body, headers=headers) - - def close(self, handle: int): + if data is not None: body['data'] = data + if handle is not None: body['handle'] = handle + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/dbfs/add-block', body=body + + , headers=headers + ) + + + + + + + def close(self + , handle: int + ): """Close the stream. - + Closes the stream specified by the input handle. If the handle does not exist, this call throws an exception with ``RESOURCE_DOES_NOT_EXIST``. - + :param handle: int The handle on an open stream. - - + + """ body = {} - if handle is not None: - body["handle"] = handle - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/dbfs/close", body=body, headers=headers) - - def create(self, path: str, *, overwrite: Optional[bool] = None) -> CreateResponse: + if handle is not None: body['handle'] = handle + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/dbfs/close', body=body + + , headers=headers + ) + + + + + + + def create(self + , path: str + , * + , overwrite: Optional[bool] = None) -> CreateResponse: """Open a stream. - + Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``. - + A typical workflow for file upload would be: - + 1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with the handle you have. 3. Issue a ``close`` call with the handle you have. - + :param path: str The path of the new file. The path should be the absolute DBFS path. :param overwrite: bool (optional) The flag that specifies whether to overwrite existing file/files. - + :returns: :class:`CreateResponse` """ body = {} - if overwrite is not None: - body["overwrite"] = overwrite - if path is not None: - body["path"] = path - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/dbfs/create", body=body, headers=headers) + if overwrite is not None: body['overwrite'] = overwrite + if path is not None: body['path'] = path + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/dbfs/create', body=body + + , headers=headers + ) return CreateResponse.from_dict(res) - def delete(self, path: str, *, recursive: Optional[bool] = None): - """Delete a file/directory. + + + + def delete(self + , path: str + , * + , recursive: Optional[bool] = None): + """Delete a file/directory. + Delete the file or directory (optionally recursively delete all files in the directory). This call throws an exception with `IO_ERROR` if the path is a non-empty directory and `recursive` is set to `false` or on other similar errors. - + When you delete a large number of files, the delete operation is done in increments. The call returns a response after approximately 45 seconds with an error message (503 Service Unavailable) asking you to re-invoke the delete operation until the directory structure is fully deleted. - + For operations that delete more than 10K files, we discourage using the DBFS REST API, but advise you to perform such operations in the context of a cluster, using the [File system utility (dbutils.fs)](/dev-tools/databricks-utils.html#dbutils-fs). `dbutils.fs` covers the functional scope of the DBFS REST API, but from notebooks. Running such operations using notebooks provides better control and manageability, such as selective deletes, and the possibility to automate periodic delete jobs. - + :param path: str The path of the file or directory to delete. The path should be the absolute DBFS path. :param recursive: bool (optional) Whether or not to recursively delete the directory's contents. Deleting empty directories can be done without providing the recursive flag. - - + + """ body = {} - if path is not None: - body["path"] = path - if recursive is not None: - body["recursive"] = recursive - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/dbfs/delete", body=body, headers=headers) - - def get_status(self, path: str) -> FileInfo: + if path is not None: body['path'] = path + if recursive is not None: body['recursive'] = recursive + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/dbfs/delete', body=body + + , headers=headers + ) + + + + + + + def get_status(self + , path: str + ) -> FileInfo: """Get the information of a file or directory. - + Gets the file information for a file or directory. If the file or directory does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The path of the file or directory. The path should be the absolute DBFS path. - + :returns: :class:`FileInfo` """ - + query = {} - if path is not None: - query["path"] = path - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/dbfs/get-status", query=query, headers=headers) + if path is not None: query['path'] = path + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/dbfs/get-status', query=query + + , headers=headers + ) return FileInfo.from_dict(res) - def list(self, path: str) -> Iterator[FileInfo]: - """List directory contents or file details. + + + + def list(self + , path: str + ) -> Iterator[FileInfo]: + """List directory contents or file details. + List the contents of a directory, or details of the file. If the file or directory does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. - + When calling list on a large directory, the list operation will time out after approximately 60 seconds. We strongly recommend using list only on directories containing less than 10K files and discourage using the DBFS REST API for operations that list more than 10K files. Instead, we recommend that you perform such operations in the context of a cluster, using the [File system utility (dbutils.fs)](/dev-tools/databricks-utils.html#dbutils-fs), which provides the same functionality without timing out. - + :param path: str The path of the file or directory. The path should be the absolute DBFS path. - + :returns: Iterator over :class:`FileInfo` """ - + query = {} - if path is not None: - query["path"] = path - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/dbfs/list", query=query, headers=headers) + if path is not None: query['path'] = path + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/dbfs/list', query=query + + , headers=headers + ) parsed = ListStatusResponse.from_dict(json).files return parsed if parsed is not None else [] + - def mkdirs(self, path: str): - """Create a directory. + + + + def mkdirs(self + , path: str + ): + """Create a directory. + Creates the given directory and necessary parent directories if they do not exist. If a file (not a directory) exists at any prefix of the input path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. **Note**: If this operation fails, it might have succeeded in creating some of the necessary parent directories. - + :param path: str The path of the new directory. The path should be the absolute DBFS path. - - + + """ body = {} - if path is not None: - body["path"] = path - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/dbfs/mkdirs", body=body, headers=headers) - - def move(self, source_path: str, destination_path: str): + if path is not None: body['path'] = path + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/dbfs/mkdirs', body=body + + , headers=headers + ) + + + + + + + def move(self + , source_path: str, destination_path: str + ): """Move a file. - + Moves a file from one location to another location within DBFS. If the source file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source path is a directory, this call always recursively moves all files. - + :param source_path: str The source path of the file or directory. The path should be the absolute DBFS path. :param destination_path: str The destination path of the file or directory. The path should be the absolute DBFS path. - - + + """ body = {} - if destination_path is not None: - body["destination_path"] = destination_path - if source_path is not None: - body["source_path"] = source_path - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/dbfs/move", body=body, headers=headers) - - def put(self, path: str, *, contents: Optional[str] = None, overwrite: Optional[bool] = None): + if destination_path is not None: body['destination_path'] = destination_path + if source_path is not None: body['source_path'] = source_path + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/dbfs/move', body=body + + , headers=headers + ) + + + + + + + def put(self + , path: str + , * + , contents: Optional[str] = None, overwrite: Optional[bool] = None): """Upload a file. - + Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but can also be used as a convenient single call for data upload. - + Alternatively you can pass contents as base64 string. - + The amount of data that can be passed (when not streaming) using the __contents__ parameter is limited to 1 MB. `MAX_BLOCK_SIZE_EXCEEDED` will be thrown if this limit is exceeded. - + If you want to upload large files, use the streaming upload. For details, see :method:dbfs/create, :method:dbfs/addBlock, :method:dbfs/close. - + :param path: str The path of the new file. The path should be the absolute DBFS path. :param contents: str (optional) This parameter might be absent, and instead a posted file will be used. :param overwrite: bool (optional) The flag that specifies whether to overwrite existing file/files. - - + + """ body = {} - if contents is not None: - body["contents"] = contents - if overwrite is not None: - body["overwrite"] = overwrite - if path is not None: - body["path"] = path - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/dbfs/put", body=body, headers=headers) - - def read(self, path: str, *, length: Optional[int] = None, offset: Optional[int] = None) -> ReadResponse: + if contents is not None: body['contents'] = contents + if overwrite is not None: body['overwrite'] = overwrite + if path is not None: body['path'] = path + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/dbfs/put', body=body + + , headers=headers + ) + + + + + + + def read(self + , path: str + , * + , length: Optional[int] = None, offset: Optional[int] = None) -> ReadResponse: """Get the contents of a file. - + Returns the contents of a file. If the file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`. - + If `offset + length` exceeds the number of bytes in a file, it reads the contents until the end of file. - + :param path: str The path of the file to read. The path should be the absolute DBFS path. :param length: int (optional) @@ -1013,196 +1075,237 @@ def read(self, path: str, *, length: Optional[int] = None, offset: Optional[int] of 0.5 MB. :param offset: int (optional) The offset to read from in bytes. - + :returns: :class:`ReadResponse` """ - + query = {} - if length is not None: - query["length"] = length - if offset is not None: - query["offset"] = offset - if path is not None: - query["path"] = path - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/dbfs/read", query=query, headers=headers) + if length is not None: query['length'] = length + if offset is not None: query['offset'] = offset + if path is not None: query['path'] = path + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/dbfs/read', query=query + + , headers=headers + ) return ReadResponse.from_dict(res) - + + class FilesAPI: """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI. The API makes working with file content as raw bytes easier and more efficient. - + The API supports [Unity Catalog volumes], where files and directories to operate on are specified using their volume URI path, which follows the format /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>. - + The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI path. The path is always absolute. - + Some Files API client features are currently experimental. To enable them, set `enable_experimental_files_api_client = True` in your configuration profile or use the environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. - + + Use of Files API may incur Databricks data transfer charges. + [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html""" - + def __init__(self, api_client): self._api = api_client + - def create_directory(self, directory_path: str): - """Create a directory. + + + + + + + def create_directory(self + , directory_path: str + ): + """Create a directory. + Creates an empty directory. If necessary, also creates any parent directories of the new, empty directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success response; this method is idempotent (it will succeed if the directory already exists). - + :param directory_path: str The absolute path of a directory. - - + + """ - + headers = {} - - self._api.do( - "PUT", f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}", headers=headers - ) - - def delete(self, file_path: str): + + self._api.do('PUT',f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}' + + , headers=headers + ) + + + + + + + def delete(self + , file_path: str + ): """Delete a file. - + Deletes a file. If the request is successful, there is no response body. - + :param file_path: str The absolute path of the file. - - + + """ - + headers = {} - - self._api.do("DELETE", f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", headers=headers) - - def delete_directory(self, directory_path: str): + + self._api.do('DELETE',f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}' + + , headers=headers + ) + + + + + + + def delete_directory(self + , directory_path: str + ): """Delete a directory. - + Deletes an empty directory. - + To delete a non-empty directory, first delete all of its contents. This can be done by listing the directory contents and deleting each file and subdirectory recursively. - + :param directory_path: str The absolute path of a directory. - - + + """ - + headers = {} - - self._api.do( - "DELETE", f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}", headers=headers - ) - - def download(self, file_path: str) -> DownloadResponse: + + self._api.do('DELETE',f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}' + + , headers=headers + ) + + + + + + + def download(self + , file_path: str + ) -> DownloadResponse: """Download a file. - + Downloads a file. The file contents are the response body. This is a standard HTTP file download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers. - + :param file_path: str The absolute path of the file. - + :returns: :class:`DownloadResponse` """ - - headers = { - "Accept": "application/octet-stream", - } - response_headers = [ - "content-length", - "content-type", - "last-modified", - ] - res = self._api.do( - "GET", - f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", - headers=headers, - response_headers=response_headers, - raw=True, - ) + + headers = {'Accept': 'application/octet-stream',} + response_headers = ['content-length','content-type','last-modified',] + res = self._api.do('GET',f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}' + + , headers=headers + , response_headers=response_headers, raw=True) return DownloadResponse.from_dict(res) - def get_directory_metadata(self, directory_path: str): - """Get directory metadata. + + + + def get_directory_metadata(self + , directory_path: str + ): + """Get directory metadata. + Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response body. - + This method is useful to check if a directory exists and the caller has access to it. - + If you wish to ensure the directory exists, you can instead use `PUT`, which will create the directory if it does not exist, and is idempotent (it will succeed if the directory already exists). - + :param directory_path: str The absolute path of a directory. - - + + """ - + headers = {} - - self._api.do( - "HEAD", f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}", headers=headers - ) - - def get_metadata(self, file_path: str) -> GetMetadataResponse: + + self._api.do('HEAD',f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}' + + , headers=headers + ) + + + + + + + def get_metadata(self + , file_path: str + ) -> GetMetadataResponse: """Get file metadata. - + Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body. - + :param file_path: str The absolute path of the file. - + :returns: :class:`GetMetadataResponse` """ - + headers = {} - response_headers = [ - "content-length", - "content-type", - "last-modified", - ] - res = self._api.do( - "HEAD", - f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", - headers=headers, - response_headers=response_headers, - ) + response_headers = ['content-length','content-type','last-modified',] + res = self._api.do('HEAD',f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}' + + , headers=headers + , response_headers=response_headers) return GetMetadataResponse.from_dict(res) - def list_directory_contents( - self, directory_path: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[DirectoryEntry]: - """List directory contents. + + + + def list_directory_contents(self + , directory_path: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[DirectoryEntry]: + """List directory contents. + Returns the contents of a directory. If there is no directory at the specified path, the API returns a HTTP 404 error. - + :param directory_path: str The absolute path of a directory. :param page_size: int (optional) The maximum number of directory entries to return. The response may contain fewer entries. If the response contains a `next_page_token`, there may be more entries, even if fewer than `page_size` entries are in the response. - + We recommend not to set this value unless you are intentionally listing less than the complete directory contents. - + If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values above 1000 will be coerced to 1000. :param page_token: str (optional) @@ -1212,62 +1315,64 @@ def list_directory_contents( request. To list all of the entries in a directory, it is necessary to continue requesting pages of entries until the response contains no `next_page_token`. Note that the number of entries returned must not be used to determine when the listing is complete. - + :returns: Iterator over :class:`DirectoryEntry` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}", - query=query, - headers=headers, - ) - if "contents" in json: - for v in json["contents"]: - yield DirectoryEntry.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def upload(self, file_path: str, contents: BinaryIO, *, overwrite: Optional[bool] = None): + json = self._api.do('GET',f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}', query=query + + , headers=headers + ) + if 'contents' in json: + for v in json['contents']: + yield DirectoryEntry.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def upload(self + , file_path: str, contents: BinaryIO + , * + , overwrite: Optional[bool] = None): """Upload a file. - + Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an octet stream); do not encode or otherwise modify the bytes before sending. The contents of the resulting file will be exactly the bytes sent in the request body. If the request is successful, there is no response body. - + :param file_path: str The absolute path of the file. :param contents: BinaryIO :param overwrite: bool (optional) If true or unspecified, an existing file will be overwritten. If false, an error will be returned if the path points to an existing file. - - + + """ - + query = {} - if overwrite is not None: - query["overwrite"] = overwrite - headers = { - "Content-Type": "application/octet-stream", - } - - self._api.do( - "PUT", - f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", - query=query, - headers=headers, - data=contents, - ) + if overwrite is not None: query['overwrite'] = overwrite + headers = {'Content-Type': 'application/octet-stream',} + + self._api.do('PUT',f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}', query=query + + , headers=headers + , data=contents) + + + + \ No newline at end of file diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 0d8c72fe8..637b3c578 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -1,266 +1,243 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Any, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class AccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[PermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the AccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccessControlRequest: """Deserializes the AccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", PermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class AccessControlResponse: all_permissions: Optional[List[Permission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the AccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccessControlResponse: """Deserializes the AccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", Permission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', Permission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class Actor: """represents an identity trying to access a resource - user or a service principal group can be a principal of a permission set assignment but an actor is always a user or a service principal""" - + actor_id: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the Actor into a dictionary suitable for use as a JSON request body.""" body = {} - if self.actor_id is not None: - body["actor_id"] = self.actor_id + if self.actor_id is not None: body['actor_id'] = self.actor_id return body def as_shallow_dict(self) -> dict: """Serializes the Actor into a shallow dictionary of its immediate attributes.""" body = {} - if self.actor_id is not None: - body["actor_id"] = self.actor_id + if self.actor_id is not None: body['actor_id'] = self.actor_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Actor: """Deserializes the Actor from a dictionary.""" - return cls(actor_id=d.get("actor_id", None)) + return cls(actor_id=d.get('actor_id', None)) + + + + + @dataclass class CheckPolicyResponse: consistency_token: ConsistencyToken - + is_permitted: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the CheckPolicyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.consistency_token: - body["consistency_token"] = self.consistency_token.as_dict() - if self.is_permitted is not None: - body["is_permitted"] = self.is_permitted + if self.consistency_token: body['consistency_token'] = self.consistency_token.as_dict() + if self.is_permitted is not None: body['is_permitted'] = self.is_permitted return body def as_shallow_dict(self) -> dict: """Serializes the CheckPolicyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.consistency_token: - body["consistency_token"] = self.consistency_token - if self.is_permitted is not None: - body["is_permitted"] = self.is_permitted + if self.consistency_token: body['consistency_token'] = self.consistency_token + if self.is_permitted is not None: body['is_permitted'] = self.is_permitted return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CheckPolicyResponse: """Deserializes the CheckPolicyResponse from a dictionary.""" - return cls( - consistency_token=_from_dict(d, "consistency_token", ConsistencyToken), - is_permitted=d.get("is_permitted", None), - ) + return cls(consistency_token=_from_dict(d, 'consistency_token', ConsistencyToken), is_permitted=d.get('is_permitted', None)) + + @dataclass class ComplexValue: display: Optional[str] = None - + primary: Optional[bool] = None - + ref: Optional[str] = None - + type: Optional[str] = None - + value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ComplexValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display is not None: - body["display"] = self.display - if self.primary is not None: - body["primary"] = self.primary - if self.ref is not None: - body["$ref"] = self.ref - if self.type is not None: - body["type"] = self.type - if self.value is not None: - body["value"] = self.value + if self.display is not None: body['display'] = self.display + if self.primary is not None: body['primary'] = self.primary + if self.ref is not None: body['$ref'] = self.ref + if self.type is not None: body['type'] = self.type + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ComplexValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.display is not None: - body["display"] = self.display - if self.primary is not None: - body["primary"] = self.primary - if self.ref is not None: - body["$ref"] = self.ref - if self.type is not None: - body["type"] = self.type - if self.value is not None: - body["value"] = self.value + if self.display is not None: body['display'] = self.display + if self.primary is not None: body['primary'] = self.primary + if self.ref is not None: body['$ref'] = self.ref + if self.type is not None: body['type'] = self.type + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComplexValue: """Deserializes the ComplexValue from a dictionary.""" - return cls( - display=d.get("display", None), - primary=d.get("primary", None), - ref=d.get("$ref", None), - type=d.get("type", None), - value=d.get("value", None), - ) + return cls(display=d.get('display', None), primary=d.get('primary', None), ref=d.get('$ref', None), type=d.get('type', None), value=d.get('value', None)) + + @dataclass class ConsistencyToken: value: str - + def as_dict(self) -> dict: """Serializes the ConsistencyToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ConsistencyToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ConsistencyToken: """Deserializes the ConsistencyToken from a dictionary.""" - return cls(value=d.get("value", None)) + return cls(value=d.get('value', None)) + + + + + + + + + + + + + + @dataclass @@ -279,6 +256,17 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + + + + + + + + + + @dataclass @@ -297,898 +285,803 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteWorkspacePermissionAssignmentResponse: """Deserializes the DeleteWorkspacePermissionAssignmentResponse from a dictionary.""" return cls() + + + + + + + + + + + + + + @dataclass class GetAssignableRolesForResourceResponse: roles: Optional[List[Role]] = None - + def as_dict(self) -> dict: """Serializes the GetAssignableRolesForResourceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.roles: - body["roles"] = [v.as_dict() for v in self.roles] + if self.roles: body['roles'] = [v.as_dict() for v in self.roles] return body def as_shallow_dict(self) -> dict: """Serializes the GetAssignableRolesForResourceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.roles: - body["roles"] = self.roles + if self.roles: body['roles'] = self.roles return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetAssignableRolesForResourceResponse: """Deserializes the GetAssignableRolesForResourceResponse from a dictionary.""" - return cls(roles=_repeated_dict(d, "roles", Role)) + return cls(roles=_repeated_dict(d, 'roles', Role)) + + + + + @dataclass class GetPasswordPermissionLevelsResponse: permission_levels: Optional[List[PasswordPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetPasswordPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetPasswordPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPasswordPermissionLevelsResponse: """Deserializes the GetPasswordPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", PasswordPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', PasswordPermissionsDescription)) + + + + + @dataclass class GetPermissionLevelsResponse: permission_levels: Optional[List[PermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPermissionLevelsResponse: """Deserializes the GetPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", PermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', PermissionsDescription)) + + + + + + + + + + + class GetSortOrder(Enum): + + + ASCENDING = 'ascending' + DESCENDING = 'descending' + + + + - ASCENDING = "ascending" - DESCENDING = "descending" @dataclass class GrantRule: role: str """Role that is assigned to the list of principals.""" - + principals: Optional[List[str]] = None """Principals this grant rule applies to. A principal can be a user (for end users), a service principal (for applications and compute workloads), or an account group. Each principal has its own identifier format: * users/ * groups/ * servicePrincipals/""" - + def as_dict(self) -> dict: """Serializes the GrantRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principals: - body["principals"] = [v for v in self.principals] - if self.role is not None: - body["role"] = self.role + if self.principals: body['principals'] = [v for v in self.principals] + if self.role is not None: body['role'] = self.role return body def as_shallow_dict(self) -> dict: """Serializes the GrantRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.principals: - body["principals"] = self.principals - if self.role is not None: - body["role"] = self.role + if self.principals: body['principals'] = self.principals + if self.role is not None: body['role'] = self.role return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GrantRule: """Deserializes the GrantRule from a dictionary.""" - return cls(principals=d.get("principals", None), role=d.get("role", None)) + return cls(principals=d.get('principals', None), role=d.get('role', None)) + + @dataclass class Group: display_name: Optional[str] = None """String that represents a human-readable group name""" - + entitlements: Optional[List[ComplexValue]] = None """Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements""" - + external_id: Optional[str] = None - + groups: Optional[List[ComplexValue]] = None - + id: Optional[str] = None """Databricks group ID""" - + members: Optional[List[ComplexValue]] = None - + meta: Optional[ResourceMeta] = None """Container for the group identifier. Workspace local versus account.""" - + roles: Optional[List[ComplexValue]] = None """Corresponds to AWS instance profile/arn role.""" - + schemas: Optional[List[GroupSchema]] = None """The schema of the group.""" - + def as_dict(self) -> dict: """Serializes the Group into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: - body["displayName"] = self.display_name - if self.entitlements: - body["entitlements"] = [v.as_dict() for v in self.entitlements] - if self.external_id is not None: - body["externalId"] = self.external_id - if self.groups: - body["groups"] = [v.as_dict() for v in self.groups] - if self.id is not None: - body["id"] = self.id - if self.members: - body["members"] = [v.as_dict() for v in self.members] - if self.meta: - body["meta"] = self.meta.as_dict() - if self.roles: - body["roles"] = [v.as_dict() for v in self.roles] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] + if self.display_name is not None: body['displayName'] = self.display_name + if self.entitlements: body['entitlements'] = [v.as_dict() for v in self.entitlements] + if self.external_id is not None: body['externalId'] = self.external_id + if self.groups: body['groups'] = [v.as_dict() for v in self.groups] + if self.id is not None: body['id'] = self.id + if self.members: body['members'] = [v.as_dict() for v in self.members] + if self.meta: body['meta'] = self.meta.as_dict() + if self.roles: body['roles'] = [v.as_dict() for v in self.roles] + if self.schemas: body['schemas'] = [v.value for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the Group into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: - body["displayName"] = self.display_name - if self.entitlements: - body["entitlements"] = self.entitlements - if self.external_id is not None: - body["externalId"] = self.external_id - if self.groups: - body["groups"] = self.groups - if self.id is not None: - body["id"] = self.id - if self.members: - body["members"] = self.members - if self.meta: - body["meta"] = self.meta - if self.roles: - body["roles"] = self.roles - if self.schemas: - body["schemas"] = self.schemas + if self.display_name is not None: body['displayName'] = self.display_name + if self.entitlements: body['entitlements'] = self.entitlements + if self.external_id is not None: body['externalId'] = self.external_id + if self.groups: body['groups'] = self.groups + if self.id is not None: body['id'] = self.id + if self.members: body['members'] = self.members + if self.meta: body['meta'] = self.meta + if self.roles: body['roles'] = self.roles + if self.schemas: body['schemas'] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Group: """Deserializes the Group from a dictionary.""" - return cls( - display_name=d.get("displayName", None), - entitlements=_repeated_dict(d, "entitlements", ComplexValue), - external_id=d.get("externalId", None), - groups=_repeated_dict(d, "groups", ComplexValue), - id=d.get("id", None), - members=_repeated_dict(d, "members", ComplexValue), - meta=_from_dict(d, "meta", ResourceMeta), - roles=_repeated_dict(d, "roles", ComplexValue), - schemas=_repeated_enum(d, "schemas", GroupSchema), - ) + return cls(display_name=d.get('displayName', None), entitlements=_repeated_dict(d, 'entitlements', ComplexValue), external_id=d.get('externalId', None), groups=_repeated_dict(d, 'groups', ComplexValue), id=d.get('id', None), members=_repeated_dict(d, 'members', ComplexValue), meta=_from_dict(d, 'meta', ResourceMeta), roles=_repeated_dict(d, 'roles', ComplexValue), schemas=_repeated_enum(d, 'schemas', GroupSchema)) + + class GroupSchema(Enum): + + + URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_GROUP = 'urn:ietf:params:scim:schemas:core:2.0:Group' + + + + + + + + + + - URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_GROUP = "urn:ietf:params:scim:schemas:core:2.0:Group" @dataclass class ListGroupsResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - + resources: Optional[List[Group]] = None """User objects returned in the response.""" - + schemas: Optional[List[ListResponseSchema]] = None """The schema of the service principal.""" - + start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" - + total_results: Optional[int] = None """Total results that match the request filters.""" - + def as_dict(self) -> dict: """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = [v.as_dict() for v in self.resources] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results + if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page + if self.resources: body['Resources'] = [v.as_dict() for v in self.resources] + if self.schemas: body['schemas'] = [v.value for v in self.schemas] + if self.start_index is not None: body['startIndex'] = self.start_index + if self.total_results is not None: body['totalResults'] = self.total_results return body def as_shallow_dict(self) -> dict: """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = self.resources - if self.schemas: - body["schemas"] = self.schemas - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results + if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page + if self.resources: body['Resources'] = self.resources + if self.schemas: body['schemas'] = self.schemas + if self.start_index is not None: body['startIndex'] = self.start_index + if self.total_results is not None: body['totalResults'] = self.total_results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListGroupsResponse: """Deserializes the ListGroupsResponse from a dictionary.""" - return cls( - items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", Group), - schemas=_repeated_enum(d, "schemas", ListResponseSchema), - start_index=d.get("startIndex", None), - total_results=d.get("totalResults", None), - ) - + return cls(items_per_page=d.get('itemsPerPage', None), resources=_repeated_dict(d, 'Resources', Group), schemas=_repeated_enum(d, 'schemas', ListResponseSchema), start_index=d.get('startIndex', None), total_results=d.get('totalResults', None)) + -class ListResponseSchema(Enum): - URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = "urn:ietf:params:scim:api:messages:2.0:ListResponse" +class ListResponseSchema(Enum): + + + URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = 'urn:ietf:params:scim:api:messages:2.0:ListResponse' @dataclass class ListServicePrincipalResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - + resources: Optional[List[ServicePrincipal]] = None """User objects returned in the response.""" - + schemas: Optional[List[ListResponseSchema]] = None """The schema of the List response.""" - + start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" - + total_results: Optional[int] = None """Total results that match the request filters.""" - + def as_dict(self) -> dict: """Serializes the ListServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = [v.as_dict() for v in self.resources] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results + if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page + if self.resources: body['Resources'] = [v.as_dict() for v in self.resources] + if self.schemas: body['schemas'] = [v.value for v in self.schemas] + if self.start_index is not None: body['startIndex'] = self.start_index + if self.total_results is not None: body['totalResults'] = self.total_results return body def as_shallow_dict(self) -> dict: """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = self.resources - if self.schemas: - body["schemas"] = self.schemas - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results + if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page + if self.resources: body['Resources'] = self.resources + if self.schemas: body['schemas'] = self.schemas + if self.start_index is not None: body['startIndex'] = self.start_index + if self.total_results is not None: body['totalResults'] = self.total_results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalResponse: """Deserializes the ListServicePrincipalResponse from a dictionary.""" - return cls( - items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", ServicePrincipal), - schemas=_repeated_enum(d, "schemas", ListResponseSchema), - start_index=d.get("startIndex", None), - total_results=d.get("totalResults", None), - ) + return cls(items_per_page=d.get('itemsPerPage', None), resources=_repeated_dict(d, 'Resources', ServicePrincipal), schemas=_repeated_enum(d, 'schemas', ListResponseSchema), start_index=d.get('startIndex', None), total_results=d.get('totalResults', None)) + + + + + class ListSortOrder(Enum): + + + ASCENDING = 'ascending' + DESCENDING = 'descending' + - ASCENDING = "ascending" - DESCENDING = "descending" @dataclass class ListUsersResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - + resources: Optional[List[User]] = None """User objects returned in the response.""" - + schemas: Optional[List[ListResponseSchema]] = None """The schema of the List response.""" - + start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" - + total_results: Optional[int] = None """Total results that match the request filters.""" - + def as_dict(self) -> dict: """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = [v.as_dict() for v in self.resources] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results + if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page + if self.resources: body['Resources'] = [v.as_dict() for v in self.resources] + if self.schemas: body['schemas'] = [v.value for v in self.schemas] + if self.start_index is not None: body['startIndex'] = self.start_index + if self.total_results is not None: body['totalResults'] = self.total_results return body def as_shallow_dict(self) -> dict: """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items_per_page is not None: - body["itemsPerPage"] = self.items_per_page - if self.resources: - body["Resources"] = self.resources - if self.schemas: - body["schemas"] = self.schemas - if self.start_index is not None: - body["startIndex"] = self.start_index - if self.total_results is not None: - body["totalResults"] = self.total_results + if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page + if self.resources: body['Resources'] = self.resources + if self.schemas: body['schemas'] = self.schemas + if self.start_index is not None: body['startIndex'] = self.start_index + if self.total_results is not None: body['totalResults'] = self.total_results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: """Deserializes the ListUsersResponse from a dictionary.""" - return cls( - items_per_page=d.get("itemsPerPage", None), - resources=_repeated_dict(d, "Resources", User), - schemas=_repeated_enum(d, "schemas", ListResponseSchema), - start_index=d.get("startIndex", None), - total_results=d.get("totalResults", None), - ) + return cls(items_per_page=d.get('itemsPerPage', None), resources=_repeated_dict(d, 'Resources', User), schemas=_repeated_enum(d, 'schemas', ListResponseSchema), start_index=d.get('startIndex', None), total_results=d.get('totalResults', None)) + + + + + @dataclass class MigratePermissionsRequest: workspace_id: int """WorkspaceId of the associated workspace where the permission migration will occur.""" - + from_workspace_group_name: str """The name of the workspace group that permissions will be migrated from.""" - + to_account_group_name: str """The name of the account group that permissions will be migrated to.""" - + size: Optional[int] = None """The maximum number of permissions that will be migrated.""" - + def as_dict(self) -> dict: """Serializes the MigratePermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.from_workspace_group_name is not None: - body["from_workspace_group_name"] = self.from_workspace_group_name - if self.size is not None: - body["size"] = self.size - if self.to_account_group_name is not None: - body["to_account_group_name"] = self.to_account_group_name - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.from_workspace_group_name is not None: body['from_workspace_group_name'] = self.from_workspace_group_name + if self.size is not None: body['size'] = self.size + if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the MigratePermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.from_workspace_group_name is not None: - body["from_workspace_group_name"] = self.from_workspace_group_name - if self.size is not None: - body["size"] = self.size - if self.to_account_group_name is not None: - body["to_account_group_name"] = self.to_account_group_name - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.from_workspace_group_name is not None: body['from_workspace_group_name'] = self.from_workspace_group_name + if self.size is not None: body['size'] = self.size + if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MigratePermissionsRequest: """Deserializes the MigratePermissionsRequest from a dictionary.""" - return cls( - from_workspace_group_name=d.get("from_workspace_group_name", None), - size=d.get("size", None), - to_account_group_name=d.get("to_account_group_name", None), - workspace_id=d.get("workspace_id", None), - ) + return cls(from_workspace_group_name=d.get('from_workspace_group_name', None), size=d.get('size', None), to_account_group_name=d.get('to_account_group_name', None), workspace_id=d.get('workspace_id', None)) + + @dataclass class MigratePermissionsResponse: permissions_migrated: Optional[int] = None """Number of permissions migrated.""" - + def as_dict(self) -> dict: """Serializes the MigratePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permissions_migrated is not None: - body["permissions_migrated"] = self.permissions_migrated + if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated return body def as_shallow_dict(self) -> dict: """Serializes the MigratePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permissions_migrated is not None: - body["permissions_migrated"] = self.permissions_migrated + if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MigratePermissionsResponse: """Deserializes the MigratePermissionsResponse from a dictionary.""" - return cls(permissions_migrated=d.get("permissions_migrated", None)) + return cls(permissions_migrated=d.get('permissions_migrated', None)) + + @dataclass class Name: family_name: Optional[str] = None """Family name of the Databricks user.""" - + given_name: Optional[str] = None """Given name of the Databricks user.""" - + def as_dict(self) -> dict: """Serializes the Name into a dictionary suitable for use as a JSON request body.""" body = {} - if self.family_name is not None: - body["familyName"] = self.family_name - if self.given_name is not None: - body["givenName"] = self.given_name + if self.family_name is not None: body['familyName'] = self.family_name + if self.given_name is not None: body['givenName'] = self.given_name return body def as_shallow_dict(self) -> dict: """Serializes the Name into a shallow dictionary of its immediate attributes.""" body = {} - if self.family_name is not None: - body["familyName"] = self.family_name - if self.given_name is not None: - body["givenName"] = self.given_name + if self.family_name is not None: body['familyName'] = self.family_name + if self.given_name is not None: body['givenName'] = self.given_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Name: """Deserializes the Name from a dictionary.""" - return cls(family_name=d.get("familyName", None), given_name=d.get("givenName", None)) + return cls(family_name=d.get('familyName', None), given_name=d.get('givenName', None)) + + @dataclass class ObjectPermissions: access_control_list: Optional[List[AccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ObjectPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ObjectPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ObjectPermissions: """Deserializes the ObjectPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class PartialUpdate: id: Optional[str] = None """Unique ID in the Databricks workspace.""" - + operations: Optional[List[Patch]] = None - + schemas: Optional[List[PatchSchema]] = None """The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].""" - + def as_dict(self) -> dict: """Serializes the PartialUpdate into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.operations: - body["Operations"] = [v.as_dict() for v in self.operations] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] + if self.id is not None: body['id'] = self.id + if self.operations: body['Operations'] = [v.as_dict() for v in self.operations] + if self.schemas: body['schemas'] = [v.value for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the PartialUpdate into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.operations: - body["Operations"] = self.operations - if self.schemas: - body["schemas"] = self.schemas + if self.id is not None: body['id'] = self.id + if self.operations: body['Operations'] = self.operations + if self.schemas: body['schemas'] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PartialUpdate: """Deserializes the PartialUpdate from a dictionary.""" - return cls( - id=d.get("id", None), - operations=_repeated_dict(d, "Operations", Patch), - schemas=_repeated_enum(d, "schemas", PatchSchema), - ) + return cls(id=d.get('id', None), operations=_repeated_dict(d, 'Operations', Patch), schemas=_repeated_enum(d, 'schemas', PatchSchema)) + + @dataclass class PasswordAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[PasswordPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the PasswordAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PasswordAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordAccessControlRequest: """Deserializes the PasswordAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", PasswordPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', PasswordPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class PasswordAccessControlResponse: all_permissions: Optional[List[PasswordPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the PasswordAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PasswordAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordAccessControlResponse: """Deserializes the PasswordAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", PasswordPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', PasswordPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class PasswordPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[PasswordPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PasswordPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PasswordPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordPermission: """Deserializes the PasswordPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", PasswordPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', PasswordPermissionLevel)) + + class PasswordPermissionLevel(Enum): """Permission level""" - - CAN_USE = "CAN_USE" - + + CAN_USE = 'CAN_USE' @dataclass class PasswordPermissions: access_control_list: Optional[List[PasswordAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the PasswordPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the PasswordPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissions: """Deserializes the PasswordPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", PasswordAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', PasswordAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class PasswordPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[PasswordPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PasswordPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PasswordPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissionsDescription: """Deserializes the PasswordPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", PasswordPermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', PasswordPermissionLevel)) + + @dataclass class PasswordPermissionsRequest: access_control_list: Optional[List[PasswordAccessControlRequest]] = None - + def as_dict(self) -> dict: """Serializes the PasswordPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] return body def as_shallow_dict(self) -> dict: """Serializes the PasswordPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list + if self.access_control_list: body['access_control_list'] = self.access_control_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissionsRequest: """Deserializes the PasswordPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, "access_control_list", PasswordAccessControlRequest)) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', PasswordAccessControlRequest)) + + @dataclass class Patch: op: Optional[PatchOp] = None """Type of patch operation.""" - + path: Optional[str] = None """Selection of patch operation""" - + value: Optional[Any] = None """Value to modify""" - + def as_dict(self) -> dict: """Serializes the Patch into a dictionary suitable for use as a JSON request body.""" body = {} - if self.op is not None: - body["op"] = self.op.value - if self.path is not None: - body["path"] = self.path - if self.value: - body["value"] = self.value + if self.op is not None: body['op'] = self.op.value + if self.path is not None: body['path'] = self.path + if self.value: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the Patch into a shallow dictionary of its immediate attributes.""" body = {} - if self.op is not None: - body["op"] = self.op - if self.path is not None: - body["path"] = self.path - if self.value: - body["value"] = self.value + if self.op is not None: body['op'] = self.op + if self.path is not None: body['path'] = self.path + if self.value: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Patch: """Deserializes the Patch from a dictionary.""" - return cls(op=_enum(d, "op", PatchOp), path=d.get("path", None), value=d.get("value", None)) + return cls(op=_enum(d, 'op', PatchOp), path=d.get('path', None), value=d.get('value', None)) + + class PatchOp(Enum): """Type of patch operation.""" - - ADD = "add" - REMOVE = "remove" - REPLACE = "replace" - + + ADD = 'add' + REMOVE = 'remove' + REPLACE = 'replace' @dataclass class PatchResponse: @@ -1206,325 +1099,279 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PatchResponse: """Deserializes the PatchResponse from a dictionary.""" return cls() + -class PatchSchema(Enum): - - URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP = "urn:ietf:params:scim:api:messages:2.0:PatchOp" +class PatchSchema(Enum): + + + URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP = 'urn:ietf:params:scim:api:messages:2.0:PatchOp' @dataclass class Permission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[PermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the Permission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the Permission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Permission: """Deserializes the Permission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", PermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', PermissionLevel)) + + @dataclass class PermissionAssignment: """The output format for existing workspace PermissionAssignment records, which contains some info for user consumption.""" - + error: Optional[str] = None """Error response associated with a workspace permission assignment, if any.""" - + permissions: Optional[List[WorkspacePermission]] = None """The permissions level of the principal.""" - + principal: Optional[PrincipalOutput] = None """Information about the principal assigned to the workspace.""" - + def as_dict(self) -> dict: """Serializes the PermissionAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error is not None: - body["error"] = self.error - if self.permissions: - body["permissions"] = [v.value for v in self.permissions] - if self.principal: - body["principal"] = self.principal.as_dict() + if self.error is not None: body['error'] = self.error + if self.permissions: body['permissions'] = [v.value for v in self.permissions] + if self.principal: body['principal'] = self.principal.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PermissionAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.error is not None: - body["error"] = self.error - if self.permissions: - body["permissions"] = self.permissions - if self.principal: - body["principal"] = self.principal + if self.error is not None: body['error'] = self.error + if self.permissions: body['permissions'] = self.permissions + if self.principal: body['principal'] = self.principal return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionAssignment: """Deserializes the PermissionAssignment from a dictionary.""" - return cls( - error=d.get("error", None), - permissions=_repeated_enum(d, "permissions", WorkspacePermission), - principal=_from_dict(d, "principal", PrincipalOutput), - ) + return cls(error=d.get('error', None), permissions=_repeated_enum(d, 'permissions', WorkspacePermission), principal=_from_dict(d, 'principal', PrincipalOutput)) + + @dataclass class PermissionAssignments: permission_assignments: Optional[List[PermissionAssignment]] = None """Array of permissions assignments defined for a workspace.""" - + def as_dict(self) -> dict: """Serializes the PermissionAssignments into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_assignments: - body["permission_assignments"] = [v.as_dict() for v in self.permission_assignments] + if self.permission_assignments: body['permission_assignments'] = [v.as_dict() for v in self.permission_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the PermissionAssignments into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_assignments: - body["permission_assignments"] = self.permission_assignments + if self.permission_assignments: body['permission_assignments'] = self.permission_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionAssignments: """Deserializes the PermissionAssignments from a dictionary.""" - return cls(permission_assignments=_repeated_dict(d, "permission_assignments", PermissionAssignment)) + return cls(permission_assignments=_repeated_dict(d, 'permission_assignments', PermissionAssignment)) + + class PermissionLevel(Enum): """Permission level""" - - CAN_ATTACH_TO = "CAN_ATTACH_TO" - CAN_BIND = "CAN_BIND" - CAN_CREATE = "CAN_CREATE" - CAN_EDIT = "CAN_EDIT" - CAN_EDIT_METADATA = "CAN_EDIT_METADATA" - CAN_MANAGE = "CAN_MANAGE" - CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS" - CAN_MANAGE_RUN = "CAN_MANAGE_RUN" - CAN_MANAGE_STAGING_VERSIONS = "CAN_MANAGE_STAGING_VERSIONS" - CAN_MONITOR = "CAN_MONITOR" - CAN_MONITOR_ONLY = "CAN_MONITOR_ONLY" - CAN_QUERY = "CAN_QUERY" - CAN_READ = "CAN_READ" - CAN_RESTART = "CAN_RESTART" - CAN_RUN = "CAN_RUN" - CAN_USE = "CAN_USE" - CAN_VIEW = "CAN_VIEW" - CAN_VIEW_METADATA = "CAN_VIEW_METADATA" - IS_OWNER = "IS_OWNER" - + + CAN_ATTACH_TO = 'CAN_ATTACH_TO' + CAN_BIND = 'CAN_BIND' + CAN_CREATE = 'CAN_CREATE' + CAN_EDIT = 'CAN_EDIT' + CAN_EDIT_METADATA = 'CAN_EDIT_METADATA' + CAN_MANAGE = 'CAN_MANAGE' + CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' + CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' + CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' + CAN_MONITOR = 'CAN_MONITOR' + CAN_MONITOR_ONLY = 'CAN_MONITOR_ONLY' + CAN_QUERY = 'CAN_QUERY' + CAN_READ = 'CAN_READ' + CAN_RESTART = 'CAN_RESTART' + CAN_RUN = 'CAN_RUN' + CAN_USE = 'CAN_USE' + CAN_VIEW = 'CAN_VIEW' + CAN_VIEW_METADATA = 'CAN_VIEW_METADATA' + IS_OWNER = 'IS_OWNER' @dataclass class PermissionOutput: description: Optional[str] = None """The results of a permissions query.""" - + permission_level: Optional[WorkspacePermission] = None - + def as_dict(self) -> dict: """Serializes the PermissionOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PermissionOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionOutput: """Deserializes the PermissionOutput from a dictionary.""" - return cls( - description=d.get("description", None), permission_level=_enum(d, "permission_level", WorkspacePermission) - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', WorkspacePermission)) + + @dataclass class PermissionsDescription: description: Optional[str] = None - + permission_level: Optional[PermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsDescription: """Deserializes the PermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), permission_level=_enum(d, "permission_level", PermissionLevel) - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', PermissionLevel)) + + @dataclass class PrincipalOutput: """Information about the principal assigned to the workspace.""" - + display_name: Optional[str] = None """The display name of the principal.""" - + group_name: Optional[str] = None """The group name of the group. Present only if the principal is a group.""" - + principal_id: Optional[int] = None """The unique, opaque id of the principal.""" - + service_principal_name: Optional[str] = None """The name of the service principal. Present only if the principal is a service principal.""" - + user_name: Optional[str] = None """The username of the user. Present only if the principal is a user.""" - + def as_dict(self) -> dict: """Serializes the PrincipalOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.principal_id is not None: - body["principal_id"] = self.principal_id - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.principal_id is not None: body['principal_id'] = self.principal_id + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PrincipalOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.principal_id is not None: - body["principal_id"] = self.principal_id - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.principal_id is not None: body['principal_id'] = self.principal_id + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrincipalOutput: """Deserializes the PrincipalOutput from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - principal_id=d.get("principal_id", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(display_name=d.get('display_name', None), group_name=d.get('group_name', None), principal_id=d.get('principal_id', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + class RequestAuthzIdentity(Enum): """Defines the identity to be used for authZ of the request on the server side. See one pager for for more information: http://go/acl/service-identity""" - - REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = "REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY" - REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = "REQUEST_AUTHZ_IDENTITY_USER_CONTEXT" - + + REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = 'REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY' + REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = 'REQUEST_AUTHZ_IDENTITY_USER_CONTEXT' @dataclass class ResourceInfo: id: str """Id of the current resource.""" - + legacy_acl_path: Optional[str] = None """The legacy acl path of the current resource.""" - + parent_resource_info: Optional[ResourceInfo] = None """Parent resource info for the current resource. The parent may have another parent.""" - + def as_dict(self) -> dict: """Serializes the ResourceInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.legacy_acl_path is not None: - body["legacy_acl_path"] = self.legacy_acl_path - if self.parent_resource_info: - body["parent_resource_info"] = self.parent_resource_info.as_dict() + if self.id is not None: body['id'] = self.id + if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path + if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ResourceInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.legacy_acl_path is not None: - body["legacy_acl_path"] = self.legacy_acl_path - if self.parent_resource_info: - body["parent_resource_info"] = self.parent_resource_info + if self.id is not None: body['id'] = self.id + if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path + if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResourceInfo: """Deserializes the ResourceInfo from a dictionary.""" - return cls( - id=d.get("id", None), - legacy_acl_path=d.get("legacy_acl_path", None), - parent_resource_info=_from_dict(d, "parent_resource_info", ResourceInfo), - ) + return cls(id=d.get('id', None), legacy_acl_path=d.get('legacy_acl_path', None), parent_resource_info=_from_dict(d, 'parent_resource_info', ResourceInfo)) + + @dataclass @@ -1532,57 +1379,57 @@ class ResourceMeta: resource_type: Optional[str] = None """Identifier for group type. Can be local workspace group (`WorkspaceGroup`) or account group (`Group`).""" - + def as_dict(self) -> dict: """Serializes the ResourceMeta into a dictionary suitable for use as a JSON request body.""" body = {} - if self.resource_type is not None: - body["resourceType"] = self.resource_type + if self.resource_type is not None: body['resourceType'] = self.resource_type return body def as_shallow_dict(self) -> dict: """Serializes the ResourceMeta into a shallow dictionary of its immediate attributes.""" body = {} - if self.resource_type is not None: - body["resourceType"] = self.resource_type + if self.resource_type is not None: body['resourceType'] = self.resource_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResourceMeta: """Deserializes the ResourceMeta from a dictionary.""" - return cls(resource_type=d.get("resourceType", None)) + return cls(resource_type=d.get('resourceType', None)) + + @dataclass class Role: name: str """Role to assign to a principal or a list of principals on a resource.""" - + def as_dict(self) -> dict: """Serializes the Role into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the Role into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Role: """Deserializes the Role from a dictionary.""" - return cls(name=d.get("name", None)) + return cls(name=d.get('name', None)) + + @dataclass class RuleSetResponse: name: str """Name of the rule set.""" - + etag: str """Identifies the version of the rule set returned. Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way to @@ -1591,44 +1438,38 @@ class RuleSetResponse: rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating.""" - + grant_rules: Optional[List[GrantRule]] = None - + def as_dict(self) -> dict: """Serializes the RuleSetResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.grant_rules: - body["grant_rules"] = [v.as_dict() for v in self.grant_rules] - if self.name is not None: - body["name"] = self.name + if self.etag is not None: body['etag'] = self.etag + if self.grant_rules: body['grant_rules'] = [v.as_dict() for v in self.grant_rules] + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the RuleSetResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.grant_rules: - body["grant_rules"] = self.grant_rules - if self.name is not None: - body["name"] = self.name + if self.etag is not None: body['etag'] = self.etag + if self.grant_rules: body['grant_rules'] = self.grant_rules + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RuleSetResponse: """Deserializes the RuleSetResponse from a dictionary.""" - return cls( - etag=d.get("etag", None), grant_rules=_repeated_dict(d, "grant_rules", GrantRule), name=d.get("name", None) - ) + return cls(etag=d.get('etag', None), grant_rules=_repeated_dict(d, 'grant_rules', GrantRule), name=d.get('name', None)) + + @dataclass class RuleSetUpdateRequest: name: str """Name of the rule set.""" - + etag: str """Identifies the version of the rule set returned. Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way to @@ -1637,222 +1478,174 @@ class RuleSetUpdateRequest: rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating.""" - + grant_rules: Optional[List[GrantRule]] = None - + def as_dict(self) -> dict: """Serializes the RuleSetUpdateRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.grant_rules: - body["grant_rules"] = [v.as_dict() for v in self.grant_rules] - if self.name is not None: - body["name"] = self.name + if self.etag is not None: body['etag'] = self.etag + if self.grant_rules: body['grant_rules'] = [v.as_dict() for v in self.grant_rules] + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the RuleSetUpdateRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.grant_rules: - body["grant_rules"] = self.grant_rules - if self.name is not None: - body["name"] = self.name + if self.etag is not None: body['etag'] = self.etag + if self.grant_rules: body['grant_rules'] = self.grant_rules + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RuleSetUpdateRequest: """Deserializes the RuleSetUpdateRequest from a dictionary.""" - return cls( - etag=d.get("etag", None), grant_rules=_repeated_dict(d, "grant_rules", GrantRule), name=d.get("name", None) - ) + return cls(etag=d.get('etag', None), grant_rules=_repeated_dict(d, 'grant_rules', GrantRule), name=d.get('name', None)) + + @dataclass class ServicePrincipal: active: Optional[bool] = None """If this user is active""" - + application_id: Optional[str] = None """UUID relating to the service principal""" - + display_name: Optional[str] = None """String that represents a concatenation of given and family names.""" - + entitlements: Optional[List[ComplexValue]] = None """Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements""" - + external_id: Optional[str] = None - + groups: Optional[List[ComplexValue]] = None - + id: Optional[str] = None """Databricks service principal ID.""" - + roles: Optional[List[ComplexValue]] = None """Corresponds to AWS instance profile/arn role.""" - + schemas: Optional[List[ServicePrincipalSchema]] = None """The schema of the List response.""" - + def as_dict(self) -> dict: """Serializes the ServicePrincipal into a dictionary suitable for use as a JSON request body.""" body = {} - if self.active is not None: - body["active"] = self.active - if self.application_id is not None: - body["applicationId"] = self.application_id - if self.display_name is not None: - body["displayName"] = self.display_name - if self.entitlements: - body["entitlements"] = [v.as_dict() for v in self.entitlements] - if self.external_id is not None: - body["externalId"] = self.external_id - if self.groups: - body["groups"] = [v.as_dict() for v in self.groups] - if self.id is not None: - body["id"] = self.id - if self.roles: - body["roles"] = [v.as_dict() for v in self.roles] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] + if self.active is not None: body['active'] = self.active + if self.application_id is not None: body['applicationId'] = self.application_id + if self.display_name is not None: body['displayName'] = self.display_name + if self.entitlements: body['entitlements'] = [v.as_dict() for v in self.entitlements] + if self.external_id is not None: body['externalId'] = self.external_id + if self.groups: body['groups'] = [v.as_dict() for v in self.groups] + if self.id is not None: body['id'] = self.id + if self.roles: body['roles'] = [v.as_dict() for v in self.roles] + if self.schemas: body['schemas'] = [v.value for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the ServicePrincipal into a shallow dictionary of its immediate attributes.""" body = {} - if self.active is not None: - body["active"] = self.active - if self.application_id is not None: - body["applicationId"] = self.application_id - if self.display_name is not None: - body["displayName"] = self.display_name - if self.entitlements: - body["entitlements"] = self.entitlements - if self.external_id is not None: - body["externalId"] = self.external_id - if self.groups: - body["groups"] = self.groups - if self.id is not None: - body["id"] = self.id - if self.roles: - body["roles"] = self.roles - if self.schemas: - body["schemas"] = self.schemas + if self.active is not None: body['active'] = self.active + if self.application_id is not None: body['applicationId'] = self.application_id + if self.display_name is not None: body['displayName'] = self.display_name + if self.entitlements: body['entitlements'] = self.entitlements + if self.external_id is not None: body['externalId'] = self.external_id + if self.groups: body['groups'] = self.groups + if self.id is not None: body['id'] = self.id + if self.roles: body['roles'] = self.roles + if self.schemas: body['schemas'] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServicePrincipal: """Deserializes the ServicePrincipal from a dictionary.""" - return cls( - active=d.get("active", None), - application_id=d.get("applicationId", None), - display_name=d.get("displayName", None), - entitlements=_repeated_dict(d, "entitlements", ComplexValue), - external_id=d.get("externalId", None), - groups=_repeated_dict(d, "groups", ComplexValue), - id=d.get("id", None), - roles=_repeated_dict(d, "roles", ComplexValue), - schemas=_repeated_enum(d, "schemas", ServicePrincipalSchema), - ) - + return cls(active=d.get('active', None), application_id=d.get('applicationId', None), display_name=d.get('displayName', None), entitlements=_repeated_dict(d, 'entitlements', ComplexValue), external_id=d.get('externalId', None), groups=_repeated_dict(d, 'groups', ComplexValue), id=d.get('id', None), roles=_repeated_dict(d, 'roles', ComplexValue), schemas=_repeated_enum(d, 'schemas', ServicePrincipalSchema)) + -class ServicePrincipalSchema(Enum): - URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = "urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal" +class ServicePrincipalSchema(Enum): + + + URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = 'urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal' @dataclass class SetObjectPermissions: access_control_list: Optional[List[AccessControlRequest]] = None - + request_object_id: Optional[str] = None """The id of the request object.""" - + request_object_type: Optional[str] = None """The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" - + def as_dict(self) -> dict: """Serializes the SetObjectPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.request_object_id is not None: body['request_object_id'] = self.request_object_id + if self.request_object_type is not None: body['request_object_type'] = self.request_object_type return body def as_shallow_dict(self) -> dict: """Serializes the SetObjectPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.request_object_id is not None: body['request_object_id'] = self.request_object_id + if self.request_object_type is not None: body['request_object_type'] = self.request_object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetObjectPermissions: """Deserializes the SetObjectPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControlRequest), - request_object_id=d.get("request_object_id", None), - request_object_type=d.get("request_object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControlRequest), request_object_id=d.get('request_object_id', None), request_object_type=d.get('request_object_type', None)) + + @dataclass class UpdateObjectPermissions: access_control_list: Optional[List[AccessControlRequest]] = None - + request_object_id: Optional[str] = None """The id of the request object.""" - + request_object_type: Optional[str] = None """The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" - + def as_dict(self) -> dict: """Serializes the UpdateObjectPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.request_object_id is not None: body['request_object_id'] = self.request_object_id + if self.request_object_type is not None: body['request_object_type'] = self.request_object_type return body def as_shallow_dict(self) -> dict: """Serializes the UpdateObjectPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.request_object_id is not None: body['request_object_id'] = self.request_object_id + if self.request_object_type is not None: body['request_object_type'] = self.request_object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateObjectPermissions: """Deserializes the UpdateObjectPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControlRequest), - request_object_id=d.get("request_object_id", None), - request_object_type=d.get("request_object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControlRequest), request_object_id=d.get('request_object_id', None), request_object_type=d.get('request_object_type', None)) + + @dataclass @@ -1871,37 +1664,37 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() + + @dataclass class UpdateRuleSetRequest: name: str """Name of the rule set.""" - + rule_set: RuleSetUpdateRequest - + def as_dict(self) -> dict: """Serializes the UpdateRuleSetRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.rule_set: - body["rule_set"] = self.rule_set.as_dict() + if self.name is not None: body['name'] = self.name + if self.rule_set: body['rule_set'] = self.rule_set.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRuleSetRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.rule_set: - body["rule_set"] = self.rule_set + if self.name is not None: body['name'] = self.name + if self.rule_set: body['rule_set'] = self.rule_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRuleSetRequest: """Deserializes the UpdateRuleSetRequest from a dictionary.""" - return cls(name=d.get("name", None), rule_set=_from_dict(d, "rule_set", RuleSetUpdateRequest)) + return cls(name=d.get('name', None), rule_set=_from_dict(d, 'rule_set', RuleSetUpdateRequest)) + + @dataclass @@ -1912,215 +1705,177 @@ class UpdateWorkspaceAssignments: values will be ignored. Note that excluding this field, or providing unsupported values, will have the same effect as providing an empty list, which will result in the deletion of all permissions for the principal.""" - + principal_id: Optional[int] = None """The ID of the user, service principal, or group.""" - + workspace_id: Optional[int] = None """The workspace ID.""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permissions: - body["permissions"] = [v.value for v in self.permissions] - if self.principal_id is not None: - body["principal_id"] = self.principal_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.permissions: body['permissions'] = [v.value for v in self.permissions] + if self.principal_id is not None: body['principal_id'] = self.principal_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceAssignments into a shallow dictionary of its immediate attributes.""" body = {} - if self.permissions: - body["permissions"] = self.permissions - if self.principal_id is not None: - body["principal_id"] = self.principal_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.permissions: body['permissions'] = self.permissions + if self.principal_id is not None: body['principal_id'] = self.principal_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceAssignments: """Deserializes the UpdateWorkspaceAssignments from a dictionary.""" - return cls( - permissions=_repeated_enum(d, "permissions", WorkspacePermission), - principal_id=d.get("principal_id", None), - workspace_id=d.get("workspace_id", None), - ) + return cls(permissions=_repeated_enum(d, 'permissions', WorkspacePermission), principal_id=d.get('principal_id', None), workspace_id=d.get('workspace_id', None)) + + @dataclass class User: active: Optional[bool] = None """If this user is active""" - + display_name: Optional[str] = None """String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation""" - + emails: Optional[List[ComplexValue]] = None """All the emails associated with the Databricks user.""" - + entitlements: Optional[List[ComplexValue]] = None """Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements""" - + external_id: Optional[str] = None """External ID is not currently supported. It is reserved for future use.""" - + groups: Optional[List[ComplexValue]] = None - + id: Optional[str] = None """Databricks user ID.""" - + name: Optional[Name] = None - + roles: Optional[List[ComplexValue]] = None """Corresponds to AWS instance profile/arn role.""" - + schemas: Optional[List[UserSchema]] = None """The schema of the user.""" - + user_name: Optional[str] = None """Email address of the Databricks user.""" - + def as_dict(self) -> dict: """Serializes the User into a dictionary suitable for use as a JSON request body.""" body = {} - if self.active is not None: - body["active"] = self.active - if self.display_name is not None: - body["displayName"] = self.display_name - if self.emails: - body["emails"] = [v.as_dict() for v in self.emails] - if self.entitlements: - body["entitlements"] = [v.as_dict() for v in self.entitlements] - if self.external_id is not None: - body["externalId"] = self.external_id - if self.groups: - body["groups"] = [v.as_dict() for v in self.groups] - if self.id is not None: - body["id"] = self.id - if self.name: - body["name"] = self.name.as_dict() - if self.roles: - body["roles"] = [v.as_dict() for v in self.roles] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] - if self.user_name is not None: - body["userName"] = self.user_name + if self.active is not None: body['active'] = self.active + if self.display_name is not None: body['displayName'] = self.display_name + if self.emails: body['emails'] = [v.as_dict() for v in self.emails] + if self.entitlements: body['entitlements'] = [v.as_dict() for v in self.entitlements] + if self.external_id is not None: body['externalId'] = self.external_id + if self.groups: body['groups'] = [v.as_dict() for v in self.groups] + if self.id is not None: body['id'] = self.id + if self.name: body['name'] = self.name.as_dict() + if self.roles: body['roles'] = [v.as_dict() for v in self.roles] + if self.schemas: body['schemas'] = [v.value for v in self.schemas] + if self.user_name is not None: body['userName'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the User into a shallow dictionary of its immediate attributes.""" body = {} - if self.active is not None: - body["active"] = self.active - if self.display_name is not None: - body["displayName"] = self.display_name - if self.emails: - body["emails"] = self.emails - if self.entitlements: - body["entitlements"] = self.entitlements - if self.external_id is not None: - body["externalId"] = self.external_id - if self.groups: - body["groups"] = self.groups - if self.id is not None: - body["id"] = self.id - if self.name: - body["name"] = self.name - if self.roles: - body["roles"] = self.roles - if self.schemas: - body["schemas"] = self.schemas - if self.user_name is not None: - body["userName"] = self.user_name + if self.active is not None: body['active'] = self.active + if self.display_name is not None: body['displayName'] = self.display_name + if self.emails: body['emails'] = self.emails + if self.entitlements: body['entitlements'] = self.entitlements + if self.external_id is not None: body['externalId'] = self.external_id + if self.groups: body['groups'] = self.groups + if self.id is not None: body['id'] = self.id + if self.name: body['name'] = self.name + if self.roles: body['roles'] = self.roles + if self.schemas: body['schemas'] = self.schemas + if self.user_name is not None: body['userName'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> User: """Deserializes the User from a dictionary.""" - return cls( - active=d.get("active", None), - display_name=d.get("displayName", None), - emails=_repeated_dict(d, "emails", ComplexValue), - entitlements=_repeated_dict(d, "entitlements", ComplexValue), - external_id=d.get("externalId", None), - groups=_repeated_dict(d, "groups", ComplexValue), - id=d.get("id", None), - name=_from_dict(d, "name", Name), - roles=_repeated_dict(d, "roles", ComplexValue), - schemas=_repeated_enum(d, "schemas", UserSchema), - user_name=d.get("userName", None), - ) - + return cls(active=d.get('active', None), display_name=d.get('displayName', None), emails=_repeated_dict(d, 'emails', ComplexValue), entitlements=_repeated_dict(d, 'entitlements', ComplexValue), external_id=d.get('externalId', None), groups=_repeated_dict(d, 'groups', ComplexValue), id=d.get('id', None), name=_from_dict(d, 'name', Name), roles=_repeated_dict(d, 'roles', ComplexValue), schemas=_repeated_enum(d, 'schemas', UserSchema), user_name=d.get('userName', None)) + -class UserSchema(Enum): - URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_USER = "urn:ietf:params:scim:schemas:core:2.0:User" - URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER = ( - "urn:ietf:params:scim:schemas:extension:workspace:2.0:User" - ) +class UserSchema(Enum): + + + URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_USER = 'urn:ietf:params:scim:schemas:core:2.0:User' + URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER = 'urn:ietf:params:scim:schemas:extension:workspace:2.0:User' class WorkspacePermission(Enum): - - ADMIN = "ADMIN" - UNKNOWN = "UNKNOWN" - USER = "USER" - + + + ADMIN = 'ADMIN' + UNKNOWN = 'UNKNOWN' + USER = 'USER' @dataclass class WorkspacePermissions: permissions: Optional[List[PermissionOutput]] = None """Array of permissions defined for a workspace.""" - + def as_dict(self) -> dict: """Serializes the WorkspacePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permissions: - body["permissions"] = [v.as_dict() for v in self.permissions] + if self.permissions: body['permissions'] = [v.as_dict() for v in self.permissions] return body def as_shallow_dict(self) -> dict: """Serializes the WorkspacePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.permissions: - body["permissions"] = self.permissions + if self.permissions: body['permissions'] = self.permissions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspacePermissions: """Deserializes the WorkspacePermissions from a dictionary.""" - return cls(permissions=_repeated_dict(d, "permissions", PermissionOutput)) + return cls(permissions=_repeated_dict(d, 'permissions', PermissionOutput)) + + + + class AccessControlAPI: """Rule based Access Control for Databricks Resources.""" - + def __init__(self, api_client): self._api = api_client + - def check_policy( - self, - actor: Actor, - permission: str, - resource: str, - consistency_token: ConsistencyToken, - authz_identity: RequestAuthzIdentity, - *, - resource_info: Optional[ResourceInfo] = None, - ) -> CheckPolicyResponse: - """Check access policy to a resource. + + + + + + + def check_policy(self + , actor: Actor, permission: str, resource: str, consistency_token: ConsistencyToken, authz_identity: RequestAuthzIdentity + , * + , resource_info: Optional[ResourceInfo] = None) -> CheckPolicyResponse: + """Check access policy to a resource. + :param actor: :class:`Actor` :param permission: str :param resource: str @@ -2129,80 +1884,87 @@ def check_policy( :param consistency_token: :class:`ConsistencyToken` :param authz_identity: :class:`RequestAuthzIdentity` :param resource_info: :class:`ResourceInfo` (optional) - + :returns: :class:`CheckPolicyResponse` """ - + query = {} - if actor is not None: - query["actor"] = actor.as_dict() - if authz_identity is not None: - query["authz_identity"] = authz_identity.value - if consistency_token is not None: - query["consistency_token"] = consistency_token.as_dict() - if permission is not None: - query["permission"] = permission - if resource is not None: - query["resource"] = resource - if resource_info is not None: - query["resource_info"] = resource_info.as_dict() - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/access-control/check-policy-v2", query=query, headers=headers) + if actor is not None: query['actor'] = actor.as_dict() + if authz_identity is not None: query['authz_identity'] = authz_identity.value + if consistency_token is not None: query['consistency_token'] = consistency_token.as_dict() + if permission is not None: query['permission'] = permission + if resource is not None: query['resource'] = resource + if resource_info is not None: query['resource_info'] = resource_info.as_dict() + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/access-control/check-policy-v2', query=query + + , headers=headers + ) return CheckPolicyResponse.from_dict(res) - + + class AccountAccessControlAPI: """These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is called a rule set.""" - + def __init__(self, api_client): self._api = api_client + - def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRolesForResourceResponse: - """Get assignable roles for a resource. + + + + + + + def get_assignable_roles_for_resource(self + , resource: str + ) -> GetAssignableRolesForResourceResponse: + """Get assignable roles for a resource. + Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. - + :param resource: str The resource name for which assignable roles will be listed. - + Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service principal. - + :returns: :class:`GetAssignableRolesForResourceResponse` """ - + query = {} - if resource is not None: - query["resource"] = resource - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/preview/accounts/{self._api.account_id}/access-control/assignable-roles", - query=query, - headers=headers, - ) + if resource is not None: query['resource'] = resource + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/preview/accounts/{self._api.account_id}/access-control/assignable-roles', query=query + + , headers=headers + ) return GetAssignableRolesForResourceResponse.from_dict(res) - def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: - """Get a rule set. + + + + def get_rule_set(self + , name: str, etag: str + ) -> RuleSetResponse: + """Get a rule set. + Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. - + :param name: str The ruleset name associated with the request. - + Examples | Summary :--- | :--- `name=accounts//ruleSets/default` | A name for a rule set on the account. `name=accounts//groups//ruleSets/default` | A name for a rule set on the group. @@ -2215,108 +1977,116 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating. - + Examples | Summary :--- | :--- `etag=` | An empty etag can only be used in GET to indicate no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` | An etag encoded a specific version of the rule set to get or to be updated. - + :returns: :class:`RuleSetResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - if name is not None: - query["name"] = name - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + if name is not None: query['name'] = name + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets', query=query + + , headers=headers + ) return RuleSetResponse.from_dict(res) - def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse: - """Update a rule set. + + + + def update_rule_set(self + , name: str, rule_set: RuleSetUpdateRequest + ) -> RuleSetResponse: + """Update a rule set. + Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. - + :param name: str Name of the rule set. :param rule_set: :class:`RuleSetUpdateRequest` - + :returns: :class:`RuleSetResponse` """ body = {} - if name is not None: - body["name"] = name - if rule_set is not None: - body["rule_set"] = rule_set.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", - f"/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets", - body=body, - headers=headers, - ) + if name is not None: body['name'] = name + if rule_set is not None: body['rule_set'] = rule_set.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets', body=body + + , headers=headers + ) return RuleSetResponse.from_dict(res) - + + class AccountAccessControlProxyAPI: """These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is called a rule set. A workspace must belong to an account for these APIs to work""" - + def __init__(self, api_client): self._api = api_client + - def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRolesForResourceResponse: - """Get assignable roles for a resource. + + + + + + + def get_assignable_roles_for_resource(self + , resource: str + ) -> GetAssignableRolesForResourceResponse: + """Get assignable roles for a resource. + Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. - + :param resource: str The resource name for which assignable roles will be listed. - + Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service principal. - + :returns: :class:`GetAssignableRolesForResourceResponse` """ - + query = {} - if resource is not None: - query["resource"] = resource - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/preview/accounts/access-control/assignable-roles", query=query, headers=headers - ) + if resource is not None: query['resource'] = resource + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/preview/accounts/access-control/assignable-roles', query=query + + , headers=headers + ) return GetAssignableRolesForResourceResponse.from_dict(res) - def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: - """Get a rule set. + + + + def get_rule_set(self + , name: str, etag: str + ) -> RuleSetResponse: + """Get a rule set. + Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. - + :param name: str The ruleset name associated with the request. - + Examples | Summary :--- | :--- `name=accounts//ruleSets/default` | A name for a rule set on the account. `name=accounts//groups//ruleSets/default` | A name for a rule set on the group. @@ -2329,86 +2099,89 @@ def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating. - + Examples | Summary :--- | :--- `etag=` | An empty etag can only be used in GET to indicate no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` | An etag encoded a specific version of the rule set to get or to be updated. - + :returns: :class:`RuleSetResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - if name is not None: - query["name"] = name - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/preview/accounts/access-control/rule-sets", query=query, headers=headers) + if etag is not None: query['etag'] = etag + if name is not None: query['name'] = name + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/preview/accounts/access-control/rule-sets', query=query + + , headers=headers + ) return RuleSetResponse.from_dict(res) - def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse: - """Update a rule set. + + + + def update_rule_set(self + , name: str, rule_set: RuleSetUpdateRequest + ) -> RuleSetResponse: + """Update a rule set. + Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. - + :param name: str Name of the rule set. :param rule_set: :class:`RuleSetUpdateRequest` - + :returns: :class:`RuleSetResponse` """ body = {} - if name is not None: - body["name"] = name - if rule_set is not None: - body["rule_set"] = rule_set.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", "/api/2.0/preview/accounts/access-control/rule-sets", body=body, headers=headers) + if name is not None: body['name'] = name + if rule_set is not None: body['rule_set'] = rule_set.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT','/api/2.0/preview/accounts/access-control/rule-sets', body=body + + , headers=headers + ) return RuleSetResponse.from_dict(res) - + + class AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects. - + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks account identities can be assigned as members of groups, and members inherit permissions that are assigned to their group.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - *, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - id: Optional[str] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[GroupSchema]] = None, - ) -> Group: - """Create a new group. + - Creates a group in the Databricks account with a unique name, using the supplied group details. + + + + + def create(self + + , * + , display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, members: Optional[List[ComplexValue]] = None, meta: Optional[ResourceMeta] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None) -> Group: + """Create a new group. + + Creates a group in the Databricks account with a unique name, using the supplied group details. + :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -2421,86 +2194,89 @@ def create( Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - + :returns: :class:`Group` """ body = {} - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if id is not None: - body["id"] = id - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups", body=body, headers=headers - ) + if display_name is not None: body['displayName'] = display_name + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if id is not None: body['id'] = id + if members is not None: body['members'] = [v.as_dict() for v in members] + if meta is not None: body['meta'] = meta.as_dict() + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups', body=body + + , headers=headers + ) return Group.from_dict(res) - def delete(self, id: str): - """Delete a group. + + + + def delete(self + , id: str + ): + """Delete a group. + Deletes a group from the Databricks account. - + :param id: str Unique ID for a group in the Databricks account. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", headers=headers) + + + - def get(self, id: str) -> Group: + def get(self + , id: str + ) -> Group: """Get group details. - + Gets the information for a specific group in the Databricks account. - + :param id: str Unique ID for a group in the Databricks account. - + :returns: :class:`Group` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}' + + , headers=headers + ) return Group.from_dict(res) - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[Group]: - """List group details. + + + + def list(self + + , * + , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[Group]: + """List group details. + Gets all details of the groups associated with the Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -2512,7 +2288,7 @@ def list( contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -2520,92 +2296,85 @@ def list( The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`Group` """ - + query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - + if attributes is not None: query['attributes'] = attributes + if count is not None: query['count'] = count + if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes + if filter is not None: query['filter'] = filter + if sort_by is not None: query['sortBy'] = sort_by + if sort_order is not None: query['sortOrder'] = sort_order.value + if start_index is not None: query['startIndex'] = start_index + headers = {'Accept': 'application/json',} + + # deduplicate items that may have been added during iteration seen = set() - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 100 + query['startIndex'] =1 + if "count" not in query: query['count'] = 10000 while True: - json = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups", query=query, headers=headers - ) - if "Resources" in json: - for v in json["Resources"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield Group.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update group details. + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups', query=query + + , headers=headers + ) + if 'Resources' in json: + for v in json['Resources']: + i = v['id'] + if i in seen: + continue + seen.add(i) + yield Group.from_dict(v) + if 'Resources' not in json or not json['Resources']: + return + query['startIndex'] += len(json['Resources']) + - Partially updates the details of a group. + + + + def patch(self + , id: str + , * + , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Update group details. + + Partially updates the details of a group. + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + """ body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", body=body, headers=headers - ) - - def update( - self, - id: str, - *, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[GroupSchema]] = None, - ): - """Replace a group. + if operations is not None: body['Operations'] = [v.as_dict() for v in operations] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}', body=body + + , headers=headers + ) + - Updates the details of a group by replacing the entire group entity. + + + + def update(self + , id: str + , * + , display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, members: Optional[List[ComplexValue]] = None, meta: Optional[ResourceMeta] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None): + """Replace a group. + + Updates the details of a group by replacing the entire group entity. + :param id: str Databricks group ID :param display_name: str (optional) @@ -2613,7 +2382,7 @@ def update( :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -2624,60 +2393,54 @@ def update( Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - - + + """ body = {} - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", body=body, headers=headers) - + if display_name is not None: body['displayName'] = display_name + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if members is not None: body['members'] = [v.as_dict() for v in members] + if meta is not None: body['meta'] = meta.as_dict() + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}', body=body + + , headers=headers + ) + + + class AccountServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. Databricks recommends creating service principals to run production jobs or modify production data. If all processes that act on production data run with service principals, interactive users do not need any write, delete, or modify privileges in production. This eliminates the risk of a user overwriting production data by accident.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - *, - active: Optional[bool] = None, - application_id: Optional[str] = None, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - id: Optional[str] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[ServicePrincipalSchema]] = None, - ) -> ServicePrincipal: - """Create a service principal. + - Creates a new service principal in the Databricks account. + + + + + def create(self + + , * + , active: Optional[bool] = None, application_id: Optional[str] = None, display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None) -> ServicePrincipal: + """Create a service principal. + + Creates a new service principal in the Databricks account. + :param active: bool (optional) If this user is active :param application_id: str (optional) @@ -2687,7 +2450,7 @@ def create( :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -2697,90 +2460,89 @@ def create( Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - + :returns: :class:`ServicePrincipal` """ body = {} - if active is not None: - body["active"] = active - if application_id is not None: - body["applicationId"] = application_id - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if id is not None: - body["id"] = id - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals", body=body, headers=headers - ) + if active is not None: body['active'] = active + if application_id is not None: body['applicationId'] = application_id + if display_name is not None: body['displayName'] = display_name + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if id is not None: body['id'] = id + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals', body=body + + , headers=headers + ) return ServicePrincipal.from_dict(res) - def delete(self, id: str): - """Delete a service principal. + + + + def delete(self + , id: str + ): + """Delete a service principal. + Delete a single service principal in the Databricks account. - + :param id: str Unique ID for a service principal in the Databricks account. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}' + + , headers=headers + ) + - self._api.do( - "DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", headers=headers - ) + + + - def get(self, id: str) -> ServicePrincipal: + def get(self + , id: str + ) -> ServicePrincipal: """Get service principal details. - + Gets the details for a single service principal define in the Databricks account. - + :param id: str Unique ID for a service principal in the Databricks account. - + :returns: :class:`ServicePrincipal` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}' + + , headers=headers + ) return ServicePrincipal.from_dict(res) - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[ServicePrincipal]: - """List service principals. + + + + def list(self + + , * + , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[ServicePrincipal]: + """List service principals. + Gets the set of service principals associated with a Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -2792,7 +2554,7 @@ def list( contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -2800,100 +2562,87 @@ def list( The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`ServicePrincipal` """ - + query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - + if attributes is not None: query['attributes'] = attributes + if count is not None: query['count'] = count + if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes + if filter is not None: query['filter'] = filter + if sort_by is not None: query['sortBy'] = sort_by + if sort_order is not None: query['sortOrder'] = sort_order.value + if start_index is not None: query['startIndex'] = start_index + headers = {'Accept': 'application/json',} + + # deduplicate items that may have been added during iteration seen = set() - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 100 + query['startIndex'] =1 + if "count" not in query: query['count'] = 10000 while True: - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals", - query=query, - headers=headers, - ) - if "Resources" in json: - for v in json["Resources"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield ServicePrincipal.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update service principal details. + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals', query=query + + , headers=headers + ) + if 'Resources' in json: + for v in json['Resources']: + i = v['id'] + if i in seen: + continue + seen.add(i) + yield ServicePrincipal.from_dict(v) + if 'Resources' not in json or not json['Resources']: + return + query['startIndex'] += len(json['Resources']) + - Partially updates the details of a single service principal in the Databricks account. + + + + def patch(self + , id: str + , * + , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Update service principal details. + + Partially updates the details of a single service principal in the Databricks account. + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + """ body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", - body=body, - headers=headers, - ) - - def update( - self, - id: str, - *, - active: Optional[bool] = None, - application_id: Optional[str] = None, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[ServicePrincipalSchema]] = None, - ): - """Replace service principal. + if operations is not None: body['Operations'] = [v.as_dict() for v in operations] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}', body=body + + , headers=headers + ) + - Updates the details of a single service principal. + + + + def update(self + , id: str + , * + , active: Optional[bool] = None, application_id: Optional[str] = None, display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None): + """Replace service principal. + + Updates the details of a single service principal. + This action replaces the existing service principal with the same name. - + :param id: str Databricks service principal ID. :param active: bool (optional) @@ -2905,7 +2654,7 @@ def update( :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -2913,41 +2662,31 @@ def update( Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - - + + """ body = {} - if active is not None: - body["active"] = active - if application_id is not None: - body["applicationId"] = application_id - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", - body=body, - headers=headers, - ) - + if active is not None: body['active'] = active + if application_id is not None: body['applicationId'] = application_id + if display_name is not None: body['displayName'] = display_name + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}', body=body + + , headers=headers + ) + + + class AccountUsersAPI: """User identities recognized by Databricks and represented by email addresses. - + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your identity provider to create users and groups in Databricks account and give them the proper level of @@ -2955,43 +2694,40 @@ class AccountUsersAPI: terminate the user in your identity provider and that user’s account will also be removed from Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from accessing sensitive data.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - *, - active: Optional[bool] = None, - display_name: Optional[str] = None, - emails: Optional[List[ComplexValue]] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - id: Optional[str] = None, - name: Optional[Name] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[UserSchema]] = None, - user_name: Optional[str] = None, - ) -> User: - """Create a new user. + + + + + + + def create(self + + , * + , active: Optional[bool] = None, display_name: Optional[str] = None, emails: Optional[List[ComplexValue]] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, name: Optional[Name] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None) -> User: + """Create a new user. + Creates a new user in the Databricks account. This new user will also be added to the Databricks account. - + :param active: bool (optional) If this user is active :param display_name: str (optional) String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -3005,74 +2741,67 @@ def create( The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - + :returns: :class:`User` """ body = {} - if active is not None: - body["active"] = active - if display_name is not None: - body["displayName"] = display_name - if emails is not None: - body["emails"] = [v.as_dict() for v in emails] - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if id is not None: - body["id"] = id - if name is not None: - body["name"] = name.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - if user_name is not None: - body["userName"] = user_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users", body=body, headers=headers - ) + if active is not None: body['active'] = active + if display_name is not None: body['displayName'] = display_name + if emails is not None: body['emails'] = [v.as_dict() for v in emails] + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if id is not None: body['id'] = id + if name is not None: body['name'] = name.as_dict() + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + if user_name is not None: body['userName'] = user_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users', body=body + + , headers=headers + ) return User.from_dict(res) - def delete(self, id: str): - """Delete a user. - + + + + + def delete(self + , id: str + ): + """Delete a user. + Deletes a user. Deleting a user from a Databricks account also removes objects associated with the user. - + :param id: str Unique ID for a user in the Databricks account. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", headers=headers) - - def get( - self, - id: str, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[GetSortOrder] = None, - start_index: Optional[int] = None, - ) -> User: - """Get user details. + + + + def get(self + , id: str + , * + , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[GetSortOrder] = None, start_index: Optional[int] = None) -> User: + """Get user details. + Gets information for a specific user in Databricks account. - + :param id: str Unique ID for a user in the Databricks account. :param attributes: str (optional) @@ -3086,7 +2815,7 @@ def get( contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -3095,49 +2824,38 @@ def get( The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: :class:`User` """ - + query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", query=query, headers=headers - ) + if attributes is not None: query['attributes'] = attributes + if count is not None: query['count'] = count + if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes + if filter is not None: query['filter'] = filter + if sort_by is not None: query['sortBy'] = sort_by + if sort_order is not None: query['sortOrder'] = sort_order.value + if start_index is not None: query['startIndex'] = start_index + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}', query=query + + , headers=headers + ) return User.from_dict(res) - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[User]: - """List users. + + + + def list(self + + , * + , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[User]: + """List users. + Gets details for all the users associated with a Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -3149,7 +2867,7 @@ def list( contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -3158,94 +2876,85 @@ def list( The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`User` """ - + query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - + if attributes is not None: query['attributes'] = attributes + if count is not None: query['count'] = count + if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes + if filter is not None: query['filter'] = filter + if sort_by is not None: query['sortBy'] = sort_by + if sort_order is not None: query['sortOrder'] = sort_order.value + if start_index is not None: query['startIndex'] = start_index + headers = {'Accept': 'application/json',} + + # deduplicate items that may have been added during iteration seen = set() - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 100 + query['startIndex'] =1 + if "count" not in query: query['count'] = 10000 while True: - json = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users", query=query, headers=headers - ) - if "Resources" in json: - for v in json["Resources"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield User.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update user details. + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users', query=query + + , headers=headers + ) + if 'Resources' in json: + for v in json['Resources']: + i = v['id'] + if i in seen: + continue + seen.add(i) + yield User.from_dict(v) + if 'Resources' not in json or not json['Resources']: + return + query['startIndex'] += len(json['Resources']) + - Partially updates a user resource by applying the supplied operations on specific user attributes. + + + + def patch(self + , id: str + , * + , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Update user details. + + Partially updates a user resource by applying the supplied operations on specific user attributes. + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + """ body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers - ) - - def update( - self, - id: str, - *, - active: Optional[bool] = None, - display_name: Optional[str] = None, - emails: Optional[List[ComplexValue]] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - name: Optional[Name] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[UserSchema]] = None, - user_name: Optional[str] = None, - ): - """Replace a user. + if operations is not None: body['Operations'] = [v.as_dict() for v in operations] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}', body=body + + , headers=headers + ) + - Replaces a user's information with the data supplied in request. + + + + def update(self + , id: str + , * + , active: Optional[bool] = None, display_name: Optional[str] = None, emails: Optional[List[ComplexValue]] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, name: Optional[Name] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None): + """Replace a user. + + Replaces a user's information with the data supplied in request. + :param id: str Databricks user ID. :param active: bool (optional) @@ -3254,13 +2963,13 @@ def update( String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -3272,93 +2981,94 @@ def update( The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - - + + """ body = {} - if active is not None: - body["active"] = active - if display_name is not None: - body["displayName"] = display_name - if emails is not None: - body["emails"] = [v.as_dict() for v in emails] - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if name is not None: - body["name"] = name.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - if user_name is not None: - body["userName"] = user_name - headers = { - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers) - + if active is not None: body['active'] = active + if display_name is not None: body['displayName'] = display_name + if emails is not None: body['emails'] = [v.as_dict() for v in emails] + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if name is not None: body['name'] = name.as_dict() + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + if user_name is not None: body['userName'] = user_name + headers = {'Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}', body=body + + , headers=headers + ) + + + class CurrentUserAPI: """This API allows retrieving information about currently authenticated user or service principal.""" - + def __init__(self, api_client): self._api = api_client + + + + + + + + def me(self) -> User: """Get current user info. - + Get details about the current method caller's identity. - + :returns: :class:`User` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/preview/scim/v2/Me", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/preview/scim/v2/Me' + , headers=headers + ) return User.from_dict(res) - + + class GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. - + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks workspace identities can be assigned as members of groups, and members inherit permissions that are assigned to their group.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - *, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - id: Optional[str] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[GroupSchema]] = None, - ) -> Group: - """Create a new group. + - Creates a group in the Databricks workspace with a unique name, using the supplied group details. + + + + + def create(self + + , * + , display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, members: Optional[List[ComplexValue]] = None, meta: Optional[ResourceMeta] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None) -> Group: + """Create a new group. + + Creates a group in the Databricks workspace with a unique name, using the supplied group details. + :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -3371,84 +3081,89 @@ def create( Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - + :returns: :class:`Group` """ body = {} - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if id is not None: - body["id"] = id - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/scim/v2/Groups", body=body, headers=headers) + if display_name is not None: body['displayName'] = display_name + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if id is not None: body['id'] = id + if members is not None: body['members'] = [v.as_dict() for v in members] + if meta is not None: body['meta'] = meta.as_dict() + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/preview/scim/v2/Groups', body=body + + , headers=headers + ) return Group.from_dict(res) - def delete(self, id: str): - """Delete a group. + + + + def delete(self + , id: str + ): + """Delete a group. + Deletes a group from the Databricks workspace. - + :param id: str Unique ID for a group in the Databricks workspace. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/preview/scim/v2/Groups/{id}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers) + + + - def get(self, id: str) -> Group: + def get(self + , id: str + ) -> Group: """Get group details. - + Gets the information for a specific group in the Databricks workspace. - + :param id: str Unique ID for a group in the Databricks workspace. - + :returns: :class:`Group` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/preview/scim/v2/Groups/{id}' + + , headers=headers + ) return Group.from_dict(res) - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[Group]: - """List group details. + + + + def list(self + + , * + , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[Group]: + """List group details. + Gets all details of the groups associated with the Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -3460,7 +3175,7 @@ def list( contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -3468,88 +3183,85 @@ def list( The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`Group` """ - + query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - + if attributes is not None: query['attributes'] = attributes + if count is not None: query['count'] = count + if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes + if filter is not None: query['filter'] = filter + if sort_by is not None: query['sortBy'] = sort_by + if sort_order is not None: query['sortOrder'] = sort_order.value + if start_index is not None: query['startIndex'] = start_index + headers = {'Accept': 'application/json',} + + # deduplicate items that may have been added during iteration seen = set() - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 100 + query['startIndex'] =1 + if "count" not in query: query['count'] = 10000 while True: - json = self._api.do("GET", "/api/2.0/preview/scim/v2/Groups", query=query, headers=headers) - if "Resources" in json: - for v in json["Resources"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield Group.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update group details. + json = self._api.do('GET','/api/2.0/preview/scim/v2/Groups', query=query + + , headers=headers + ) + if 'Resources' in json: + for v in json['Resources']: + i = v['id'] + if i in seen: + continue + seen.add(i) + yield Group.from_dict(v) + if 'Resources' not in json or not json['Resources']: + return + query['startIndex'] += len(json['Resources']) + - Partially updates the details of a group. + + + + def patch(self + , id: str + , * + , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Update group details. + + Partially updates the details of a group. + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + """ body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) - - def update( - self, - id: str, - *, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - members: Optional[List[ComplexValue]] = None, - meta: Optional[ResourceMeta] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[GroupSchema]] = None, - ): - """Replace a group. + if operations is not None: body['Operations'] = [v.as_dict() for v in operations] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/preview/scim/v2/Groups/{id}', body=body + + , headers=headers + ) + - Updates the details of a group by replacing the entire group entity. + + + + def update(self + , id: str + , * + , display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, members: Optional[List[ComplexValue]] = None, meta: Optional[ResourceMeta] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None): + """Replace a group. + + Updates the details of a group by replacing the entire group entity. + :param id: str Databricks group ID :param display_name: str (optional) @@ -3557,7 +3269,7 @@ def update( :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -3568,49 +3280,48 @@ def update( Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - - + + """ body = {} - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if members is not None: - body["members"] = [v.as_dict() for v in members] - if meta is not None: - body["meta"] = meta.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) - + if display_name is not None: body['displayName'] = display_name + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if members is not None: body['members'] = [v.as_dict() for v in members] + if meta is not None: body['meta'] = meta.as_dict() + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/preview/scim/v2/Groups/{id}', body=body + + , headers=headers + ) + + + class PermissionMigrationAPI: """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx""" - + def __init__(self, api_client): self._api = api_client + - def migrate_permissions( - self, - workspace_id: int, - from_workspace_group_name: str, - to_account_group_name: str, - *, - size: Optional[int] = None, - ) -> MigratePermissionsResponse: - """Migrate Permissions. + + + + + + + def migrate_permissions(self + , workspace_id: int, from_workspace_group_name: str, to_account_group_name: str + , * + , size: Optional[int] = None) -> MigratePermissionsResponse: + """Migrate Permissions. + :param workspace_id: int WorkspaceId of the associated workspace where the permission migration will occur. :param from_workspace_group_name: str @@ -3619,27 +3330,24 @@ def migrate_permissions( The name of the account group that permissions will be migrated to. :param size: int (optional) The maximum number of permissions that will be migrated. - + :returns: :class:`MigratePermissionsResponse` """ body = {} - if from_workspace_group_name is not None: - body["from_workspace_group_name"] = from_workspace_group_name - if size is not None: - body["size"] = size - if to_account_group_name is not None: - body["to_account_group_name"] = to_account_group_name - if workspace_id is not None: - body["workspace_id"] = workspace_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/permissionmigration", body=body, headers=headers) + if from_workspace_group_name is not None: body['from_workspace_group_name'] = from_workspace_group_name + if size is not None: body['size'] = size + if to_account_group_name is not None: body['to_account_group_name'] = to_account_group_name + if workspace_id is not None: body['workspace_id'] = workspace_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/permissionmigration', body=body + + , headers=headers + ) return MigratePermissionsResponse.from_dict(res) - + + class PermissionsAPI: """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users can manage @@ -3661,71 +3369,88 @@ class PermissionsAPI: the required permissions for specific actions or abilities and other important information, see [Access Control]. Note that to manage access control on service principals, use **[Account Access Control Proxy](:service:accountaccesscontrolproxy)**. - + [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html""" - + def __init__(self, api_client): self._api = api_client + - def get(self, request_object_type: str, request_object_id: str) -> ObjectPermissions: - """Get object permissions. + + + + + + + def get(self + , request_object_type: str, request_object_id: str + ) -> ObjectPermissions: + """Get object permissions. + Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. - + :returns: :class:`ObjectPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/{request_object_type}/{request_object_id}' + + , headers=headers + ) return ObjectPermissions.from_dict(res) - def get_permission_levels(self, request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse: - """Get object permission levels. + + + + def get_permission_levels(self + , request_object_type: str, request_object_id: str + ) -> GetPermissionLevelsResponse: + """Get object permission levels. + Gets the permission levels that a user can have on an object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str - + :returns: :class:`GetPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels' + + , headers=headers + ) return GetPermissionLevelsResponse.from_dict(res) - def set( - self, - request_object_type: str, - request_object_id: str, - *, - access_control_list: Optional[List[AccessControlRequest]] = None, - ) -> ObjectPermissions: - """Set object permissions. + + + + def set(self + , request_object_type: str, request_object_id: str + , * + , access_control_list: Optional[List[AccessControlRequest]] = None) -> ObjectPermissions: + """Set object permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, @@ -3733,34 +3458,32 @@ def set( :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) - + :returns: :class:`ObjectPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/{request_object_type}/{request_object_id}', body=body + + , headers=headers + ) return ObjectPermissions.from_dict(res) - def update( - self, - request_object_type: str, - request_object_id: str, - *, - access_control_list: Optional[List[AccessControlRequest]] = None, - ) -> ObjectPermissions: - """Update object permissions. + + + + def update(self + , request_object_type: str, request_object_id: str + , * + , access_control_list: Optional[List[AccessControlRequest]] = None) -> ObjectPermissions: + """Update object permissions. + Updates the permissions on an object. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, @@ -3768,50 +3491,47 @@ def update( :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) - + :returns: :class:`ObjectPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/{request_object_type}/{request_object_id}', body=body + + , headers=headers + ) return ObjectPermissions.from_dict(res) - + + class ServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. Databricks recommends creating service principals to run production jobs or modify production data. If all processes that act on production data run with service principals, interactive users do not need any write, delete, or modify privileges in production. This eliminates the risk of a user overwriting production data by accident.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - *, - active: Optional[bool] = None, - application_id: Optional[str] = None, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - id: Optional[str] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[ServicePrincipalSchema]] = None, - ) -> ServicePrincipal: - """Create a service principal. + - Creates a new service principal in the Databricks workspace. + + + + + def create(self + + , * + , active: Optional[bool] = None, application_id: Optional[str] = None, display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None) -> ServicePrincipal: + """Create a service principal. + + Creates a new service principal in the Databricks workspace. + :param active: bool (optional) If this user is active :param application_id: str (optional) @@ -3821,7 +3541,7 @@ def create( :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -3831,84 +3551,89 @@ def create( Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - + :returns: :class:`ServicePrincipal` """ body = {} - if active is not None: - body["active"] = active - if application_id is not None: - body["applicationId"] = application_id - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if id is not None: - body["id"] = id - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/scim/v2/ServicePrincipals", body=body, headers=headers) + if active is not None: body['active'] = active + if application_id is not None: body['applicationId'] = application_id + if display_name is not None: body['displayName'] = display_name + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if id is not None: body['id'] = id + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/preview/scim/v2/ServicePrincipals', body=body + + , headers=headers + ) return ServicePrincipal.from_dict(res) - def delete(self, id: str): - """Delete a service principal. + + + + def delete(self + , id: str + ): + """Delete a service principal. + Delete a single service principal in the Databricks workspace. - + :param id: str Unique ID for a service principal in the Databricks workspace. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", headers=headers) + + + - def get(self, id: str) -> ServicePrincipal: + def get(self + , id: str + ) -> ServicePrincipal: """Get service principal details. - + Gets the details for a single service principal define in the Databricks workspace. - + :param id: str Unique ID for a service principal in the Databricks workspace. - + :returns: :class:`ServicePrincipal` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}' + + , headers=headers + ) return ServicePrincipal.from_dict(res) - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[ServicePrincipal]: - """List service principals. + + + + def list(self + + , * + , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[ServicePrincipal]: + """List service principals. + Gets the set of service principals associated with a Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -3920,7 +3645,7 @@ def list( contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -3928,90 +3653,87 @@ def list( The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`ServicePrincipal` """ - + query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - + if attributes is not None: query['attributes'] = attributes + if count is not None: query['count'] = count + if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes + if filter is not None: query['filter'] = filter + if sort_by is not None: query['sortBy'] = sort_by + if sort_order is not None: query['sortOrder'] = sort_order.value + if start_index is not None: query['startIndex'] = start_index + headers = {'Accept': 'application/json',} + + # deduplicate items that may have been added during iteration seen = set() - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 100 + query['startIndex'] =1 + if "count" not in query: query['count'] = 10000 while True: - json = self._api.do("GET", "/api/2.0/preview/scim/v2/ServicePrincipals", query=query, headers=headers) - if "Resources" in json: - for v in json["Resources"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield ServicePrincipal.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update service principal details. + json = self._api.do('GET','/api/2.0/preview/scim/v2/ServicePrincipals', query=query + + , headers=headers + ) + if 'Resources' in json: + for v in json['Resources']: + i = v['id'] + if i in seen: + continue + seen.add(i) + yield ServicePrincipal.from_dict(v) + if 'Resources' not in json or not json['Resources']: + return + query['startIndex'] += len(json['Resources']) + - Partially updates the details of a single service principal in the Databricks workspace. + + + + def patch(self + , id: str + , * + , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Update service principal details. + + Partially updates the details of a single service principal in the Databricks workspace. + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + """ body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", body=body, headers=headers) - - def update( - self, - id: str, - *, - active: Optional[bool] = None, - application_id: Optional[str] = None, - display_name: Optional[str] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[ServicePrincipalSchema]] = None, - ): - """Replace service principal. + if operations is not None: body['Operations'] = [v.as_dict() for v in operations] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}', body=body + + , headers=headers + ) + - Updates the details of a single service principal. + + + + def update(self + , id: str + , * + , active: Optional[bool] = None, application_id: Optional[str] = None, display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None): + """Replace service principal. + + Updates the details of a single service principal. + This action replaces the existing service principal with the same name. - + :param id: str Databricks service principal ID. :param active: bool (optional) @@ -4023,7 +3745,7 @@ def update( :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -4031,36 +3753,31 @@ def update( Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - - + + """ body = {} - if active is not None: - body["active"] = active - if application_id is not None: - body["applicationId"] = application_id - if display_name is not None: - body["displayName"] = display_name - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", body=body, headers=headers) - + if active is not None: body['active'] = active + if application_id is not None: body['applicationId'] = application_id + if display_name is not None: body['displayName'] = display_name + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}', body=body + + , headers=headers + ) + + + class UsersAPI: """User identities recognized by Databricks and represented by email addresses. - + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your identity provider to create users and groups in Databricks workspace and give them the proper level of @@ -4068,43 +3785,40 @@ class UsersAPI: terminate the user in your identity provider and that user’s account will also be removed from Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from accessing sensitive data.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - *, - active: Optional[bool] = None, - display_name: Optional[str] = None, - emails: Optional[List[ComplexValue]] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - id: Optional[str] = None, - name: Optional[Name] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[UserSchema]] = None, - user_name: Optional[str] = None, - ) -> User: - """Create a new user. + + + + + + + def create(self + + , * + , active: Optional[bool] = None, display_name: Optional[str] = None, emails: Optional[List[ComplexValue]] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, name: Optional[Name] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None) -> User: + """Create a new user. + Creates a new user in the Databricks workspace. This new user will also be added to the Databricks account. - + :param active: bool (optional) If this user is active :param display_name: str (optional) String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -4118,72 +3832,67 @@ def create( The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - + :returns: :class:`User` """ body = {} - if active is not None: - body["active"] = active - if display_name is not None: - body["displayName"] = display_name - if emails is not None: - body["emails"] = [v.as_dict() for v in emails] - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if id is not None: - body["id"] = id - if name is not None: - body["name"] = name.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - if user_name is not None: - body["userName"] = user_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/scim/v2/Users", body=body, headers=headers) + if active is not None: body['active'] = active + if display_name is not None: body['displayName'] = display_name + if emails is not None: body['emails'] = [v.as_dict() for v in emails] + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if id is not None: body['id'] = id + if name is not None: body['name'] = name.as_dict() + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + if user_name is not None: body['userName'] = user_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/preview/scim/v2/Users', body=body + + , headers=headers + ) return User.from_dict(res) - def delete(self, id: str): - """Delete a user. + + + + def delete(self + , id: str + ): + """Delete a user. + Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the user. - + :param id: str Unique ID for a user in the Databricks workspace. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/preview/scim/v2/Users/{id}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Users/{id}", headers=headers) - - def get( - self, - id: str, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[GetSortOrder] = None, - start_index: Optional[int] = None, - ) -> User: - """Get user details. + + + + def get(self + , id: str + , * + , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[GetSortOrder] = None, start_index: Optional[int] = None) -> User: + """Get user details. + Gets information for a specific user in Databricks workspace. - + :param id: str Unique ID for a user in the Databricks workspace. :param attributes: str (optional) @@ -4197,7 +3906,7 @@ def get( contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -4206,77 +3915,76 @@ def get( The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: :class:`User` """ - + query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/scim/v2/Users/{id}", query=query, headers=headers) + if attributes is not None: query['attributes'] = attributes + if count is not None: query['count'] = count + if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes + if filter is not None: query['filter'] = filter + if sort_by is not None: query['sortBy'] = sort_by + if sort_order is not None: query['sortOrder'] = sort_order.value + if start_index is not None: query['startIndex'] = start_index + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/preview/scim/v2/Users/{id}', query=query + + , headers=headers + ) return User.from_dict(res) + + + + def get_permission_levels(self) -> GetPasswordPermissionLevelsResponse: """Get password permission levels. - + Gets the permission levels that a user can have on an object. - + :returns: :class:`GetPasswordPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/permissions/authorization/passwords/permissionLevels", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/permissions/authorization/passwords/permissionLevels' + , headers=headers + ) return GetPasswordPermissionLevelsResponse.from_dict(res) + + + + def get_permissions(self) -> PasswordPermissions: """Get password permissions. - + Gets the permissions of all passwords. Passwords can inherit permissions from their root object. - + :returns: :class:`PasswordPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/permissions/authorization/passwords", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/permissions/authorization/passwords' + , headers=headers + ) return PasswordPermissions.from_dict(res) - def list( - self, - *, - attributes: Optional[str] = None, - count: Optional[int] = None, - excluded_attributes: Optional[str] = None, - filter: Optional[str] = None, - sort_by: Optional[str] = None, - sort_order: Optional[ListSortOrder] = None, - start_index: Optional[int] = None, - ) -> Iterator[User]: - """List users. + + + + def list(self + + , * + , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[User]: + """List users. + Gets details for all the users associated with a Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -4288,7 +3996,7 @@ def list( contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -4297,113 +4005,112 @@ def list( The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`User` """ - + query = {} - if attributes is not None: - query["attributes"] = attributes - if count is not None: - query["count"] = count - if excluded_attributes is not None: - query["excludedAttributes"] = excluded_attributes - if filter is not None: - query["filter"] = filter - if sort_by is not None: - query["sortBy"] = sort_by - if sort_order is not None: - query["sortOrder"] = sort_order.value - if start_index is not None: - query["startIndex"] = start_index - headers = { - "Accept": "application/json", - } - + if attributes is not None: query['attributes'] = attributes + if count is not None: query['count'] = count + if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes + if filter is not None: query['filter'] = filter + if sort_by is not None: query['sortBy'] = sort_by + if sort_order is not None: query['sortOrder'] = sort_order.value + if start_index is not None: query['startIndex'] = start_index + headers = {'Accept': 'application/json',} + + # deduplicate items that may have been added during iteration seen = set() - query["startIndex"] = 1 - if "count" not in query: - query["count"] = 100 + query['startIndex'] =1 + if "count" not in query: query['count'] = 10000 while True: - json = self._api.do("GET", "/api/2.0/preview/scim/v2/Users", query=query, headers=headers) - if "Resources" in json: - for v in json["Resources"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield User.from_dict(v) - if "Resources" not in json or not json["Resources"]: - return - query["startIndex"] += len(json["Resources"]) - - def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): - """Update user details. + json = self._api.do('GET','/api/2.0/preview/scim/v2/Users', query=query + + , headers=headers + ) + if 'Resources' in json: + for v in json['Resources']: + i = v['id'] + if i in seen: + continue + seen.add(i) + yield User.from_dict(v) + if 'Resources' not in json or not json['Resources']: + return + query['startIndex'] += len(json['Resources']) + - Partially updates a user resource by applying the supplied operations on specific user attributes. + + + + def patch(self + , id: str + , * + , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + """Update user details. + + Partially updates a user resource by applying the supplied operations on specific user attributes. + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + """ body = {} - if operations is not None: - body["Operations"] = [v.as_dict() for v in operations] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - headers = { - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Users/{id}", body=body, headers=headers) - - def set_permissions( - self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None - ) -> PasswordPermissions: - """Set password permissions. + if operations is not None: body['Operations'] = [v.as_dict() for v in operations] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + headers = {'Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/preview/scim/v2/Users/{id}', body=body + + , headers=headers + ) + + + + + + def set_permissions(self + + , * + , access_control_list: Optional[List[PasswordAccessControlRequest]] = None) -> PasswordPermissions: + """Set password permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - + :returns: :class:`PasswordPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT','/api/2.0/permissions/authorization/passwords', body=body + + , headers=headers + ) return PasswordPermissions.from_dict(res) - def update( - self, - id: str, - *, - active: Optional[bool] = None, - display_name: Optional[str] = None, - emails: Optional[List[ComplexValue]] = None, - entitlements: Optional[List[ComplexValue]] = None, - external_id: Optional[str] = None, - groups: Optional[List[ComplexValue]] = None, - name: Optional[Name] = None, - roles: Optional[List[ComplexValue]] = None, - schemas: Optional[List[UserSchema]] = None, - user_name: Optional[str] = None, - ): - """Replace a user. + + + + def update(self + , id: str + , * + , active: Optional[bool] = None, display_name: Optional[str] = None, emails: Optional[List[ComplexValue]] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, name: Optional[Name] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None): + """Replace a user. + Replaces a user's information with the data supplied in request. - + :param id: str Databricks user ID. :param active: bool (optional) @@ -4412,13 +4119,13 @@ def update( String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -4430,143 +4137,160 @@ def update( The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - - + + """ body = {} - if active is not None: - body["active"] = active - if display_name is not None: - body["displayName"] = display_name - if emails is not None: - body["emails"] = [v.as_dict() for v in emails] - if entitlements is not None: - body["entitlements"] = [v.as_dict() for v in entitlements] - if external_id is not None: - body["externalId"] = external_id - if groups is not None: - body["groups"] = [v.as_dict() for v in groups] - if name is not None: - body["name"] = name.as_dict() - if roles is not None: - body["roles"] = [v.as_dict() for v in roles] - if schemas is not None: - body["schemas"] = [v.value for v in schemas] - if user_name is not None: - body["userName"] = user_name - headers = { - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/preview/scim/v2/Users/{id}", body=body, headers=headers) - - def update_permissions( - self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None - ) -> PasswordPermissions: - """Update password permissions. + if active is not None: body['active'] = active + if display_name is not None: body['displayName'] = display_name + if emails is not None: body['emails'] = [v.as_dict() for v in emails] + if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] + if external_id is not None: body['externalId'] = external_id + if groups is not None: body['groups'] = [v.as_dict() for v in groups] + if name is not None: body['name'] = name.as_dict() + if roles is not None: body['roles'] = [v.as_dict() for v in roles] + if schemas is not None: body['schemas'] = [v.value for v in schemas] + if user_name is not None: body['userName'] = user_name + headers = {'Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/preview/scim/v2/Users/{id}', body=body + + , headers=headers + ) + - Updates the permissions on all passwords. Passwords can inherit permissions from their root object. + + + + def update_permissions(self + + , * + , access_control_list: Optional[List[PasswordAccessControlRequest]] = None) -> PasswordPermissions: + """Update password permissions. + + Updates the permissions on all passwords. Passwords can inherit permissions from their root object. + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - + :returns: :class:`PasswordPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/permissions/authorization/passwords', body=body + + , headers=headers + ) return PasswordPermissions.from_dict(res) - + + class WorkspaceAssignmentAPI: """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, workspace_id: int, principal_id: int): - """Delete permissions assignment. + + + + + + + def delete(self + , workspace_id: int, principal_id: int + ): + """Delete permissions assignment. + Deletes the workspace permissions assignment in a given account and workspace for the specified principal. - + :param workspace_id: int The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", - headers=headers, - ) + + + - def get(self, workspace_id: int) -> WorkspacePermissions: + def get(self + , workspace_id: int + ) -> WorkspacePermissions: """List workspace permissions. - + Get an array of workspace permissions for the specified account and workspace. - + :param workspace_id: int The workspace ID. - + :returns: :class:`WorkspacePermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions' + + , headers=headers + ) return WorkspacePermissions.from_dict(res) - def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: - """Get permission assignments. + + + + def list(self + , workspace_id: int + ) -> Iterator[PermissionAssignment]: + """Get permission assignments. + Get the permission assignments for the specified Databricks account and Databricks workspace. - + :param workspace_id: int The workspace ID for the account. - + :returns: Iterator over :class:`PermissionAssignment` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments' + + , headers=headers + ) parsed = PermissionAssignments.from_dict(json).permission_assignments return parsed if parsed is not None else [] + - def update( - self, workspace_id: int, principal_id: int, *, permissions: Optional[List[WorkspacePermission]] = None - ) -> PermissionAssignment: - """Create or update permissions assignment. + + + + def update(self + , workspace_id: int, principal_id: int + , * + , permissions: Optional[List[WorkspacePermission]] = None) -> PermissionAssignment: + """Create or update permissions assignment. + Creates or updates the workspace permissions assignment in a given account and workspace for the specified principal. - + :param workspace_id: int The workspace ID. :param principal_id: int @@ -4577,21 +4301,18 @@ def update( will be ignored. Note that excluding this field, or providing unsupported values, will have the same effect as providing an empty list, which will result in the deletion of all permissions for the principal. - + :returns: :class:`PermissionAssignment` """ body = {} - if permissions is not None: - body["permissions"] = [v.value for v in permissions] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", - body=body, - headers=headers, - ) + if permissions is not None: body['permissions'] = [v.value for v in permissions] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}', body=body + + , headers=headers + ) return PermissionAssignment.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 1cb0ac4a7..75aac8f51 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -1,104 +1,95 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') +from databricks.sdk.service import compute +from databricks.sdk.service import compute from databricks.sdk.service import compute # all definitions in this file are in alphabetical order - class AuthenticationMethod(Enum): - - OAUTH = "OAUTH" - PAT = "PAT" - + + + OAUTH = 'OAUTH' + PAT = 'PAT' @dataclass class BaseJob: created_time: Optional[int] = None """The time at which this job was created in epoch milliseconds (milliseconds since 1/1/1970 UTC).""" - + creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" - + effective_budget_policy_id: Optional[str] = None """The id of the budget policy used by this job for cost attribution purposes. This may be set through (in order of precedence): 1. Budget admins through the account or workspace console 2. Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" - + has_more: Optional[bool] = None """Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list requests with `expand_tasks=true`.""" - + job_id: Optional[int] = None """The canonical identifier for this job.""" - + settings: Optional[JobSettings] = None """Settings for this job and all of its runs. These settings can be updated using the `resetJob` method.""" - + + trigger_state: Optional[TriggerStateProto] = None + """State of the trigger associated with the job.""" + def as_dict(self) -> dict: """Serializes the BaseJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_time is not None: - body["created_time"] = self.created_time - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.has_more is not None: - body["has_more"] = self.has_more - if self.job_id is not None: - body["job_id"] = self.job_id - if self.settings: - body["settings"] = self.settings.as_dict() + if self.created_time is not None: body['created_time'] = self.created_time + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.has_more is not None: body['has_more'] = self.has_more + if self.job_id is not None: body['job_id'] = self.job_id + if self.settings: body['settings'] = self.settings.as_dict() + if self.trigger_state: body['trigger_state'] = self.trigger_state.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the BaseJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_time is not None: - body["created_time"] = self.created_time - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.has_more is not None: - body["has_more"] = self.has_more - if self.job_id is not None: - body["job_id"] = self.job_id - if self.settings: - body["settings"] = self.settings + if self.created_time is not None: body['created_time'] = self.created_time + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.has_more is not None: body['has_more'] = self.has_more + if self.job_id is not None: body['job_id'] = self.job_id + if self.settings: body['settings'] = self.settings + if self.trigger_state: body['trigger_state'] = self.trigger_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BaseJob: """Deserializes the BaseJob from a dictionary.""" - return cls( - created_time=d.get("created_time", None), - creator_user_name=d.get("creator_user_name", None), - effective_budget_policy_id=d.get("effective_budget_policy_id", None), - has_more=d.get("has_more", None), - job_id=d.get("job_id", None), - settings=_from_dict(d, "settings", JobSettings), - ) + return cls(created_time=d.get('created_time', None), creator_user_name=d.get('creator_user_name', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), has_more=d.get('has_more', None), job_id=d.get('job_id', None), settings=_from_dict(d, 'settings', JobSettings), trigger_state=_from_dict(d, 'trigger_state', TriggerStateProto)) + + @dataclass @@ -109,27 +100,27 @@ class BaseRun: (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" - + cleanup_duration: Optional[int] = None """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `cleanup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + cluster_instance: Optional[ClusterInstance] = None """The cluster used for this run. If the run is specified to use a new cluster, this field is set once the Jobs service has requested a cluster for the run.""" - + cluster_spec: Optional[ClusterSpec] = None """A snapshot of the job’s cluster specification when this run was created.""" - + creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" - + description: Optional[str] = None """Description of the run""" - + effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from the client-set performance target on the request depending on whether the performance mode is @@ -138,18 +129,18 @@ class BaseRun: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" - + execution_duration: Optional[int] = None """The time in milliseconds it took to execute the commands in the JAR or notebook until they completed, failed, timed out, were cancelled, or encountered an unexpected error. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `execution_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -159,91 +150,91 @@ class BaseRun: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + has_more: Optional[bool] = None """Indicates if the run has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 :method:jobs/listruns requests with `expand_tasks=true`.""" - + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. If more than 100 job clusters are available, you can paginate through them using :method:jobs/getrun.""" - + job_id: Optional[int] = None """The canonical identifier of the job that contains this run.""" - + job_parameters: Optional[List[JobParameter]] = None """Job-level parameters used in the run""" - + job_run_id: Optional[int] = None """ID of the job run that this run belongs to. For legacy and single-task job runs the field is populated with the job run ID. For task runs, the field is populated with the ID of the job run that the task run belongs to.""" - + number_in_job: Optional[int] = None """A unique identifier for this job run. This is set to the same value as `run_id`.""" - + original_attempt_run_id: Optional[int] = None """If this run is a retry of a prior run attempt, this field contains the run_id of the original attempt; otherwise, it is the same as the run_id.""" - + overriding_parameters: Optional[RunParameters] = None """The parameters used for this run.""" - + queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" - + repair_history: Optional[List[RepairHistoryItem]] = None """The repair history of the run.""" - + run_duration: Optional[int] = None """The time in milliseconds it took the job run and all of its repairs to finish.""" - + run_id: Optional[int] = None """The canonical identifier of the run. This ID is unique across all runs of all jobs.""" - + run_name: Optional[str] = None """An optional name for the run. The maximum length is 4096 bytes in UTF-8 encoding.""" - + run_page_url: Optional[str] = None """The URL to the detail page of the run.""" - + run_type: Optional[RunType] = None """The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with :method:jobs/submit. [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow""" - + schedule: Optional[CronSchedule] = None """The cron schedule that triggered this run if it was triggered by the periodic scheduler.""" - + setup_duration: Optional[int] = None """The time in milliseconds it took to set up the cluster. For runs that run on new clusters this is the cluster creation time, for runs that run on existing clusters this time should be very short. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `setup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + start_time: Optional[int] = None """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC). This may not be the time when the job task starts executing, for example, if the job is scheduled to run on a new cluster, this is the time the cluster creation call is issued.""" - + state: Optional[RunState] = None """Deprecated. Please use the `status` field instead.""" - + status: Optional[RunStatus] = None """The current status of the run""" - + tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object root to determine if more results are available.""" - + trigger: Optional[TriggerType] = None """The type of trigger that fired this run. @@ -255,190 +246,92 @@ class BaseRun: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run.""" - + trigger_info: Optional[TriggerInfo] = None """Additional details about what triggered the run""" - + def as_dict(self) -> dict: """Serializes the BaseRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attempt_number is not None: - body["attempt_number"] = self.attempt_number - if self.cleanup_duration is not None: - body["cleanup_duration"] = self.cleanup_duration - if self.cluster_instance: - body["cluster_instance"] = self.cluster_instance.as_dict() - if self.cluster_spec: - body["cluster_spec"] = self.cluster_spec.as_dict() - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.description is not None: - body["description"] = self.description - if self.effective_performance_target is not None: - body["effective_performance_target"] = self.effective_performance_target.value - if self.end_time is not None: - body["end_time"] = self.end_time - if self.execution_duration is not None: - body["execution_duration"] = self.execution_duration - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.has_more is not None: - body["has_more"] = self.has_more - if self.job_clusters: - body["job_clusters"] = [v.as_dict() for v in self.job_clusters] - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = [v.as_dict() for v in self.job_parameters] - if self.job_run_id is not None: - body["job_run_id"] = self.job_run_id - if self.number_in_job is not None: - body["number_in_job"] = self.number_in_job - if self.original_attempt_run_id is not None: - body["original_attempt_run_id"] = self.original_attempt_run_id - if self.overriding_parameters: - body["overriding_parameters"] = self.overriding_parameters.as_dict() - if self.queue_duration is not None: - body["queue_duration"] = self.queue_duration - if self.repair_history: - body["repair_history"] = [v.as_dict() for v in self.repair_history] - if self.run_duration is not None: - body["run_duration"] = self.run_duration - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_page_url is not None: - body["run_page_url"] = self.run_page_url - if self.run_type is not None: - body["run_type"] = self.run_type.value - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.setup_duration is not None: - body["setup_duration"] = self.setup_duration - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state.as_dict() - if self.status: - body["status"] = self.status.as_dict() - if self.tasks: - body["tasks"] = [v.as_dict() for v in self.tasks] - if self.trigger is not None: - body["trigger"] = self.trigger.value - if self.trigger_info: - body["trigger_info"] = self.trigger_info.as_dict() + if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration + if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict() + if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict() + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.description is not None: body['description'] = self.description + if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target.value + if self.end_time is not None: body['end_time'] = self.end_time + if self.execution_duration is not None: body['execution_duration'] = self.execution_duration + if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.has_more is not None: body['has_more'] = self.has_more + if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters] + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.number_in_job is not None: body['number_in_job'] = self.number_in_job + if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id + if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict() + if self.queue_duration is not None: body['queue_duration'] = self.queue_duration + if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history] + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_page_url is not None: body['run_page_url'] = self.run_page_url + if self.run_type is not None: body['run_type'] = self.run_type.value + if self.schedule: body['schedule'] = self.schedule.as_dict() + if self.setup_duration is not None: body['setup_duration'] = self.setup_duration + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state.as_dict() + if self.status: body['status'] = self.status.as_dict() + if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] + if self.trigger is not None: body['trigger'] = self.trigger.value + if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the BaseRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.attempt_number is not None: - body["attempt_number"] = self.attempt_number - if self.cleanup_duration is not None: - body["cleanup_duration"] = self.cleanup_duration - if self.cluster_instance: - body["cluster_instance"] = self.cluster_instance - if self.cluster_spec: - body["cluster_spec"] = self.cluster_spec - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.description is not None: - body["description"] = self.description - if self.effective_performance_target is not None: - body["effective_performance_target"] = self.effective_performance_target - if self.end_time is not None: - body["end_time"] = self.end_time - if self.execution_duration is not None: - body["execution_duration"] = self.execution_duration - if self.git_source: - body["git_source"] = self.git_source - if self.has_more is not None: - body["has_more"] = self.has_more - if self.job_clusters: - body["job_clusters"] = self.job_clusters - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.job_run_id is not None: - body["job_run_id"] = self.job_run_id - if self.number_in_job is not None: - body["number_in_job"] = self.number_in_job - if self.original_attempt_run_id is not None: - body["original_attempt_run_id"] = self.original_attempt_run_id - if self.overriding_parameters: - body["overriding_parameters"] = self.overriding_parameters - if self.queue_duration is not None: - body["queue_duration"] = self.queue_duration - if self.repair_history: - body["repair_history"] = self.repair_history - if self.run_duration is not None: - body["run_duration"] = self.run_duration - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_page_url is not None: - body["run_page_url"] = self.run_page_url - if self.run_type is not None: - body["run_type"] = self.run_type - if self.schedule: - body["schedule"] = self.schedule - if self.setup_duration is not None: - body["setup_duration"] = self.setup_duration - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state - if self.status: - body["status"] = self.status - if self.tasks: - body["tasks"] = self.tasks - if self.trigger is not None: - body["trigger"] = self.trigger - if self.trigger_info: - body["trigger_info"] = self.trigger_info + if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration + if self.cluster_instance: body['cluster_instance'] = self.cluster_instance + if self.cluster_spec: body['cluster_spec'] = self.cluster_spec + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.description is not None: body['description'] = self.description + if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target + if self.end_time is not None: body['end_time'] = self.end_time + if self.execution_duration is not None: body['execution_duration'] = self.execution_duration + if self.git_source: body['git_source'] = self.git_source + if self.has_more is not None: body['has_more'] = self.has_more + if self.job_clusters: body['job_clusters'] = self.job_clusters + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.number_in_job is not None: body['number_in_job'] = self.number_in_job + if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id + if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters + if self.queue_duration is not None: body['queue_duration'] = self.queue_duration + if self.repair_history: body['repair_history'] = self.repair_history + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_page_url is not None: body['run_page_url'] = self.run_page_url + if self.run_type is not None: body['run_type'] = self.run_type + if self.schedule: body['schedule'] = self.schedule + if self.setup_duration is not None: body['setup_duration'] = self.setup_duration + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state + if self.status: body['status'] = self.status + if self.tasks: body['tasks'] = self.tasks + if self.trigger is not None: body['trigger'] = self.trigger + if self.trigger_info: body['trigger_info'] = self.trigger_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BaseRun: """Deserializes the BaseRun from a dictionary.""" - return cls( - attempt_number=d.get("attempt_number", None), - cleanup_duration=d.get("cleanup_duration", None), - cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance), - cluster_spec=_from_dict(d, "cluster_spec", ClusterSpec), - creator_user_name=d.get("creator_user_name", None), - description=d.get("description", None), - effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), - end_time=d.get("end_time", None), - execution_duration=d.get("execution_duration", None), - git_source=_from_dict(d, "git_source", GitSource), - has_more=d.get("has_more", None), - job_clusters=_repeated_dict(d, "job_clusters", JobCluster), - job_id=d.get("job_id", None), - job_parameters=_repeated_dict(d, "job_parameters", JobParameter), - job_run_id=d.get("job_run_id", None), - number_in_job=d.get("number_in_job", None), - original_attempt_run_id=d.get("original_attempt_run_id", None), - overriding_parameters=_from_dict(d, "overriding_parameters", RunParameters), - queue_duration=d.get("queue_duration", None), - repair_history=_repeated_dict(d, "repair_history", RepairHistoryItem), - run_duration=d.get("run_duration", None), - run_id=d.get("run_id", None), - run_name=d.get("run_name", None), - run_page_url=d.get("run_page_url", None), - run_type=_enum(d, "run_type", RunType), - schedule=_from_dict(d, "schedule", CronSchedule), - setup_duration=d.get("setup_duration", None), - start_time=d.get("start_time", None), - state=_from_dict(d, "state", RunState), - status=_from_dict(d, "status", RunStatus), - tasks=_repeated_dict(d, "tasks", RunTask), - trigger=_enum(d, "trigger", TriggerType), - trigger_info=_from_dict(d, "trigger_info", TriggerInfo), - ) + return cls(attempt_number=d.get('attempt_number', None), cleanup_duration=d.get('cleanup_duration', None), cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance), cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec), creator_user_name=d.get('creator_user_name', None), description=d.get('description', None), effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), git_source=_from_dict(d, 'git_source', GitSource), has_more=d.get('has_more', None), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), job_parameters=_repeated_dict(d, 'job_parameters', JobParameter), job_run_id=d.get('job_run_id', None), number_in_job=d.get('number_in_job', None), original_attempt_run_id=d.get('original_attempt_run_id', None), overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters), queue_duration=d.get('queue_duration', None), repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem), run_duration=d.get('run_duration', None), run_id=d.get('run_id', None), run_name=d.get('run_name', None), run_page_url=d.get('run_page_url', None), run_type=_enum(d, 'run_type', RunType), schedule=_from_dict(d, 'schedule', CronSchedule), setup_duration=d.get('setup_duration', None), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), status=_from_dict(d, 'status', RunStatus), tasks=_repeated_dict(d, 'tasks', RunTask), trigger=_enum(d, 'trigger', TriggerType), trigger_info=_from_dict(d, 'trigger_info', TriggerInfo)) + + @dataclass @@ -446,32 +339,30 @@ class CancelAllRuns: all_queued_runs: Optional[bool] = None """Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in the workspace are canceled.""" - + job_id: Optional[int] = None """The canonical identifier of the job to cancel all runs of.""" - + def as_dict(self) -> dict: """Serializes the CancelAllRuns into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_queued_runs is not None: - body["all_queued_runs"] = self.all_queued_runs - if self.job_id is not None: - body["job_id"] = self.job_id + if self.all_queued_runs is not None: body['all_queued_runs'] = self.all_queued_runs + if self.job_id is not None: body['job_id'] = self.job_id return body def as_shallow_dict(self) -> dict: """Serializes the CancelAllRuns into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_queued_runs is not None: - body["all_queued_runs"] = self.all_queued_runs - if self.job_id is not None: - body["job_id"] = self.job_id + if self.all_queued_runs is not None: body['all_queued_runs'] = self.all_queued_runs + if self.job_id is not None: body['job_id'] = self.job_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CancelAllRuns: """Deserializes the CancelAllRuns from a dictionary.""" - return cls(all_queued_runs=d.get("all_queued_runs", None), job_id=d.get("job_id", None)) + return cls(all_queued_runs=d.get('all_queued_runs', None), job_id=d.get('job_id', None)) + + @dataclass @@ -490,31 +381,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelAllRunsResponse: """Deserializes the CancelAllRunsResponse from a dictionary.""" return cls() + + @dataclass class CancelRun: run_id: int """This field is required.""" - + def as_dict(self) -> dict: """Serializes the CancelRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the CancelRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CancelRun: """Deserializes the CancelRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) + return cls(run_id=d.get('run_id', None)) + + @dataclass @@ -533,175 +426,151 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelRunResponse: """Deserializes the CancelRunResponse from a dictionary.""" return cls() + + class CleanRoomTaskRunLifeCycleState(Enum): """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove coupling with jobs API definition""" - - BLOCKED = "BLOCKED" - INTERNAL_ERROR = "INTERNAL_ERROR" - PENDING = "PENDING" - QUEUED = "QUEUED" - RUNNING = "RUNNING" - RUN_LIFE_CYCLE_STATE_UNSPECIFIED = "RUN_LIFE_CYCLE_STATE_UNSPECIFIED" - SKIPPED = "SKIPPED" - TERMINATED = "TERMINATED" - TERMINATING = "TERMINATING" - WAITING_FOR_RETRY = "WAITING_FOR_RETRY" - + + BLOCKED = 'BLOCKED' + INTERNAL_ERROR = 'INTERNAL_ERROR' + PENDING = 'PENDING' + QUEUED = 'QUEUED' + RUNNING = 'RUNNING' + RUN_LIFE_CYCLE_STATE_UNSPECIFIED = 'RUN_LIFE_CYCLE_STATE_UNSPECIFIED' + SKIPPED = 'SKIPPED' + TERMINATED = 'TERMINATED' + TERMINATING = 'TERMINATING' + WAITING_FOR_RETRY = 'WAITING_FOR_RETRY' class CleanRoomTaskRunResultState(Enum): """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid cyclic dependency.""" - - CANCELED = "CANCELED" - DISABLED = "DISABLED" - EVICTED = "EVICTED" - EXCLUDED = "EXCLUDED" - FAILED = "FAILED" - MAXIMUM_CONCURRENT_RUNS_REACHED = "MAXIMUM_CONCURRENT_RUNS_REACHED" - RUN_RESULT_STATE_UNSPECIFIED = "RUN_RESULT_STATE_UNSPECIFIED" - SUCCESS = "SUCCESS" - SUCCESS_WITH_FAILURES = "SUCCESS_WITH_FAILURES" - TIMEDOUT = "TIMEDOUT" - UPSTREAM_CANCELED = "UPSTREAM_CANCELED" - UPSTREAM_EVICTED = "UPSTREAM_EVICTED" - UPSTREAM_FAILED = "UPSTREAM_FAILED" - + + CANCELED = 'CANCELED' + DISABLED = 'DISABLED' + EVICTED = 'EVICTED' + EXCLUDED = 'EXCLUDED' + FAILED = 'FAILED' + MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED' + RUN_RESULT_STATE_UNSPECIFIED = 'RUN_RESULT_STATE_UNSPECIFIED' + SUCCESS = 'SUCCESS' + SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES' + TIMEDOUT = 'TIMEDOUT' + UPSTREAM_CANCELED = 'UPSTREAM_CANCELED' + UPSTREAM_EVICTED = 'UPSTREAM_EVICTED' + UPSTREAM_FAILED = 'UPSTREAM_FAILED' @dataclass class CleanRoomTaskRunState: """Stores the run state of the clean rooms notebook task.""" - + life_cycle_state: Optional[CleanRoomTaskRunLifeCycleState] = None """A value indicating the run's current lifecycle state. This field is always available in the response. Note: Additional states might be introduced in future releases.""" - + result_state: Optional[CleanRoomTaskRunResultState] = None """A value indicating the run's result. This field is only available for terminal lifecycle states. Note: Additional states might be introduced in future releases.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomTaskRunState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.life_cycle_state is not None: - body["life_cycle_state"] = self.life_cycle_state.value - if self.result_state is not None: - body["result_state"] = self.result_state.value + if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state.value + if self.result_state is not None: body['result_state'] = self.result_state.value return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomTaskRunState into a shallow dictionary of its immediate attributes.""" body = {} - if self.life_cycle_state is not None: - body["life_cycle_state"] = self.life_cycle_state - if self.result_state is not None: - body["result_state"] = self.result_state + if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state + if self.result_state is not None: body['result_state'] = self.result_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomTaskRunState: """Deserializes the CleanRoomTaskRunState from a dictionary.""" - return cls( - life_cycle_state=_enum(d, "life_cycle_state", CleanRoomTaskRunLifeCycleState), - result_state=_enum(d, "result_state", CleanRoomTaskRunResultState), - ) + return cls(life_cycle_state=_enum(d, 'life_cycle_state', CleanRoomTaskRunLifeCycleState), result_state=_enum(d, 'result_state', CleanRoomTaskRunResultState)) + + @dataclass class CleanRoomsNotebookTask: clean_room_name: str """The clean room that the notebook belongs to.""" - + notebook_name: str """Name of the notebook being run.""" - + etag: Optional[str] = None """Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the latest version). It can be fetched by calling the :method:cleanroomassets/get API.""" - - notebook_base_parameters: Optional[Dict[str, str]] = None + + notebook_base_parameters: Optional[Dict[str,str]] = None """Base parameters to be used for the clean room notebook job.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomsNotebookTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_room_name is not None: - body["clean_room_name"] = self.clean_room_name - if self.etag is not None: - body["etag"] = self.etag - if self.notebook_base_parameters: - body["notebook_base_parameters"] = self.notebook_base_parameters - if self.notebook_name is not None: - body["notebook_name"] = self.notebook_name + if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name + if self.etag is not None: body['etag'] = self.etag + if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters + if self.notebook_name is not None: body['notebook_name'] = self.notebook_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomsNotebookTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_room_name is not None: - body["clean_room_name"] = self.clean_room_name - if self.etag is not None: - body["etag"] = self.etag - if self.notebook_base_parameters: - body["notebook_base_parameters"] = self.notebook_base_parameters - if self.notebook_name is not None: - body["notebook_name"] = self.notebook_name + if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name + if self.etag is not None: body['etag'] = self.etag + if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters + if self.notebook_name is not None: body['notebook_name'] = self.notebook_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomsNotebookTask: """Deserializes the CleanRoomsNotebookTask from a dictionary.""" - return cls( - clean_room_name=d.get("clean_room_name", None), - etag=d.get("etag", None), - notebook_base_parameters=d.get("notebook_base_parameters", None), - notebook_name=d.get("notebook_name", None), - ) + return cls(clean_room_name=d.get('clean_room_name', None), etag=d.get('etag', None), notebook_base_parameters=d.get('notebook_base_parameters', None), notebook_name=d.get('notebook_name', None)) + + @dataclass class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput: clean_room_job_run_state: Optional[CleanRoomTaskRunState] = None """The run state of the clean rooms notebook task.""" - + notebook_output: Optional[NotebookOutput] = None """The notebook output for the clean room run""" - + output_schema_info: Optional[OutputSchemaInfo] = None """Information on how to access the output schema for the clean room run""" - + def as_dict(self) -> dict: """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_room_job_run_state: - body["clean_room_job_run_state"] = self.clean_room_job_run_state.as_dict() - if self.notebook_output: - body["notebook_output"] = self.notebook_output.as_dict() - if self.output_schema_info: - body["output_schema_info"] = self.output_schema_info.as_dict() + if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state.as_dict() + if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict() + if self.output_schema_info: body['output_schema_info'] = self.output_schema_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_room_job_run_state: - body["clean_room_job_run_state"] = self.clean_room_job_run_state - if self.notebook_output: - body["notebook_output"] = self.notebook_output - if self.output_schema_info: - body["output_schema_info"] = self.output_schema_info + if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state + if self.notebook_output: body['notebook_output'] = self.notebook_output + if self.output_schema_info: body['output_schema_info'] = self.output_schema_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput: """Deserializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput from a dictionary.""" - return cls( - clean_room_job_run_state=_from_dict(d, "clean_room_job_run_state", CleanRoomTaskRunState), - notebook_output=_from_dict(d, "notebook_output", NotebookOutput), - output_schema_info=_from_dict(d, "output_schema_info", OutputSchemaInfo), - ) + return cls(clean_room_job_run_state=_from_dict(d, 'clean_room_job_run_state', CleanRoomTaskRunState), notebook_output=_from_dict(d, 'notebook_output', NotebookOutput), output_schema_info=_from_dict(d, 'output_schema_info', OutputSchemaInfo)) + + @dataclass @@ -714,7 +583,7 @@ class ClusterInstance: completes. The response won’t include this field if the identifier is not available yet.""" - + spark_context_id: Optional[str] = None """The canonical identifier for the Spark context used by a run. This field is filled in once the run begins execution. This value can be used to view the Spark UI by browsing to @@ -722,29 +591,27 @@ class ClusterInstance: the run has completed. The response won’t include this field if the identifier is not available yet.""" - + def as_dict(self) -> dict: """Serializes the ClusterInstance into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.spark_context_id is not None: - body["spark_context_id"] = self.spark_context_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id return body def as_shallow_dict(self) -> dict: """Serializes the ClusterInstance into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.spark_context_id is not None: - body["spark_context_id"] = self.spark_context_id + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterInstance: """Deserializes the ClusterInstance from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), spark_context_id=d.get("spark_context_id", None)) + return cls(cluster_id=d.get('cluster_id', None), spark_context_id=d.get('spark_context_id', None)) + + @dataclass @@ -753,103 +620,84 @@ class ClusterSpec: """If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops responding. We suggest running jobs and tasks on new clusters for greater reliability""" - + job_cluster_key: Optional[str] = None """If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.""" - + libraries: Optional[List[compute.Library]] = None """An optional list of libraries to be installed on the cluster. The default value is an empty list.""" - + new_cluster: Optional[compute.ClusterSpec] = None """If new_cluster, a description of a new cluster that is created for each run.""" - + def as_dict(self) -> dict: """Serializes the ClusterSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.existing_cluster_id is not None: - body["existing_cluster_id"] = self.existing_cluster_id - if self.job_cluster_key is not None: - body["job_cluster_key"] = self.job_cluster_key - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.new_cluster: - body["new_cluster"] = self.new_cluster.as_dict() + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.existing_cluster_id is not None: - body["existing_cluster_id"] = self.existing_cluster_id - if self.job_cluster_key is not None: - body["job_cluster_key"] = self.job_cluster_key - if self.libraries: - body["libraries"] = self.libraries - if self.new_cluster: - body["new_cluster"] = self.new_cluster + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.libraries: body['libraries'] = self.libraries + if self.new_cluster: body['new_cluster'] = self.new_cluster return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterSpec: """Deserializes the ClusterSpec from a dictionary.""" - return cls( - existing_cluster_id=d.get("existing_cluster_id", None), - job_cluster_key=d.get("job_cluster_key", None), - libraries=_repeated_dict(d, "libraries", compute.Library), - new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), - ) + return cls(existing_cluster_id=d.get('existing_cluster_id', None), job_cluster_key=d.get('job_cluster_key', None), libraries=_repeated_dict(d, 'libraries', compute.Library), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec)) + + @dataclass class ComputeConfig: num_gpus: int """Number of GPUs.""" - + gpu_node_pool_id: Optional[str] = None """IDof the GPU pool to use.""" - + gpu_type: Optional[str] = None """GPU type.""" - + def as_dict(self) -> dict: """Serializes the ComputeConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gpu_node_pool_id is not None: - body["gpu_node_pool_id"] = self.gpu_node_pool_id - if self.gpu_type is not None: - body["gpu_type"] = self.gpu_type - if self.num_gpus is not None: - body["num_gpus"] = self.num_gpus + if self.gpu_node_pool_id is not None: body['gpu_node_pool_id'] = self.gpu_node_pool_id + if self.gpu_type is not None: body['gpu_type'] = self.gpu_type + if self.num_gpus is not None: body['num_gpus'] = self.num_gpus return body def as_shallow_dict(self) -> dict: """Serializes the ComputeConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.gpu_node_pool_id is not None: - body["gpu_node_pool_id"] = self.gpu_node_pool_id - if self.gpu_type is not None: - body["gpu_type"] = self.gpu_type - if self.num_gpus is not None: - body["num_gpus"] = self.num_gpus + if self.gpu_node_pool_id is not None: body['gpu_node_pool_id'] = self.gpu_node_pool_id + if self.gpu_type is not None: body['gpu_type'] = self.gpu_type + if self.num_gpus is not None: body['num_gpus'] = self.num_gpus return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComputeConfig: """Deserializes the ComputeConfig from a dictionary.""" - return cls( - gpu_node_pool_id=d.get("gpu_node_pool_id", None), - gpu_type=d.get("gpu_type", None), - num_gpus=d.get("num_gpus", None), - ) - + return cls(gpu_node_pool_id=d.get('gpu_node_pool_id', None), gpu_type=d.get('gpu_type', None), num_gpus=d.get('num_gpus', None)) + -class Condition(Enum): - ALL_UPDATED = "ALL_UPDATED" - ANY_UPDATED = "ANY_UPDATED" +class Condition(Enum): + + + ALL_UPDATED = 'ALL_UPDATED' + ANY_UPDATED = 'ANY_UPDATED' @dataclass class ConditionTask: @@ -863,41 +711,37 @@ class ConditionTask: The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.""" - + left: str """The left operand of the condition task. Can be either a string value or a job state or parameter reference.""" - + right: str """The right operand of the condition task. Can be either a string value or a job state or parameter reference.""" - + def as_dict(self) -> dict: """Serializes the ConditionTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.left is not None: - body["left"] = self.left - if self.op is not None: - body["op"] = self.op.value - if self.right is not None: - body["right"] = self.right + if self.left is not None: body['left'] = self.left + if self.op is not None: body['op'] = self.op.value + if self.right is not None: body['right'] = self.right return body def as_shallow_dict(self) -> dict: """Serializes the ConditionTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.left is not None: - body["left"] = self.left - if self.op is not None: - body["op"] = self.op - if self.right is not None: - body["right"] = self.right + if self.left is not None: body['left'] = self.left + if self.op is not None: body['op'] = self.op + if self.right is not None: body['right'] = self.right return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ConditionTask: """Deserializes the ConditionTask from a dictionary.""" - return cls(left=d.get("left", None), op=_enum(d, "op", ConditionTaskOp), right=d.get("right", None)) + return cls(left=d.get('left', None), op=_enum(d, 'op', ConditionTaskOp), right=d.get('right', None)) + + class ConditionTaskOp(Enum): @@ -906,85 +750,84 @@ class ConditionTaskOp(Enum): `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to `false`. - + The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.""" - - EQUAL_TO = "EQUAL_TO" - GREATER_THAN = "GREATER_THAN" - GREATER_THAN_OR_EQUAL = "GREATER_THAN_OR_EQUAL" - LESS_THAN = "LESS_THAN" - LESS_THAN_OR_EQUAL = "LESS_THAN_OR_EQUAL" - NOT_EQUAL = "NOT_EQUAL" - + + EQUAL_TO = 'EQUAL_TO' + GREATER_THAN = 'GREATER_THAN' + GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL' + LESS_THAN = 'LESS_THAN' + LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL' + NOT_EQUAL = 'NOT_EQUAL' @dataclass class Continuous: pause_status: Optional[PauseStatus] = None """Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.""" - + def as_dict(self) -> dict: """Serializes the Continuous into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status.value + if self.pause_status is not None: body['pause_status'] = self.pause_status.value return body def as_shallow_dict(self) -> dict: """Serializes the Continuous into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status + if self.pause_status is not None: body['pause_status'] = self.pause_status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Continuous: """Deserializes the Continuous from a dictionary.""" - return cls(pause_status=_enum(d, "pause_status", PauseStatus)) + return cls(pause_status=_enum(d, 'pause_status', PauseStatus)) + + @dataclass class CreateJob: access_control_list: Optional[List[JobAccessControlRequest]] = None """List of permissions to set on the job.""" - + budget_policy_id: Optional[str] = None """The id of the user specified budget policy to use for this job. If not specified, a default budget policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the budget policy used by this workload.""" - + continuous: Optional[Continuous] = None """An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.""" - + deployment: Optional[JobDeployment] = None """Deployment information for jobs managed by external sources.""" - + description: Optional[str] = None """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" - + edit_mode: Optional[JobEditMode] = None """Edit mode of the job. * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.""" - + environments: Optional[List[JobEnvironment]] = None """A list of task execution environment specifications that can be referenced by serverless tasks of this job. An environment is required to be present for serverless tasks. For serverless notebook tasks, the environment is accessible in the notebook environment panel. For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.""" - + format: Optional[Format] = None """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -994,15 +837,15 @@ class CreateJob: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.""" - + max_concurrent_runs: Optional[int] = None """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be able to execute multiple runs of the same job concurrently. This is useful for example if you @@ -1012,17 +855,17 @@ class CreateJob: concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs. This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.""" - + name: Optional[str] = None """An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.""" - + notification_settings: Optional[JobNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this job.""" - + parameters: Optional[List[JobParameterDefinition]] = None """Job-level parameter definitions""" - + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. @@ -1030,210 +873,136 @@ class CreateJob: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + queue: Optional[QueueSettings] = None """The queue settings of the job.""" - + run_as: Optional[JobRunAs] = None """Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" - + schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - - tags: Optional[Dict[str, str]] = None + + tags: Optional[Dict[str,str]] = None """A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.""" - + tasks: Optional[List[Task]] = None """A list of task specifications to be executed by this job. It supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available.""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - + trigger: Optional[TriggerSettings] = None """A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when runs of this job begin or complete.""" - + def as_dict(self) -> dict: """Serializes the CreateJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.continuous: - body["continuous"] = self.continuous.as_dict() - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.description is not None: - body["description"] = self.description - if self.edit_mode is not None: - body["edit_mode"] = self.edit_mode.value - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.environments: - body["environments"] = [v.as_dict() for v in self.environments] - if self.format is not None: - body["format"] = self.format.value - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.health: - body["health"] = self.health.as_dict() - if self.job_clusters: - body["job_clusters"] = [v.as_dict() for v in self.job_clusters] - if self.max_concurrent_runs is not None: - body["max_concurrent_runs"] = self.max_concurrent_runs - if self.name is not None: - body["name"] = self.name - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value - if self.queue: - body["queue"] = self.queue.as_dict() - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.tags: - body["tags"] = self.tags - if self.tasks: - body["tasks"] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.trigger: - body["trigger"] = self.trigger.as_dict() - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.continuous: body['continuous'] = self.continuous.as_dict() + if self.deployment: body['deployment'] = self.deployment.as_dict() + if self.description is not None: body['description'] = self.description + if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value + if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() + if self.environments: body['environments'] = [v.as_dict() for v in self.environments] + if self.format is not None: body['format'] = self.format.value + if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.health: body['health'] = self.health.as_dict() + if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] + if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs + if self.name is not None: body['name'] = self.name + if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.performance_target is not None: body['performance_target'] = self.performance_target.value + if self.queue: body['queue'] = self.queue.as_dict() + if self.run_as: body['run_as'] = self.run_as.as_dict() + if self.schedule: body['schedule'] = self.schedule.as_dict() + if self.tags: body['tags'] = self.tags + if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.trigger: body['trigger'] = self.trigger.as_dict() + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.continuous: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.description is not None: - body["description"] = self.description - if self.edit_mode is not None: - body["edit_mode"] = self.edit_mode - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.environments: - body["environments"] = self.environments - if self.format is not None: - body["format"] = self.format - if self.git_source: - body["git_source"] = self.git_source - if self.health: - body["health"] = self.health - if self.job_clusters: - body["job_clusters"] = self.job_clusters - if self.max_concurrent_runs is not None: - body["max_concurrent_runs"] = self.max_concurrent_runs - if self.name is not None: - body["name"] = self.name - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.parameters: - body["parameters"] = self.parameters - if self.performance_target is not None: - body["performance_target"] = self.performance_target - if self.queue: - body["queue"] = self.queue - if self.run_as: - body["run_as"] = self.run_as - if self.schedule: - body["schedule"] = self.schedule - if self.tags: - body["tags"] = self.tags - if self.tasks: - body["tasks"] = self.tasks - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.trigger: - body["trigger"] = self.trigger - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.continuous: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.description is not None: body['description'] = self.description + if self.edit_mode is not None: body['edit_mode'] = self.edit_mode + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environments: body['environments'] = self.environments + if self.format is not None: body['format'] = self.format + if self.git_source: body['git_source'] = self.git_source + if self.health: body['health'] = self.health + if self.job_clusters: body['job_clusters'] = self.job_clusters + if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs + if self.name is not None: body['name'] = self.name + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.parameters: body['parameters'] = self.parameters + if self.performance_target is not None: body['performance_target'] = self.performance_target + if self.queue: body['queue'] = self.queue + if self.run_as: body['run_as'] = self.run_as + if self.schedule: body['schedule'] = self.schedule + if self.tags: body['tags'] = self.tags + if self.tasks: body['tasks'] = self.tasks + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.trigger: body['trigger'] = self.trigger + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateJob: """Deserializes the CreateJob from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), - budget_policy_id=d.get("budget_policy_id", None), - continuous=_from_dict(d, "continuous", Continuous), - deployment=_from_dict(d, "deployment", JobDeployment), - description=d.get("description", None), - edit_mode=_enum(d, "edit_mode", JobEditMode), - email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), - environments=_repeated_dict(d, "environments", JobEnvironment), - format=_enum(d, "format", Format), - git_source=_from_dict(d, "git_source", GitSource), - health=_from_dict(d, "health", JobsHealthRules), - job_clusters=_repeated_dict(d, "job_clusters", JobCluster), - max_concurrent_runs=d.get("max_concurrent_runs", None), - name=d.get("name", None), - notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), - parameters=_repeated_dict(d, "parameters", JobParameterDefinition), - performance_target=_enum(d, "performance_target", PerformanceTarget), - queue=_from_dict(d, "queue", QueueSettings), - run_as=_from_dict(d, "run_as", JobRunAs), - schedule=_from_dict(d, "schedule", CronSchedule), - tags=d.get("tags", None), - tasks=_repeated_dict(d, "tasks", Task), - timeout_seconds=d.get("timeout_seconds", None), - trigger=_from_dict(d, "trigger", TriggerSettings), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), budget_policy_id=d.get('budget_policy_id', None), continuous=_from_dict(d, 'continuous', Continuous), deployment=_from_dict(d, 'deployment', JobDeployment), description=d.get('description', None), edit_mode=_enum(d, 'edit_mode', JobEditMode), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environments=_repeated_dict(d, 'environments', JobEnvironment), format=_enum(d, 'format', Format), git_source=_from_dict(d, 'git_source', GitSource), health=_from_dict(d, 'health', JobsHealthRules), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), max_concurrent_runs=d.get('max_concurrent_runs', None), name=d.get('name', None), notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings), parameters=_repeated_dict(d, 'parameters', JobParameterDefinition), performance_target=_enum(d, 'performance_target', PerformanceTarget), queue=_from_dict(d, 'queue', QueueSettings), run_as=_from_dict(d, 'run_as', JobRunAs), schedule=_from_dict(d, 'schedule', CronSchedule), tags=d.get('tags', None), tasks=_repeated_dict(d, 'tasks', Task), timeout_seconds=d.get('timeout_seconds', None), trigger=_from_dict(d, 'trigger', TriggerSettings), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) + + @dataclass class CreateResponse: """Job was created successfully""" - + job_id: Optional[int] = None """The canonical identifier for the newly created job.""" - + def as_dict(self) -> dict: """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id + if self.job_id is not None: body['job_id'] = self.job_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id + if self.job_id is not None: body['job_id'] = self.job_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" - return cls(job_id=d.get("job_id", None)) + return cls(job_id=d.get('job_id', None)) + + @dataclass @@ -1243,183 +1012,277 @@ class CronSchedule: for details. This field is required. [Cron Trigger]: http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" - + timezone_id: str """A Java timezone ID. The schedule for a job is resolved with respect to this timezone. See [Java TimeZone] for details. This field is required. [Java TimeZone]: https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html""" - + pause_status: Optional[PauseStatus] = None """Indicate whether this schedule is paused or not.""" - + def as_dict(self) -> dict: """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status.value - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.pause_status is not None: body['pause_status'] = self.pause_status.value + if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: """Deserializes the CronSchedule from a dictionary.""" - return cls( - pause_status=_enum(d, "pause_status", PauseStatus), - quartz_cron_expression=d.get("quartz_cron_expression", None), - timezone_id=d.get("timezone_id", None), - ) + return cls(pause_status=_enum(d, 'pause_status', PauseStatus), quartz_cron_expression=d.get('quartz_cron_expression', None), timezone_id=d.get('timezone_id', None)) + + @dataclass class DashboardPageSnapshot: page_display_name: Optional[str] = None - + widget_error_details: Optional[List[WidgetErrorDetail]] = None - + def as_dict(self) -> dict: """Serializes the DashboardPageSnapshot into a dictionary suitable for use as a JSON request body.""" body = {} - if self.page_display_name is not None: - body["page_display_name"] = self.page_display_name - if self.widget_error_details: - body["widget_error_details"] = [v.as_dict() for v in self.widget_error_details] + if self.page_display_name is not None: body['page_display_name'] = self.page_display_name + if self.widget_error_details: body['widget_error_details'] = [v.as_dict() for v in self.widget_error_details] return body def as_shallow_dict(self) -> dict: """Serializes the DashboardPageSnapshot into a shallow dictionary of its immediate attributes.""" body = {} - if self.page_display_name is not None: - body["page_display_name"] = self.page_display_name - if self.widget_error_details: - body["widget_error_details"] = self.widget_error_details + if self.page_display_name is not None: body['page_display_name'] = self.page_display_name + if self.widget_error_details: body['widget_error_details'] = self.widget_error_details return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardPageSnapshot: """Deserializes the DashboardPageSnapshot from a dictionary.""" - return cls( - page_display_name=d.get("page_display_name", None), - widget_error_details=_repeated_dict(d, "widget_error_details", WidgetErrorDetail), - ) + return cls(page_display_name=d.get('page_display_name', None), widget_error_details=_repeated_dict(d, 'widget_error_details', WidgetErrorDetail)) + + @dataclass class DashboardTask: """Configures the Lakeview Dashboard job task type.""" - + dashboard_id: Optional[str] = None """The identifier of the dashboard to refresh.""" - + subscription: Optional[Subscription] = None """Optional: subscription configuration for sending the dashboard snapshot.""" - + warehouse_id: Optional[str] = None """Optional: The warehouse id to execute the dashboard with for the schedule. If not specified, the default warehouse of the dashboard will be used.""" - + def as_dict(self) -> dict: """Serializes the DashboardTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.subscription: - body["subscription"] = self.subscription.as_dict() - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.subscription: body['subscription'] = self.subscription.as_dict() + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the DashboardTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.subscription: - body["subscription"] = self.subscription - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.subscription: body['subscription'] = self.subscription + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardTask: """Deserializes the DashboardTask from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - subscription=_from_dict(d, "subscription", Subscription), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(dashboard_id=d.get('dashboard_id', None), subscription=_from_dict(d, 'subscription', Subscription), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class DashboardTaskOutput: page_snapshots: Optional[List[DashboardPageSnapshot]] = None """Should only be populated for manual PDF download jobs.""" - + def as_dict(self) -> dict: """Serializes the DashboardTaskOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.page_snapshots: - body["page_snapshots"] = [v.as_dict() for v in self.page_snapshots] + if self.page_snapshots: body['page_snapshots'] = [v.as_dict() for v in self.page_snapshots] return body def as_shallow_dict(self) -> dict: """Serializes the DashboardTaskOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.page_snapshots: - body["page_snapshots"] = self.page_snapshots + if self.page_snapshots: body['page_snapshots'] = self.page_snapshots return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardTaskOutput: """Deserializes the DashboardTaskOutput from a dictionary.""" - return cls(page_snapshots=_repeated_dict(d, "page_snapshots", DashboardPageSnapshot)) + return cls(page_snapshots=_repeated_dict(d, 'page_snapshots', DashboardPageSnapshot)) + + + + +@dataclass +class DbtCloudJobRunStep: + """Format of response retrieved from dbt Cloud, for inclusion in output""" + + index: Optional[int] = None + """Orders the steps in the job""" + + logs: Optional[str] = None + """Output of the step""" + + name: Optional[str] = None + """Name of the step in the job""" + + status: Optional[DbtCloudRunStatus] = None + """State of the step""" + + def as_dict(self) -> dict: + """Serializes the DbtCloudJobRunStep into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.index is not None: body['index'] = self.index + if self.logs is not None: body['logs'] = self.logs + if self.name is not None: body['name'] = self.name + if self.status is not None: body['status'] = self.status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DbtCloudJobRunStep into a shallow dictionary of its immediate attributes.""" + body = {} + if self.index is not None: body['index'] = self.index + if self.logs is not None: body['logs'] = self.logs + if self.name is not None: body['name'] = self.name + if self.status is not None: body['status'] = self.status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DbtCloudJobRunStep: + """Deserializes the DbtCloudJobRunStep from a dictionary.""" + return cls(index=d.get('index', None), logs=d.get('logs', None), name=d.get('name', None), status=_enum(d, 'status', DbtCloudRunStatus)) + + + + +class DbtCloudRunStatus(Enum): + """Response enumeration from calling the dbt Cloud API, for inclusion in output""" + + CANCELLED = 'CANCELLED' + ERROR = 'ERROR' + QUEUED = 'QUEUED' + RUNNING = 'RUNNING' + STARTING = 'STARTING' + SUCCESS = 'SUCCESS' + +@dataclass +class DbtCloudTask: + connection_resource_name: Optional[str] = None + """The resource name of the UC connection that authenticates the dbt Cloud for this task""" + + dbt_cloud_job_id: Optional[int] = None + """Id of the dbt Cloud job to be triggered""" + + def as_dict(self) -> dict: + """Serializes the DbtCloudTask into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connection_resource_name is not None: body['connection_resource_name'] = self.connection_resource_name + if self.dbt_cloud_job_id is not None: body['dbt_cloud_job_id'] = self.dbt_cloud_job_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DbtCloudTask into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connection_resource_name is not None: body['connection_resource_name'] = self.connection_resource_name + if self.dbt_cloud_job_id is not None: body['dbt_cloud_job_id'] = self.dbt_cloud_job_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DbtCloudTask: + """Deserializes the DbtCloudTask from a dictionary.""" + return cls(connection_resource_name=d.get('connection_resource_name', None), dbt_cloud_job_id=d.get('dbt_cloud_job_id', None)) + + + + +@dataclass +class DbtCloudTaskOutput: + dbt_cloud_job_run_id: Optional[int] = None + """Id of the job run in dbt Cloud""" + + dbt_cloud_job_run_output: Optional[List[DbtCloudJobRunStep]] = None + """Steps of the job run as received from dbt Cloud""" + + dbt_cloud_job_run_url: Optional[str] = None + """Url where full run details can be viewed""" + + def as_dict(self) -> dict: + """Serializes the DbtCloudTaskOutput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dbt_cloud_job_run_id is not None: body['dbt_cloud_job_run_id'] = self.dbt_cloud_job_run_id + if self.dbt_cloud_job_run_output: body['dbt_cloud_job_run_output'] = [v.as_dict() for v in self.dbt_cloud_job_run_output] + if self.dbt_cloud_job_run_url is not None: body['dbt_cloud_job_run_url'] = self.dbt_cloud_job_run_url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DbtCloudTaskOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.dbt_cloud_job_run_id is not None: body['dbt_cloud_job_run_id'] = self.dbt_cloud_job_run_id + if self.dbt_cloud_job_run_output: body['dbt_cloud_job_run_output'] = self.dbt_cloud_job_run_output + if self.dbt_cloud_job_run_url is not None: body['dbt_cloud_job_run_url'] = self.dbt_cloud_job_run_url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DbtCloudTaskOutput: + """Deserializes the DbtCloudTaskOutput from a dictionary.""" + return cls(dbt_cloud_job_run_id=d.get('dbt_cloud_job_run_id', None), dbt_cloud_job_run_output=_repeated_dict(d, 'dbt_cloud_job_run_output', DbtCloudJobRunStep), dbt_cloud_job_run_url=d.get('dbt_cloud_job_run_url', None)) + + @dataclass class DbtOutput: - artifacts_headers: Optional[Dict[str, str]] = None + artifacts_headers: Optional[Dict[str,str]] = None """An optional map of headers to send when retrieving the artifact from the `artifacts_link`.""" - + artifacts_link: Optional[str] = None """A pre-signed URL to download the (compressed) dbt artifacts. This link is valid for a limited time (30 minutes). This information is only available after the run has finished.""" - + def as_dict(self) -> dict: """Serializes the DbtOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifacts_headers: - body["artifacts_headers"] = self.artifacts_headers - if self.artifacts_link is not None: - body["artifacts_link"] = self.artifacts_link + if self.artifacts_headers: body['artifacts_headers'] = self.artifacts_headers + if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link return body def as_shallow_dict(self) -> dict: """Serializes the DbtOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifacts_headers: - body["artifacts_headers"] = self.artifacts_headers - if self.artifacts_link is not None: - body["artifacts_link"] = self.artifacts_link + if self.artifacts_headers: body['artifacts_headers'] = self.artifacts_headers + if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DbtOutput: """Deserializes the DbtOutput from a dictionary.""" - return cls(artifacts_headers=d.get("artifacts_headers", None), artifacts_link=d.get("artifacts_link", None)) + return cls(artifacts_headers=d.get('artifacts_headers', None), artifacts_link=d.get('artifacts_link', None)) + + @dataclass @@ -1427,24 +1290,24 @@ class DbtTask: commands: List[str] """A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.""" - + catalog: Optional[str] = None """Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1.""" - + profiles_directory: Optional[str] = None """Optional (relative) path to the profiles directory. Can only be specified if no warehouse_id is specified. If no warehouse_id is specified and this folder is unset, the root directory is used.""" - + project_directory: Optional[str] = None """Path to the project directory. Optional for Git sourced tasks, in which case if no value is provided, the root of the Git repository is used.""" - + schema: Optional[str] = None """Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the `default` schema is used.""" - + source: Optional[Source] = None """Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved from the local Databricks workspace. When set to `GIT`, the project will be retrieved @@ -1453,87 +1316,67 @@ class DbtTask: * `WORKSPACE`: Project is located in Databricks workspace. * `GIT`: Project is located in cloud Git provider.""" - + warehouse_id: Optional[str] = None """ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the `--profiles-dir` command line argument.""" - + def as_dict(self) -> dict: """Serializes the DbtTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog is not None: - body["catalog"] = self.catalog - if self.commands: - body["commands"] = [v for v in self.commands] - if self.profiles_directory is not None: - body["profiles_directory"] = self.profiles_directory - if self.project_directory is not None: - body["project_directory"] = self.project_directory - if self.schema is not None: - body["schema"] = self.schema - if self.source is not None: - body["source"] = self.source.value - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.commands: body['commands'] = [v for v in self.commands] + if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory + if self.project_directory is not None: body['project_directory'] = self.project_directory + if self.schema is not None: body['schema'] = self.schema + if self.source is not None: body['source'] = self.source.value + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the DbtTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog is not None: - body["catalog"] = self.catalog - if self.commands: - body["commands"] = self.commands - if self.profiles_directory is not None: - body["profiles_directory"] = self.profiles_directory - if self.project_directory is not None: - body["project_directory"] = self.project_directory - if self.schema is not None: - body["schema"] = self.schema - if self.source is not None: - body["source"] = self.source - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.commands: body['commands'] = self.commands + if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory + if self.project_directory is not None: body['project_directory'] = self.project_directory + if self.schema is not None: body['schema'] = self.schema + if self.source is not None: body['source'] = self.source + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DbtTask: """Deserializes the DbtTask from a dictionary.""" - return cls( - catalog=d.get("catalog", None), - commands=d.get("commands", None), - profiles_directory=d.get("profiles_directory", None), - project_directory=d.get("project_directory", None), - schema=d.get("schema", None), - source=_enum(d, "source", Source), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(catalog=d.get('catalog', None), commands=d.get('commands', None), profiles_directory=d.get('profiles_directory', None), project_directory=d.get('project_directory', None), schema=d.get('schema', None), source=_enum(d, 'source', Source), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class DeleteJob: job_id: int """The canonical identifier of the job to delete. This field is required.""" - + def as_dict(self) -> dict: """Serializes the DeleteJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id + if self.job_id is not None: body['job_id'] = self.job_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id + if self.job_id is not None: body['job_id'] = self.job_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteJob: """Deserializes the DeleteJob from a dictionary.""" - return cls(job_id=d.get("job_id", None)) + return cls(job_id=d.get('job_id', None)) + + @dataclass @@ -1552,31 +1395,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + @dataclass class DeleteRun: run_id: int """ID of the run to delete.""" - + def as_dict(self) -> dict: """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRun: """Deserializes the DeleteRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) + return cls(run_id=d.get('run_id', None)) + + @dataclass @@ -1595,86 +1440,80 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse: """Deserializes the DeleteRunResponse from a dictionary.""" return cls() + + @dataclass class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: """Represents a change to the job cluster's settings that would be required for the job clusters to become compliant with their policies.""" - + field: Optional[str] = None """The field where this change would be made, prepended with the job cluster key.""" - + new_value: Optional[str] = None """The new value of this field after enforcing policy compliance (either a number, a boolean, or a string) converted to a string. This is intended to be read by a human. The typed new value of this field can be retrieved by reading the settings field in the API response.""" - + previous_value: Optional[str] = None """The previous value of this field before enforcing policy compliance (either a number, a boolean, or a string) converted to a string. This is intended to be read by a human. The type of the field can be retrieved by reading the settings field in the API response.""" - + def as_dict(self) -> dict: """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.field is not None: - body["field"] = self.field - if self.new_value is not None: - body["new_value"] = self.new_value - if self.previous_value is not None: - body["previous_value"] = self.previous_value + if self.field is not None: body['field'] = self.field + if self.new_value is not None: body['new_value'] = self.new_value + if self.previous_value is not None: body['previous_value'] = self.previous_value return body def as_shallow_dict(self) -> dict: """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a shallow dictionary of its immediate attributes.""" body = {} - if self.field is not None: - body["field"] = self.field - if self.new_value is not None: - body["new_value"] = self.new_value - if self.previous_value is not None: - body["previous_value"] = self.previous_value + if self.field is not None: body['field'] = self.field + if self.new_value is not None: body['new_value'] = self.new_value + if self.previous_value is not None: body['previous_value'] = self.previous_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: """Deserializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange from a dictionary.""" - return cls( - field=d.get("field", None), new_value=d.get("new_value", None), previous_value=d.get("previous_value", None) - ) + return cls(field=d.get('field', None), new_value=d.get('new_value', None), previous_value=d.get('previous_value', None)) + + @dataclass class EnforcePolicyComplianceRequest: job_id: int """The ID of the job you want to enforce policy compliance on.""" - + validate_only: Optional[bool] = None """If set, previews changes made to the job to comply with its policy, but does not update the job.""" - + def as_dict(self) -> dict: """Serializes the EnforcePolicyComplianceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only + if self.job_id is not None: body['job_id'] = self.job_id + if self.validate_only is not None: body['validate_only'] = self.validate_only return body def as_shallow_dict(self) -> dict: """Serializes the EnforcePolicyComplianceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only + if self.job_id is not None: body['job_id'] = self.job_id + if self.validate_only is not None: body['validate_only'] = self.validate_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceRequest: """Deserializes the EnforcePolicyComplianceRequest from a dictionary.""" - return cls(job_id=d.get("job_id", None), validate_only=d.get("validate_only", None)) + return cls(job_id=d.get('job_id', None), validate_only=d.get('validate_only', None)) + + @dataclass @@ -1682,79 +1521,72 @@ class EnforcePolicyComplianceResponse: has_changes: Optional[bool] = None """Whether any changes have been made to the job cluster settings for the job to become compliant with its policies.""" - + job_cluster_changes: Optional[List[EnforcePolicyComplianceForJobResponseJobClusterSettingsChange]] = None """A list of job cluster changes that have been made to the job’s cluster settings in order for all job clusters to become compliant with their policies.""" - + settings: Optional[JobSettings] = None """Updated job settings after policy enforcement. Policy enforcement only applies to job clusters that are created when running the job (which are specified in new_cluster) and does not apply to existing all-purpose clusters. Updated job settings are derived by applying policy default values to the existing job clusters in order to satisfy policy requirements.""" - + def as_dict(self) -> dict: """Serializes the EnforcePolicyComplianceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.has_changes is not None: - body["has_changes"] = self.has_changes - if self.job_cluster_changes: - body["job_cluster_changes"] = [v.as_dict() for v in self.job_cluster_changes] - if self.settings: - body["settings"] = self.settings.as_dict() + if self.has_changes is not None: body['has_changes'] = self.has_changes + if self.job_cluster_changes: body['job_cluster_changes'] = [v.as_dict() for v in self.job_cluster_changes] + if self.settings: body['settings'] = self.settings.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EnforcePolicyComplianceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.has_changes is not None: - body["has_changes"] = self.has_changes - if self.job_cluster_changes: - body["job_cluster_changes"] = self.job_cluster_changes - if self.settings: - body["settings"] = self.settings + if self.has_changes is not None: body['has_changes'] = self.has_changes + if self.job_cluster_changes: body['job_cluster_changes'] = self.job_cluster_changes + if self.settings: body['settings'] = self.settings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceResponse: """Deserializes the EnforcePolicyComplianceResponse from a dictionary.""" - return cls( - has_changes=d.get("has_changes", None), - job_cluster_changes=_repeated_dict( - d, "job_cluster_changes", EnforcePolicyComplianceForJobResponseJobClusterSettingsChange - ), - settings=_from_dict(d, "settings", JobSettings), - ) + return cls(has_changes=d.get('has_changes', None), job_cluster_changes=_repeated_dict(d, 'job_cluster_changes', EnforcePolicyComplianceForJobResponseJobClusterSettingsChange), settings=_from_dict(d, 'settings', JobSettings)) + + @dataclass class ExportRunOutput: """Run was exported successfully.""" - + views: Optional[List[ViewItem]] = None """The exported content in HTML format (one for every view item). To extract the HTML notebook from the JSON response, download and run this [Python script]. [Python script]: https://docs.databricks.com/en/_static/examples/extract.py""" - + def as_dict(self) -> dict: """Serializes the ExportRunOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.views: - body["views"] = [v.as_dict() for v in self.views] + if self.views: body['views'] = [v.as_dict() for v in self.views] return body def as_shallow_dict(self) -> dict: """Serializes the ExportRunOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.views: - body["views"] = self.views + if self.views: body['views'] = self.views return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExportRunOutput: """Deserializes the ExportRunOutput from a dictionary.""" - return cls(views=_repeated_dict(d, "views", ViewItem)) + return cls(views=_repeated_dict(d, 'views', ViewItem)) + + + + + @dataclass @@ -1762,355 +1594,325 @@ class FileArrivalTriggerConfiguration: url: str """URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.""" - + min_time_between_triggers_seconds: Optional[int] = None """If set, the trigger starts a run only after the specified amount of time passed since the last time the trigger fired. The minimum allowed value is 60 seconds""" - + wait_after_last_change_seconds: Optional[int] = None """If set, the trigger starts a run only after no file activity has occurred for the specified amount of time. This makes it possible to wait for a batch of incoming files to arrive before triggering a run. The minimum allowed value is 60 seconds.""" - + def as_dict(self) -> dict: """Serializes the FileArrivalTriggerConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.min_time_between_triggers_seconds is not None: - body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds - if self.url is not None: - body["url"] = self.url - if self.wait_after_last_change_seconds is not None: - body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + if self.min_time_between_triggers_seconds is not None: body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds + if self.url is not None: body['url'] = self.url + if self.wait_after_last_change_seconds is not None: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds return body def as_shallow_dict(self) -> dict: """Serializes the FileArrivalTriggerConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.min_time_between_triggers_seconds is not None: - body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds - if self.url is not None: - body["url"] = self.url - if self.wait_after_last_change_seconds is not None: - body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + if self.min_time_between_triggers_seconds is not None: body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds + if self.url is not None: body['url'] = self.url + if self.wait_after_last_change_seconds is not None: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileArrivalTriggerConfiguration: """Deserializes the FileArrivalTriggerConfiguration from a dictionary.""" - return cls( - min_time_between_triggers_seconds=d.get("min_time_between_triggers_seconds", None), - url=d.get("url", None), - wait_after_last_change_seconds=d.get("wait_after_last_change_seconds", None), - ) + return cls(min_time_between_triggers_seconds=d.get('min_time_between_triggers_seconds', None), url=d.get('url', None), wait_after_last_change_seconds=d.get('wait_after_last_change_seconds', None)) + + + + +@dataclass +class FileArrivalTriggerState: + using_file_events: Optional[bool] = None + """Indicates whether the trigger leverages file events to detect file arrivals.""" + + def as_dict(self) -> dict: + """Serializes the FileArrivalTriggerState into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.using_file_events is not None: body['using_file_events'] = self.using_file_events + return body + + def as_shallow_dict(self) -> dict: + """Serializes the FileArrivalTriggerState into a shallow dictionary of its immediate attributes.""" + body = {} + if self.using_file_events is not None: body['using_file_events'] = self.using_file_events + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> FileArrivalTriggerState: + """Deserializes the FileArrivalTriggerState from a dictionary.""" + return cls(using_file_events=d.get('using_file_events', None)) + + @dataclass class ForEachStats: error_message_stats: Optional[List[ForEachTaskErrorMessageStats]] = None """Sample of 3 most common error messages occurred during the iteration.""" - + task_run_stats: Optional[ForEachTaskTaskRunStats] = None """Describes stats of the iteration. Only latest retries are considered.""" - + def as_dict(self) -> dict: """Serializes the ForEachStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error_message_stats: - body["error_message_stats"] = [v.as_dict() for v in self.error_message_stats] - if self.task_run_stats: - body["task_run_stats"] = self.task_run_stats.as_dict() + if self.error_message_stats: body['error_message_stats'] = [v.as_dict() for v in self.error_message_stats] + if self.task_run_stats: body['task_run_stats'] = self.task_run_stats.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ForEachStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.error_message_stats: - body["error_message_stats"] = self.error_message_stats - if self.task_run_stats: - body["task_run_stats"] = self.task_run_stats + if self.error_message_stats: body['error_message_stats'] = self.error_message_stats + if self.task_run_stats: body['task_run_stats'] = self.task_run_stats return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForEachStats: """Deserializes the ForEachStats from a dictionary.""" - return cls( - error_message_stats=_repeated_dict(d, "error_message_stats", ForEachTaskErrorMessageStats), - task_run_stats=_from_dict(d, "task_run_stats", ForEachTaskTaskRunStats), - ) + return cls(error_message_stats=_repeated_dict(d, 'error_message_stats', ForEachTaskErrorMessageStats), task_run_stats=_from_dict(d, 'task_run_stats', ForEachTaskTaskRunStats)) + + @dataclass class ForEachTask: inputs: str """Array for task to iterate on. This can be a JSON string or a reference to an array parameter.""" - + task: Task """Configuration for the task that will be run for each element in the array""" - + concurrency: Optional[int] = None """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to be able to execute multiple runs of the task concurrently.""" - + def as_dict(self) -> dict: """Serializes the ForEachTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.concurrency is not None: - body["concurrency"] = self.concurrency - if self.inputs is not None: - body["inputs"] = self.inputs - if self.task: - body["task"] = self.task.as_dict() + if self.concurrency is not None: body['concurrency'] = self.concurrency + if self.inputs is not None: body['inputs'] = self.inputs + if self.task: body['task'] = self.task.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ForEachTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.concurrency is not None: - body["concurrency"] = self.concurrency - if self.inputs is not None: - body["inputs"] = self.inputs - if self.task: - body["task"] = self.task + if self.concurrency is not None: body['concurrency'] = self.concurrency + if self.inputs is not None: body['inputs'] = self.inputs + if self.task: body['task'] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForEachTask: """Deserializes the ForEachTask from a dictionary.""" - return cls( - concurrency=d.get("concurrency", None), inputs=d.get("inputs", None), task=_from_dict(d, "task", Task) - ) + return cls(concurrency=d.get('concurrency', None), inputs=d.get('inputs', None), task=_from_dict(d, 'task', Task)) + + @dataclass class ForEachTaskErrorMessageStats: count: Optional[int] = None """Describes the count of such error message encountered during the iterations.""" - + error_message: Optional[str] = None """Describes the error message occured during the iterations.""" - + termination_category: Optional[str] = None """Describes the termination reason for the error message.""" - + def as_dict(self) -> dict: """Serializes the ForEachTaskErrorMessageStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.count is not None: - body["count"] = self.count - if self.error_message is not None: - body["error_message"] = self.error_message - if self.termination_category is not None: - body["termination_category"] = self.termination_category + if self.count is not None: body['count'] = self.count + if self.error_message is not None: body['error_message'] = self.error_message + if self.termination_category is not None: body['termination_category'] = self.termination_category return body def as_shallow_dict(self) -> dict: """Serializes the ForEachTaskErrorMessageStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.count is not None: - body["count"] = self.count - if self.error_message is not None: - body["error_message"] = self.error_message - if self.termination_category is not None: - body["termination_category"] = self.termination_category + if self.count is not None: body['count'] = self.count + if self.error_message is not None: body['error_message'] = self.error_message + if self.termination_category is not None: body['termination_category'] = self.termination_category return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForEachTaskErrorMessageStats: """Deserializes the ForEachTaskErrorMessageStats from a dictionary.""" - return cls( - count=d.get("count", None), - error_message=d.get("error_message", None), - termination_category=d.get("termination_category", None), - ) + return cls(count=d.get('count', None), error_message=d.get('error_message', None), termination_category=d.get('termination_category', None)) + + @dataclass class ForEachTaskTaskRunStats: active_iterations: Optional[int] = None """Describes the iteration runs having an active lifecycle state or an active run sub state.""" - + completed_iterations: Optional[int] = None """Describes the number of failed and succeeded iteration runs.""" - + failed_iterations: Optional[int] = None """Describes the number of failed iteration runs.""" - + scheduled_iterations: Optional[int] = None """Describes the number of iteration runs that have been scheduled.""" - + succeeded_iterations: Optional[int] = None """Describes the number of succeeded iteration runs.""" - + total_iterations: Optional[int] = None """Describes the length of the list of items to iterate over.""" - + def as_dict(self) -> dict: """Serializes the ForEachTaskTaskRunStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.active_iterations is not None: - body["active_iterations"] = self.active_iterations - if self.completed_iterations is not None: - body["completed_iterations"] = self.completed_iterations - if self.failed_iterations is not None: - body["failed_iterations"] = self.failed_iterations - if self.scheduled_iterations is not None: - body["scheduled_iterations"] = self.scheduled_iterations - if self.succeeded_iterations is not None: - body["succeeded_iterations"] = self.succeeded_iterations - if self.total_iterations is not None: - body["total_iterations"] = self.total_iterations + if self.active_iterations is not None: body['active_iterations'] = self.active_iterations + if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations + if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations + if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations + if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations + if self.total_iterations is not None: body['total_iterations'] = self.total_iterations return body def as_shallow_dict(self) -> dict: """Serializes the ForEachTaskTaskRunStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.active_iterations is not None: - body["active_iterations"] = self.active_iterations - if self.completed_iterations is not None: - body["completed_iterations"] = self.completed_iterations - if self.failed_iterations is not None: - body["failed_iterations"] = self.failed_iterations - if self.scheduled_iterations is not None: - body["scheduled_iterations"] = self.scheduled_iterations - if self.succeeded_iterations is not None: - body["succeeded_iterations"] = self.succeeded_iterations - if self.total_iterations is not None: - body["total_iterations"] = self.total_iterations + if self.active_iterations is not None: body['active_iterations'] = self.active_iterations + if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations + if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations + if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations + if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations + if self.total_iterations is not None: body['total_iterations'] = self.total_iterations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForEachTaskTaskRunStats: """Deserializes the ForEachTaskTaskRunStats from a dictionary.""" - return cls( - active_iterations=d.get("active_iterations", None), - completed_iterations=d.get("completed_iterations", None), - failed_iterations=d.get("failed_iterations", None), - scheduled_iterations=d.get("scheduled_iterations", None), - succeeded_iterations=d.get("succeeded_iterations", None), - total_iterations=d.get("total_iterations", None), - ) + return cls(active_iterations=d.get('active_iterations', None), completed_iterations=d.get('completed_iterations', None), failed_iterations=d.get('failed_iterations', None), scheduled_iterations=d.get('scheduled_iterations', None), succeeded_iterations=d.get('succeeded_iterations', None), total_iterations=d.get('total_iterations', None)) + -class Format(Enum): - - MULTI_TASK = "MULTI_TASK" - SINGLE_TASK = "SINGLE_TASK" +class Format(Enum): + + + MULTI_TASK = 'MULTI_TASK' + SINGLE_TASK = 'SINGLE_TASK' @dataclass class GenAiComputeTask: dl_runtime_image: str """Runtime image""" - + command: Optional[str] = None """Command launcher to run the actual script, e.g. bash, python etc.""" - + compute: Optional[ComputeConfig] = None - + mlflow_experiment_name: Optional[str] = None """Optional string containing the name of the MLflow experiment to log the run to. If name is not found, backend will create the mlflow experiment using the name.""" - + source: Optional[Source] = None """Optional location type of the training script. When set to `WORKSPACE`, the script will be retrieved from the local Databricks workspace. When set to `GIT`, the script will be retrieved from a Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Script is located in Databricks workspace. * `GIT`: Script is located in cloud Git provider.""" - + training_script_path: Optional[str] = None """The training script file path to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.""" - + yaml_parameters: Optional[str] = None """Optional string containing model parameters passed to the training script in yaml format. If present, then the content in yaml_parameters_file_path will be ignored.""" - + yaml_parameters_file_path: Optional[str] = None """Optional path to a YAML file containing model parameters passed to the training script.""" - + def as_dict(self) -> dict: """Serializes the GenAiComputeTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.command is not None: - body["command"] = self.command - if self.compute: - body["compute"] = self.compute.as_dict() - if self.dl_runtime_image is not None: - body["dl_runtime_image"] = self.dl_runtime_image - if self.mlflow_experiment_name is not None: - body["mlflow_experiment_name"] = self.mlflow_experiment_name - if self.source is not None: - body["source"] = self.source.value - if self.training_script_path is not None: - body["training_script_path"] = self.training_script_path - if self.yaml_parameters is not None: - body["yaml_parameters"] = self.yaml_parameters - if self.yaml_parameters_file_path is not None: - body["yaml_parameters_file_path"] = self.yaml_parameters_file_path + if self.command is not None: body['command'] = self.command + if self.compute: body['compute'] = self.compute.as_dict() + if self.dl_runtime_image is not None: body['dl_runtime_image'] = self.dl_runtime_image + if self.mlflow_experiment_name is not None: body['mlflow_experiment_name'] = self.mlflow_experiment_name + if self.source is not None: body['source'] = self.source.value + if self.training_script_path is not None: body['training_script_path'] = self.training_script_path + if self.yaml_parameters is not None: body['yaml_parameters'] = self.yaml_parameters + if self.yaml_parameters_file_path is not None: body['yaml_parameters_file_path'] = self.yaml_parameters_file_path return body def as_shallow_dict(self) -> dict: """Serializes the GenAiComputeTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.command is not None: - body["command"] = self.command - if self.compute: - body["compute"] = self.compute - if self.dl_runtime_image is not None: - body["dl_runtime_image"] = self.dl_runtime_image - if self.mlflow_experiment_name is not None: - body["mlflow_experiment_name"] = self.mlflow_experiment_name - if self.source is not None: - body["source"] = self.source - if self.training_script_path is not None: - body["training_script_path"] = self.training_script_path - if self.yaml_parameters is not None: - body["yaml_parameters"] = self.yaml_parameters - if self.yaml_parameters_file_path is not None: - body["yaml_parameters_file_path"] = self.yaml_parameters_file_path + if self.command is not None: body['command'] = self.command + if self.compute: body['compute'] = self.compute + if self.dl_runtime_image is not None: body['dl_runtime_image'] = self.dl_runtime_image + if self.mlflow_experiment_name is not None: body['mlflow_experiment_name'] = self.mlflow_experiment_name + if self.source is not None: body['source'] = self.source + if self.training_script_path is not None: body['training_script_path'] = self.training_script_path + if self.yaml_parameters is not None: body['yaml_parameters'] = self.yaml_parameters + if self.yaml_parameters_file_path is not None: body['yaml_parameters_file_path'] = self.yaml_parameters_file_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenAiComputeTask: """Deserializes the GenAiComputeTask from a dictionary.""" - return cls( - command=d.get("command", None), - compute=_from_dict(d, "compute", ComputeConfig), - dl_runtime_image=d.get("dl_runtime_image", None), - mlflow_experiment_name=d.get("mlflow_experiment_name", None), - source=_enum(d, "source", Source), - training_script_path=d.get("training_script_path", None), - yaml_parameters=d.get("yaml_parameters", None), - yaml_parameters_file_path=d.get("yaml_parameters_file_path", None), - ) + return cls(command=d.get('command', None), compute=_from_dict(d, 'compute', ComputeConfig), dl_runtime_image=d.get('dl_runtime_image', None), mlflow_experiment_name=d.get('mlflow_experiment_name', None), source=_enum(d, 'source', Source), training_script_path=d.get('training_script_path', None), yaml_parameters=d.get('yaml_parameters', None), yaml_parameters_file_path=d.get('yaml_parameters_file_path', None)) + + + + + @dataclass class GetJobPermissionLevelsResponse: permission_levels: Optional[List[JobPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetJobPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetJobPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetJobPermissionLevelsResponse: """Deserializes the GetJobPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", JobPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', JobPermissionsDescription)) + + + + + + + + + + + @dataclass @@ -2119,195 +1921,179 @@ class GetPolicyComplianceResponse: """Whether the job is compliant with its policies or not. Jobs could be out of compliance if a policy they are using was updated after the job was last edited and some of its job clusters no longer comply with their updated policies.""" - - violations: Optional[Dict[str, str]] = None + + violations: Optional[Dict[str,str]] = None """An object containing key-value mappings representing the first 200 policy validation errors. The keys indicate the path where the policy validation error is occurring. An identifier for the job cluster is prepended to the path. The values indicate an error message describing the policy validation error.""" - + def as_dict(self) -> dict: """Serializes the GetPolicyComplianceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_compliant is not None: - body["is_compliant"] = self.is_compliant - if self.violations: - body["violations"] = self.violations + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations return body def as_shallow_dict(self) -> dict: """Serializes the GetPolicyComplianceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_compliant is not None: - body["is_compliant"] = self.is_compliant - if self.violations: - body["violations"] = self.violations + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPolicyComplianceResponse: """Deserializes the GetPolicyComplianceResponse from a dictionary.""" - return cls(is_compliant=d.get("is_compliant", None), violations=d.get("violations", None)) + return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None)) + + + + + -class GitProvider(Enum): - AWS_CODE_COMMIT = "awsCodeCommit" - AZURE_DEV_OPS_SERVICES = "azureDevOpsServices" - BITBUCKET_CLOUD = "bitbucketCloud" - BITBUCKET_SERVER = "bitbucketServer" - GIT_HUB = "gitHub" - GIT_HUB_ENTERPRISE = "gitHubEnterprise" - GIT_LAB = "gitLab" - GIT_LAB_ENTERPRISE_EDITION = "gitLabEnterpriseEdition" +class GitProvider(Enum): + + + AWS_CODE_COMMIT = 'awsCodeCommit' + AZURE_DEV_OPS_SERVICES = 'azureDevOpsServices' + BITBUCKET_CLOUD = 'bitbucketCloud' + BITBUCKET_SERVER = 'bitbucketServer' + GIT_HUB = 'gitHub' + GIT_HUB_ENTERPRISE = 'gitHubEnterprise' + GIT_LAB = 'gitLab' + GIT_LAB_ENTERPRISE_EDITION = 'gitLabEnterpriseEdition' + @dataclass class GitSnapshot: """Read-only state of the remote repository at the time the job was run. This field is only included on job runs.""" - + used_commit: Optional[str] = None """Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to.""" - + def as_dict(self) -> dict: """Serializes the GitSnapshot into a dictionary suitable for use as a JSON request body.""" body = {} - if self.used_commit is not None: - body["used_commit"] = self.used_commit + if self.used_commit is not None: body['used_commit'] = self.used_commit return body def as_shallow_dict(self) -> dict: """Serializes the GitSnapshot into a shallow dictionary of its immediate attributes.""" body = {} - if self.used_commit is not None: - body["used_commit"] = self.used_commit + if self.used_commit is not None: body['used_commit'] = self.used_commit return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GitSnapshot: """Deserializes the GitSnapshot from a dictionary.""" - return cls(used_commit=d.get("used_commit", None)) + return cls(used_commit=d.get('used_commit', None)) + + @dataclass class GitSource: """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + git_url: str """URL of the repository to be cloned by this job.""" - + git_provider: GitProvider """Unique identifier of the service used to host the Git repository. The value is case insensitive.""" - + git_branch: Optional[str] = None """Name of the branch to be checked out and used by this job. This field cannot be specified in conjunction with git_tag or git_commit.""" - + git_commit: Optional[str] = None """Commit to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_tag.""" - + git_snapshot: Optional[GitSnapshot] = None """Read-only state of the remote repository at the time the job was run. This field is only included on job runs.""" - + git_tag: Optional[str] = None """Name of the tag to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_commit.""" - + job_source: Optional[JobSource] = None """The source of the job specification in the remote repository when the job is source controlled.""" - + def as_dict(self) -> dict: """Serializes the GitSource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.git_branch is not None: - body["git_branch"] = self.git_branch - if self.git_commit is not None: - body["git_commit"] = self.git_commit - if self.git_provider is not None: - body["git_provider"] = self.git_provider.value - if self.git_snapshot: - body["git_snapshot"] = self.git_snapshot.as_dict() - if self.git_tag is not None: - body["git_tag"] = self.git_tag - if self.git_url is not None: - body["git_url"] = self.git_url - if self.job_source: - body["job_source"] = self.job_source.as_dict() + if self.git_branch is not None: body['git_branch'] = self.git_branch + if self.git_commit is not None: body['git_commit'] = self.git_commit + if self.git_provider is not None: body['git_provider'] = self.git_provider.value + if self.git_snapshot: body['git_snapshot'] = self.git_snapshot.as_dict() + if self.git_tag is not None: body['git_tag'] = self.git_tag + if self.git_url is not None: body['git_url'] = self.git_url + if self.job_source: body['job_source'] = self.job_source.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GitSource into a shallow dictionary of its immediate attributes.""" body = {} - if self.git_branch is not None: - body["git_branch"] = self.git_branch - if self.git_commit is not None: - body["git_commit"] = self.git_commit - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_snapshot: - body["git_snapshot"] = self.git_snapshot - if self.git_tag is not None: - body["git_tag"] = self.git_tag - if self.git_url is not None: - body["git_url"] = self.git_url - if self.job_source: - body["job_source"] = self.job_source + if self.git_branch is not None: body['git_branch'] = self.git_branch + if self.git_commit is not None: body['git_commit'] = self.git_commit + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_snapshot: body['git_snapshot'] = self.git_snapshot + if self.git_tag is not None: body['git_tag'] = self.git_tag + if self.git_url is not None: body['git_url'] = self.git_url + if self.job_source: body['job_source'] = self.job_source return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GitSource: """Deserializes the GitSource from a dictionary.""" - return cls( - git_branch=d.get("git_branch", None), - git_commit=d.get("git_commit", None), - git_provider=_enum(d, "git_provider", GitProvider), - git_snapshot=_from_dict(d, "git_snapshot", GitSnapshot), - git_tag=d.get("git_tag", None), - git_url=d.get("git_url", None), - job_source=_from_dict(d, "job_source", JobSource), - ) + return cls(git_branch=d.get('git_branch', None), git_commit=d.get('git_commit', None), git_provider=_enum(d, 'git_provider', GitProvider), git_snapshot=_from_dict(d, 'git_snapshot', GitSnapshot), git_tag=d.get('git_tag', None), git_url=d.get('git_url', None), job_source=_from_dict(d, 'job_source', JobSource)) + + @dataclass class Job: """Job was retrieved successfully.""" - + created_time: Optional[int] = None """The time at which this job was created in epoch milliseconds (milliseconds since 1/1/1970 UTC).""" - + creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" - + effective_budget_policy_id: Optional[str] = None """The id of the budget policy used by this job for cost attribution purposes. This may be set through (in order of precedence): 1. Budget admins through the account or workspace console 2. Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" - + has_more: Optional[bool] = None """Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list requests with `expand_tasks=true`.""" - + job_id: Optional[int] = None """The canonical identifier for this job.""" - + next_page_token: Optional[str] = None """A token that can be used to list the next page of array properties.""" - + run_as_user_name: Optional[str] = None """The email of an active workspace user or the application ID of a service principal that the job runs as. This value can be changed by setting the `run_as` field when creating or updating a @@ -2316,176 +2102,133 @@ class Job: By default, `run_as_user_name` is based on the current job settings and is set to the creator of the job if job access control is disabled or to the user with the `is_owner` permission if job access control is enabled.""" - + settings: Optional[JobSettings] = None """Settings for this job and all of its runs. These settings can be updated using the `resetJob` method.""" - + + trigger_state: Optional[TriggerStateProto] = None + """State of the trigger associated with the job.""" + def as_dict(self) -> dict: """Serializes the Job into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_time is not None: - body["created_time"] = self.created_time - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.has_more is not None: - body["has_more"] = self.has_more - if self.job_id is not None: - body["job_id"] = self.job_id - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.settings: - body["settings"] = self.settings.as_dict() + if self.created_time is not None: body['created_time'] = self.created_time + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.has_more is not None: body['has_more'] = self.has_more + if self.job_id is not None: body['job_id'] = self.job_id + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.settings: body['settings'] = self.settings.as_dict() + if self.trigger_state: body['trigger_state'] = self.trigger_state.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Job into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_time is not None: - body["created_time"] = self.created_time - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.has_more is not None: - body["has_more"] = self.has_more - if self.job_id is not None: - body["job_id"] = self.job_id - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.settings: - body["settings"] = self.settings + if self.created_time is not None: body['created_time'] = self.created_time + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.has_more is not None: body['has_more'] = self.has_more + if self.job_id is not None: body['job_id'] = self.job_id + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.settings: body['settings'] = self.settings + if self.trigger_state: body['trigger_state'] = self.trigger_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Job: """Deserializes the Job from a dictionary.""" - return cls( - created_time=d.get("created_time", None), - creator_user_name=d.get("creator_user_name", None), - effective_budget_policy_id=d.get("effective_budget_policy_id", None), - has_more=d.get("has_more", None), - job_id=d.get("job_id", None), - next_page_token=d.get("next_page_token", None), - run_as_user_name=d.get("run_as_user_name", None), - settings=_from_dict(d, "settings", JobSettings), - ) + return cls(created_time=d.get('created_time', None), creator_user_name=d.get('creator_user_name', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), has_more=d.get('has_more', None), job_id=d.get('job_id', None), next_page_token=d.get('next_page_token', None), run_as_user_name=d.get('run_as_user_name', None), settings=_from_dict(d, 'settings', JobSettings), trigger_state=_from_dict(d, 'trigger_state', TriggerStateProto)) + + @dataclass class JobAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[JobPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the JobAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the JobAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobAccessControlRequest: """Deserializes the JobAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", JobPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', JobPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class JobAccessControlResponse: all_permissions: Optional[List[JobPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the JobAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the JobAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobAccessControlResponse: """Deserializes the JobAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", JobPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', JobPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -2494,79 +2237,68 @@ class JobCluster: """A unique name for the job cluster. This field is required and must be unique within the job. `JobTaskSettings` may refer to this field to determine which cluster to launch for the task execution.""" - + new_cluster: compute.ClusterSpec """If new_cluster, a description of a cluster that is created for each task.""" - + def as_dict(self) -> dict: """Serializes the JobCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_cluster_key is not None: - body["job_cluster_key"] = self.job_cluster_key - if self.new_cluster: - body["new_cluster"] = self.new_cluster.as_dict() + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the JobCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_cluster_key is not None: - body["job_cluster_key"] = self.job_cluster_key - if self.new_cluster: - body["new_cluster"] = self.new_cluster + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.new_cluster: body['new_cluster'] = self.new_cluster return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobCluster: """Deserializes the JobCluster from a dictionary.""" - return cls( - job_cluster_key=d.get("job_cluster_key", None), - new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), - ) + return cls(job_cluster_key=d.get('job_cluster_key', None), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec)) + + @dataclass class JobCompliance: job_id: int """Canonical unique identifier for a job.""" - + is_compliant: Optional[bool] = None """Whether this job is in compliance with the latest version of its policy.""" - - violations: Optional[Dict[str, str]] = None + + violations: Optional[Dict[str,str]] = None """An object containing key-value mappings representing the first 200 policy validation errors. The keys indicate the path where the policy validation error is occurring. An identifier for the job cluster is prepended to the path. The values indicate an error message describing the policy validation error.""" - + def as_dict(self) -> dict: """Serializes the JobCompliance into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_compliant is not None: - body["is_compliant"] = self.is_compliant - if self.job_id is not None: - body["job_id"] = self.job_id - if self.violations: - body["violations"] = self.violations + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.job_id is not None: body['job_id'] = self.job_id + if self.violations: body['violations'] = self.violations return body def as_shallow_dict(self) -> dict: """Serializes the JobCompliance into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_compliant is not None: - body["is_compliant"] = self.is_compliant - if self.job_id is not None: - body["job_id"] = self.job_id - if self.violations: - body["violations"] = self.violations + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.job_id is not None: body['job_id'] = self.job_id + if self.violations: body['violations'] = self.violations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobCompliance: """Deserializes the JobCompliance from a dictionary.""" - return cls( - is_compliant=d.get("is_compliant", None), job_id=d.get("job_id", None), violations=d.get("violations", None) - ) + return cls(is_compliant=d.get('is_compliant', None), job_id=d.get('job_id', None), violations=d.get('violations', None)) + + @dataclass @@ -2575,164 +2307,141 @@ class JobDeployment: """The kind of deployment that manages the job. * `BUNDLE`: The job is managed by Databricks Asset Bundle.""" - + metadata_file_path: Optional[str] = None """Path of the file that contains deployment metadata.""" - + def as_dict(self) -> dict: """Serializes the JobDeployment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.kind is not None: - body["kind"] = self.kind.value - if self.metadata_file_path is not None: - body["metadata_file_path"] = self.metadata_file_path + if self.kind is not None: body['kind'] = self.kind.value + if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path return body def as_shallow_dict(self) -> dict: """Serializes the JobDeployment into a shallow dictionary of its immediate attributes.""" body = {} - if self.kind is not None: - body["kind"] = self.kind - if self.metadata_file_path is not None: - body["metadata_file_path"] = self.metadata_file_path + if self.kind is not None: body['kind'] = self.kind + if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobDeployment: """Deserializes the JobDeployment from a dictionary.""" - return cls(kind=_enum(d, "kind", JobDeploymentKind), metadata_file_path=d.get("metadata_file_path", None)) + return cls(kind=_enum(d, 'kind', JobDeploymentKind), metadata_file_path=d.get('metadata_file_path', None)) + + class JobDeploymentKind(Enum): """* `BUNDLE`: The job is managed by Databricks Asset Bundle.""" - - BUNDLE = "BUNDLE" - + + BUNDLE = 'BUNDLE' class JobEditMode(Enum): """Edit mode of the job. - + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified.""" - - EDITABLE = "EDITABLE" - UI_LOCKED = "UI_LOCKED" - + + EDITABLE = 'EDITABLE' + UI_LOCKED = 'UI_LOCKED' @dataclass class JobEmailNotifications: no_alert_for_skipped_runs: Optional[bool] = None """If true, do not send email to recipients specified in `on_failure` if the run is skipped. This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.""" - + on_duration_warning_threshold_exceeded: Optional[List[str]] = None """A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.""" - + on_failure: Optional[List[str]] = None """A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.""" - + on_start: Optional[List[str]] = None """A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.""" - + on_streaming_backlog_exceeded: Optional[List[str]] = None """A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream. Streaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.""" - + on_success: Optional[List[str]] = None """A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.""" - + def as_dict(self) -> dict: """Serializes the JobEmailNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.no_alert_for_skipped_runs is not None: - body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs - if self.on_duration_warning_threshold_exceeded: - body["on_duration_warning_threshold_exceeded"] = [v for v in self.on_duration_warning_threshold_exceeded] - if self.on_failure: - body["on_failure"] = [v for v in self.on_failure] - if self.on_start: - body["on_start"] = [v for v in self.on_start] - if self.on_streaming_backlog_exceeded: - body["on_streaming_backlog_exceeded"] = [v for v in self.on_streaming_backlog_exceeded] - if self.on_success: - body["on_success"] = [v for v in self.on_success] + if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = [v for v in self.on_duration_warning_threshold_exceeded] + if self.on_failure: body['on_failure'] = [v for v in self.on_failure] + if self.on_start: body['on_start'] = [v for v in self.on_start] + if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded] + if self.on_success: body['on_success'] = [v for v in self.on_success] return body def as_shallow_dict(self) -> dict: """Serializes the JobEmailNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.no_alert_for_skipped_runs is not None: - body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs - if self.on_duration_warning_threshold_exceeded: - body["on_duration_warning_threshold_exceeded"] = self.on_duration_warning_threshold_exceeded - if self.on_failure: - body["on_failure"] = self.on_failure - if self.on_start: - body["on_start"] = self.on_start - if self.on_streaming_backlog_exceeded: - body["on_streaming_backlog_exceeded"] = self.on_streaming_backlog_exceeded - if self.on_success: - body["on_success"] = self.on_success + if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded + if self.on_failure: body['on_failure'] = self.on_failure + if self.on_start: body['on_start'] = self.on_start + if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded + if self.on_success: body['on_success'] = self.on_success return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobEmailNotifications: """Deserializes the JobEmailNotifications from a dictionary.""" - return cls( - no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None), - on_duration_warning_threshold_exceeded=d.get("on_duration_warning_threshold_exceeded", None), - on_failure=d.get("on_failure", None), - on_start=d.get("on_start", None), - on_streaming_backlog_exceeded=d.get("on_streaming_backlog_exceeded", None), - on_success=d.get("on_success", None), - ) + return cls(no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None), on_duration_warning_threshold_exceeded=d.get('on_duration_warning_threshold_exceeded', None), on_failure=d.get('on_failure', None), on_start=d.get('on_start', None), on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None), on_success=d.get('on_success', None)) + + @dataclass class JobEnvironment: environment_key: str """The key of an environment. It has to be unique within a job.""" - + spec: Optional[compute.Environment] = None """The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal environment spec, only pip dependencies are supported.""" - + def as_dict(self) -> dict: """Serializes the JobEnvironment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.environment_key is not None: - body["environment_key"] = self.environment_key - if self.spec: - body["spec"] = self.spec.as_dict() + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.spec: body['spec'] = self.spec.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the JobEnvironment into a shallow dictionary of its immediate attributes.""" body = {} - if self.environment_key is not None: - body["environment_key"] = self.environment_key - if self.spec: - body["spec"] = self.spec + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.spec: body['spec'] = self.spec return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobEnvironment: """Deserializes the JobEnvironment from a dictionary.""" - return cls(environment_key=d.get("environment_key", None), spec=_from_dict(d, "spec", compute.Environment)) + return cls(environment_key=d.get('environment_key', None), spec=_from_dict(d, 'spec', compute.Environment)) + + @dataclass @@ -2740,303 +2449,264 @@ class JobNotificationSettings: no_alert_for_canceled_runs: Optional[bool] = None """If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.""" - + no_alert_for_skipped_runs: Optional[bool] = None """If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.""" - + def as_dict(self) -> dict: """Serializes the JobNotificationSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.no_alert_for_canceled_runs is not None: - body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs - if self.no_alert_for_skipped_runs is not None: - body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs + if self.no_alert_for_canceled_runs is not None: body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs return body def as_shallow_dict(self) -> dict: """Serializes the JobNotificationSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.no_alert_for_canceled_runs is not None: - body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs - if self.no_alert_for_skipped_runs is not None: - body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs + if self.no_alert_for_canceled_runs is not None: body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobNotificationSettings: """Deserializes the JobNotificationSettings from a dictionary.""" - return cls( - no_alert_for_canceled_runs=d.get("no_alert_for_canceled_runs", None), - no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None), - ) + return cls(no_alert_for_canceled_runs=d.get('no_alert_for_canceled_runs', None), no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None)) + + @dataclass class JobParameter: default: Optional[str] = None """The optional default value of the parameter""" - + name: Optional[str] = None """The name of the parameter""" - + value: Optional[str] = None """The value used in the run""" - + def as_dict(self) -> dict: """Serializes the JobParameter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default is not None: - body["default"] = self.default - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value + if self.default is not None: body['default'] = self.default + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the JobParameter into a shallow dictionary of its immediate attributes.""" body = {} - if self.default is not None: - body["default"] = self.default - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value + if self.default is not None: body['default'] = self.default + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobParameter: """Deserializes the JobParameter from a dictionary.""" - return cls(default=d.get("default", None), name=d.get("name", None), value=d.get("value", None)) + return cls(default=d.get('default', None), name=d.get('name', None), value=d.get('value', None)) + + @dataclass class JobParameterDefinition: name: str """The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.`""" - + default: str """Default value of the parameter.""" - + def as_dict(self) -> dict: """Serializes the JobParameterDefinition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default is not None: - body["default"] = self.default - if self.name is not None: - body["name"] = self.name + if self.default is not None: body['default'] = self.default + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the JobParameterDefinition into a shallow dictionary of its immediate attributes.""" body = {} - if self.default is not None: - body["default"] = self.default - if self.name is not None: - body["name"] = self.name + if self.default is not None: body['default'] = self.default + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobParameterDefinition: """Deserializes the JobParameterDefinition from a dictionary.""" - return cls(default=d.get("default", None), name=d.get("name", None)) + return cls(default=d.get('default', None), name=d.get('name', None)) + + @dataclass class JobPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[JobPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the JobPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the JobPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobPermission: """Deserializes the JobPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", JobPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', JobPermissionLevel)) + + class JobPermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = "CAN_MANAGE" - CAN_MANAGE_RUN = "CAN_MANAGE_RUN" - CAN_VIEW = "CAN_VIEW" - IS_OWNER = "IS_OWNER" - + + CAN_MANAGE = 'CAN_MANAGE' + CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' + CAN_VIEW = 'CAN_VIEW' + IS_OWNER = 'IS_OWNER' @dataclass class JobPermissions: access_control_list: Optional[List[JobAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the JobPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the JobPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobPermissions: """Deserializes the JobPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class JobPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[JobPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the JobPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the JobPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobPermissionsDescription: """Deserializes the JobPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), permission_level=_enum(d, "permission_level", JobPermissionLevel) - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', JobPermissionLevel)) + + @dataclass class JobPermissionsRequest: access_control_list: Optional[List[JobAccessControlRequest]] = None - + job_id: Optional[str] = None """The job for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the JobPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.job_id is not None: - body["job_id"] = self.job_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.job_id is not None: body['job_id'] = self.job_id return body def as_shallow_dict(self) -> dict: """Serializes the JobPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.job_id is not None: - body["job_id"] = self.job_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.job_id is not None: body['job_id'] = self.job_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobPermissionsRequest: """Deserializes the JobPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), - job_id=d.get("job_id", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), job_id=d.get('job_id', None)) + + @dataclass class JobRunAs: """Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. - + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" - + service_principal_name: Optional[str] = None """Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.""" - + user_name: Optional[str] = None """The email of an active workspace user. Non-admin users can only set this field to their own email.""" - + def as_dict(self) -> dict: """Serializes the JobRunAs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the JobRunAs into a shallow dictionary of its immediate attributes.""" body = {} - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobRunAs: """Deserializes the JobRunAs from a dictionary.""" - return cls(service_principal_name=d.get("service_principal_name", None), user_name=d.get("user_name", None)) + return cls(service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -3045,38 +2715,38 @@ class JobSettings: """The id of the user specified budget policy to use for this job. If not specified, a default budget policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the budget policy used by this workload.""" - + continuous: Optional[Continuous] = None """An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.""" - + deployment: Optional[JobDeployment] = None """Deployment information for jobs managed by external sources.""" - + description: Optional[str] = None """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" - + edit_mode: Optional[JobEditMode] = None """Edit mode of the job. * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.""" - + environments: Optional[List[JobEnvironment]] = None """A list of task execution environment specifications that can be referenced by serverless tasks of this job. An environment is required to be present for serverless tasks. For serverless notebook tasks, the environment is accessible in the notebook environment panel. For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.""" - + format: Optional[Format] = None """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -3086,15 +2756,15 @@ class JobSettings: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.""" - + max_concurrent_runs: Optional[int] = None """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be able to execute multiple runs of the same job concurrently. This is useful for example if you @@ -3104,17 +2774,17 @@ class JobSettings: concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs. This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.""" - + name: Optional[str] = None """An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.""" - + notification_settings: Optional[JobNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this job.""" - + parameters: Optional[List[JobParameterDefinition]] = None """Job-level parameter definitions""" - + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. @@ -3122,190 +2792,119 @@ class JobSettings: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + queue: Optional[QueueSettings] = None """The queue settings of the job.""" - + run_as: Optional[JobRunAs] = None """Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" - + schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - - tags: Optional[Dict[str, str]] = None + + tags: Optional[Dict[str,str]] = None """A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.""" - + tasks: Optional[List[Task]] = None """A list of task specifications to be executed by this job. It supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available.""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - + trigger: Optional[TriggerSettings] = None """A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when runs of this job begin or complete.""" - + def as_dict(self) -> dict: """Serializes the JobSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.continuous: - body["continuous"] = self.continuous.as_dict() - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.description is not None: - body["description"] = self.description - if self.edit_mode is not None: - body["edit_mode"] = self.edit_mode.value - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.environments: - body["environments"] = [v.as_dict() for v in self.environments] - if self.format is not None: - body["format"] = self.format.value - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.health: - body["health"] = self.health.as_dict() - if self.job_clusters: - body["job_clusters"] = [v.as_dict() for v in self.job_clusters] - if self.max_concurrent_runs is not None: - body["max_concurrent_runs"] = self.max_concurrent_runs - if self.name is not None: - body["name"] = self.name - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value - if self.queue: - body["queue"] = self.queue.as_dict() - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.tags: - body["tags"] = self.tags - if self.tasks: - body["tasks"] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.trigger: - body["trigger"] = self.trigger.as_dict() - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.continuous: body['continuous'] = self.continuous.as_dict() + if self.deployment: body['deployment'] = self.deployment.as_dict() + if self.description is not None: body['description'] = self.description + if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value + if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() + if self.environments: body['environments'] = [v.as_dict() for v in self.environments] + if self.format is not None: body['format'] = self.format.value + if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.health: body['health'] = self.health.as_dict() + if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] + if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs + if self.name is not None: body['name'] = self.name + if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.performance_target is not None: body['performance_target'] = self.performance_target.value + if self.queue: body['queue'] = self.queue.as_dict() + if self.run_as: body['run_as'] = self.run_as.as_dict() + if self.schedule: body['schedule'] = self.schedule.as_dict() + if self.tags: body['tags'] = self.tags + if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.trigger: body['trigger'] = self.trigger.as_dict() + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the JobSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.continuous: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.description is not None: - body["description"] = self.description - if self.edit_mode is not None: - body["edit_mode"] = self.edit_mode - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.environments: - body["environments"] = self.environments - if self.format is not None: - body["format"] = self.format - if self.git_source: - body["git_source"] = self.git_source - if self.health: - body["health"] = self.health - if self.job_clusters: - body["job_clusters"] = self.job_clusters - if self.max_concurrent_runs is not None: - body["max_concurrent_runs"] = self.max_concurrent_runs - if self.name is not None: - body["name"] = self.name - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.parameters: - body["parameters"] = self.parameters - if self.performance_target is not None: - body["performance_target"] = self.performance_target - if self.queue: - body["queue"] = self.queue - if self.run_as: - body["run_as"] = self.run_as - if self.schedule: - body["schedule"] = self.schedule - if self.tags: - body["tags"] = self.tags - if self.tasks: - body["tasks"] = self.tasks - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.trigger: - body["trigger"] = self.trigger - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.continuous: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.description is not None: body['description'] = self.description + if self.edit_mode is not None: body['edit_mode'] = self.edit_mode + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environments: body['environments'] = self.environments + if self.format is not None: body['format'] = self.format + if self.git_source: body['git_source'] = self.git_source + if self.health: body['health'] = self.health + if self.job_clusters: body['job_clusters'] = self.job_clusters + if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs + if self.name is not None: body['name'] = self.name + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.parameters: body['parameters'] = self.parameters + if self.performance_target is not None: body['performance_target'] = self.performance_target + if self.queue: body['queue'] = self.queue + if self.run_as: body['run_as'] = self.run_as + if self.schedule: body['schedule'] = self.schedule + if self.tags: body['tags'] = self.tags + if self.tasks: body['tasks'] = self.tasks + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.trigger: body['trigger'] = self.trigger + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobSettings: """Deserializes the JobSettings from a dictionary.""" - return cls( - budget_policy_id=d.get("budget_policy_id", None), - continuous=_from_dict(d, "continuous", Continuous), - deployment=_from_dict(d, "deployment", JobDeployment), - description=d.get("description", None), - edit_mode=_enum(d, "edit_mode", JobEditMode), - email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), - environments=_repeated_dict(d, "environments", JobEnvironment), - format=_enum(d, "format", Format), - git_source=_from_dict(d, "git_source", GitSource), - health=_from_dict(d, "health", JobsHealthRules), - job_clusters=_repeated_dict(d, "job_clusters", JobCluster), - max_concurrent_runs=d.get("max_concurrent_runs", None), - name=d.get("name", None), - notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), - parameters=_repeated_dict(d, "parameters", JobParameterDefinition), - performance_target=_enum(d, "performance_target", PerformanceTarget), - queue=_from_dict(d, "queue", QueueSettings), - run_as=_from_dict(d, "run_as", JobRunAs), - schedule=_from_dict(d, "schedule", CronSchedule), - tags=d.get("tags", None), - tasks=_repeated_dict(d, "tasks", Task), - timeout_seconds=d.get("timeout_seconds", None), - trigger=_from_dict(d, "trigger", TriggerSettings), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) + return cls(budget_policy_id=d.get('budget_policy_id', None), continuous=_from_dict(d, 'continuous', Continuous), deployment=_from_dict(d, 'deployment', JobDeployment), description=d.get('description', None), edit_mode=_enum(d, 'edit_mode', JobEditMode), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environments=_repeated_dict(d, 'environments', JobEnvironment), format=_enum(d, 'format', Format), git_source=_from_dict(d, 'git_source', GitSource), health=_from_dict(d, 'health', JobsHealthRules), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), max_concurrent_runs=d.get('max_concurrent_runs', None), name=d.get('name', None), notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings), parameters=_repeated_dict(d, 'parameters', JobParameterDefinition), performance_target=_enum(d, 'performance_target', PerformanceTarget), queue=_from_dict(d, 'queue', QueueSettings), run_as=_from_dict(d, 'run_as', JobRunAs), schedule=_from_dict(d, 'schedule', CronSchedule), tags=d.get('tags', None), tasks=_repeated_dict(d, 'tasks', Task), timeout_seconds=d.get('timeout_seconds', None), trigger=_from_dict(d, 'trigger', TriggerSettings), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) + + @dataclass class JobSource: """The source of the job specification in the remote repository when the job is source controlled.""" - + job_config_path: str """Path of the job YAML file that contains the job specification.""" - + import_from_git_branch: str """Name of the branch which the job is imported from.""" - + dirty_state: Optional[JobSourceDirtyState] = None """Dirty state indicates the job is not fully synced with the job specification in the remote repository. @@ -3315,56 +2914,47 @@ class JobSource: `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.""" - + def as_dict(self) -> dict: """Serializes the JobSource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dirty_state is not None: - body["dirty_state"] = self.dirty_state.value - if self.import_from_git_branch is not None: - body["import_from_git_branch"] = self.import_from_git_branch - if self.job_config_path is not None: - body["job_config_path"] = self.job_config_path + if self.dirty_state is not None: body['dirty_state'] = self.dirty_state.value + if self.import_from_git_branch is not None: body['import_from_git_branch'] = self.import_from_git_branch + if self.job_config_path is not None: body['job_config_path'] = self.job_config_path return body def as_shallow_dict(self) -> dict: """Serializes the JobSource into a shallow dictionary of its immediate attributes.""" body = {} - if self.dirty_state is not None: - body["dirty_state"] = self.dirty_state - if self.import_from_git_branch is not None: - body["import_from_git_branch"] = self.import_from_git_branch - if self.job_config_path is not None: - body["job_config_path"] = self.job_config_path + if self.dirty_state is not None: body['dirty_state'] = self.dirty_state + if self.import_from_git_branch is not None: body['import_from_git_branch'] = self.import_from_git_branch + if self.job_config_path is not None: body['job_config_path'] = self.job_config_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobSource: """Deserializes the JobSource from a dictionary.""" - return cls( - dirty_state=_enum(d, "dirty_state", JobSourceDirtyState), - import_from_git_branch=d.get("import_from_git_branch", None), - job_config_path=d.get("job_config_path", None), - ) + return cls(dirty_state=_enum(d, 'dirty_state', JobSourceDirtyState), import_from_git_branch=d.get('import_from_git_branch', None), job_config_path=d.get('job_config_path', None)) + + class JobSourceDirtyState(Enum): """Dirty state indicates the job is not fully synced with the job specification in the remote repository. - + Possible values are: * `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced. * `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.""" - - DISCONNECTED = "DISCONNECTED" - NOT_SYNCED = "NOT_SYNCED" - + + DISCONNECTED = 'DISCONNECTED' + NOT_SYNCED = 'NOT_SYNCED' class JobsHealthMetric(Enum): """Specifies the health metric that is being evaluated for a particular health rule. - + * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag @@ -3372,19 +2962,17 @@ class JobsHealthMetric(Enum): of the maximum consumer delay across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Public Preview.""" - - RUN_DURATION_SECONDS = "RUN_DURATION_SECONDS" - STREAMING_BACKLOG_BYTES = "STREAMING_BACKLOG_BYTES" - STREAMING_BACKLOG_FILES = "STREAMING_BACKLOG_FILES" - STREAMING_BACKLOG_RECORDS = "STREAMING_BACKLOG_RECORDS" - STREAMING_BACKLOG_SECONDS = "STREAMING_BACKLOG_SECONDS" - + + RUN_DURATION_SECONDS = 'RUN_DURATION_SECONDS' + STREAMING_BACKLOG_BYTES = 'STREAMING_BACKLOG_BYTES' + STREAMING_BACKLOG_FILES = 'STREAMING_BACKLOG_FILES' + STREAMING_BACKLOG_RECORDS = 'STREAMING_BACKLOG_RECORDS' + STREAMING_BACKLOG_SECONDS = 'STREAMING_BACKLOG_SECONDS' class JobsHealthOperator(Enum): """Specifies the operator used to compare the health metric value with the specified threshold.""" - - GREATER_THAN = "GREATER_THAN" - + + GREATER_THAN = 'GREATER_THAN' @dataclass class JobsHealthRule: @@ -3398,221 +2986,192 @@ class JobsHealthRule: of the maximum consumer delay across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Public Preview.""" - + op: JobsHealthOperator """Specifies the operator used to compare the health metric value with the specified threshold.""" - + value: int """Specifies the threshold value that the health metric should obey to satisfy the health rule.""" - + def as_dict(self) -> dict: """Serializes the JobsHealthRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metric is not None: - body["metric"] = self.metric.value - if self.op is not None: - body["op"] = self.op.value - if self.value is not None: - body["value"] = self.value + if self.metric is not None: body['metric'] = self.metric.value + if self.op is not None: body['op'] = self.op.value + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the JobsHealthRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.metric is not None: - body["metric"] = self.metric - if self.op is not None: - body["op"] = self.op - if self.value is not None: - body["value"] = self.value + if self.metric is not None: body['metric'] = self.metric + if self.op is not None: body['op'] = self.op + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobsHealthRule: """Deserializes the JobsHealthRule from a dictionary.""" - return cls( - metric=_enum(d, "metric", JobsHealthMetric), - op=_enum(d, "op", JobsHealthOperator), - value=d.get("value", None), - ) + return cls(metric=_enum(d, 'metric', JobsHealthMetric), op=_enum(d, 'op', JobsHealthOperator), value=d.get('value', None)) + + @dataclass class JobsHealthRules: """An optional set of health rules that can be defined for this job.""" - + rules: Optional[List[JobsHealthRule]] = None - + def as_dict(self) -> dict: """Serializes the JobsHealthRules into a dictionary suitable for use as a JSON request body.""" body = {} - if self.rules: - body["rules"] = [v.as_dict() for v in self.rules] + if self.rules: body['rules'] = [v.as_dict() for v in self.rules] return body def as_shallow_dict(self) -> dict: """Serializes the JobsHealthRules into a shallow dictionary of its immediate attributes.""" body = {} - if self.rules: - body["rules"] = self.rules + if self.rules: body['rules'] = self.rules return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobsHealthRules: """Deserializes the JobsHealthRules from a dictionary.""" - return cls(rules=_repeated_dict(d, "rules", JobsHealthRule)) + return cls(rules=_repeated_dict(d, 'rules', JobsHealthRule)) + + @dataclass class ListJobComplianceForPolicyResponse: jobs: Optional[List[JobCompliance]] = None """A list of jobs and their policy compliance statuses.""" - + next_page_token: Optional[str] = None """This field represents the pagination token to retrieve the next page of results. If this field is not in the response, it means no further results for the request.""" - + prev_page_token: Optional[str] = None """This field represents the pagination token to retrieve the previous page of results. If this field is not in the response, it means no further results for the request.""" - + def as_dict(self) -> dict: """Serializes the ListJobComplianceForPolicyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.jobs: - body["jobs"] = [v.as_dict() for v in self.jobs] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.jobs: body['jobs'] = [v.as_dict() for v in self.jobs] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListJobComplianceForPolicyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.jobs: - body["jobs"] = self.jobs - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.jobs: body['jobs'] = self.jobs + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListJobComplianceForPolicyResponse: """Deserializes the ListJobComplianceForPolicyResponse from a dictionary.""" - return cls( - jobs=_repeated_dict(d, "jobs", JobCompliance), - next_page_token=d.get("next_page_token", None), - prev_page_token=d.get("prev_page_token", None), - ) + return cls(jobs=_repeated_dict(d, 'jobs', JobCompliance), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) + + + + + + + + @dataclass class ListJobsResponse: """List of jobs was retrieved successfully.""" - + has_more: Optional[bool] = None """If true, additional jobs matching the provided filter are available for listing.""" - + jobs: Optional[List[BaseJob]] = None """The list of jobs. Only included in the response if there are jobs to list.""" - + next_page_token: Optional[str] = None """A token that can be used to list the next page of jobs (if applicable).""" - + prev_page_token: Optional[str] = None """A token that can be used to list the previous page of jobs (if applicable).""" - + def as_dict(self) -> dict: """Serializes the ListJobsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.has_more is not None: - body["has_more"] = self.has_more - if self.jobs: - body["jobs"] = [v.as_dict() for v in self.jobs] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.has_more is not None: body['has_more'] = self.has_more + if self.jobs: body['jobs'] = [v.as_dict() for v in self.jobs] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListJobsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.has_more is not None: - body["has_more"] = self.has_more - if self.jobs: - body["jobs"] = self.jobs - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.has_more is not None: body['has_more'] = self.has_more + if self.jobs: body['jobs'] = self.jobs + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListJobsResponse: """Deserializes the ListJobsResponse from a dictionary.""" - return cls( - has_more=d.get("has_more", None), - jobs=_repeated_dict(d, "jobs", BaseJob), - next_page_token=d.get("next_page_token", None), - prev_page_token=d.get("prev_page_token", None), - ) + return cls(has_more=d.get('has_more', None), jobs=_repeated_dict(d, 'jobs', BaseJob), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) + + + + + @dataclass class ListRunsResponse: """List of runs was retrieved successfully.""" - + has_more: Optional[bool] = None """If true, additional runs matching the provided filter are available for listing.""" - + next_page_token: Optional[str] = None """A token that can be used to list the next page of runs (if applicable).""" - + prev_page_token: Optional[str] = None """A token that can be used to list the previous page of runs (if applicable).""" - + runs: Optional[List[BaseRun]] = None """A list of runs, from most recently started to least. Only included in the response if there are runs to list.""" - + def as_dict(self) -> dict: """Serializes the ListRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.has_more is not None: - body["has_more"] = self.has_more - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token - if self.runs: - body["runs"] = [v.as_dict() for v in self.runs] + if self.has_more is not None: body['has_more'] = self.has_more + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.runs: body['runs'] = [v.as_dict() for v in self.runs] return body def as_shallow_dict(self) -> dict: """Serializes the ListRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.has_more is not None: - body["has_more"] = self.has_more - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token - if self.runs: - body["runs"] = self.runs + if self.has_more is not None: body['has_more'] = self.has_more + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.runs: body['runs'] = self.runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListRunsResponse: """Deserializes the ListRunsResponse from a dictionary.""" - return cls( - has_more=d.get("has_more", None), - next_page_token=d.get("next_page_token", None), - prev_page_token=d.get("prev_page_token", None), - runs=_repeated_dict(d, "runs", BaseRun), - ) + return cls(has_more=d.get('has_more', None), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None), runs=_repeated_dict(d, 'runs', BaseRun)) + + @dataclass @@ -3623,32 +3182,30 @@ class NotebookOutput: Databricks restricts this API to return the first 5 MB of the value. For a larger result, your job can store the results in a cloud storage service. This field is absent if `dbutils.notebook.exit()` was never called.""" - + truncated: Optional[bool] = None """Whether or not the result was truncated.""" - + def as_dict(self) -> dict: """Serializes the NotebookOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.result is not None: - body["result"] = self.result - if self.truncated is not None: - body["truncated"] = self.truncated + if self.result is not None: body['result'] = self.result + if self.truncated is not None: body['truncated'] = self.truncated return body def as_shallow_dict(self) -> dict: """Serializes the NotebookOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.result is not None: - body["result"] = self.result - if self.truncated is not None: - body["truncated"] = self.truncated + if self.result is not None: body['result'] = self.result + if self.truncated is not None: body['truncated'] = self.truncated return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotebookOutput: """Deserializes the NotebookOutput from a dictionary.""" - return cls(result=d.get("result", None), truncated=d.get("truncated", None)) + return cls(result=d.get('result', None), truncated=d.get('truncated', None)) + + @dataclass @@ -3657,8 +3214,8 @@ class NotebookTask: """The path of the notebook to be run in the Databricks workspace or remote repository. For notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash. For notebooks stored in a remote repository, the path must be relative. This field is required.""" - - base_parameters: Optional[Dict[str, str]] = None + + base_parameters: Optional[Dict[str,str]] = None """Base parameters to be used for each run of this job. If the run is initiated by a call to :method:jobs/run Now with parameters specified, the two parameters maps are merged. If the same key is specified in `base_parameters` and in `run-now`, the value from `run-now` is used. Use @@ -3673,434 +3230,360 @@ class NotebookTask: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets""" - + source: Optional[Source] = None """Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved from the local Databricks workspace. When set to `GIT`, the notebook will be retrieved from a Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Notebook is located in Databricks workspace. * `GIT`: Notebook is located in cloud Git provider.""" - + warehouse_id: Optional[str] = None """Optional `warehouse_id` to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT supported, please use serverless or pro SQL warehouses. Note that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the run will fail.""" - + def as_dict(self) -> dict: """Serializes the NotebookTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.base_parameters: - body["base_parameters"] = self.base_parameters - if self.notebook_path is not None: - body["notebook_path"] = self.notebook_path - if self.source is not None: - body["source"] = self.source.value - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.base_parameters: body['base_parameters'] = self.base_parameters + if self.notebook_path is not None: body['notebook_path'] = self.notebook_path + if self.source is not None: body['source'] = self.source.value + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the NotebookTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.base_parameters: - body["base_parameters"] = self.base_parameters - if self.notebook_path is not None: - body["notebook_path"] = self.notebook_path - if self.source is not None: - body["source"] = self.source - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.base_parameters: body['base_parameters'] = self.base_parameters + if self.notebook_path is not None: body['notebook_path'] = self.notebook_path + if self.source is not None: body['source'] = self.source + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotebookTask: """Deserializes the NotebookTask from a dictionary.""" - return cls( - base_parameters=d.get("base_parameters", None), - notebook_path=d.get("notebook_path", None), - source=_enum(d, "source", Source), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(base_parameters=d.get('base_parameters', None), notebook_path=d.get('notebook_path', None), source=_enum(d, 'source', Source), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class OutputSchemaInfo: """Stores the catalog name, schema name, and the output schema expiration time for the clean room run.""" - + catalog_name: Optional[str] = None - + expiration_time: Optional[int] = None """The expiration time for the output schema as a Unix timestamp in milliseconds.""" - + schema_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the OutputSchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.schema_name is not None: body['schema_name'] = self.schema_name return body def as_shallow_dict(self) -> dict: """Serializes the OutputSchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.schema_name is not None: body['schema_name'] = self.schema_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OutputSchemaInfo: """Deserializes the OutputSchemaInfo from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - expiration_time=d.get("expiration_time", None), - schema_name=d.get("schema_name", None), - ) - + return cls(catalog_name=d.get('catalog_name', None), expiration_time=d.get('expiration_time', None), schema_name=d.get('schema_name', None)) + -class PauseStatus(Enum): - PAUSED = "PAUSED" - UNPAUSED = "UNPAUSED" +class PauseStatus(Enum): + + + PAUSED = 'PAUSED' + UNPAUSED = 'UNPAUSED' class PerformanceTarget(Enum): """PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be. The performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget).""" - - PERFORMANCE_OPTIMIZED = "PERFORMANCE_OPTIMIZED" - STANDARD = "STANDARD" - + + PERFORMANCE_OPTIMIZED = 'PERFORMANCE_OPTIMIZED' + STANDARD = 'STANDARD' @dataclass class PeriodicTriggerConfiguration: interval: int """The interval at which the trigger should run.""" - + unit: PeriodicTriggerConfigurationTimeUnit """The unit of time for the interval.""" - + def as_dict(self) -> dict: """Serializes the PeriodicTriggerConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.interval is not None: - body["interval"] = self.interval - if self.unit is not None: - body["unit"] = self.unit.value + if self.interval is not None: body['interval'] = self.interval + if self.unit is not None: body['unit'] = self.unit.value return body def as_shallow_dict(self) -> dict: """Serializes the PeriodicTriggerConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.interval is not None: - body["interval"] = self.interval - if self.unit is not None: - body["unit"] = self.unit + if self.interval is not None: body['interval'] = self.interval + if self.unit is not None: body['unit'] = self.unit return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PeriodicTriggerConfiguration: """Deserializes the PeriodicTriggerConfiguration from a dictionary.""" - return cls(interval=d.get("interval", None), unit=_enum(d, "unit", PeriodicTriggerConfigurationTimeUnit)) - + return cls(interval=d.get('interval', None), unit=_enum(d, 'unit', PeriodicTriggerConfigurationTimeUnit)) + -class PeriodicTriggerConfigurationTimeUnit(Enum): - DAYS = "DAYS" - HOURS = "HOURS" - WEEKS = "WEEKS" +class PeriodicTriggerConfigurationTimeUnit(Enum): + + + DAYS = 'DAYS' + HOURS = 'HOURS' + WEEKS = 'WEEKS' @dataclass class PipelineParams: full_refresh: Optional[bool] = None """If true, triggers a full refresh on the delta live table.""" - + def as_dict(self) -> dict: """Serializes the PipelineParams into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh return body def as_shallow_dict(self) -> dict: """Serializes the PipelineParams into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineParams: """Deserializes the PipelineParams from a dictionary.""" - return cls(full_refresh=d.get("full_refresh", None)) + return cls(full_refresh=d.get('full_refresh', None)) + + @dataclass class PipelineTask: pipeline_id: str """The full name of the pipeline task to execute.""" - + full_refresh: Optional[bool] = None """If true, triggers a full refresh on the delta live table.""" - + def as_dict(self) -> dict: """Serializes the PipelineTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id return body def as_shallow_dict(self) -> dict: """Serializes the PipelineTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineTask: """Deserializes the PipelineTask from a dictionary.""" - return cls(full_refresh=d.get("full_refresh", None), pipeline_id=d.get("pipeline_id", None)) + return cls(full_refresh=d.get('full_refresh', None), pipeline_id=d.get('pipeline_id', None)) + + @dataclass class PowerBiModel: authentication_method: Optional[AuthenticationMethod] = None """How the published Power BI model authenticates to Databricks""" - + model_name: Optional[str] = None """The name of the Power BI model""" - + overwrite_existing: Optional[bool] = None """Whether to overwrite existing Power BI models""" - + storage_mode: Optional[StorageMode] = None """The default storage mode of the Power BI model""" - + workspace_name: Optional[str] = None """The name of the Power BI workspace of the model""" - + def as_dict(self) -> dict: """Serializes the PowerBiModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authentication_method is not None: - body["authentication_method"] = self.authentication_method.value - if self.model_name is not None: - body["model_name"] = self.model_name - if self.overwrite_existing is not None: - body["overwrite_existing"] = self.overwrite_existing - if self.storage_mode is not None: - body["storage_mode"] = self.storage_mode.value - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name + if self.authentication_method is not None: body['authentication_method'] = self.authentication_method.value + if self.model_name is not None: body['model_name'] = self.model_name + if self.overwrite_existing is not None: body['overwrite_existing'] = self.overwrite_existing + if self.storage_mode is not None: body['storage_mode'] = self.storage_mode.value + if self.workspace_name is not None: body['workspace_name'] = self.workspace_name return body def as_shallow_dict(self) -> dict: """Serializes the PowerBiModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.authentication_method is not None: - body["authentication_method"] = self.authentication_method - if self.model_name is not None: - body["model_name"] = self.model_name - if self.overwrite_existing is not None: - body["overwrite_existing"] = self.overwrite_existing - if self.storage_mode is not None: - body["storage_mode"] = self.storage_mode - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name + if self.authentication_method is not None: body['authentication_method'] = self.authentication_method + if self.model_name is not None: body['model_name'] = self.model_name + if self.overwrite_existing is not None: body['overwrite_existing'] = self.overwrite_existing + if self.storage_mode is not None: body['storage_mode'] = self.storage_mode + if self.workspace_name is not None: body['workspace_name'] = self.workspace_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PowerBiModel: """Deserializes the PowerBiModel from a dictionary.""" - return cls( - authentication_method=_enum(d, "authentication_method", AuthenticationMethod), - model_name=d.get("model_name", None), - overwrite_existing=d.get("overwrite_existing", None), - storage_mode=_enum(d, "storage_mode", StorageMode), - workspace_name=d.get("workspace_name", None), - ) + return cls(authentication_method=_enum(d, 'authentication_method', AuthenticationMethod), model_name=d.get('model_name', None), overwrite_existing=d.get('overwrite_existing', None), storage_mode=_enum(d, 'storage_mode', StorageMode), workspace_name=d.get('workspace_name', None)) + + @dataclass class PowerBiTable: catalog: Optional[str] = None """The catalog name in Databricks""" - + name: Optional[str] = None """The table name in Databricks""" - + schema: Optional[str] = None """The schema name in Databricks""" - + storage_mode: Optional[StorageMode] = None """The Power BI storage mode of the table""" - + def as_dict(self) -> dict: """Serializes the PowerBiTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog is not None: - body["catalog"] = self.catalog - if self.name is not None: - body["name"] = self.name - if self.schema is not None: - body["schema"] = self.schema - if self.storage_mode is not None: - body["storage_mode"] = self.storage_mode.value + if self.catalog is not None: body['catalog'] = self.catalog + if self.name is not None: body['name'] = self.name + if self.schema is not None: body['schema'] = self.schema + if self.storage_mode is not None: body['storage_mode'] = self.storage_mode.value return body def as_shallow_dict(self) -> dict: """Serializes the PowerBiTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog is not None: - body["catalog"] = self.catalog - if self.name is not None: - body["name"] = self.name - if self.schema is not None: - body["schema"] = self.schema - if self.storage_mode is not None: - body["storage_mode"] = self.storage_mode + if self.catalog is not None: body['catalog'] = self.catalog + if self.name is not None: body['name'] = self.name + if self.schema is not None: body['schema'] = self.schema + if self.storage_mode is not None: body['storage_mode'] = self.storage_mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PowerBiTable: """Deserializes the PowerBiTable from a dictionary.""" - return cls( - catalog=d.get("catalog", None), - name=d.get("name", None), - schema=d.get("schema", None), - storage_mode=_enum(d, "storage_mode", StorageMode), - ) + return cls(catalog=d.get('catalog', None), name=d.get('name', None), schema=d.get('schema', None), storage_mode=_enum(d, 'storage_mode', StorageMode)) + + @dataclass class PowerBiTask: connection_resource_name: Optional[str] = None """The resource name of the UC connection to authenticate from Databricks to Power BI""" - + power_bi_model: Optional[PowerBiModel] = None """The semantic model to update""" - + refresh_after_update: Optional[bool] = None """Whether the model should be refreshed after the update""" - + tables: Optional[List[PowerBiTable]] = None """The tables to be exported to Power BI""" - + warehouse_id: Optional[str] = None """The SQL warehouse ID to use as the Power BI data source""" - + def as_dict(self) -> dict: """Serializes the PowerBiTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_resource_name is not None: - body["connection_resource_name"] = self.connection_resource_name - if self.power_bi_model: - body["power_bi_model"] = self.power_bi_model.as_dict() - if self.refresh_after_update is not None: - body["refresh_after_update"] = self.refresh_after_update - if self.tables: - body["tables"] = [v.as_dict() for v in self.tables] - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.connection_resource_name is not None: body['connection_resource_name'] = self.connection_resource_name + if self.power_bi_model: body['power_bi_model'] = self.power_bi_model.as_dict() + if self.refresh_after_update is not None: body['refresh_after_update'] = self.refresh_after_update + if self.tables: body['tables'] = [v.as_dict() for v in self.tables] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the PowerBiTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_resource_name is not None: - body["connection_resource_name"] = self.connection_resource_name - if self.power_bi_model: - body["power_bi_model"] = self.power_bi_model - if self.refresh_after_update is not None: - body["refresh_after_update"] = self.refresh_after_update - if self.tables: - body["tables"] = self.tables - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.connection_resource_name is not None: body['connection_resource_name'] = self.connection_resource_name + if self.power_bi_model: body['power_bi_model'] = self.power_bi_model + if self.refresh_after_update is not None: body['refresh_after_update'] = self.refresh_after_update + if self.tables: body['tables'] = self.tables + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PowerBiTask: """Deserializes the PowerBiTask from a dictionary.""" - return cls( - connection_resource_name=d.get("connection_resource_name", None), - power_bi_model=_from_dict(d, "power_bi_model", PowerBiModel), - refresh_after_update=d.get("refresh_after_update", None), - tables=_repeated_dict(d, "tables", PowerBiTable), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(connection_resource_name=d.get('connection_resource_name', None), power_bi_model=_from_dict(d, 'power_bi_model', PowerBiModel), refresh_after_update=d.get('refresh_after_update', None), tables=_repeated_dict(d, 'tables', PowerBiTable), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class PythonWheelTask: package_name: str """Name of the package to execute""" - + entry_point: str """Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()`""" - - named_parameters: Optional[Dict[str, str]] = None + + named_parameters: Optional[Dict[str,str]] = None """Command-line parameters passed to Python wheel task in the form of `["--name=task", "--data=dbfs:/path/to/data.json"]`. Leave it empty if `parameters` is not null.""" - + parameters: Optional[List[str]] = None """Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is not null.""" - + def as_dict(self) -> dict: """Serializes the PythonWheelTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entry_point is not None: - body["entry_point"] = self.entry_point - if self.named_parameters: - body["named_parameters"] = self.named_parameters - if self.package_name is not None: - body["package_name"] = self.package_name - if self.parameters: - body["parameters"] = [v for v in self.parameters] + if self.entry_point is not None: body['entry_point'] = self.entry_point + if self.named_parameters: body['named_parameters'] = self.named_parameters + if self.package_name is not None: body['package_name'] = self.package_name + if self.parameters: body['parameters'] = [v for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the PythonWheelTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.entry_point is not None: - body["entry_point"] = self.entry_point - if self.named_parameters: - body["named_parameters"] = self.named_parameters - if self.package_name is not None: - body["package_name"] = self.package_name - if self.parameters: - body["parameters"] = self.parameters + if self.entry_point is not None: body['entry_point'] = self.entry_point + if self.named_parameters: body['named_parameters'] = self.named_parameters + if self.package_name is not None: body['package_name'] = self.package_name + if self.parameters: body['parameters'] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PythonWheelTask: """Deserializes the PythonWheelTask from a dictionary.""" - return cls( - entry_point=d.get("entry_point", None), - named_parameters=d.get("named_parameters", None), - package_name=d.get("package_name", None), - parameters=d.get("parameters", None), - ) + return cls(entry_point=d.get('entry_point', None), named_parameters=d.get('named_parameters', None), package_name=d.get('package_name', None), parameters=d.get('parameters', None)) + + @dataclass @@ -4111,33 +3594,31 @@ class QueueDetails: queued due to reaching the per-job limit of concurrent job runs. * `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of active run job tasks.""" - + message: Optional[str] = None """A descriptive message with the queuing details. This field is unstructured, and its exact format is subject to change.""" - + def as_dict(self) -> dict: """Serializes the QueueDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.code is not None: - body["code"] = self.code.value - if self.message is not None: - body["message"] = self.message + if self.code is not None: body['code'] = self.code.value + if self.message is not None: body['message'] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the QueueDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.code is not None: - body["code"] = self.code - if self.message is not None: - body["message"] = self.message + if self.code is not None: body['code'] = self.code + if self.message is not None: body['message'] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueueDetails: """Deserializes the QueueDetails from a dictionary.""" - return cls(code=_enum(d, "code", QueueDetailsCodeCode), message=d.get("message", None)) + return cls(code=_enum(d, 'code', QueueDetailsCodeCode), message=d.get('message', None)) + + class QueueDetailsCodeCode(Enum): @@ -4146,35 +3627,34 @@ class QueueDetailsCodeCode(Enum): queued due to reaching the per-job limit of concurrent job runs. * `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of active run job tasks.""" - - ACTIVE_RUNS_LIMIT_REACHED = "ACTIVE_RUNS_LIMIT_REACHED" - ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED = "ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED" - MAX_CONCURRENT_RUNS_REACHED = "MAX_CONCURRENT_RUNS_REACHED" - + + ACTIVE_RUNS_LIMIT_REACHED = 'ACTIVE_RUNS_LIMIT_REACHED' + ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED = 'ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED' + MAX_CONCURRENT_RUNS_REACHED = 'MAX_CONCURRENT_RUNS_REACHED' @dataclass class QueueSettings: enabled: bool """If true, enable queueing for the job. This is a required field.""" - + def as_dict(self) -> dict: """Serializes the QueueSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.enabled is not None: body['enabled'] = self.enabled return body def as_shallow_dict(self) -> dict: """Serializes the QueueSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.enabled is not None: body['enabled'] = self.enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueueSettings: """Deserializes the QueueSettings from a dictionary.""" - return cls(enabled=d.get("enabled", None)) + return cls(enabled=d.get('enabled', None)) + + @dataclass @@ -4187,101 +3667,77 @@ class RepairHistoryItem: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + end_time: Optional[int] = None """The end time of the (repaired) run.""" - + id: Optional[int] = None """The ID of the repair. Only returned for the items that represent a repair in `repair_history`.""" - + start_time: Optional[int] = None """The start time of the (repaired) run.""" - + state: Optional[RunState] = None """Deprecated. Please use the `status` field instead.""" - + status: Optional[RunStatus] = None """The current status of the run""" - + task_run_ids: Optional[List[int]] = None """The run IDs of the task runs that ran as part of this repair history item.""" - + type: Optional[RepairHistoryItemType] = None """The repair history item type. Indicates whether a run is the original run or a repair run.""" - + def as_dict(self) -> dict: """Serializes the RepairHistoryItem into a dictionary suitable for use as a JSON request body.""" body = {} - if self.effective_performance_target is not None: - body["effective_performance_target"] = self.effective_performance_target.value - if self.end_time is not None: - body["end_time"] = self.end_time - if self.id is not None: - body["id"] = self.id - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state.as_dict() - if self.status: - body["status"] = self.status.as_dict() - if self.task_run_ids: - body["task_run_ids"] = [v for v in self.task_run_ids] - if self.type is not None: - body["type"] = self.type.value + if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target.value + if self.end_time is not None: body['end_time'] = self.end_time + if self.id is not None: body['id'] = self.id + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state.as_dict() + if self.status: body['status'] = self.status.as_dict() + if self.task_run_ids: body['task_run_ids'] = [v for v in self.task_run_ids] + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the RepairHistoryItem into a shallow dictionary of its immediate attributes.""" body = {} - if self.effective_performance_target is not None: - body["effective_performance_target"] = self.effective_performance_target - if self.end_time is not None: - body["end_time"] = self.end_time - if self.id is not None: - body["id"] = self.id - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state - if self.status: - body["status"] = self.status - if self.task_run_ids: - body["task_run_ids"] = self.task_run_ids - if self.type is not None: - body["type"] = self.type + if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target + if self.end_time is not None: body['end_time'] = self.end_time + if self.id is not None: body['id'] = self.id + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state + if self.status: body['status'] = self.status + if self.task_run_ids: body['task_run_ids'] = self.task_run_ids + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepairHistoryItem: """Deserializes the RepairHistoryItem from a dictionary.""" - return cls( - effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), - end_time=d.get("end_time", None), - id=d.get("id", None), - start_time=d.get("start_time", None), - state=_from_dict(d, "state", RunState), - status=_from_dict(d, "status", RunStatus), - task_run_ids=d.get("task_run_ids", None), - type=_enum(d, "type", RepairHistoryItemType), - ) + return cls(effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), end_time=d.get('end_time', None), id=d.get('id', None), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), status=_from_dict(d, 'status', RunStatus), task_run_ids=d.get('task_run_ids', None), type=_enum(d, 'type', RepairHistoryItemType)) + + class RepairHistoryItemType(Enum): """The repair history item type. Indicates whether a run is the original run or a repair run.""" - - ORIGINAL = "ORIGINAL" - REPAIR = "REPAIR" - + + ORIGINAL = 'ORIGINAL' + REPAIR = 'REPAIR' @dataclass class RepairRun: run_id: int """The job run ID of the run to repair. The run must not be in progress.""" - + dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - + jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -4292,15 +3748,15 @@ class RepairRun: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str, str]] = None + + job_parameters: Optional[Dict[str,str]] = None """Job-level parameters used in the run. for example `"param": "overriding_val"`""" - + latest_repair_id: Optional[int] = None """The ID of the latest repair. This parameter is not required when repairing a run for the first time, but must be provided on subsequent requests to repair the same run.""" - - notebook_params: Optional[Dict[str, str]] = None + + notebook_params: Optional[Dict[str,str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. @@ -4316,7 +3772,7 @@ class RepairRun: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined @@ -4325,12 +3781,12 @@ class RepairRun: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str, str]] = None - + + python_named_params: Optional[Dict[str,str]] = None + python_params: Optional[List[str]] = None """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon @@ -4346,18 +3802,18 @@ class RepairRun: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + rerun_all_failed_tasks: Optional[bool] = None """If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.""" - + rerun_dependent_tasks: Optional[bool] = None """If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were previously successful. Can be also used in combination with `rerun_all_failed_tasks`.""" - + rerun_tasks: Optional[List[str]] = None """The task keys of the task runs to repair.""" - + spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -4374,164 +3830,118 @@ class RepairRun: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str, str]] = None + + sql_params: Optional[Dict[str,str]] = None """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - + def as_dict(self) -> dict: """Serializes the RepairRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbt_commands: - body["dbt_commands"] = [v for v in self.dbt_commands] - if self.jar_params: - body["jar_params"] = [v for v in self.jar_params] - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.latest_repair_id is not None: - body["latest_repair_id"] = self.latest_repair_id - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params.as_dict() - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = [v for v in self.python_params] - if self.rerun_all_failed_tasks is not None: - body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks - if self.rerun_dependent_tasks is not None: - body["rerun_dependent_tasks"] = self.rerun_dependent_tasks - if self.rerun_tasks: - body["rerun_tasks"] = [v for v in self.rerun_tasks] - if self.run_id is not None: - body["run_id"] = self.run_id - if self.spark_submit_params: - body["spark_submit_params"] = [v for v in self.spark_submit_params] - if self.sql_params: - body["sql_params"] = self.sql_params + if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands] + if self.jar_params: body['jar_params'] = [v for v in self.jar_params] + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.latest_repair_id is not None: body['latest_repair_id'] = self.latest_repair_id + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.performance_target is not None: body['performance_target'] = self.performance_target.value + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = [v for v in self.python_params] + if self.rerun_all_failed_tasks is not None: body['rerun_all_failed_tasks'] = self.rerun_all_failed_tasks + if self.rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = self.rerun_dependent_tasks + if self.rerun_tasks: body['rerun_tasks'] = [v for v in self.rerun_tasks] + if self.run_id is not None: body['run_id'] = self.run_id + if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params] + if self.sql_params: body['sql_params'] = self.sql_params return body def as_shallow_dict(self) -> dict: """Serializes the RepairRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbt_commands: - body["dbt_commands"] = self.dbt_commands - if self.jar_params: - body["jar_params"] = self.jar_params - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.latest_repair_id is not None: - body["latest_repair_id"] = self.latest_repair_id - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.performance_target is not None: - body["performance_target"] = self.performance_target - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = self.python_params - if self.rerun_all_failed_tasks is not None: - body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks - if self.rerun_dependent_tasks is not None: - body["rerun_dependent_tasks"] = self.rerun_dependent_tasks - if self.rerun_tasks: - body["rerun_tasks"] = self.rerun_tasks - if self.run_id is not None: - body["run_id"] = self.run_id - if self.spark_submit_params: - body["spark_submit_params"] = self.spark_submit_params - if self.sql_params: - body["sql_params"] = self.sql_params + if self.dbt_commands: body['dbt_commands'] = self.dbt_commands + if self.jar_params: body['jar_params'] = self.jar_params + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.latest_repair_id is not None: body['latest_repair_id'] = self.latest_repair_id + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.performance_target is not None: body['performance_target'] = self.performance_target + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = self.python_params + if self.rerun_all_failed_tasks is not None: body['rerun_all_failed_tasks'] = self.rerun_all_failed_tasks + if self.rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = self.rerun_dependent_tasks + if self.rerun_tasks: body['rerun_tasks'] = self.rerun_tasks + if self.run_id is not None: body['run_id'] = self.run_id + if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params + if self.sql_params: body['sql_params'] = self.sql_params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepairRun: """Deserializes the RepairRun from a dictionary.""" - return cls( - dbt_commands=d.get("dbt_commands", None), - jar_params=d.get("jar_params", None), - job_parameters=d.get("job_parameters", None), - latest_repair_id=d.get("latest_repair_id", None), - notebook_params=d.get("notebook_params", None), - performance_target=_enum(d, "performance_target", PerformanceTarget), - pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), - python_named_params=d.get("python_named_params", None), - python_params=d.get("python_params", None), - rerun_all_failed_tasks=d.get("rerun_all_failed_tasks", None), - rerun_dependent_tasks=d.get("rerun_dependent_tasks", None), - rerun_tasks=d.get("rerun_tasks", None), - run_id=d.get("run_id", None), - spark_submit_params=d.get("spark_submit_params", None), - sql_params=d.get("sql_params", None), - ) + return cls(dbt_commands=d.get('dbt_commands', None), jar_params=d.get('jar_params', None), job_parameters=d.get('job_parameters', None), latest_repair_id=d.get('latest_repair_id', None), notebook_params=d.get('notebook_params', None), performance_target=_enum(d, 'performance_target', PerformanceTarget), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), rerun_all_failed_tasks=d.get('rerun_all_failed_tasks', None), rerun_dependent_tasks=d.get('rerun_dependent_tasks', None), rerun_tasks=d.get('rerun_tasks', None), run_id=d.get('run_id', None), spark_submit_params=d.get('spark_submit_params', None), sql_params=d.get('sql_params', None)) + + @dataclass class RepairRunResponse: """Run repair was initiated.""" - + repair_id: Optional[int] = None """The ID of the repair. Must be provided in subsequent repairs using the `latest_repair_id` field to ensure sequential repairs.""" - + def as_dict(self) -> dict: """Serializes the RepairRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.repair_id is not None: - body["repair_id"] = self.repair_id + if self.repair_id is not None: body['repair_id'] = self.repair_id return body def as_shallow_dict(self) -> dict: """Serializes the RepairRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.repair_id is not None: - body["repair_id"] = self.repair_id + if self.repair_id is not None: body['repair_id'] = self.repair_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepairRunResponse: """Deserializes the RepairRunResponse from a dictionary.""" - return cls(repair_id=d.get("repair_id", None)) + return cls(repair_id=d.get('repair_id', None)) + + @dataclass class ResetJob: job_id: int """The canonical identifier of the job to reset. This field is required.""" - + new_settings: JobSettings """The new settings of the job. These settings completely replace the old settings. Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only.""" - + def as_dict(self) -> dict: """Serializes the ResetJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings.as_dict() + if self.job_id is not None: body['job_id'] = self.job_id + if self.new_settings: body['new_settings'] = self.new_settings.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ResetJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings + if self.job_id is not None: body['job_id'] = self.job_id + if self.new_settings: body['new_settings'] = self.new_settings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResetJob: """Deserializes the ResetJob from a dictionary.""" - return cls(job_id=d.get("job_id", None), new_settings=_from_dict(d, "new_settings", JobSettings)) + return cls(job_id=d.get('job_id', None), new_settings=_from_dict(d, 'new_settings', JobSettings)) + + @dataclass @@ -4550,314 +3960,281 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ResetResponse: """Deserializes the ResetResponse from a dictionary.""" return cls() + + @dataclass class ResolvedConditionTaskValues: left: Optional[str] = None - + right: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ResolvedConditionTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.left is not None: - body["left"] = self.left - if self.right is not None: - body["right"] = self.right + if self.left is not None: body['left'] = self.left + if self.right is not None: body['right'] = self.right return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedConditionTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.left is not None: - body["left"] = self.left - if self.right is not None: - body["right"] = self.right + if self.left is not None: body['left'] = self.left + if self.right is not None: body['right'] = self.right return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedConditionTaskValues: """Deserializes the ResolvedConditionTaskValues from a dictionary.""" - return cls(left=d.get("left", None), right=d.get("right", None)) + return cls(left=d.get('left', None), right=d.get('right', None)) + + @dataclass class ResolvedDbtTaskValues: commands: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the ResolvedDbtTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.commands: - body["commands"] = [v for v in self.commands] + if self.commands: body['commands'] = [v for v in self.commands] return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedDbtTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.commands: - body["commands"] = self.commands + if self.commands: body['commands'] = self.commands return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedDbtTaskValues: """Deserializes the ResolvedDbtTaskValues from a dictionary.""" - return cls(commands=d.get("commands", None)) + return cls(commands=d.get('commands', None)) + + @dataclass class ResolvedNotebookTaskValues: - base_parameters: Optional[Dict[str, str]] = None - + base_parameters: Optional[Dict[str,str]] = None + def as_dict(self) -> dict: """Serializes the ResolvedNotebookTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.base_parameters: - body["base_parameters"] = self.base_parameters + if self.base_parameters: body['base_parameters'] = self.base_parameters return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedNotebookTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.base_parameters: - body["base_parameters"] = self.base_parameters + if self.base_parameters: body['base_parameters'] = self.base_parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedNotebookTaskValues: """Deserializes the ResolvedNotebookTaskValues from a dictionary.""" - return cls(base_parameters=d.get("base_parameters", None)) + return cls(base_parameters=d.get('base_parameters', None)) + + @dataclass class ResolvedParamPairValues: - parameters: Optional[Dict[str, str]] = None - + parameters: Optional[Dict[str,str]] = None + def as_dict(self) -> dict: """Serializes the ResolvedParamPairValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: - body["parameters"] = self.parameters + if self.parameters: body['parameters'] = self.parameters return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedParamPairValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: - body["parameters"] = self.parameters + if self.parameters: body['parameters'] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedParamPairValues: """Deserializes the ResolvedParamPairValues from a dictionary.""" - return cls(parameters=d.get("parameters", None)) + return cls(parameters=d.get('parameters', None)) + + @dataclass class ResolvedPythonWheelTaskValues: - named_parameters: Optional[Dict[str, str]] = None - + named_parameters: Optional[Dict[str,str]] = None + parameters: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the ResolvedPythonWheelTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.named_parameters: - body["named_parameters"] = self.named_parameters - if self.parameters: - body["parameters"] = [v for v in self.parameters] + if self.named_parameters: body['named_parameters'] = self.named_parameters + if self.parameters: body['parameters'] = [v for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedPythonWheelTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.named_parameters: - body["named_parameters"] = self.named_parameters - if self.parameters: - body["parameters"] = self.parameters + if self.named_parameters: body['named_parameters'] = self.named_parameters + if self.parameters: body['parameters'] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedPythonWheelTaskValues: """Deserializes the ResolvedPythonWheelTaskValues from a dictionary.""" - return cls(named_parameters=d.get("named_parameters", None), parameters=d.get("parameters", None)) + return cls(named_parameters=d.get('named_parameters', None), parameters=d.get('parameters', None)) + + @dataclass class ResolvedRunJobTaskValues: - job_parameters: Optional[Dict[str, str]] = None - - parameters: Optional[Dict[str, str]] = None - + job_parameters: Optional[Dict[str,str]] = None + + parameters: Optional[Dict[str,str]] = None + def as_dict(self) -> dict: """Serializes the ResolvedRunJobTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.parameters: - body["parameters"] = self.parameters + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.parameters: body['parameters'] = self.parameters return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedRunJobTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.parameters: - body["parameters"] = self.parameters + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.parameters: body['parameters'] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedRunJobTaskValues: """Deserializes the ResolvedRunJobTaskValues from a dictionary.""" - return cls(job_parameters=d.get("job_parameters", None), parameters=d.get("parameters", None)) + return cls(job_parameters=d.get('job_parameters', None), parameters=d.get('parameters', None)) + + @dataclass class ResolvedStringParamsValues: parameters: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the ResolvedStringParamsValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: - body["parameters"] = [v for v in self.parameters] + if self.parameters: body['parameters'] = [v for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedStringParamsValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: - body["parameters"] = self.parameters + if self.parameters: body['parameters'] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedStringParamsValues: """Deserializes the ResolvedStringParamsValues from a dictionary.""" - return cls(parameters=d.get("parameters", None)) + return cls(parameters=d.get('parameters', None)) + + @dataclass class ResolvedValues: condition_task: Optional[ResolvedConditionTaskValues] = None - + dbt_task: Optional[ResolvedDbtTaskValues] = None - + notebook_task: Optional[ResolvedNotebookTaskValues] = None - + python_wheel_task: Optional[ResolvedPythonWheelTaskValues] = None - + run_job_task: Optional[ResolvedRunJobTaskValues] = None - + simulation_task: Optional[ResolvedParamPairValues] = None - + spark_jar_task: Optional[ResolvedStringParamsValues] = None - + spark_python_task: Optional[ResolvedStringParamsValues] = None - + spark_submit_task: Optional[ResolvedStringParamsValues] = None - + sql_task: Optional[ResolvedParamPairValues] = None - + def as_dict(self) -> dict: """Serializes the ResolvedValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition_task: - body["condition_task"] = self.condition_task.as_dict() - if self.dbt_task: - body["dbt_task"] = self.dbt_task.as_dict() - if self.notebook_task: - body["notebook_task"] = self.notebook_task.as_dict() - if self.python_wheel_task: - body["python_wheel_task"] = self.python_wheel_task.as_dict() - if self.run_job_task: - body["run_job_task"] = self.run_job_task.as_dict() - if self.simulation_task: - body["simulation_task"] = self.simulation_task.as_dict() - if self.spark_jar_task: - body["spark_jar_task"] = self.spark_jar_task.as_dict() - if self.spark_python_task: - body["spark_python_task"] = self.spark_python_task.as_dict() - if self.spark_submit_task: - body["spark_submit_task"] = self.spark_submit_task.as_dict() - if self.sql_task: - body["sql_task"] = self.sql_task.as_dict() + if self.condition_task: body['condition_task'] = self.condition_task.as_dict() + if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() + if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict() + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict() + if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict() + if self.simulation_task: body['simulation_task'] = self.simulation_task.as_dict() + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict() + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict() + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict() + if self.sql_task: body['sql_task'] = self.sql_task.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition_task: - body["condition_task"] = self.condition_task - if self.dbt_task: - body["dbt_task"] = self.dbt_task - if self.notebook_task: - body["notebook_task"] = self.notebook_task - if self.python_wheel_task: - body["python_wheel_task"] = self.python_wheel_task - if self.run_job_task: - body["run_job_task"] = self.run_job_task - if self.simulation_task: - body["simulation_task"] = self.simulation_task - if self.spark_jar_task: - body["spark_jar_task"] = self.spark_jar_task - if self.spark_python_task: - body["spark_python_task"] = self.spark_python_task - if self.spark_submit_task: - body["spark_submit_task"] = self.spark_submit_task - if self.sql_task: - body["sql_task"] = self.sql_task + if self.condition_task: body['condition_task'] = self.condition_task + if self.dbt_task: body['dbt_task'] = self.dbt_task + if self.notebook_task: body['notebook_task'] = self.notebook_task + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task + if self.run_job_task: body['run_job_task'] = self.run_job_task + if self.simulation_task: body['simulation_task'] = self.simulation_task + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task + if self.sql_task: body['sql_task'] = self.sql_task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedValues: """Deserializes the ResolvedValues from a dictionary.""" - return cls( - condition_task=_from_dict(d, "condition_task", ResolvedConditionTaskValues), - dbt_task=_from_dict(d, "dbt_task", ResolvedDbtTaskValues), - notebook_task=_from_dict(d, "notebook_task", ResolvedNotebookTaskValues), - python_wheel_task=_from_dict(d, "python_wheel_task", ResolvedPythonWheelTaskValues), - run_job_task=_from_dict(d, "run_job_task", ResolvedRunJobTaskValues), - simulation_task=_from_dict(d, "simulation_task", ResolvedParamPairValues), - spark_jar_task=_from_dict(d, "spark_jar_task", ResolvedStringParamsValues), - spark_python_task=_from_dict(d, "spark_python_task", ResolvedStringParamsValues), - spark_submit_task=_from_dict(d, "spark_submit_task", ResolvedStringParamsValues), - sql_task=_from_dict(d, "sql_task", ResolvedParamPairValues), - ) + return cls(condition_task=_from_dict(d, 'condition_task', ResolvedConditionTaskValues), dbt_task=_from_dict(d, 'dbt_task', ResolvedDbtTaskValues), notebook_task=_from_dict(d, 'notebook_task', ResolvedNotebookTaskValues), python_wheel_task=_from_dict(d, 'python_wheel_task', ResolvedPythonWheelTaskValues), run_job_task=_from_dict(d, 'run_job_task', ResolvedRunJobTaskValues), simulation_task=_from_dict(d, 'simulation_task', ResolvedParamPairValues), spark_jar_task=_from_dict(d, 'spark_jar_task', ResolvedStringParamsValues), spark_python_task=_from_dict(d, 'spark_python_task', ResolvedStringParamsValues), spark_submit_task=_from_dict(d, 'spark_submit_task', ResolvedStringParamsValues), sql_task=_from_dict(d, 'sql_task', ResolvedParamPairValues)) + + @dataclass class Run: """Run was retrieved successfully""" - + attempt_number: Optional[int] = None """The sequence number of this run attempt for a triggered job run. The initial attempt of a run has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" - + cleanup_duration: Optional[int] = None """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `cleanup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + cluster_instance: Optional[ClusterInstance] = None """The cluster used for this run. If the run is specified to use a new cluster, this field is set once the Jobs service has requested a cluster for the run.""" - + cluster_spec: Optional[ClusterSpec] = None """A snapshot of the job’s cluster specification when this run was created.""" - + creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" - + description: Optional[str] = None """Description of the run""" - + effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from the client-set performance target on the request depending on whether the performance mode is @@ -4866,18 +4243,18 @@ class Run: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" - + execution_duration: Optional[int] = None """The time in milliseconds it took to execute the commands in the JAR or notebook until they completed, failed, timed out, were cancelled, or encountered an unexpected error. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `execution_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -4887,97 +4264,97 @@ class Run: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + has_more: Optional[bool] = None """Indicates if the run has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 :method:jobs/listruns requests with `expand_tasks=true`.""" - + iterations: Optional[List[RunTask]] = None """Only populated by for-each iterations. The parent for-each task is located in tasks array.""" - + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. If more than 100 job clusters are available, you can paginate through them using :method:jobs/getrun.""" - + job_id: Optional[int] = None """The canonical identifier of the job that contains this run.""" - + job_parameters: Optional[List[JobParameter]] = None """Job-level parameters used in the run""" - + job_run_id: Optional[int] = None """ID of the job run that this run belongs to. For legacy and single-task job runs the field is populated with the job run ID. For task runs, the field is populated with the ID of the job run that the task run belongs to.""" - + next_page_token: Optional[str] = None """A token that can be used to list the next page of array properties.""" - + number_in_job: Optional[int] = None """A unique identifier for this job run. This is set to the same value as `run_id`.""" - + original_attempt_run_id: Optional[int] = None """If this run is a retry of a prior run attempt, this field contains the run_id of the original attempt; otherwise, it is the same as the run_id.""" - + overriding_parameters: Optional[RunParameters] = None """The parameters used for this run.""" - + queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" - + repair_history: Optional[List[RepairHistoryItem]] = None """The repair history of the run.""" - + run_duration: Optional[int] = None """The time in milliseconds it took the job run and all of its repairs to finish.""" - + run_id: Optional[int] = None """The canonical identifier of the run. This ID is unique across all runs of all jobs.""" - + run_name: Optional[str] = None """An optional name for the run. The maximum length is 4096 bytes in UTF-8 encoding.""" - + run_page_url: Optional[str] = None """The URL to the detail page of the run.""" - + run_type: Optional[RunType] = None """The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with :method:jobs/submit. [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow""" - + schedule: Optional[CronSchedule] = None """The cron schedule that triggered this run if it was triggered by the periodic scheduler.""" - + setup_duration: Optional[int] = None """The time in milliseconds it took to set up the cluster. For runs that run on new clusters this is the cluster creation time, for runs that run on existing clusters this time should be very short. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `setup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + start_time: Optional[int] = None """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC). This may not be the time when the job task starts executing, for example, if the job is scheduled to run on a new cluster, this is the time the cluster creation call is issued.""" - + state: Optional[RunState] = None """Deprecated. Please use the `status` field instead.""" - + status: Optional[RunStatus] = None """The current status of the run""" - + tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object root to determine if more results are available.""" - + trigger: Optional[TriggerType] = None """The type of trigger that fired this run. @@ -4989,200 +4366,96 @@ class Run: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run.""" - + trigger_info: Optional[TriggerInfo] = None """Additional details about what triggered the run""" - + def as_dict(self) -> dict: """Serializes the Run into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attempt_number is not None: - body["attempt_number"] = self.attempt_number - if self.cleanup_duration is not None: - body["cleanup_duration"] = self.cleanup_duration - if self.cluster_instance: - body["cluster_instance"] = self.cluster_instance.as_dict() - if self.cluster_spec: - body["cluster_spec"] = self.cluster_spec.as_dict() - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.description is not None: - body["description"] = self.description - if self.effective_performance_target is not None: - body["effective_performance_target"] = self.effective_performance_target.value - if self.end_time is not None: - body["end_time"] = self.end_time - if self.execution_duration is not None: - body["execution_duration"] = self.execution_duration - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.has_more is not None: - body["has_more"] = self.has_more - if self.iterations: - body["iterations"] = [v.as_dict() for v in self.iterations] - if self.job_clusters: - body["job_clusters"] = [v.as_dict() for v in self.job_clusters] - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = [v.as_dict() for v in self.job_parameters] - if self.job_run_id is not None: - body["job_run_id"] = self.job_run_id - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.number_in_job is not None: - body["number_in_job"] = self.number_in_job - if self.original_attempt_run_id is not None: - body["original_attempt_run_id"] = self.original_attempt_run_id - if self.overriding_parameters: - body["overriding_parameters"] = self.overriding_parameters.as_dict() - if self.queue_duration is not None: - body["queue_duration"] = self.queue_duration - if self.repair_history: - body["repair_history"] = [v.as_dict() for v in self.repair_history] - if self.run_duration is not None: - body["run_duration"] = self.run_duration - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_page_url is not None: - body["run_page_url"] = self.run_page_url - if self.run_type is not None: - body["run_type"] = self.run_type.value - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.setup_duration is not None: - body["setup_duration"] = self.setup_duration - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state.as_dict() - if self.status: - body["status"] = self.status.as_dict() - if self.tasks: - body["tasks"] = [v.as_dict() for v in self.tasks] - if self.trigger is not None: - body["trigger"] = self.trigger.value - if self.trigger_info: - body["trigger_info"] = self.trigger_info.as_dict() + if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration + if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict() + if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict() + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.description is not None: body['description'] = self.description + if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target.value + if self.end_time is not None: body['end_time'] = self.end_time + if self.execution_duration is not None: body['execution_duration'] = self.execution_duration + if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.has_more is not None: body['has_more'] = self.has_more + if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations] + if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters] + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.number_in_job is not None: body['number_in_job'] = self.number_in_job + if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id + if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict() + if self.queue_duration is not None: body['queue_duration'] = self.queue_duration + if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history] + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_page_url is not None: body['run_page_url'] = self.run_page_url + if self.run_type is not None: body['run_type'] = self.run_type.value + if self.schedule: body['schedule'] = self.schedule.as_dict() + if self.setup_duration is not None: body['setup_duration'] = self.setup_duration + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state.as_dict() + if self.status: body['status'] = self.status.as_dict() + if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] + if self.trigger is not None: body['trigger'] = self.trigger.value + if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Run into a shallow dictionary of its immediate attributes.""" body = {} - if self.attempt_number is not None: - body["attempt_number"] = self.attempt_number - if self.cleanup_duration is not None: - body["cleanup_duration"] = self.cleanup_duration - if self.cluster_instance: - body["cluster_instance"] = self.cluster_instance - if self.cluster_spec: - body["cluster_spec"] = self.cluster_spec - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.description is not None: - body["description"] = self.description - if self.effective_performance_target is not None: - body["effective_performance_target"] = self.effective_performance_target - if self.end_time is not None: - body["end_time"] = self.end_time - if self.execution_duration is not None: - body["execution_duration"] = self.execution_duration - if self.git_source: - body["git_source"] = self.git_source - if self.has_more is not None: - body["has_more"] = self.has_more - if self.iterations: - body["iterations"] = self.iterations - if self.job_clusters: - body["job_clusters"] = self.job_clusters - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.job_run_id is not None: - body["job_run_id"] = self.job_run_id - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.number_in_job is not None: - body["number_in_job"] = self.number_in_job - if self.original_attempt_run_id is not None: - body["original_attempt_run_id"] = self.original_attempt_run_id - if self.overriding_parameters: - body["overriding_parameters"] = self.overriding_parameters - if self.queue_duration is not None: - body["queue_duration"] = self.queue_duration - if self.repair_history: - body["repair_history"] = self.repair_history - if self.run_duration is not None: - body["run_duration"] = self.run_duration - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_page_url is not None: - body["run_page_url"] = self.run_page_url - if self.run_type is not None: - body["run_type"] = self.run_type - if self.schedule: - body["schedule"] = self.schedule - if self.setup_duration is not None: - body["setup_duration"] = self.setup_duration - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state - if self.status: - body["status"] = self.status - if self.tasks: - body["tasks"] = self.tasks - if self.trigger is not None: - body["trigger"] = self.trigger - if self.trigger_info: - body["trigger_info"] = self.trigger_info + if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration + if self.cluster_instance: body['cluster_instance'] = self.cluster_instance + if self.cluster_spec: body['cluster_spec'] = self.cluster_spec + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.description is not None: body['description'] = self.description + if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target + if self.end_time is not None: body['end_time'] = self.end_time + if self.execution_duration is not None: body['execution_duration'] = self.execution_duration + if self.git_source: body['git_source'] = self.git_source + if self.has_more is not None: body['has_more'] = self.has_more + if self.iterations: body['iterations'] = self.iterations + if self.job_clusters: body['job_clusters'] = self.job_clusters + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.number_in_job is not None: body['number_in_job'] = self.number_in_job + if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id + if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters + if self.queue_duration is not None: body['queue_duration'] = self.queue_duration + if self.repair_history: body['repair_history'] = self.repair_history + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_page_url is not None: body['run_page_url'] = self.run_page_url + if self.run_type is not None: body['run_type'] = self.run_type + if self.schedule: body['schedule'] = self.schedule + if self.setup_duration is not None: body['setup_duration'] = self.setup_duration + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state + if self.status: body['status'] = self.status + if self.tasks: body['tasks'] = self.tasks + if self.trigger is not None: body['trigger'] = self.trigger + if self.trigger_info: body['trigger_info'] = self.trigger_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Run: """Deserializes the Run from a dictionary.""" - return cls( - attempt_number=d.get("attempt_number", None), - cleanup_duration=d.get("cleanup_duration", None), - cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance), - cluster_spec=_from_dict(d, "cluster_spec", ClusterSpec), - creator_user_name=d.get("creator_user_name", None), - description=d.get("description", None), - effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), - end_time=d.get("end_time", None), - execution_duration=d.get("execution_duration", None), - git_source=_from_dict(d, "git_source", GitSource), - has_more=d.get("has_more", None), - iterations=_repeated_dict(d, "iterations", RunTask), - job_clusters=_repeated_dict(d, "job_clusters", JobCluster), - job_id=d.get("job_id", None), - job_parameters=_repeated_dict(d, "job_parameters", JobParameter), - job_run_id=d.get("job_run_id", None), - next_page_token=d.get("next_page_token", None), - number_in_job=d.get("number_in_job", None), - original_attempt_run_id=d.get("original_attempt_run_id", None), - overriding_parameters=_from_dict(d, "overriding_parameters", RunParameters), - queue_duration=d.get("queue_duration", None), - repair_history=_repeated_dict(d, "repair_history", RepairHistoryItem), - run_duration=d.get("run_duration", None), - run_id=d.get("run_id", None), - run_name=d.get("run_name", None), - run_page_url=d.get("run_page_url", None), - run_type=_enum(d, "run_type", RunType), - schedule=_from_dict(d, "schedule", CronSchedule), - setup_duration=d.get("setup_duration", None), - start_time=d.get("start_time", None), - state=_from_dict(d, "state", RunState), - status=_from_dict(d, "status", RunStatus), - tasks=_repeated_dict(d, "tasks", RunTask), - trigger=_enum(d, "trigger", TriggerType), - trigger_info=_from_dict(d, "trigger_info", TriggerInfo), - ) + return cls(attempt_number=d.get('attempt_number', None), cleanup_duration=d.get('cleanup_duration', None), cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance), cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec), creator_user_name=d.get('creator_user_name', None), description=d.get('description', None), effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), git_source=_from_dict(d, 'git_source', GitSource), has_more=d.get('has_more', None), iterations=_repeated_dict(d, 'iterations', RunTask), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), job_parameters=_repeated_dict(d, 'job_parameters', JobParameter), job_run_id=d.get('job_run_id', None), next_page_token=d.get('next_page_token', None), number_in_job=d.get('number_in_job', None), original_attempt_run_id=d.get('original_attempt_run_id', None), overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters), queue_duration=d.get('queue_duration', None), repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem), run_duration=d.get('run_duration', None), run_id=d.get('run_id', None), run_name=d.get('run_name', None), run_page_url=d.get('run_page_url', None), run_type=_enum(d, 'run_type', RunType), schedule=_from_dict(d, 'schedule', CronSchedule), setup_duration=d.get('setup_duration', None), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), status=_from_dict(d, 'status', RunStatus), tasks=_repeated_dict(d, 'tasks', RunTask), trigger=_enum(d, 'trigger', TriggerType), trigger_info=_from_dict(d, 'trigger_info', TriggerInfo)) + + @dataclass @@ -5197,161 +4470,138 @@ class RunConditionTask: The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.""" - + left: str """The left operand of the condition task. Can be either a string value or a job state or parameter reference.""" - + right: str """The right operand of the condition task. Can be either a string value or a job state or parameter reference.""" - + outcome: Optional[str] = None """The condition expression evaluation result. Filled in if the task was successfully completed. Can be `"true"` or `"false"`""" - + def as_dict(self) -> dict: """Serializes the RunConditionTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.left is not None: - body["left"] = self.left - if self.op is not None: - body["op"] = self.op.value - if self.outcome is not None: - body["outcome"] = self.outcome - if self.right is not None: - body["right"] = self.right + if self.left is not None: body['left'] = self.left + if self.op is not None: body['op'] = self.op.value + if self.outcome is not None: body['outcome'] = self.outcome + if self.right is not None: body['right'] = self.right return body def as_shallow_dict(self) -> dict: """Serializes the RunConditionTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.left is not None: - body["left"] = self.left - if self.op is not None: - body["op"] = self.op - if self.outcome is not None: - body["outcome"] = self.outcome - if self.right is not None: - body["right"] = self.right + if self.left is not None: body['left'] = self.left + if self.op is not None: body['op'] = self.op + if self.outcome is not None: body['outcome'] = self.outcome + if self.right is not None: body['right'] = self.right return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunConditionTask: """Deserializes the RunConditionTask from a dictionary.""" - return cls( - left=d.get("left", None), - op=_enum(d, "op", ConditionTaskOp), - outcome=d.get("outcome", None), - right=d.get("right", None), - ) + return cls(left=d.get('left', None), op=_enum(d, 'op', ConditionTaskOp), outcome=d.get('outcome', None), right=d.get('right', None)) + + @dataclass class RunForEachTask: inputs: str """Array for task to iterate on. This can be a JSON string or a reference to an array parameter.""" - + task: Task """Configuration for the task that will be run for each element in the array""" - + concurrency: Optional[int] = None """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to be able to execute multiple runs of the task concurrently.""" - + stats: Optional[ForEachStats] = None """Read only field. Populated for GetRun and ListRuns RPC calls and stores the execution stats of an For each task""" - + def as_dict(self) -> dict: """Serializes the RunForEachTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.concurrency is not None: - body["concurrency"] = self.concurrency - if self.inputs is not None: - body["inputs"] = self.inputs - if self.stats: - body["stats"] = self.stats.as_dict() - if self.task: - body["task"] = self.task.as_dict() + if self.concurrency is not None: body['concurrency'] = self.concurrency + if self.inputs is not None: body['inputs'] = self.inputs + if self.stats: body['stats'] = self.stats.as_dict() + if self.task: body['task'] = self.task.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RunForEachTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.concurrency is not None: - body["concurrency"] = self.concurrency - if self.inputs is not None: - body["inputs"] = self.inputs - if self.stats: - body["stats"] = self.stats - if self.task: - body["task"] = self.task + if self.concurrency is not None: body['concurrency'] = self.concurrency + if self.inputs is not None: body['inputs'] = self.inputs + if self.stats: body['stats'] = self.stats + if self.task: body['task'] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunForEachTask: """Deserializes the RunForEachTask from a dictionary.""" - return cls( - concurrency=d.get("concurrency", None), - inputs=d.get("inputs", None), - stats=_from_dict(d, "stats", ForEachStats), - task=_from_dict(d, "task", Task), - ) + return cls(concurrency=d.get('concurrency', None), inputs=d.get('inputs', None), stats=_from_dict(d, 'stats', ForEachStats), task=_from_dict(d, 'task', Task)) + + class RunIf(Enum): """An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. - + Possible values are: * `ALL_SUCCESS`: All dependencies have executed and succeeded * `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded * `NONE_FAILED`: None of the dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl dependencies have failed""" - - ALL_DONE = "ALL_DONE" - ALL_FAILED = "ALL_FAILED" - ALL_SUCCESS = "ALL_SUCCESS" - AT_LEAST_ONE_FAILED = "AT_LEAST_ONE_FAILED" - AT_LEAST_ONE_SUCCESS = "AT_LEAST_ONE_SUCCESS" - NONE_FAILED = "NONE_FAILED" - + + ALL_DONE = 'ALL_DONE' + ALL_FAILED = 'ALL_FAILED' + ALL_SUCCESS = 'ALL_SUCCESS' + AT_LEAST_ONE_FAILED = 'AT_LEAST_ONE_FAILED' + AT_LEAST_ONE_SUCCESS = 'AT_LEAST_ONE_SUCCESS' + NONE_FAILED = 'NONE_FAILED' @dataclass class RunJobOutput: run_id: Optional[int] = None """The run id of the triggered job run""" - + def as_dict(self) -> dict: """Serializes the RunJobOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunJobOutput: """Deserializes the RunJobOutput from a dictionary.""" - return cls(run_id=d.get("run_id", None)) + return cls(run_id=d.get('run_id', None)) + + @dataclass class RunJobTask: job_id: int """ID of the job to trigger.""" - + dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - + jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -5362,11 +4612,11 @@ class RunJobTask: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str, str]] = None + + job_parameters: Optional[Dict[str,str]] = None """Job-level parameters used to trigger the job.""" - - notebook_params: Optional[Dict[str, str]] = None + + notebook_params: Optional[Dict[str,str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. @@ -5382,12 +4632,12 @@ class RunJobTask: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str, str]] = None - + + python_named_params: Optional[Dict[str,str]] = None + python_params: Optional[List[str]] = None """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon @@ -5403,7 +4653,7 @@ class RunJobTask: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -5420,76 +4670,47 @@ class RunJobTask: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str, str]] = None + + sql_params: Optional[Dict[str,str]] = None """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - + def as_dict(self) -> dict: """Serializes the RunJobTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbt_commands: - body["dbt_commands"] = [v for v in self.dbt_commands] - if self.jar_params: - body["jar_params"] = [v for v in self.jar_params] - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params.as_dict() - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = [v for v in self.python_params] - if self.spark_submit_params: - body["spark_submit_params"] = [v for v in self.spark_submit_params] - if self.sql_params: - body["sql_params"] = self.sql_params + if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands] + if self.jar_params: body['jar_params'] = [v for v in self.jar_params] + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = [v for v in self.python_params] + if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params] + if self.sql_params: body['sql_params'] = self.sql_params return body def as_shallow_dict(self) -> dict: """Serializes the RunJobTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbt_commands: - body["dbt_commands"] = self.dbt_commands - if self.jar_params: - body["jar_params"] = self.jar_params - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = self.python_params - if self.spark_submit_params: - body["spark_submit_params"] = self.spark_submit_params - if self.sql_params: - body["sql_params"] = self.sql_params + if self.dbt_commands: body['dbt_commands'] = self.dbt_commands + if self.jar_params: body['jar_params'] = self.jar_params + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = self.python_params + if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params + if self.sql_params: body['sql_params'] = self.sql_params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunJobTask: """Deserializes the RunJobTask from a dictionary.""" - return cls( - dbt_commands=d.get("dbt_commands", None), - jar_params=d.get("jar_params", None), - job_id=d.get("job_id", None), - job_parameters=d.get("job_parameters", None), - notebook_params=d.get("notebook_params", None), - pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), - python_named_params=d.get("python_named_params", None), - python_params=d.get("python_params", None), - spark_submit_params=d.get("spark_submit_params", None), - sql_params=d.get("sql_params", None), - ) + return cls(dbt_commands=d.get('dbt_commands', None), jar_params=d.get('jar_params', None), job_id=d.get('job_id', None), job_parameters=d.get('job_parameters', None), notebook_params=d.get('notebook_params', None), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), spark_submit_params=d.get('spark_submit_params', None), sql_params=d.get('sql_params', None)) + + class RunLifeCycleState(Enum): @@ -5504,39 +4725,37 @@ class RunLifeCycleState(Enum): long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry.""" - - BLOCKED = "BLOCKED" - INTERNAL_ERROR = "INTERNAL_ERROR" - PENDING = "PENDING" - QUEUED = "QUEUED" - RUNNING = "RUNNING" - SKIPPED = "SKIPPED" - TERMINATED = "TERMINATED" - TERMINATING = "TERMINATING" - WAITING_FOR_RETRY = "WAITING_FOR_RETRY" - + + BLOCKED = 'BLOCKED' + INTERNAL_ERROR = 'INTERNAL_ERROR' + PENDING = 'PENDING' + QUEUED = 'QUEUED' + RUNNING = 'RUNNING' + SKIPPED = 'SKIPPED' + TERMINATED = 'TERMINATED' + TERMINATING = 'TERMINATING' + WAITING_FOR_RETRY = 'WAITING_FOR_RETRY' class RunLifecycleStateV2State(Enum): """The current state of the run.""" - - BLOCKED = "BLOCKED" - PENDING = "PENDING" - QUEUED = "QUEUED" - RUNNING = "RUNNING" - TERMINATED = "TERMINATED" - TERMINATING = "TERMINATING" - WAITING = "WAITING" - + + BLOCKED = 'BLOCKED' + PENDING = 'PENDING' + QUEUED = 'QUEUED' + RUNNING = 'RUNNING' + TERMINATED = 'TERMINATED' + TERMINATING = 'TERMINATING' + WAITING = 'WAITING' @dataclass class RunNow: job_id: int """The ID of the job to be executed""" - + dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - + idempotency_token: Optional[str] = None """An optional token to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing @@ -5550,7 +4769,7 @@ class RunNow: For more information, see [How to ensure idempotency for jobs]. [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html""" - + jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -5561,11 +4780,11 @@ class RunNow: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str, str]] = None + + job_parameters: Optional[Dict[str,str]] = None """Job-level parameters used in the run. for example `"param": "overriding_val"`""" - - notebook_params: Optional[Dict[str, str]] = None + + notebook_params: Optional[Dict[str,str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. @@ -5581,11 +4800,11 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - + only: Optional[List[str]] = None """A list of task keys to run inside of the job. If this field is not provided, all tasks in the job will be run.""" - + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined @@ -5594,12 +4813,12 @@ class RunNow: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str, str]] = None - + + python_named_params: Optional[Dict[str,str]] = None + python_params: Optional[List[str]] = None """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon @@ -5615,10 +4834,10 @@ class RunNow: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + queue: Optional[QueueSettings] = None """The queue settings of the run.""" - + spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -5635,154 +4854,113 @@ class RunNow: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str, str]] = None + + sql_params: Optional[Dict[str,str]] = None """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - + def as_dict(self) -> dict: """Serializes the RunNow into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbt_commands: - body["dbt_commands"] = [v for v in self.dbt_commands] - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token - if self.jar_params: - body["jar_params"] = [v for v in self.jar_params] - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.only: - body["only"] = [v for v in self.only] - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params.as_dict() - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = [v for v in self.python_params] - if self.queue: - body["queue"] = self.queue.as_dict() - if self.spark_submit_params: - body["spark_submit_params"] = [v for v in self.spark_submit_params] - if self.sql_params: - body["sql_params"] = self.sql_params + if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands] + if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token + if self.jar_params: body['jar_params'] = [v for v in self.jar_params] + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.only: body['only'] = [v for v in self.only] + if self.performance_target is not None: body['performance_target'] = self.performance_target.value + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = [v for v in self.python_params] + if self.queue: body['queue'] = self.queue.as_dict() + if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params] + if self.sql_params: body['sql_params'] = self.sql_params return body def as_shallow_dict(self) -> dict: """Serializes the RunNow into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbt_commands: - body["dbt_commands"] = self.dbt_commands - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token - if self.jar_params: - body["jar_params"] = self.jar_params - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.only: - body["only"] = self.only - if self.performance_target is not None: - body["performance_target"] = self.performance_target - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = self.python_params - if self.queue: - body["queue"] = self.queue - if self.spark_submit_params: - body["spark_submit_params"] = self.spark_submit_params - if self.sql_params: - body["sql_params"] = self.sql_params + if self.dbt_commands: body['dbt_commands'] = self.dbt_commands + if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token + if self.jar_params: body['jar_params'] = self.jar_params + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_parameters: body['job_parameters'] = self.job_parameters + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.only: body['only'] = self.only + if self.performance_target is not None: body['performance_target'] = self.performance_target + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = self.python_params + if self.queue: body['queue'] = self.queue + if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params + if self.sql_params: body['sql_params'] = self.sql_params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunNow: """Deserializes the RunNow from a dictionary.""" - return cls( - dbt_commands=d.get("dbt_commands", None), - idempotency_token=d.get("idempotency_token", None), - jar_params=d.get("jar_params", None), - job_id=d.get("job_id", None), - job_parameters=d.get("job_parameters", None), - notebook_params=d.get("notebook_params", None), - only=d.get("only", None), - performance_target=_enum(d, "performance_target", PerformanceTarget), - pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), - python_named_params=d.get("python_named_params", None), - python_params=d.get("python_params", None), - queue=_from_dict(d, "queue", QueueSettings), - spark_submit_params=d.get("spark_submit_params", None), - sql_params=d.get("sql_params", None), - ) + return cls(dbt_commands=d.get('dbt_commands', None), idempotency_token=d.get('idempotency_token', None), jar_params=d.get('jar_params', None), job_id=d.get('job_id', None), job_parameters=d.get('job_parameters', None), notebook_params=d.get('notebook_params', None), only=d.get('only', None), performance_target=_enum(d, 'performance_target', PerformanceTarget), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), queue=_from_dict(d, 'queue', QueueSettings), spark_submit_params=d.get('spark_submit_params', None), sql_params=d.get('sql_params', None)) + + @dataclass class RunNowResponse: """Run was started successfully.""" - + number_in_job: Optional[int] = None """A unique identifier for this job run. This is set to the same value as `run_id`.""" - + run_id: Optional[int] = None """The globally unique ID of the newly triggered run.""" - + def as_dict(self) -> dict: """Serializes the RunNowResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.number_in_job is not None: - body["number_in_job"] = self.number_in_job - if self.run_id is not None: - body["run_id"] = self.run_id + if self.number_in_job is not None: body['number_in_job'] = self.number_in_job + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the RunNowResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.number_in_job is not None: - body["number_in_job"] = self.number_in_job - if self.run_id is not None: - body["run_id"] = self.run_id + if self.number_in_job is not None: body['number_in_job'] = self.number_in_job + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunNowResponse: """Deserializes the RunNowResponse from a dictionary.""" - return cls(number_in_job=d.get("number_in_job", None), run_id=d.get("run_id", None)) + return cls(number_in_job=d.get('number_in_job', None), run_id=d.get('run_id', None)) + + @dataclass class RunOutput: """Run output was retrieved successfully.""" - + clean_rooms_notebook_output: Optional[CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput] = None """The output of a clean rooms notebook task, if available""" - + dashboard_output: Optional[DashboardTaskOutput] = None """The output of a dashboard task, if available""" - + + dbt_cloud_output: Optional[DbtCloudTaskOutput] = None + dbt_output: Optional[DbtOutput] = None """The output of a dbt task, if available.""" - + error: Optional[str] = None """An error message indicating why a task failed or why output is not available. The message is unstructured, and its exact format is subject to change.""" - + error_trace: Optional[str] = None """If there was an error executing the run, this field contains any available stack traces.""" - + info: Optional[str] = None - + logs: Optional[str] = None """The output from tasks that write to standard streams (stdout/stderr) such as spark_jar_task, spark_python_task, python_wheel_task. @@ -5790,13 +4968,13 @@ class RunOutput: It's not supported for the notebook_task, pipeline_task or spark_submit_task. Databricks restricts this API to return the last 5 MB of these logs.""" - + logs_truncated: Optional[bool] = None """Whether the logs are truncated.""" - + metadata: Optional[Run] = None """All details of the run except for its output.""" - + notebook_output: Optional[NotebookOutput] = None """The output of a notebook task, if available. A notebook task that terminates (either successfully or with a failure) without calling `dbutils.notebook.exit()` is considered to have @@ -5805,90 +4983,55 @@ class RunOutput: field to configure log storage for the job cluster. [ClusterLogConf]: https://docs.databricks.com/dev-tools/api/latest/clusters.html#clusterlogconf""" - + run_job_output: Optional[RunJobOutput] = None """The output of a run job task, if available""" - + sql_output: Optional[SqlOutput] = None """The output of a SQL task, if available.""" - + def as_dict(self) -> dict: """Serializes the RunOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_rooms_notebook_output: - body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output.as_dict() - if self.dashboard_output: - body["dashboard_output"] = self.dashboard_output.as_dict() - if self.dbt_output: - body["dbt_output"] = self.dbt_output.as_dict() - if self.error is not None: - body["error"] = self.error - if self.error_trace is not None: - body["error_trace"] = self.error_trace - if self.info is not None: - body["info"] = self.info - if self.logs is not None: - body["logs"] = self.logs - if self.logs_truncated is not None: - body["logs_truncated"] = self.logs_truncated - if self.metadata: - body["metadata"] = self.metadata.as_dict() - if self.notebook_output: - body["notebook_output"] = self.notebook_output.as_dict() - if self.run_job_output: - body["run_job_output"] = self.run_job_output.as_dict() - if self.sql_output: - body["sql_output"] = self.sql_output.as_dict() + if self.clean_rooms_notebook_output: body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output.as_dict() + if self.dashboard_output: body['dashboard_output'] = self.dashboard_output.as_dict() + if self.dbt_cloud_output: body['dbt_cloud_output'] = self.dbt_cloud_output.as_dict() + if self.dbt_output: body['dbt_output'] = self.dbt_output.as_dict() + if self.error is not None: body['error'] = self.error + if self.error_trace is not None: body['error_trace'] = self.error_trace + if self.info is not None: body['info'] = self.info + if self.logs is not None: body['logs'] = self.logs + if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated + if self.metadata: body['metadata'] = self.metadata.as_dict() + if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict() + if self.run_job_output: body['run_job_output'] = self.run_job_output.as_dict() + if self.sql_output: body['sql_output'] = self.sql_output.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RunOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_rooms_notebook_output: - body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output - if self.dashboard_output: - body["dashboard_output"] = self.dashboard_output - if self.dbt_output: - body["dbt_output"] = self.dbt_output - if self.error is not None: - body["error"] = self.error - if self.error_trace is not None: - body["error_trace"] = self.error_trace - if self.info is not None: - body["info"] = self.info - if self.logs is not None: - body["logs"] = self.logs - if self.logs_truncated is not None: - body["logs_truncated"] = self.logs_truncated - if self.metadata: - body["metadata"] = self.metadata - if self.notebook_output: - body["notebook_output"] = self.notebook_output - if self.run_job_output: - body["run_job_output"] = self.run_job_output - if self.sql_output: - body["sql_output"] = self.sql_output + if self.clean_rooms_notebook_output: body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output + if self.dashboard_output: body['dashboard_output'] = self.dashboard_output + if self.dbt_cloud_output: body['dbt_cloud_output'] = self.dbt_cloud_output + if self.dbt_output: body['dbt_output'] = self.dbt_output + if self.error is not None: body['error'] = self.error + if self.error_trace is not None: body['error_trace'] = self.error_trace + if self.info is not None: body['info'] = self.info + if self.logs is not None: body['logs'] = self.logs + if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated + if self.metadata: body['metadata'] = self.metadata + if self.notebook_output: body['notebook_output'] = self.notebook_output + if self.run_job_output: body['run_job_output'] = self.run_job_output + if self.sql_output: body['sql_output'] = self.sql_output return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunOutput: """Deserializes the RunOutput from a dictionary.""" - return cls( - clean_rooms_notebook_output=_from_dict( - d, "clean_rooms_notebook_output", CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput - ), - dashboard_output=_from_dict(d, "dashboard_output", DashboardTaskOutput), - dbt_output=_from_dict(d, "dbt_output", DbtOutput), - error=d.get("error", None), - error_trace=d.get("error_trace", None), - info=d.get("info", None), - logs=d.get("logs", None), - logs_truncated=d.get("logs_truncated", None), - metadata=_from_dict(d, "metadata", Run), - notebook_output=_from_dict(d, "notebook_output", NotebookOutput), - run_job_output=_from_dict(d, "run_job_output", RunJobOutput), - sql_output=_from_dict(d, "sql_output", SqlOutput), - ) + return cls(clean_rooms_notebook_output=_from_dict(d, 'clean_rooms_notebook_output', CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput), dashboard_output=_from_dict(d, 'dashboard_output', DashboardTaskOutput), dbt_cloud_output=_from_dict(d, 'dbt_cloud_output', DbtCloudTaskOutput), dbt_output=_from_dict(d, 'dbt_output', DbtOutput), error=d.get('error', None), error_trace=d.get('error_trace', None), info=d.get('info', None), logs=d.get('logs', None), logs_truncated=d.get('logs_truncated', None), metadata=_from_dict(d, 'metadata', Run), notebook_output=_from_dict(d, 'notebook_output', NotebookOutput), run_job_output=_from_dict(d, 'run_job_output', RunJobOutput), sql_output=_from_dict(d, 'sql_output', SqlOutput)) + + @dataclass @@ -5896,7 +5039,7 @@ class RunParameters: dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - + jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -5907,8 +5050,8 @@ class RunParameters: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - notebook_params: Optional[Dict[str, str]] = None + + notebook_params: Optional[Dict[str,str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. @@ -5924,12 +5067,12 @@ class RunParameters: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str, str]] = None - + + python_named_params: Optional[Dict[str,str]] = None + python_params: Optional[List[str]] = None """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon @@ -5945,7 +5088,7 @@ class RunParameters: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -5962,66 +5105,43 @@ class RunParameters: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str, str]] = None + + sql_params: Optional[Dict[str,str]] = None """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - + def as_dict(self) -> dict: """Serializes the RunParameters into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbt_commands: - body["dbt_commands"] = [v for v in self.dbt_commands] - if self.jar_params: - body["jar_params"] = [v for v in self.jar_params] - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params.as_dict() - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = [v for v in self.python_params] - if self.spark_submit_params: - body["spark_submit_params"] = [v for v in self.spark_submit_params] - if self.sql_params: - body["sql_params"] = self.sql_params + if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands] + if self.jar_params: body['jar_params'] = [v for v in self.jar_params] + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = [v for v in self.python_params] + if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params] + if self.sql_params: body['sql_params'] = self.sql_params return body def as_shallow_dict(self) -> dict: """Serializes the RunParameters into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbt_commands: - body["dbt_commands"] = self.dbt_commands - if self.jar_params: - body["jar_params"] = self.jar_params - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = self.python_params - if self.spark_submit_params: - body["spark_submit_params"] = self.spark_submit_params - if self.sql_params: - body["sql_params"] = self.sql_params + if self.dbt_commands: body['dbt_commands'] = self.dbt_commands + if self.jar_params: body['jar_params'] = self.jar_params + if self.notebook_params: body['notebook_params'] = self.notebook_params + if self.pipeline_params: body['pipeline_params'] = self.pipeline_params + if self.python_named_params: body['python_named_params'] = self.python_named_params + if self.python_params: body['python_params'] = self.python_params + if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params + if self.sql_params: body['sql_params'] = self.sql_params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunParameters: """Deserializes the RunParameters from a dictionary.""" - return cls( - dbt_commands=d.get("dbt_commands", None), - jar_params=d.get("jar_params", None), - notebook_params=d.get("notebook_params", None), - pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), - python_named_params=d.get("python_named_params", None), - python_params=d.get("python_params", None), - spark_submit_params=d.get("spark_submit_params", None), - sql_params=d.get("sql_params", None), - ) + return cls(dbt_commands=d.get('dbt_commands', None), jar_params=d.get('jar_params', None), notebook_params=d.get('notebook_params', None), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), spark_submit_params=d.get('spark_submit_params', None), sql_params=d.get('sql_params', None)) + + class RunResultState(Enum): @@ -6034,184 +5154,164 @@ class RunResultState(Enum): successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. * `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. * `DISABLED`: The run was skipped because it was disabled explicitly by the user.""" - - CANCELED = "CANCELED" - DISABLED = "DISABLED" - EXCLUDED = "EXCLUDED" - FAILED = "FAILED" - MAXIMUM_CONCURRENT_RUNS_REACHED = "MAXIMUM_CONCURRENT_RUNS_REACHED" - SUCCESS = "SUCCESS" - SUCCESS_WITH_FAILURES = "SUCCESS_WITH_FAILURES" - TIMEDOUT = "TIMEDOUT" - UPSTREAM_CANCELED = "UPSTREAM_CANCELED" - UPSTREAM_FAILED = "UPSTREAM_FAILED" - + + CANCELED = 'CANCELED' + DISABLED = 'DISABLED' + EXCLUDED = 'EXCLUDED' + FAILED = 'FAILED' + MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED' + SUCCESS = 'SUCCESS' + SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES' + TIMEDOUT = 'TIMEDOUT' + UPSTREAM_CANCELED = 'UPSTREAM_CANCELED' + UPSTREAM_FAILED = 'UPSTREAM_FAILED' @dataclass class RunState: """The current state of the run.""" - + life_cycle_state: Optional[RunLifeCycleState] = None """A value indicating the run's current lifecycle state. This field is always available in the response. Note: Additional states might be introduced in future releases.""" - + queue_reason: Optional[str] = None """The reason indicating why the run was queued.""" - + result_state: Optional[RunResultState] = None """A value indicating the run's result. This field is only available for terminal lifecycle states. Note: Additional states might be introduced in future releases.""" - + state_message: Optional[str] = None """A descriptive message for the current state. This field is unstructured, and its exact format is subject to change.""" - + user_cancelled_or_timedout: Optional[bool] = None """A value indicating whether a run was canceled manually by a user or by the scheduler because the run timed out.""" - + def as_dict(self) -> dict: """Serializes the RunState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.life_cycle_state is not None: - body["life_cycle_state"] = self.life_cycle_state.value - if self.queue_reason is not None: - body["queue_reason"] = self.queue_reason - if self.result_state is not None: - body["result_state"] = self.result_state.value - if self.state_message is not None: - body["state_message"] = self.state_message - if self.user_cancelled_or_timedout is not None: - body["user_cancelled_or_timedout"] = self.user_cancelled_or_timedout + if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state.value + if self.queue_reason is not None: body['queue_reason'] = self.queue_reason + if self.result_state is not None: body['result_state'] = self.result_state.value + if self.state_message is not None: body['state_message'] = self.state_message + if self.user_cancelled_or_timedout is not None: body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout return body def as_shallow_dict(self) -> dict: """Serializes the RunState into a shallow dictionary of its immediate attributes.""" body = {} - if self.life_cycle_state is not None: - body["life_cycle_state"] = self.life_cycle_state - if self.queue_reason is not None: - body["queue_reason"] = self.queue_reason - if self.result_state is not None: - body["result_state"] = self.result_state - if self.state_message is not None: - body["state_message"] = self.state_message - if self.user_cancelled_or_timedout is not None: - body["user_cancelled_or_timedout"] = self.user_cancelled_or_timedout + if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state + if self.queue_reason is not None: body['queue_reason'] = self.queue_reason + if self.result_state is not None: body['result_state'] = self.result_state + if self.state_message is not None: body['state_message'] = self.state_message + if self.user_cancelled_or_timedout is not None: body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunState: """Deserializes the RunState from a dictionary.""" - return cls( - life_cycle_state=_enum(d, "life_cycle_state", RunLifeCycleState), - queue_reason=d.get("queue_reason", None), - result_state=_enum(d, "result_state", RunResultState), - state_message=d.get("state_message", None), - user_cancelled_or_timedout=d.get("user_cancelled_or_timedout", None), - ) + return cls(life_cycle_state=_enum(d, 'life_cycle_state', RunLifeCycleState), queue_reason=d.get('queue_reason', None), result_state=_enum(d, 'result_state', RunResultState), state_message=d.get('state_message', None), user_cancelled_or_timedout=d.get('user_cancelled_or_timedout', None)) + + @dataclass class RunStatus: """The current status of the run""" - + queue_details: Optional[QueueDetails] = None """If the run was queued, details about the reason for queuing the run.""" - + state: Optional[RunLifecycleStateV2State] = None """The current state of the run.""" - + termination_details: Optional[TerminationDetails] = None """If the run is in a TERMINATING or TERMINATED state, details about the reason for terminating the run.""" - + def as_dict(self) -> dict: """Serializes the RunStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.queue_details: - body["queue_details"] = self.queue_details.as_dict() - if self.state is not None: - body["state"] = self.state.value - if self.termination_details: - body["termination_details"] = self.termination_details.as_dict() + if self.queue_details: body['queue_details'] = self.queue_details.as_dict() + if self.state is not None: body['state'] = self.state.value + if self.termination_details: body['termination_details'] = self.termination_details.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RunStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.queue_details: - body["queue_details"] = self.queue_details - if self.state is not None: - body["state"] = self.state - if self.termination_details: - body["termination_details"] = self.termination_details + if self.queue_details: body['queue_details'] = self.queue_details + if self.state is not None: body['state'] = self.state + if self.termination_details: body['termination_details'] = self.termination_details return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunStatus: """Deserializes the RunStatus from a dictionary.""" - return cls( - queue_details=_from_dict(d, "queue_details", QueueDetails), - state=_enum(d, "state", RunLifecycleStateV2State), - termination_details=_from_dict(d, "termination_details", TerminationDetails), - ) + return cls(queue_details=_from_dict(d, 'queue_details', QueueDetails), state=_enum(d, 'state', RunLifecycleStateV2State), termination_details=_from_dict(d, 'termination_details', TerminationDetails)) + + @dataclass class RunTask: """Used when outputting a child run, in GetRun or ListRuns.""" - + task_key: str """A unique name for the task. This field is used to refer to this task from other tasks. This field is required and must be unique within its parent job. On Update or Reset, this field is used to reference the tasks to be updated or reset.""" - + attempt_number: Optional[int] = None """The sequence number of this run attempt for a triggered job run. The initial attempt of a run has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" - + clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" - + cleanup_duration: Optional[int] = None """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `cleanup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + cluster_instance: Optional[ClusterInstance] = None """The cluster used for this run. If the run is specified to use a new cluster, this field is set once the Jobs service has requested a cluster for the run.""" - + condition_task: Optional[RunConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present. The condition task does not require a cluster to execute and does not support retries or notifications.""" - + dashboard_task: Optional[DashboardTask] = None """The task refreshes a dashboard and sends a snapshot to subscribers.""" - + + dbt_cloud_task: Optional[DbtCloudTask] = None + """Task type for dbt cloud""" + dbt_task: Optional[DbtTask] = None """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.""" - + depends_on: Optional[List[TaskDependency]] = None """An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete successfully before executing this task. The key is `task_key`, and the value is the name assigned to the dependent task.""" - + description: Optional[str] = None """An optional description for this task.""" - + disabled: Optional[bool] = None """Deprecated, field was never used in production.""" - + effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from the client-set performance target on the request depending on whether the performance mode is @@ -6220,37 +5320,37 @@ class RunTask: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the task run begins or completes. The default behavior is to not send any emails.""" - + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" - + environment_key: Optional[str] = None """The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.""" - + execution_duration: Optional[int] = None """The time in milliseconds it took to execute the commands in the JAR or notebook until they completed, failed, timed out, were cancelled, or encountered an unexpected error. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `execution_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + existing_cluster_id: Optional[str] = None """If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops responding. We suggest running jobs and tasks on new clusters for greater reliability""" - + for_each_task: Optional[RunForEachTask] = None """The task executes a nested task for every input provided when the `for_each_task` field is present.""" - + gen_ai_compute_task: Optional[GenAiComputeTask] = None - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -6258,70 +5358,70 @@ class RunTask: However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + job_cluster_key: Optional[str] = None """If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.""" - + libraries: Optional[List[compute.Library]] = None """An optional list of libraries to be installed on the cluster. The default value is an empty list.""" - + new_cluster: Optional[compute.ClusterSpec] = None """If new_cluster, a description of a new cluster that is created for each run.""" - + notebook_task: Optional[NotebookTask] = None """The task runs a notebook when the `notebook_task` field is present.""" - + notification_settings: Optional[TaskNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task run.""" - + pipeline_task: Optional[PipelineTask] = None """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.""" - + power_bi_task: Optional[PowerBiTask] = None """The task triggers a Power BI semantic model update when the `power_bi_task` field is present.""" - + python_wheel_task: Optional[PythonWheelTask] = None """The task runs a Python wheel when the `python_wheel_task` field is present.""" - + queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" - + resolved_values: Optional[ResolvedValues] = None """Parameter values including resolved references""" - + run_duration: Optional[int] = None """The time in milliseconds it took the job run and all of its repairs to finish.""" - + run_id: Optional[int] = None """The ID of the task run.""" - + run_if: Optional[RunIf] = None """An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. See :method:jobs/create for a list of possible values.""" - + run_job_task: Optional[RunJobTask] = None """The task triggers another job when the `run_job_task` field is present.""" - + run_page_url: Optional[str] = None - + setup_duration: Optional[int] = None """The time in milliseconds it took to set up the cluster. For runs that run on new clusters this is the cluster creation time, for runs that run on existing clusters this time should be very short. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `setup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + spark_jar_task: Optional[SparkJarTask] = None """The task runs a JAR when the `spark_jar_task` field is present.""" - + spark_python_task: Optional[SparkPythonTask] = None """The task runs a Python file when the `spark_python_task` field is present.""" - + spark_submit_task: Optional[SparkSubmitTask] = None """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute. @@ -6338,283 +5438,150 @@ class RunTask: to leave some room for off-heap usage. The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" - + sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.""" - + start_time: Optional[int] = None """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC). This may not be the time when the job task starts executing, for example, if the job is scheduled to run on a new cluster, this is the time the cluster creation call is issued.""" - + state: Optional[RunState] = None """Deprecated. Please use the `status` field instead.""" - + status: Optional[RunStatus] = None """The current status of the run""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when the run begins or completes. The default behavior is to not send any system notifications. Task webhooks respect the task notification settings.""" - + def as_dict(self) -> dict: """Serializes the RunTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attempt_number is not None: - body["attempt_number"] = self.attempt_number - if self.clean_rooms_notebook_task: - body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task.as_dict() - if self.cleanup_duration is not None: - body["cleanup_duration"] = self.cleanup_duration - if self.cluster_instance: - body["cluster_instance"] = self.cluster_instance.as_dict() - if self.condition_task: - body["condition_task"] = self.condition_task.as_dict() - if self.dashboard_task: - body["dashboard_task"] = self.dashboard_task.as_dict() - if self.dbt_task: - body["dbt_task"] = self.dbt_task.as_dict() - if self.depends_on: - body["depends_on"] = [v.as_dict() for v in self.depends_on] - if self.description is not None: - body["description"] = self.description - if self.disabled is not None: - body["disabled"] = self.disabled - if self.effective_performance_target is not None: - body["effective_performance_target"] = self.effective_performance_target.value - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.end_time is not None: - body["end_time"] = self.end_time - if self.environment_key is not None: - body["environment_key"] = self.environment_key - if self.execution_duration is not None: - body["execution_duration"] = self.execution_duration - if self.existing_cluster_id is not None: - body["existing_cluster_id"] = self.existing_cluster_id - if self.for_each_task: - body["for_each_task"] = self.for_each_task.as_dict() - if self.gen_ai_compute_task: - body["gen_ai_compute_task"] = self.gen_ai_compute_task.as_dict() - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.job_cluster_key is not None: - body["job_cluster_key"] = self.job_cluster_key - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.new_cluster: - body["new_cluster"] = self.new_cluster.as_dict() - if self.notebook_task: - body["notebook_task"] = self.notebook_task.as_dict() - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.pipeline_task: - body["pipeline_task"] = self.pipeline_task.as_dict() - if self.power_bi_task: - body["power_bi_task"] = self.power_bi_task.as_dict() - if self.python_wheel_task: - body["python_wheel_task"] = self.python_wheel_task.as_dict() - if self.queue_duration is not None: - body["queue_duration"] = self.queue_duration - if self.resolved_values: - body["resolved_values"] = self.resolved_values.as_dict() - if self.run_duration is not None: - body["run_duration"] = self.run_duration - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_if is not None: - body["run_if"] = self.run_if.value - if self.run_job_task: - body["run_job_task"] = self.run_job_task.as_dict() - if self.run_page_url is not None: - body["run_page_url"] = self.run_page_url - if self.setup_duration is not None: - body["setup_duration"] = self.setup_duration - if self.spark_jar_task: - body["spark_jar_task"] = self.spark_jar_task.as_dict() - if self.spark_python_task: - body["spark_python_task"] = self.spark_python_task.as_dict() - if self.spark_submit_task: - body["spark_submit_task"] = self.spark_submit_task.as_dict() - if self.sql_task: - body["sql_task"] = self.sql_task.as_dict() - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state.as_dict() - if self.status: - body["status"] = self.status.as_dict() - if self.task_key is not None: - body["task_key"] = self.task_key - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() + if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict() + if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration + if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict() + if self.condition_task: body['condition_task'] = self.condition_task.as_dict() + if self.dashboard_task: body['dashboard_task'] = self.dashboard_task.as_dict() + if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task.as_dict() + if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() + if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on] + if self.description is not None: body['description'] = self.description + if self.disabled is not None: body['disabled'] = self.disabled + if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target.value + if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() + if self.end_time is not None: body['end_time'] = self.end_time + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.execution_duration is not None: body['execution_duration'] = self.execution_duration + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict() + if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task.as_dict() + if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() + if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict() + if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() + if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict() + if self.power_bi_task: body['power_bi_task'] = self.power_bi_task.as_dict() + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict() + if self.queue_duration is not None: body['queue_duration'] = self.queue_duration + if self.resolved_values: body['resolved_values'] = self.resolved_values.as_dict() + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_if is not None: body['run_if'] = self.run_if.value + if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict() + if self.run_page_url is not None: body['run_page_url'] = self.run_page_url + if self.setup_duration is not None: body['setup_duration'] = self.setup_duration + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict() + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict() + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict() + if self.sql_task: body['sql_task'] = self.sql_task.as_dict() + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state.as_dict() + if self.status: body['status'] = self.status.as_dict() + if self.task_key is not None: body['task_key'] = self.task_key + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RunTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.attempt_number is not None: - body["attempt_number"] = self.attempt_number - if self.clean_rooms_notebook_task: - body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task - if self.cleanup_duration is not None: - body["cleanup_duration"] = self.cleanup_duration - if self.cluster_instance: - body["cluster_instance"] = self.cluster_instance - if self.condition_task: - body["condition_task"] = self.condition_task - if self.dashboard_task: - body["dashboard_task"] = self.dashboard_task - if self.dbt_task: - body["dbt_task"] = self.dbt_task - if self.depends_on: - body["depends_on"] = self.depends_on - if self.description is not None: - body["description"] = self.description - if self.disabled is not None: - body["disabled"] = self.disabled - if self.effective_performance_target is not None: - body["effective_performance_target"] = self.effective_performance_target - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.end_time is not None: - body["end_time"] = self.end_time - if self.environment_key is not None: - body["environment_key"] = self.environment_key - if self.execution_duration is not None: - body["execution_duration"] = self.execution_duration - if self.existing_cluster_id is not None: - body["existing_cluster_id"] = self.existing_cluster_id - if self.for_each_task: - body["for_each_task"] = self.for_each_task - if self.gen_ai_compute_task: - body["gen_ai_compute_task"] = self.gen_ai_compute_task - if self.git_source: - body["git_source"] = self.git_source - if self.job_cluster_key is not None: - body["job_cluster_key"] = self.job_cluster_key - if self.libraries: - body["libraries"] = self.libraries - if self.new_cluster: - body["new_cluster"] = self.new_cluster - if self.notebook_task: - body["notebook_task"] = self.notebook_task - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.pipeline_task: - body["pipeline_task"] = self.pipeline_task - if self.power_bi_task: - body["power_bi_task"] = self.power_bi_task - if self.python_wheel_task: - body["python_wheel_task"] = self.python_wheel_task - if self.queue_duration is not None: - body["queue_duration"] = self.queue_duration - if self.resolved_values: - body["resolved_values"] = self.resolved_values - if self.run_duration is not None: - body["run_duration"] = self.run_duration - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_if is not None: - body["run_if"] = self.run_if - if self.run_job_task: - body["run_job_task"] = self.run_job_task - if self.run_page_url is not None: - body["run_page_url"] = self.run_page_url - if self.setup_duration is not None: - body["setup_duration"] = self.setup_duration - if self.spark_jar_task: - body["spark_jar_task"] = self.spark_jar_task - if self.spark_python_task: - body["spark_python_task"] = self.spark_python_task - if self.spark_submit_task: - body["spark_submit_task"] = self.spark_submit_task - if self.sql_task: - body["sql_task"] = self.sql_task - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state - if self.status: - body["status"] = self.status - if self.task_key is not None: - body["task_key"] = self.task_key - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications + if self.attempt_number is not None: body['attempt_number'] = self.attempt_number + if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task + if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration + if self.cluster_instance: body['cluster_instance'] = self.cluster_instance + if self.condition_task: body['condition_task'] = self.condition_task + if self.dashboard_task: body['dashboard_task'] = self.dashboard_task + if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task + if self.dbt_task: body['dbt_task'] = self.dbt_task + if self.depends_on: body['depends_on'] = self.depends_on + if self.description is not None: body['description'] = self.description + if self.disabled is not None: body['disabled'] = self.disabled + if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.end_time is not None: body['end_time'] = self.end_time + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.execution_duration is not None: body['execution_duration'] = self.execution_duration + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.for_each_task: body['for_each_task'] = self.for_each_task + if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task + if self.git_source: body['git_source'] = self.git_source + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.libraries: body['libraries'] = self.libraries + if self.new_cluster: body['new_cluster'] = self.new_cluster + if self.notebook_task: body['notebook_task'] = self.notebook_task + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.pipeline_task: body['pipeline_task'] = self.pipeline_task + if self.power_bi_task: body['power_bi_task'] = self.power_bi_task + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task + if self.queue_duration is not None: body['queue_duration'] = self.queue_duration + if self.resolved_values: body['resolved_values'] = self.resolved_values + if self.run_duration is not None: body['run_duration'] = self.run_duration + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_if is not None: body['run_if'] = self.run_if + if self.run_job_task: body['run_job_task'] = self.run_job_task + if self.run_page_url is not None: body['run_page_url'] = self.run_page_url + if self.setup_duration is not None: body['setup_duration'] = self.setup_duration + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task + if self.sql_task: body['sql_task'] = self.sql_task + if self.start_time is not None: body['start_time'] = self.start_time + if self.state: body['state'] = self.state + if self.status: body['status'] = self.status + if self.task_key is not None: body['task_key'] = self.task_key + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunTask: """Deserializes the RunTask from a dictionary.""" - return cls( - attempt_number=d.get("attempt_number", None), - clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask), - cleanup_duration=d.get("cleanup_duration", None), - cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance), - condition_task=_from_dict(d, "condition_task", RunConditionTask), - dashboard_task=_from_dict(d, "dashboard_task", DashboardTask), - dbt_task=_from_dict(d, "dbt_task", DbtTask), - depends_on=_repeated_dict(d, "depends_on", TaskDependency), - description=d.get("description", None), - disabled=d.get("disabled", None), - effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), - email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), - end_time=d.get("end_time", None), - environment_key=d.get("environment_key", None), - execution_duration=d.get("execution_duration", None), - existing_cluster_id=d.get("existing_cluster_id", None), - for_each_task=_from_dict(d, "for_each_task", RunForEachTask), - gen_ai_compute_task=_from_dict(d, "gen_ai_compute_task", GenAiComputeTask), - git_source=_from_dict(d, "git_source", GitSource), - job_cluster_key=d.get("job_cluster_key", None), - libraries=_repeated_dict(d, "libraries", compute.Library), - new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), - notebook_task=_from_dict(d, "notebook_task", NotebookTask), - notification_settings=_from_dict(d, "notification_settings", TaskNotificationSettings), - pipeline_task=_from_dict(d, "pipeline_task", PipelineTask), - power_bi_task=_from_dict(d, "power_bi_task", PowerBiTask), - python_wheel_task=_from_dict(d, "python_wheel_task", PythonWheelTask), - queue_duration=d.get("queue_duration", None), - resolved_values=_from_dict(d, "resolved_values", ResolvedValues), - run_duration=d.get("run_duration", None), - run_id=d.get("run_id", None), - run_if=_enum(d, "run_if", RunIf), - run_job_task=_from_dict(d, "run_job_task", RunJobTask), - run_page_url=d.get("run_page_url", None), - setup_duration=d.get("setup_duration", None), - spark_jar_task=_from_dict(d, "spark_jar_task", SparkJarTask), - spark_python_task=_from_dict(d, "spark_python_task", SparkPythonTask), - spark_submit_task=_from_dict(d, "spark_submit_task", SparkSubmitTask), - sql_task=_from_dict(d, "sql_task", SqlTask), - start_time=d.get("start_time", None), - state=_from_dict(d, "state", RunState), - status=_from_dict(d, "status", RunStatus), - task_key=d.get("task_key", None), - timeout_seconds=d.get("timeout_seconds", None), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) + return cls(attempt_number=d.get('attempt_number', None), clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task', CleanRoomsNotebookTask), cleanup_duration=d.get('cleanup_duration', None), cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance), condition_task=_from_dict(d, 'condition_task', RunConditionTask), dashboard_task=_from_dict(d, 'dashboard_task', DashboardTask), dbt_cloud_task=_from_dict(d, 'dbt_cloud_task', DbtCloudTask), dbt_task=_from_dict(d, 'dbt_task', DbtTask), depends_on=_repeated_dict(d, 'depends_on', TaskDependency), description=d.get('description', None), disabled=d.get('disabled', None), effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), end_time=d.get('end_time', None), environment_key=d.get('environment_key', None), execution_duration=d.get('execution_duration', None), existing_cluster_id=d.get('existing_cluster_id', None), for_each_task=_from_dict(d, 'for_each_task', RunForEachTask), gen_ai_compute_task=_from_dict(d, 'gen_ai_compute_task', GenAiComputeTask), git_source=_from_dict(d, 'git_source', GitSource), job_cluster_key=d.get('job_cluster_key', None), libraries=_repeated_dict(d, 'libraries', compute.Library), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec), notebook_task=_from_dict(d, 'notebook_task', NotebookTask), notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings), pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask), power_bi_task=_from_dict(d, 'power_bi_task', PowerBiTask), python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask), queue_duration=d.get('queue_duration', None), resolved_values=_from_dict(d, 'resolved_values', ResolvedValues), run_duration=d.get('run_duration', None), run_id=d.get('run_id', None), run_if=_enum(d, 'run_if', RunIf), run_job_task=_from_dict(d, 'run_job_task', RunJobTask), run_page_url=d.get('run_page_url', None), setup_duration=d.get('setup_duration', None), spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask), spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask), spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask), sql_task=_from_dict(d, 'sql_task', SqlTask), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), status=_from_dict(d, 'status', RunStatus), task_key=d.get('task_key', None), timeout_seconds=d.get('timeout_seconds', None), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) + + class RunType(Enum): """The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with :method:jobs/submit. - + [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow""" - - JOB_RUN = "JOB_RUN" - SUBMIT_RUN = "SUBMIT_RUN" - WORKFLOW_RUN = "WORKFLOW_RUN" - + + JOB_RUN = 'JOB_RUN' + SUBMIT_RUN = 'SUBMIT_RUN' + WORKFLOW_RUN = 'WORKFLOW_RUN' class Source(Enum): """Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\ @@ -6624,69 +5591,57 @@ class Source(Enum): * `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in cloud Git provider.""" - - GIT = "GIT" - WORKSPACE = "WORKSPACE" - + + GIT = 'GIT' + WORKSPACE = 'WORKSPACE' @dataclass class SparkJarTask: jar_uri: Optional[str] = None """Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create.""" - + main_class_name: Optional[str] = None """The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.""" - + parameters: Optional[List[str]] = None """Parameters passed to the main method. Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + run_as_repl: Optional[bool] = None """Deprecated. A value of `false` is no longer supported.""" - + def as_dict(self) -> dict: """Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.jar_uri is not None: - body["jar_uri"] = self.jar_uri - if self.main_class_name is not None: - body["main_class_name"] = self.main_class_name - if self.parameters: - body["parameters"] = [v for v in self.parameters] - if self.run_as_repl is not None: - body["run_as_repl"] = self.run_as_repl + if self.jar_uri is not None: body['jar_uri'] = self.jar_uri + if self.main_class_name is not None: body['main_class_name'] = self.main_class_name + if self.parameters: body['parameters'] = [v for v in self.parameters] + if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl return body def as_shallow_dict(self) -> dict: """Serializes the SparkJarTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.jar_uri is not None: - body["jar_uri"] = self.jar_uri - if self.main_class_name is not None: - body["main_class_name"] = self.main_class_name - if self.parameters: - body["parameters"] = self.parameters - if self.run_as_repl is not None: - body["run_as_repl"] = self.run_as_repl + if self.jar_uri is not None: body['jar_uri'] = self.jar_uri + if self.main_class_name is not None: body['main_class_name'] = self.main_class_name + if self.parameters: body['parameters'] = self.parameters + if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkJarTask: """Deserializes the SparkJarTask from a dictionary.""" - return cls( - jar_uri=d.get("jar_uri", None), - main_class_name=d.get("main_class_name", None), - parameters=d.get("parameters", None), - run_as_repl=d.get("run_as_repl", None), - ) + return cls(jar_uri=d.get('jar_uri', None), main_class_name=d.get('main_class_name', None), parameters=d.get('parameters', None), run_as_repl=d.get('run_as_repl', None)) + + @dataclass @@ -6696,14 +5651,14 @@ class SparkPythonTask: workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.""" - + parameters: Optional[List[str]] = None """Command line parameters passed to the Python file. Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + source: Optional[Source] = None """Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved from the local Databricks workspace or cloud location (if the `python_file` @@ -6712,37 +5667,29 @@ class SparkPythonTask: * `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem URI. * `GIT`: The Python file is located in a remote Git repository.""" - + def as_dict(self) -> dict: """Serializes the SparkPythonTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: - body["parameters"] = [v for v in self.parameters] - if self.python_file is not None: - body["python_file"] = self.python_file - if self.source is not None: - body["source"] = self.source.value + if self.parameters: body['parameters'] = [v for v in self.parameters] + if self.python_file is not None: body['python_file'] = self.python_file + if self.source is not None: body['source'] = self.source.value return body def as_shallow_dict(self) -> dict: """Serializes the SparkPythonTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: - body["parameters"] = self.parameters - if self.python_file is not None: - body["python_file"] = self.python_file - if self.source is not None: - body["source"] = self.source + if self.parameters: body['parameters'] = self.parameters + if self.python_file is not None: body['python_file'] = self.python_file + if self.source is not None: body['source'] = self.source return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkPythonTask: """Deserializes the SparkPythonTask from a dictionary.""" - return cls( - parameters=d.get("parameters", None), - python_file=d.get("python_file", None), - source=_enum(d, "source", Source), - ) + return cls(parameters=d.get('parameters', None), python_file=d.get('python_file', None), source=_enum(d, 'source', Source)) + + @dataclass @@ -6753,25 +5700,25 @@ class SparkSubmitTask: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + def as_dict(self) -> dict: """Serializes the SparkSubmitTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: - body["parameters"] = [v for v in self.parameters] + if self.parameters: body['parameters'] = [v for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the SparkSubmitTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: - body["parameters"] = self.parameters + if self.parameters: body['parameters'] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkSubmitTask: """Deserializes the SparkSubmitTask from a dictionary.""" - return cls(parameters=d.get("parameters", None)) + return cls(parameters=d.get('parameters', None)) + + @dataclass @@ -6781,340 +5728,279 @@ class SqlAlertOutput: * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions""" - + output_link: Optional[str] = None """The link to find the output results.""" - + query_text: Optional[str] = None """The text of the SQL query. Can Run permission of the SQL query associated with the SQL alert is required to view this field.""" - + sql_statements: Optional[List[SqlStatementOutput]] = None """Information about SQL statements executed in the run.""" - + warehouse_id: Optional[str] = None """The canonical identifier of the SQL warehouse.""" - + def as_dict(self) -> dict: """Serializes the SqlAlertOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_state is not None: - body["alert_state"] = self.alert_state.value - if self.output_link is not None: - body["output_link"] = self.output_link - if self.query_text is not None: - body["query_text"] = self.query_text - if self.sql_statements: - body["sql_statements"] = [v.as_dict() for v in self.sql_statements] - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.alert_state is not None: body['alert_state'] = self.alert_state.value + if self.output_link is not None: body['output_link'] = self.output_link + if self.query_text is not None: body['query_text'] = self.query_text + if self.sql_statements: body['sql_statements'] = [v.as_dict() for v in self.sql_statements] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the SqlAlertOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_state is not None: - body["alert_state"] = self.alert_state - if self.output_link is not None: - body["output_link"] = self.output_link - if self.query_text is not None: - body["query_text"] = self.query_text - if self.sql_statements: - body["sql_statements"] = self.sql_statements - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.alert_state is not None: body['alert_state'] = self.alert_state + if self.output_link is not None: body['output_link'] = self.output_link + if self.query_text is not None: body['query_text'] = self.query_text + if self.sql_statements: body['sql_statements'] = self.sql_statements + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlAlertOutput: """Deserializes the SqlAlertOutput from a dictionary.""" - return cls( - alert_state=_enum(d, "alert_state", SqlAlertState), - output_link=d.get("output_link", None), - query_text=d.get("query_text", None), - sql_statements=_repeated_dict(d, "sql_statements", SqlStatementOutput), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(alert_state=_enum(d, 'alert_state', SqlAlertState), output_link=d.get('output_link', None), query_text=d.get('query_text', None), sql_statements=_repeated_dict(d, 'sql_statements', SqlStatementOutput), warehouse_id=d.get('warehouse_id', None)) + + class SqlAlertState(Enum): """The state of the SQL alert. - + * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions""" - - OK = "OK" - TRIGGERED = "TRIGGERED" - UNKNOWN = "UNKNOWN" - + + OK = 'OK' + TRIGGERED = 'TRIGGERED' + UNKNOWN = 'UNKNOWN' @dataclass class SqlDashboardOutput: warehouse_id: Optional[str] = None """The canonical identifier of the SQL warehouse.""" - + widgets: Optional[List[SqlDashboardWidgetOutput]] = None """Widgets executed in the run. Only SQL query based widgets are listed.""" - + def as_dict(self) -> dict: """Serializes the SqlDashboardOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - if self.widgets: - body["widgets"] = [v.as_dict() for v in self.widgets] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets] return body def as_shallow_dict(self) -> dict: """Serializes the SqlDashboardOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - if self.widgets: - body["widgets"] = self.widgets + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.widgets: body['widgets'] = self.widgets return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlDashboardOutput: """Deserializes the SqlDashboardOutput from a dictionary.""" - return cls( - warehouse_id=d.get("warehouse_id", None), widgets=_repeated_dict(d, "widgets", SqlDashboardWidgetOutput) - ) + return cls(warehouse_id=d.get('warehouse_id', None), widgets=_repeated_dict(d, 'widgets', SqlDashboardWidgetOutput)) + + @dataclass class SqlDashboardWidgetOutput: end_time: Optional[int] = None """Time (in epoch milliseconds) when execution of the SQL widget ends.""" - + error: Optional[SqlOutputError] = None """The information about the error when execution fails.""" - + output_link: Optional[str] = None """The link to find the output results.""" - + start_time: Optional[int] = None """Time (in epoch milliseconds) when execution of the SQL widget starts.""" - + status: Optional[SqlDashboardWidgetOutputStatus] = None """The execution status of the SQL widget.""" - + widget_id: Optional[str] = None """The canonical identifier of the SQL widget.""" - + widget_title: Optional[str] = None """The title of the SQL widget.""" - + def as_dict(self) -> dict: """Serializes the SqlDashboardWidgetOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end_time is not None: - body["end_time"] = self.end_time - if self.error: - body["error"] = self.error.as_dict() - if self.output_link is not None: - body["output_link"] = self.output_link - if self.start_time is not None: - body["start_time"] = self.start_time - if self.status is not None: - body["status"] = self.status.value - if self.widget_id is not None: - body["widget_id"] = self.widget_id - if self.widget_title is not None: - body["widget_title"] = self.widget_title + if self.end_time is not None: body['end_time'] = self.end_time + if self.error: body['error'] = self.error.as_dict() + if self.output_link is not None: body['output_link'] = self.output_link + if self.start_time is not None: body['start_time'] = self.start_time + if self.status is not None: body['status'] = self.status.value + if self.widget_id is not None: body['widget_id'] = self.widget_id + if self.widget_title is not None: body['widget_title'] = self.widget_title return body def as_shallow_dict(self) -> dict: """Serializes the SqlDashboardWidgetOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.end_time is not None: - body["end_time"] = self.end_time - if self.error: - body["error"] = self.error - if self.output_link is not None: - body["output_link"] = self.output_link - if self.start_time is not None: - body["start_time"] = self.start_time - if self.status is not None: - body["status"] = self.status - if self.widget_id is not None: - body["widget_id"] = self.widget_id - if self.widget_title is not None: - body["widget_title"] = self.widget_title + if self.end_time is not None: body['end_time'] = self.end_time + if self.error: body['error'] = self.error + if self.output_link is not None: body['output_link'] = self.output_link + if self.start_time is not None: body['start_time'] = self.start_time + if self.status is not None: body['status'] = self.status + if self.widget_id is not None: body['widget_id'] = self.widget_id + if self.widget_title is not None: body['widget_title'] = self.widget_title return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlDashboardWidgetOutput: """Deserializes the SqlDashboardWidgetOutput from a dictionary.""" - return cls( - end_time=d.get("end_time", None), - error=_from_dict(d, "error", SqlOutputError), - output_link=d.get("output_link", None), - start_time=d.get("start_time", None), - status=_enum(d, "status", SqlDashboardWidgetOutputStatus), - widget_id=d.get("widget_id", None), - widget_title=d.get("widget_title", None), - ) - + return cls(end_time=d.get('end_time', None), error=_from_dict(d, 'error', SqlOutputError), output_link=d.get('output_link', None), start_time=d.get('start_time', None), status=_enum(d, 'status', SqlDashboardWidgetOutputStatus), widget_id=d.get('widget_id', None), widget_title=d.get('widget_title', None)) + -class SqlDashboardWidgetOutputStatus(Enum): - CANCELLED = "CANCELLED" - FAILED = "FAILED" - PENDING = "PENDING" - RUNNING = "RUNNING" - SUCCESS = "SUCCESS" +class SqlDashboardWidgetOutputStatus(Enum): + + + CANCELLED = 'CANCELLED' + FAILED = 'FAILED' + PENDING = 'PENDING' + RUNNING = 'RUNNING' + SUCCESS = 'SUCCESS' @dataclass class SqlOutput: alert_output: Optional[SqlAlertOutput] = None """The output of a SQL alert task, if available.""" - + dashboard_output: Optional[SqlDashboardOutput] = None """The output of a SQL dashboard task, if available.""" - + query_output: Optional[SqlQueryOutput] = None """The output of a SQL query task, if available.""" - + def as_dict(self) -> dict: """Serializes the SqlOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_output: - body["alert_output"] = self.alert_output.as_dict() - if self.dashboard_output: - body["dashboard_output"] = self.dashboard_output.as_dict() - if self.query_output: - body["query_output"] = self.query_output.as_dict() + if self.alert_output: body['alert_output'] = self.alert_output.as_dict() + if self.dashboard_output: body['dashboard_output'] = self.dashboard_output.as_dict() + if self.query_output: body['query_output'] = self.query_output.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SqlOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_output: - body["alert_output"] = self.alert_output - if self.dashboard_output: - body["dashboard_output"] = self.dashboard_output - if self.query_output: - body["query_output"] = self.query_output + if self.alert_output: body['alert_output'] = self.alert_output + if self.dashboard_output: body['dashboard_output'] = self.dashboard_output + if self.query_output: body['query_output'] = self.query_output return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlOutput: """Deserializes the SqlOutput from a dictionary.""" - return cls( - alert_output=_from_dict(d, "alert_output", SqlAlertOutput), - dashboard_output=_from_dict(d, "dashboard_output", SqlDashboardOutput), - query_output=_from_dict(d, "query_output", SqlQueryOutput), - ) + return cls(alert_output=_from_dict(d, 'alert_output', SqlAlertOutput), dashboard_output=_from_dict(d, 'dashboard_output', SqlDashboardOutput), query_output=_from_dict(d, 'query_output', SqlQueryOutput)) + + @dataclass class SqlOutputError: message: Optional[str] = None """The error message when execution fails.""" - + def as_dict(self) -> dict: """Serializes the SqlOutputError into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message + if self.message is not None: body['message'] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the SqlOutputError into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message + if self.message is not None: body['message'] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlOutputError: """Deserializes the SqlOutputError from a dictionary.""" - return cls(message=d.get("message", None)) + return cls(message=d.get('message', None)) + + @dataclass class SqlQueryOutput: endpoint_id: Optional[str] = None - + output_link: Optional[str] = None """The link to find the output results.""" - + query_text: Optional[str] = None """The text of the SQL query. Can Run permission of the SQL query is required to view this field.""" - + sql_statements: Optional[List[SqlStatementOutput]] = None """Information about SQL statements executed in the run.""" - + warehouse_id: Optional[str] = None """The canonical identifier of the SQL warehouse.""" - + def as_dict(self) -> dict: """Serializes the SqlQueryOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoint_id is not None: - body["endpoint_id"] = self.endpoint_id - if self.output_link is not None: - body["output_link"] = self.output_link - if self.query_text is not None: - body["query_text"] = self.query_text - if self.sql_statements: - body["sql_statements"] = [v.as_dict() for v in self.sql_statements] - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id + if self.output_link is not None: body['output_link'] = self.output_link + if self.query_text is not None: body['query_text'] = self.query_text + if self.sql_statements: body['sql_statements'] = [v.as_dict() for v in self.sql_statements] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the SqlQueryOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoint_id is not None: - body["endpoint_id"] = self.endpoint_id - if self.output_link is not None: - body["output_link"] = self.output_link - if self.query_text is not None: - body["query_text"] = self.query_text - if self.sql_statements: - body["sql_statements"] = self.sql_statements - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id + if self.output_link is not None: body['output_link'] = self.output_link + if self.query_text is not None: body['query_text'] = self.query_text + if self.sql_statements: body['sql_statements'] = self.sql_statements + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlQueryOutput: """Deserializes the SqlQueryOutput from a dictionary.""" - return cls( - endpoint_id=d.get("endpoint_id", None), - output_link=d.get("output_link", None), - query_text=d.get("query_text", None), - sql_statements=_repeated_dict(d, "sql_statements", SqlStatementOutput), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(endpoint_id=d.get('endpoint_id', None), output_link=d.get('output_link', None), query_text=d.get('query_text', None), sql_statements=_repeated_dict(d, 'sql_statements', SqlStatementOutput), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class SqlStatementOutput: lookup_key: Optional[str] = None """A key that can be used to look up query details.""" - + def as_dict(self) -> dict: """Serializes the SqlStatementOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.lookup_key is not None: - body["lookup_key"] = self.lookup_key + if self.lookup_key is not None: body['lookup_key'] = self.lookup_key return body def as_shallow_dict(self) -> dict: """Serializes the SqlStatementOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.lookup_key is not None: - body["lookup_key"] = self.lookup_key + if self.lookup_key is not None: body['lookup_key'] = self.lookup_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlStatementOutput: """Deserializes the SqlStatementOutput from a dictionary.""" - return cls(lookup_key=d.get("lookup_key", None)) + return cls(lookup_key=d.get('lookup_key', None)) + + @dataclass @@ -7123,162 +6009,126 @@ class SqlTask: """The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.""" - + alert: Optional[SqlTaskAlert] = None """If alert, indicates that this job must refresh a SQL alert.""" - + dashboard: Optional[SqlTaskDashboard] = None """If dashboard, indicates that this job must refresh a SQL dashboard.""" - + file: Optional[SqlTaskFile] = None """If file, indicates that this job runs a SQL file in a remote Git repository.""" - - parameters: Optional[Dict[str, str]] = None + + parameters: Optional[Dict[str,str]] = None """Parameters to be used for each run of this job. The SQL alert task does not support custom parameters.""" - + query: Optional[SqlTaskQuery] = None """If query, indicates that this job must execute a SQL query.""" - + def as_dict(self) -> dict: """Serializes the SqlTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert: - body["alert"] = self.alert.as_dict() - if self.dashboard: - body["dashboard"] = self.dashboard.as_dict() - if self.file: - body["file"] = self.file.as_dict() - if self.parameters: - body["parameters"] = self.parameters - if self.query: - body["query"] = self.query.as_dict() - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.alert: body['alert'] = self.alert.as_dict() + if self.dashboard: body['dashboard'] = self.dashboard.as_dict() + if self.file: body['file'] = self.file.as_dict() + if self.parameters: body['parameters'] = self.parameters + if self.query: body['query'] = self.query.as_dict() + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the SqlTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert: - body["alert"] = self.alert - if self.dashboard: - body["dashboard"] = self.dashboard - if self.file: - body["file"] = self.file - if self.parameters: - body["parameters"] = self.parameters - if self.query: - body["query"] = self.query - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.alert: body['alert'] = self.alert + if self.dashboard: body['dashboard'] = self.dashboard + if self.file: body['file'] = self.file + if self.parameters: body['parameters'] = self.parameters + if self.query: body['query'] = self.query + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTask: """Deserializes the SqlTask from a dictionary.""" - return cls( - alert=_from_dict(d, "alert", SqlTaskAlert), - dashboard=_from_dict(d, "dashboard", SqlTaskDashboard), - file=_from_dict(d, "file", SqlTaskFile), - parameters=d.get("parameters", None), - query=_from_dict(d, "query", SqlTaskQuery), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(alert=_from_dict(d, 'alert', SqlTaskAlert), dashboard=_from_dict(d, 'dashboard', SqlTaskDashboard), file=_from_dict(d, 'file', SqlTaskFile), parameters=d.get('parameters', None), query=_from_dict(d, 'query', SqlTaskQuery), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class SqlTaskAlert: alert_id: str """The canonical identifier of the SQL alert.""" - + pause_subscriptions: Optional[bool] = None """If true, the alert notifications are not sent to subscribers.""" - + subscriptions: Optional[List[SqlTaskSubscription]] = None """If specified, alert notifications are sent to subscribers.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.pause_subscriptions is not None: - body["pause_subscriptions"] = self.pause_subscriptions - if self.subscriptions: - body["subscriptions"] = [v.as_dict() for v in self.subscriptions] + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions + if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.pause_subscriptions is not None: - body["pause_subscriptions"] = self.pause_subscriptions - if self.subscriptions: - body["subscriptions"] = self.subscriptions + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions + if self.subscriptions: body['subscriptions'] = self.subscriptions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskAlert: """Deserializes the SqlTaskAlert from a dictionary.""" - return cls( - alert_id=d.get("alert_id", None), - pause_subscriptions=d.get("pause_subscriptions", None), - subscriptions=_repeated_dict(d, "subscriptions", SqlTaskSubscription), - ) + return cls(alert_id=d.get('alert_id', None), pause_subscriptions=d.get('pause_subscriptions', None), subscriptions=_repeated_dict(d, 'subscriptions', SqlTaskSubscription)) + + @dataclass class SqlTaskDashboard: dashboard_id: str """The canonical identifier of the SQL dashboard.""" - + custom_subject: Optional[str] = None """Subject of the email sent to subscribers of this task.""" - + pause_subscriptions: Optional[bool] = None """If true, the dashboard snapshot is not taken, and emails are not sent to subscribers.""" - + subscriptions: Optional[List[SqlTaskSubscription]] = None """If specified, dashboard snapshots are sent to subscriptions.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskDashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.pause_subscriptions is not None: - body["pause_subscriptions"] = self.pause_subscriptions - if self.subscriptions: - body["subscriptions"] = [v.as_dict() for v in self.subscriptions] + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions + if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskDashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.pause_subscriptions is not None: - body["pause_subscriptions"] = self.pause_subscriptions - if self.subscriptions: - body["subscriptions"] = self.subscriptions + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions + if self.subscriptions: body['subscriptions'] = self.subscriptions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskDashboard: """Deserializes the SqlTaskDashboard from a dictionary.""" - return cls( - custom_subject=d.get("custom_subject", None), - dashboard_id=d.get("dashboard_id", None), - pause_subscriptions=d.get("pause_subscriptions", None), - subscriptions=_repeated_dict(d, "subscriptions", SqlTaskSubscription), - ) + return cls(custom_subject=d.get('custom_subject', None), dashboard_id=d.get('dashboard_id', None), pause_subscriptions=d.get('pause_subscriptions', None), subscriptions=_repeated_dict(d, 'subscriptions', SqlTaskSubscription)) + + @dataclass @@ -7286,7 +6136,7 @@ class SqlTaskFile: path: str """Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.""" - + source: Optional[Source] = None """Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved from the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a @@ -7295,54 +6145,52 @@ class SqlTaskFile: * `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in cloud Git provider.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskFile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: - body["path"] = self.path - if self.source is not None: - body["source"] = self.source.value + if self.path is not None: body['path'] = self.path + if self.source is not None: body['source'] = self.source.value return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskFile into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: - body["path"] = self.path - if self.source is not None: - body["source"] = self.source + if self.path is not None: body['path'] = self.path + if self.source is not None: body['source'] = self.source return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskFile: """Deserializes the SqlTaskFile from a dictionary.""" - return cls(path=d.get("path", None), source=_enum(d, "source", Source)) + return cls(path=d.get('path', None), source=_enum(d, 'source', Source)) + + @dataclass class SqlTaskQuery: query_id: str """The canonical identifier of the SQL query.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.query_id is not None: - body["query_id"] = self.query_id + if self.query_id is not None: body['query_id'] = self.query_id return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.query_id is not None: - body["query_id"] = self.query_id + if self.query_id is not None: body['query_id'] = self.query_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskQuery: """Deserializes the SqlTaskQuery from a dictionary.""" - return cls(query_id=d.get("query_id", None)) + return cls(query_id=d.get('query_id', None)) + + @dataclass @@ -7351,57 +6199,55 @@ class SqlTaskSubscription: """The canonical identifier of the destination to receive email notification. This parameter is mutually exclusive with user_name. You cannot set both destination_id and user_name for subscription notifications.""" - + user_name: Optional[str] = None """The user name to receive the subscription email. This parameter is mutually exclusive with destination_id. You cannot set both destination_id and user_name for subscription notifications.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskSubscription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_id is not None: - body["destination_id"] = self.destination_id - if self.user_name is not None: - body["user_name"] = self.user_name + if self.destination_id is not None: body['destination_id'] = self.destination_id + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskSubscription into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_id is not None: - body["destination_id"] = self.destination_id - if self.user_name is not None: - body["user_name"] = self.user_name + if self.destination_id is not None: body['destination_id'] = self.destination_id + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskSubscription: """Deserializes the SqlTaskSubscription from a dictionary.""" - return cls(destination_id=d.get("destination_id", None), user_name=d.get("user_name", None)) - + return cls(destination_id=d.get('destination_id', None), user_name=d.get('user_name', None)) + -class StorageMode(Enum): - DIRECT_QUERY = "DIRECT_QUERY" - DUAL = "DUAL" - IMPORT = "IMPORT" +class StorageMode(Enum): + + + DIRECT_QUERY = 'DIRECT_QUERY' + DUAL = 'DUAL' + IMPORT = 'IMPORT' @dataclass class SubmitRun: access_control_list: Optional[List[JobAccessControlRequest]] = None """List of permissions to set on the job.""" - + budget_policy_id: Optional[str] = None """The user specified id of the budget policy to use for this one-time run. If not specified, the run will be not be attributed to any budget policy.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the run begins or completes.""" - + environments: Optional[List[JobEnvironment]] = None """A list of task execution environment specifications that can be referenced by tasks of this run.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -7411,10 +6257,10 @@ class SubmitRun: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + idempotency_token: Optional[str] = None """An optional token that can be used to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID @@ -7428,141 +6274,100 @@ class SubmitRun: For more information, see [How to ensure idempotency for jobs]. [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html""" - + notification_settings: Optional[JobNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this run.""" - + queue: Optional[QueueSettings] = None """The queue settings of the one-time run.""" - + run_as: Optional[JobRunAs] = None """Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who submits the request.""" - + run_name: Optional[str] = None """An optional name for the run. The default value is `Untitled`.""" - + tasks: Optional[List[SubmitTask]] = None - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when the run begins or completes.""" - + def as_dict(self) -> dict: """Serializes the SubmitRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.environments: - body["environments"] = [v.as_dict() for v in self.environments] - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.health: - body["health"] = self.health.as_dict() - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.queue: - body["queue"] = self.queue.as_dict() - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.run_name is not None: - body["run_name"] = self.run_name - if self.tasks: - body["tasks"] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() + if self.environments: body['environments'] = [v.as_dict() for v in self.environments] + if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.health: body['health'] = self.health.as_dict() + if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token + if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() + if self.queue: body['queue'] = self.queue.as_dict() + if self.run_as: body['run_as'] = self.run_as.as_dict() + if self.run_name is not None: body['run_name'] = self.run_name + if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SubmitRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.environments: - body["environments"] = self.environments - if self.git_source: - body["git_source"] = self.git_source - if self.health: - body["health"] = self.health - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.queue: - body["queue"] = self.queue - if self.run_as: - body["run_as"] = self.run_as - if self.run_name is not None: - body["run_name"] = self.run_name - if self.tasks: - body["tasks"] = self.tasks - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environments: body['environments'] = self.environments + if self.git_source: body['git_source'] = self.git_source + if self.health: body['health'] = self.health + if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.queue: body['queue'] = self.queue + if self.run_as: body['run_as'] = self.run_as + if self.run_name is not None: body['run_name'] = self.run_name + if self.tasks: body['tasks'] = self.tasks + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubmitRun: """Deserializes the SubmitRun from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), - budget_policy_id=d.get("budget_policy_id", None), - email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), - environments=_repeated_dict(d, "environments", JobEnvironment), - git_source=_from_dict(d, "git_source", GitSource), - health=_from_dict(d, "health", JobsHealthRules), - idempotency_token=d.get("idempotency_token", None), - notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), - queue=_from_dict(d, "queue", QueueSettings), - run_as=_from_dict(d, "run_as", JobRunAs), - run_name=d.get("run_name", None), - tasks=_repeated_dict(d, "tasks", SubmitTask), - timeout_seconds=d.get("timeout_seconds", None), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), budget_policy_id=d.get('budget_policy_id', None), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environments=_repeated_dict(d, 'environments', JobEnvironment), git_source=_from_dict(d, 'git_source', GitSource), health=_from_dict(d, 'health', JobsHealthRules), idempotency_token=d.get('idempotency_token', None), notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings), queue=_from_dict(d, 'queue', QueueSettings), run_as=_from_dict(d, 'run_as', JobRunAs), run_name=d.get('run_name', None), tasks=_repeated_dict(d, 'tasks', SubmitTask), timeout_seconds=d.get('timeout_seconds', None), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) + + @dataclass class SubmitRunResponse: """Run was created and started successfully.""" - + run_id: Optional[int] = None """The canonical identifier for the newly submitted run.""" - + def as_dict(self) -> dict: """Serializes the SubmitRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the SubmitRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubmitRunResponse: """Deserializes the SubmitRunResponse from a dictionary.""" - return cls(run_id=d.get("run_id", None)) + return cls(run_id=d.get('run_id', None)) + + @dataclass @@ -7571,92 +6376,95 @@ class SubmitTask: """A unique name for the task. This field is used to refer to this task from other tasks. This field is required and must be unique within its parent job. On Update or Reset, this field is used to reference the tasks to be updated or reset.""" - + clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" - + condition_task: Optional[ConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present. The condition task does not require a cluster to execute and does not support retries or notifications.""" - + dashboard_task: Optional[DashboardTask] = None """The task refreshes a dashboard and sends a snapshot to subscribers.""" - + + dbt_cloud_task: Optional[DbtCloudTask] = None + """Task type for dbt cloud""" + dbt_task: Optional[DbtTask] = None """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.""" - + depends_on: Optional[List[TaskDependency]] = None """An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete successfully before executing this task. The key is `task_key`, and the value is the name assigned to the dependent task.""" - + description: Optional[str] = None """An optional description for this task.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the task run begins or completes. The default behavior is to not send any emails.""" - + environment_key: Optional[str] = None """The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.""" - + existing_cluster_id: Optional[str] = None """If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops responding. We suggest running jobs and tasks on new clusters for greater reliability""" - + for_each_task: Optional[ForEachTask] = None """The task executes a nested task for every input provided when the `for_each_task` field is present.""" - + gen_ai_compute_task: Optional[GenAiComputeTask] = None - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + libraries: Optional[List[compute.Library]] = None """An optional list of libraries to be installed on the cluster. The default value is an empty list.""" - + new_cluster: Optional[compute.ClusterSpec] = None """If new_cluster, a description of a new cluster that is created for each run.""" - + notebook_task: Optional[NotebookTask] = None """The task runs a notebook when the `notebook_task` field is present.""" - + notification_settings: Optional[TaskNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task run.""" - + pipeline_task: Optional[PipelineTask] = None """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.""" - + power_bi_task: Optional[PowerBiTask] = None """The task triggers a Power BI semantic model update when the `power_bi_task` field is present.""" - + python_wheel_task: Optional[PythonWheelTask] = None """The task runs a Python wheel when the `python_wheel_task` field is present.""" - + run_if: Optional[RunIf] = None """An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. See :method:jobs/create for a list of possible values.""" - + run_job_task: Optional[RunJobTask] = None """The task triggers another job when the `run_job_task` field is present.""" - + spark_jar_task: Optional[SparkJarTask] = None """The task runs a JAR when the `spark_jar_task` field is present.""" - + spark_python_task: Optional[SparkPythonTask] = None """The task runs a Python file when the `spark_python_task` field is present.""" - + spark_submit_task: Optional[SparkSubmitTask] = None """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute. @@ -7673,217 +6481,128 @@ class SubmitTask: to leave some room for off-heap usage. The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" - + sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when the run begins or completes. The default behavior is to not send any system notifications. Task webhooks respect the task notification settings.""" - + def as_dict(self) -> dict: """Serializes the SubmitTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_rooms_notebook_task: - body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task.as_dict() - if self.condition_task: - body["condition_task"] = self.condition_task.as_dict() - if self.dashboard_task: - body["dashboard_task"] = self.dashboard_task.as_dict() - if self.dbt_task: - body["dbt_task"] = self.dbt_task.as_dict() - if self.depends_on: - body["depends_on"] = [v.as_dict() for v in self.depends_on] - if self.description is not None: - body["description"] = self.description - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.environment_key is not None: - body["environment_key"] = self.environment_key - if self.existing_cluster_id is not None: - body["existing_cluster_id"] = self.existing_cluster_id - if self.for_each_task: - body["for_each_task"] = self.for_each_task.as_dict() - if self.gen_ai_compute_task: - body["gen_ai_compute_task"] = self.gen_ai_compute_task.as_dict() - if self.health: - body["health"] = self.health.as_dict() - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.new_cluster: - body["new_cluster"] = self.new_cluster.as_dict() - if self.notebook_task: - body["notebook_task"] = self.notebook_task.as_dict() - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.pipeline_task: - body["pipeline_task"] = self.pipeline_task.as_dict() - if self.power_bi_task: - body["power_bi_task"] = self.power_bi_task.as_dict() - if self.python_wheel_task: - body["python_wheel_task"] = self.python_wheel_task.as_dict() - if self.run_if is not None: - body["run_if"] = self.run_if.value - if self.run_job_task: - body["run_job_task"] = self.run_job_task.as_dict() - if self.spark_jar_task: - body["spark_jar_task"] = self.spark_jar_task.as_dict() - if self.spark_python_task: - body["spark_python_task"] = self.spark_python_task.as_dict() - if self.spark_submit_task: - body["spark_submit_task"] = self.spark_submit_task.as_dict() - if self.sql_task: - body["sql_task"] = self.sql_task.as_dict() - if self.task_key is not None: - body["task_key"] = self.task_key - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() + if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict() + if self.condition_task: body['condition_task'] = self.condition_task.as_dict() + if self.dashboard_task: body['dashboard_task'] = self.dashboard_task.as_dict() + if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task.as_dict() + if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() + if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on] + if self.description is not None: body['description'] = self.description + if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict() + if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task.as_dict() + if self.health: body['health'] = self.health.as_dict() + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() + if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict() + if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() + if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict() + if self.power_bi_task: body['power_bi_task'] = self.power_bi_task.as_dict() + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict() + if self.run_if is not None: body['run_if'] = self.run_if.value + if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict() + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict() + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict() + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict() + if self.sql_task: body['sql_task'] = self.sql_task.as_dict() + if self.task_key is not None: body['task_key'] = self.task_key + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SubmitTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_rooms_notebook_task: - body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task - if self.condition_task: - body["condition_task"] = self.condition_task - if self.dashboard_task: - body["dashboard_task"] = self.dashboard_task - if self.dbt_task: - body["dbt_task"] = self.dbt_task - if self.depends_on: - body["depends_on"] = self.depends_on - if self.description is not None: - body["description"] = self.description - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.environment_key is not None: - body["environment_key"] = self.environment_key - if self.existing_cluster_id is not None: - body["existing_cluster_id"] = self.existing_cluster_id - if self.for_each_task: - body["for_each_task"] = self.for_each_task - if self.gen_ai_compute_task: - body["gen_ai_compute_task"] = self.gen_ai_compute_task - if self.health: - body["health"] = self.health - if self.libraries: - body["libraries"] = self.libraries - if self.new_cluster: - body["new_cluster"] = self.new_cluster - if self.notebook_task: - body["notebook_task"] = self.notebook_task - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.pipeline_task: - body["pipeline_task"] = self.pipeline_task - if self.power_bi_task: - body["power_bi_task"] = self.power_bi_task - if self.python_wheel_task: - body["python_wheel_task"] = self.python_wheel_task - if self.run_if is not None: - body["run_if"] = self.run_if - if self.run_job_task: - body["run_job_task"] = self.run_job_task - if self.spark_jar_task: - body["spark_jar_task"] = self.spark_jar_task - if self.spark_python_task: - body["spark_python_task"] = self.spark_python_task - if self.spark_submit_task: - body["spark_submit_task"] = self.spark_submit_task - if self.sql_task: - body["sql_task"] = self.sql_task - if self.task_key is not None: - body["task_key"] = self.task_key - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications + if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task + if self.condition_task: body['condition_task'] = self.condition_task + if self.dashboard_task: body['dashboard_task'] = self.dashboard_task + if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task + if self.dbt_task: body['dbt_task'] = self.dbt_task + if self.depends_on: body['depends_on'] = self.depends_on + if self.description is not None: body['description'] = self.description + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.for_each_task: body['for_each_task'] = self.for_each_task + if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task + if self.health: body['health'] = self.health + if self.libraries: body['libraries'] = self.libraries + if self.new_cluster: body['new_cluster'] = self.new_cluster + if self.notebook_task: body['notebook_task'] = self.notebook_task + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.pipeline_task: body['pipeline_task'] = self.pipeline_task + if self.power_bi_task: body['power_bi_task'] = self.power_bi_task + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task + if self.run_if is not None: body['run_if'] = self.run_if + if self.run_job_task: body['run_job_task'] = self.run_job_task + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task + if self.sql_task: body['sql_task'] = self.sql_task + if self.task_key is not None: body['task_key'] = self.task_key + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubmitTask: """Deserializes the SubmitTask from a dictionary.""" - return cls( - clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask), - condition_task=_from_dict(d, "condition_task", ConditionTask), - dashboard_task=_from_dict(d, "dashboard_task", DashboardTask), - dbt_task=_from_dict(d, "dbt_task", DbtTask), - depends_on=_repeated_dict(d, "depends_on", TaskDependency), - description=d.get("description", None), - email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), - environment_key=d.get("environment_key", None), - existing_cluster_id=d.get("existing_cluster_id", None), - for_each_task=_from_dict(d, "for_each_task", ForEachTask), - gen_ai_compute_task=_from_dict(d, "gen_ai_compute_task", GenAiComputeTask), - health=_from_dict(d, "health", JobsHealthRules), - libraries=_repeated_dict(d, "libraries", compute.Library), - new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), - notebook_task=_from_dict(d, "notebook_task", NotebookTask), - notification_settings=_from_dict(d, "notification_settings", TaskNotificationSettings), - pipeline_task=_from_dict(d, "pipeline_task", PipelineTask), - power_bi_task=_from_dict(d, "power_bi_task", PowerBiTask), - python_wheel_task=_from_dict(d, "python_wheel_task", PythonWheelTask), - run_if=_enum(d, "run_if", RunIf), - run_job_task=_from_dict(d, "run_job_task", RunJobTask), - spark_jar_task=_from_dict(d, "spark_jar_task", SparkJarTask), - spark_python_task=_from_dict(d, "spark_python_task", SparkPythonTask), - spark_submit_task=_from_dict(d, "spark_submit_task", SparkSubmitTask), - sql_task=_from_dict(d, "sql_task", SqlTask), - task_key=d.get("task_key", None), - timeout_seconds=d.get("timeout_seconds", None), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) + return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task', CleanRoomsNotebookTask), condition_task=_from_dict(d, 'condition_task', ConditionTask), dashboard_task=_from_dict(d, 'dashboard_task', DashboardTask), dbt_cloud_task=_from_dict(d, 'dbt_cloud_task', DbtCloudTask), dbt_task=_from_dict(d, 'dbt_task', DbtTask), depends_on=_repeated_dict(d, 'depends_on', TaskDependency), description=d.get('description', None), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environment_key=d.get('environment_key', None), existing_cluster_id=d.get('existing_cluster_id', None), for_each_task=_from_dict(d, 'for_each_task', ForEachTask), gen_ai_compute_task=_from_dict(d, 'gen_ai_compute_task', GenAiComputeTask), health=_from_dict(d, 'health', JobsHealthRules), libraries=_repeated_dict(d, 'libraries', compute.Library), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec), notebook_task=_from_dict(d, 'notebook_task', NotebookTask), notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings), pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask), power_bi_task=_from_dict(d, 'power_bi_task', PowerBiTask), python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask), run_if=_enum(d, 'run_if', RunIf), run_job_task=_from_dict(d, 'run_job_task', RunJobTask), spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask), spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask), spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask), sql_task=_from_dict(d, 'sql_task', SqlTask), task_key=d.get('task_key', None), timeout_seconds=d.get('timeout_seconds', None), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) + + @dataclass class Subscription: custom_subject: Optional[str] = None """Optional: Allows users to specify a custom subject line on the email sent to subscribers.""" - + paused: Optional[bool] = None """When true, the subscription will not send emails.""" - + subscribers: Optional[List[SubscriptionSubscriber]] = None """The list of subscribers to send the snapshot of the dashboard to.""" - + def as_dict(self) -> dict: """Serializes the Subscription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.paused is not None: - body["paused"] = self.paused - if self.subscribers: - body["subscribers"] = [v.as_dict() for v in self.subscribers] + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.paused is not None: body['paused'] = self.paused + if self.subscribers: body['subscribers'] = [v.as_dict() for v in self.subscribers] return body def as_shallow_dict(self) -> dict: """Serializes the Subscription into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.paused is not None: - body["paused"] = self.paused - if self.subscribers: - body["subscribers"] = self.subscribers + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.paused is not None: body['paused'] = self.paused + if self.subscribers: body['subscribers'] = self.subscribers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Subscription: """Deserializes the Subscription from a dictionary.""" - return cls( - custom_subject=d.get("custom_subject", None), - paused=d.get("paused", None), - subscribers=_repeated_dict(d, "subscribers", SubscriptionSubscriber), - ) + return cls(custom_subject=d.get('custom_subject', None), paused=d.get('paused', None), subscribers=_repeated_dict(d, 'subscribers', SubscriptionSubscriber)) + + @dataclass @@ -7891,88 +6610,75 @@ class SubscriptionSubscriber: destination_id: Optional[str] = None """A snapshot of the dashboard will be sent to the destination when the `destination_id` field is present.""" - + user_name: Optional[str] = None """A snapshot of the dashboard will be sent to the user's email when the `user_name` field is present.""" - + def as_dict(self) -> dict: """Serializes the SubscriptionSubscriber into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_id is not None: - body["destination_id"] = self.destination_id - if self.user_name is not None: - body["user_name"] = self.user_name + if self.destination_id is not None: body['destination_id'] = self.destination_id + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the SubscriptionSubscriber into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_id is not None: - body["destination_id"] = self.destination_id - if self.user_name is not None: - body["user_name"] = self.user_name + if self.destination_id is not None: body['destination_id'] = self.destination_id + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriber: """Deserializes the SubscriptionSubscriber from a dictionary.""" - return cls(destination_id=d.get("destination_id", None), user_name=d.get("user_name", None)) + return cls(destination_id=d.get('destination_id', None), user_name=d.get('user_name', None)) + + @dataclass class TableUpdateTriggerConfiguration: condition: Optional[Condition] = None """The table(s) condition based on which to trigger a job run.""" - + min_time_between_triggers_seconds: Optional[int] = None """If set, the trigger starts a run only after the specified amount of time has passed since the last time the trigger fired. The minimum allowed value is 60 seconds.""" - + table_names: Optional[List[str]] = None """A list of Delta tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`.""" - + wait_after_last_change_seconds: Optional[int] = None """If set, the trigger starts a run only after no table updates have occurred for the specified time and can be used to wait for a series of table updates before triggering a run. The minimum allowed value is 60 seconds.""" - + def as_dict(self) -> dict: """Serializes the TableUpdateTriggerConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition is not None: - body["condition"] = self.condition.value - if self.min_time_between_triggers_seconds is not None: - body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds - if self.table_names: - body["table_names"] = [v for v in self.table_names] - if self.wait_after_last_change_seconds is not None: - body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + if self.condition is not None: body['condition'] = self.condition.value + if self.min_time_between_triggers_seconds is not None: body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds + if self.table_names: body['table_names'] = [v for v in self.table_names] + if self.wait_after_last_change_seconds is not None: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds return body def as_shallow_dict(self) -> dict: """Serializes the TableUpdateTriggerConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition is not None: - body["condition"] = self.condition - if self.min_time_between_triggers_seconds is not None: - body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds - if self.table_names: - body["table_names"] = self.table_names - if self.wait_after_last_change_seconds is not None: - body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds + if self.condition is not None: body['condition'] = self.condition + if self.min_time_between_triggers_seconds is not None: body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds + if self.table_names: body['table_names'] = self.table_names + if self.wait_after_last_change_seconds is not None: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableUpdateTriggerConfiguration: """Deserializes the TableUpdateTriggerConfiguration from a dictionary.""" - return cls( - condition=_enum(d, "condition", Condition), - min_time_between_triggers_seconds=d.get("min_time_between_triggers_seconds", None), - table_names=d.get("table_names", None), - wait_after_last_change_seconds=d.get("wait_after_last_change_seconds", None), - ) + return cls(condition=_enum(d, 'condition', Condition), min_time_between_triggers_seconds=d.get('min_time_between_triggers_seconds', None), table_names=d.get('table_names', None), wait_after_last_change_seconds=d.get('wait_after_last_change_seconds', None)) + + @dataclass @@ -7981,100 +6687,103 @@ class Task: """A unique name for the task. This field is used to refer to this task from other tasks. This field is required and must be unique within its parent job. On Update or Reset, this field is used to reference the tasks to be updated or reset.""" - + clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" - + condition_task: Optional[ConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present. The condition task does not require a cluster to execute and does not support retries or notifications.""" - + dashboard_task: Optional[DashboardTask] = None """The task refreshes a dashboard and sends a snapshot to subscribers.""" - + + dbt_cloud_task: Optional[DbtCloudTask] = None + """Task type for dbt cloud""" + dbt_task: Optional[DbtTask] = None """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.""" - + depends_on: Optional[List[TaskDependency]] = None """An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete before executing this task. The task will run only if the `run_if` condition is true. The key is `task_key`, and the value is the name assigned to the dependent task.""" - + description: Optional[str] = None """An optional description for this task.""" - + disable_auto_optimization: Optional[bool] = None """An option to disable auto optimization in serverless""" - + email_notifications: Optional[TaskEmailNotifications] = None """An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.""" - + environment_key: Optional[str] = None """The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.""" - + existing_cluster_id: Optional[str] = None """If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops responding. We suggest running jobs and tasks on new clusters for greater reliability""" - + for_each_task: Optional[ForEachTask] = None """The task executes a nested task for every input provided when the `for_each_task` field is present.""" - + gen_ai_compute_task: Optional[GenAiComputeTask] = None - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + job_cluster_key: Optional[str] = None """If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.""" - + libraries: Optional[List[compute.Library]] = None """An optional list of libraries to be installed on the cluster. The default value is an empty list.""" - + max_retries: Optional[int] = None """An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with the `FAILED` result_state or `INTERNAL_ERROR` `life_cycle_state`. The value `-1` means to retry indefinitely and the value `0` means to never retry.""" - + min_retry_interval_millis: Optional[int] = None """An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried.""" - + new_cluster: Optional[compute.ClusterSpec] = None """If new_cluster, a description of a new cluster that is created for each run.""" - + notebook_task: Optional[NotebookTask] = None """The task runs a notebook when the `notebook_task` field is present.""" - + notification_settings: Optional[TaskNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task.""" - + pipeline_task: Optional[PipelineTask] = None """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.""" - + power_bi_task: Optional[PowerBiTask] = None """The task triggers a Power BI semantic model update when the `power_bi_task` field is present.""" - + python_wheel_task: Optional[PythonWheelTask] = None """The task runs a Python wheel when the `python_wheel_task` field is present.""" - + retry_on_timeout: Optional[bool] = None """An optional policy to specify whether to retry a job when it times out. The default behavior is to not retry on timeout.""" - + run_if: Optional[RunIf] = None """An optional value specifying the condition determining whether the task is run once its dependencies have been completed. @@ -8083,16 +6792,16 @@ class Task: one dependency has succeeded * `NONE_FAILED`: None of the dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl dependencies have failed""" - + run_job_task: Optional[RunJobTask] = None """The task triggers another job when the `run_job_task` field is present.""" - + spark_jar_task: Optional[SparkJarTask] = None """The task runs a JAR when the `spark_jar_task` field is present.""" - + spark_python_task: Optional[SparkPythonTask] = None """The task runs a Python file when the `spark_python_task` field is present.""" - + spark_submit_task: Optional[SparkSubmitTask] = None """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute. @@ -8109,231 +6818,133 @@ class Task: to leave some room for off-heap usage. The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" - + sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when runs of this task begin or complete. The default behavior is to not send any system notifications.""" - + def as_dict(self) -> dict: """Serializes the Task into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_rooms_notebook_task: - body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task.as_dict() - if self.condition_task: - body["condition_task"] = self.condition_task.as_dict() - if self.dashboard_task: - body["dashboard_task"] = self.dashboard_task.as_dict() - if self.dbt_task: - body["dbt_task"] = self.dbt_task.as_dict() - if self.depends_on: - body["depends_on"] = [v.as_dict() for v in self.depends_on] - if self.description is not None: - body["description"] = self.description - if self.disable_auto_optimization is not None: - body["disable_auto_optimization"] = self.disable_auto_optimization - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.environment_key is not None: - body["environment_key"] = self.environment_key - if self.existing_cluster_id is not None: - body["existing_cluster_id"] = self.existing_cluster_id - if self.for_each_task: - body["for_each_task"] = self.for_each_task.as_dict() - if self.gen_ai_compute_task: - body["gen_ai_compute_task"] = self.gen_ai_compute_task.as_dict() - if self.health: - body["health"] = self.health.as_dict() - if self.job_cluster_key is not None: - body["job_cluster_key"] = self.job_cluster_key - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.max_retries is not None: - body["max_retries"] = self.max_retries - if self.min_retry_interval_millis is not None: - body["min_retry_interval_millis"] = self.min_retry_interval_millis - if self.new_cluster: - body["new_cluster"] = self.new_cluster.as_dict() - if self.notebook_task: - body["notebook_task"] = self.notebook_task.as_dict() - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.pipeline_task: - body["pipeline_task"] = self.pipeline_task.as_dict() - if self.power_bi_task: - body["power_bi_task"] = self.power_bi_task.as_dict() - if self.python_wheel_task: - body["python_wheel_task"] = self.python_wheel_task.as_dict() - if self.retry_on_timeout is not None: - body["retry_on_timeout"] = self.retry_on_timeout - if self.run_if is not None: - body["run_if"] = self.run_if.value - if self.run_job_task: - body["run_job_task"] = self.run_job_task.as_dict() - if self.spark_jar_task: - body["spark_jar_task"] = self.spark_jar_task.as_dict() - if self.spark_python_task: - body["spark_python_task"] = self.spark_python_task.as_dict() - if self.spark_submit_task: - body["spark_submit_task"] = self.spark_submit_task.as_dict() - if self.sql_task: - body["sql_task"] = self.sql_task.as_dict() - if self.task_key is not None: - body["task_key"] = self.task_key - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() + if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict() + if self.condition_task: body['condition_task'] = self.condition_task.as_dict() + if self.dashboard_task: body['dashboard_task'] = self.dashboard_task.as_dict() + if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task.as_dict() + if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() + if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on] + if self.description is not None: body['description'] = self.description + if self.disable_auto_optimization is not None: body['disable_auto_optimization'] = self.disable_auto_optimization + if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict() + if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task.as_dict() + if self.health: body['health'] = self.health.as_dict() + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.max_retries is not None: body['max_retries'] = self.max_retries + if self.min_retry_interval_millis is not None: body['min_retry_interval_millis'] = self.min_retry_interval_millis + if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() + if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict() + if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() + if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict() + if self.power_bi_task: body['power_bi_task'] = self.power_bi_task.as_dict() + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict() + if self.retry_on_timeout is not None: body['retry_on_timeout'] = self.retry_on_timeout + if self.run_if is not None: body['run_if'] = self.run_if.value + if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict() + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict() + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict() + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict() + if self.sql_task: body['sql_task'] = self.sql_task.as_dict() + if self.task_key is not None: body['task_key'] = self.task_key + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Task into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_rooms_notebook_task: - body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task - if self.condition_task: - body["condition_task"] = self.condition_task - if self.dashboard_task: - body["dashboard_task"] = self.dashboard_task - if self.dbt_task: - body["dbt_task"] = self.dbt_task - if self.depends_on: - body["depends_on"] = self.depends_on - if self.description is not None: - body["description"] = self.description - if self.disable_auto_optimization is not None: - body["disable_auto_optimization"] = self.disable_auto_optimization - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.environment_key is not None: - body["environment_key"] = self.environment_key - if self.existing_cluster_id is not None: - body["existing_cluster_id"] = self.existing_cluster_id - if self.for_each_task: - body["for_each_task"] = self.for_each_task - if self.gen_ai_compute_task: - body["gen_ai_compute_task"] = self.gen_ai_compute_task - if self.health: - body["health"] = self.health - if self.job_cluster_key is not None: - body["job_cluster_key"] = self.job_cluster_key - if self.libraries: - body["libraries"] = self.libraries - if self.max_retries is not None: - body["max_retries"] = self.max_retries - if self.min_retry_interval_millis is not None: - body["min_retry_interval_millis"] = self.min_retry_interval_millis - if self.new_cluster: - body["new_cluster"] = self.new_cluster - if self.notebook_task: - body["notebook_task"] = self.notebook_task - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.pipeline_task: - body["pipeline_task"] = self.pipeline_task - if self.power_bi_task: - body["power_bi_task"] = self.power_bi_task - if self.python_wheel_task: - body["python_wheel_task"] = self.python_wheel_task - if self.retry_on_timeout is not None: - body["retry_on_timeout"] = self.retry_on_timeout - if self.run_if is not None: - body["run_if"] = self.run_if - if self.run_job_task: - body["run_job_task"] = self.run_job_task - if self.spark_jar_task: - body["spark_jar_task"] = self.spark_jar_task - if self.spark_python_task: - body["spark_python_task"] = self.spark_python_task - if self.spark_submit_task: - body["spark_submit_task"] = self.spark_submit_task - if self.sql_task: - body["sql_task"] = self.sql_task - if self.task_key is not None: - body["task_key"] = self.task_key - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications + if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task + if self.condition_task: body['condition_task'] = self.condition_task + if self.dashboard_task: body['dashboard_task'] = self.dashboard_task + if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task + if self.dbt_task: body['dbt_task'] = self.dbt_task + if self.depends_on: body['depends_on'] = self.depends_on + if self.description is not None: body['description'] = self.description + if self.disable_auto_optimization is not None: body['disable_auto_optimization'] = self.disable_auto_optimization + if self.email_notifications: body['email_notifications'] = self.email_notifications + if self.environment_key is not None: body['environment_key'] = self.environment_key + if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id + if self.for_each_task: body['for_each_task'] = self.for_each_task + if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task + if self.health: body['health'] = self.health + if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key + if self.libraries: body['libraries'] = self.libraries + if self.max_retries is not None: body['max_retries'] = self.max_retries + if self.min_retry_interval_millis is not None: body['min_retry_interval_millis'] = self.min_retry_interval_millis + if self.new_cluster: body['new_cluster'] = self.new_cluster + if self.notebook_task: body['notebook_task'] = self.notebook_task + if self.notification_settings: body['notification_settings'] = self.notification_settings + if self.pipeline_task: body['pipeline_task'] = self.pipeline_task + if self.power_bi_task: body['power_bi_task'] = self.power_bi_task + if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task + if self.retry_on_timeout is not None: body['retry_on_timeout'] = self.retry_on_timeout + if self.run_if is not None: body['run_if'] = self.run_if + if self.run_job_task: body['run_job_task'] = self.run_job_task + if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task + if self.spark_python_task: body['spark_python_task'] = self.spark_python_task + if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task + if self.sql_task: body['sql_task'] = self.sql_task + if self.task_key is not None: body['task_key'] = self.task_key + if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds + if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Task: """Deserializes the Task from a dictionary.""" - return cls( - clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask), - condition_task=_from_dict(d, "condition_task", ConditionTask), - dashboard_task=_from_dict(d, "dashboard_task", DashboardTask), - dbt_task=_from_dict(d, "dbt_task", DbtTask), - depends_on=_repeated_dict(d, "depends_on", TaskDependency), - description=d.get("description", None), - disable_auto_optimization=d.get("disable_auto_optimization", None), - email_notifications=_from_dict(d, "email_notifications", TaskEmailNotifications), - environment_key=d.get("environment_key", None), - existing_cluster_id=d.get("existing_cluster_id", None), - for_each_task=_from_dict(d, "for_each_task", ForEachTask), - gen_ai_compute_task=_from_dict(d, "gen_ai_compute_task", GenAiComputeTask), - health=_from_dict(d, "health", JobsHealthRules), - job_cluster_key=d.get("job_cluster_key", None), - libraries=_repeated_dict(d, "libraries", compute.Library), - max_retries=d.get("max_retries", None), - min_retry_interval_millis=d.get("min_retry_interval_millis", None), - new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), - notebook_task=_from_dict(d, "notebook_task", NotebookTask), - notification_settings=_from_dict(d, "notification_settings", TaskNotificationSettings), - pipeline_task=_from_dict(d, "pipeline_task", PipelineTask), - power_bi_task=_from_dict(d, "power_bi_task", PowerBiTask), - python_wheel_task=_from_dict(d, "python_wheel_task", PythonWheelTask), - retry_on_timeout=d.get("retry_on_timeout", None), - run_if=_enum(d, "run_if", RunIf), - run_job_task=_from_dict(d, "run_job_task", RunJobTask), - spark_jar_task=_from_dict(d, "spark_jar_task", SparkJarTask), - spark_python_task=_from_dict(d, "spark_python_task", SparkPythonTask), - spark_submit_task=_from_dict(d, "spark_submit_task", SparkSubmitTask), - sql_task=_from_dict(d, "sql_task", SqlTask), - task_key=d.get("task_key", None), - timeout_seconds=d.get("timeout_seconds", None), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) + return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task', CleanRoomsNotebookTask), condition_task=_from_dict(d, 'condition_task', ConditionTask), dashboard_task=_from_dict(d, 'dashboard_task', DashboardTask), dbt_cloud_task=_from_dict(d, 'dbt_cloud_task', DbtCloudTask), dbt_task=_from_dict(d, 'dbt_task', DbtTask), depends_on=_repeated_dict(d, 'depends_on', TaskDependency), description=d.get('description', None), disable_auto_optimization=d.get('disable_auto_optimization', None), email_notifications=_from_dict(d, 'email_notifications', TaskEmailNotifications), environment_key=d.get('environment_key', None), existing_cluster_id=d.get('existing_cluster_id', None), for_each_task=_from_dict(d, 'for_each_task', ForEachTask), gen_ai_compute_task=_from_dict(d, 'gen_ai_compute_task', GenAiComputeTask), health=_from_dict(d, 'health', JobsHealthRules), job_cluster_key=d.get('job_cluster_key', None), libraries=_repeated_dict(d, 'libraries', compute.Library), max_retries=d.get('max_retries', None), min_retry_interval_millis=d.get('min_retry_interval_millis', None), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec), notebook_task=_from_dict(d, 'notebook_task', NotebookTask), notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings), pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask), power_bi_task=_from_dict(d, 'power_bi_task', PowerBiTask), python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask), retry_on_timeout=d.get('retry_on_timeout', None), run_if=_enum(d, 'run_if', RunIf), run_job_task=_from_dict(d, 'run_job_task', RunJobTask), spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask), spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask), spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask), sql_task=_from_dict(d, 'sql_task', SqlTask), task_key=d.get('task_key', None), timeout_seconds=d.get('timeout_seconds', None), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) + + @dataclass class TaskDependency: task_key: str """The name of the task this task depends on.""" - + outcome: Optional[str] = None """Can only be specified on condition task dependencies. The outcome of the dependent task that must be met for this task to run.""" - + def as_dict(self) -> dict: """Serializes the TaskDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.outcome is not None: - body["outcome"] = self.outcome - if self.task_key is not None: - body["task_key"] = self.task_key + if self.outcome is not None: body['outcome'] = self.outcome + if self.task_key is not None: body['task_key'] = self.task_key return body def as_shallow_dict(self) -> dict: """Serializes the TaskDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.outcome is not None: - body["outcome"] = self.outcome - if self.task_key is not None: - body["task_key"] = self.task_key + if self.outcome is not None: body['outcome'] = self.outcome + if self.task_key is not None: body['task_key'] = self.task_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TaskDependency: """Deserializes the TaskDependency from a dictionary.""" - return cls(outcome=d.get("outcome", None), task_key=d.get("task_key", None)) + return cls(outcome=d.get('outcome', None), task_key=d.get('task_key', None)) + + @dataclass @@ -8341,81 +6952,64 @@ class TaskEmailNotifications: no_alert_for_skipped_runs: Optional[bool] = None """If true, do not send email to recipients specified in `on_failure` if the run is skipped. This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.""" - + on_duration_warning_threshold_exceeded: Optional[List[str]] = None """A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.""" - + on_failure: Optional[List[str]] = None """A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.""" - + on_start: Optional[List[str]] = None """A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.""" - + on_streaming_backlog_exceeded: Optional[List[str]] = None """A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream. Streaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.""" - + on_success: Optional[List[str]] = None """A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.""" - + def as_dict(self) -> dict: """Serializes the TaskEmailNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.no_alert_for_skipped_runs is not None: - body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs - if self.on_duration_warning_threshold_exceeded: - body["on_duration_warning_threshold_exceeded"] = [v for v in self.on_duration_warning_threshold_exceeded] - if self.on_failure: - body["on_failure"] = [v for v in self.on_failure] - if self.on_start: - body["on_start"] = [v for v in self.on_start] - if self.on_streaming_backlog_exceeded: - body["on_streaming_backlog_exceeded"] = [v for v in self.on_streaming_backlog_exceeded] - if self.on_success: - body["on_success"] = [v for v in self.on_success] + if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = [v for v in self.on_duration_warning_threshold_exceeded] + if self.on_failure: body['on_failure'] = [v for v in self.on_failure] + if self.on_start: body['on_start'] = [v for v in self.on_start] + if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded] + if self.on_success: body['on_success'] = [v for v in self.on_success] return body def as_shallow_dict(self) -> dict: """Serializes the TaskEmailNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.no_alert_for_skipped_runs is not None: - body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs - if self.on_duration_warning_threshold_exceeded: - body["on_duration_warning_threshold_exceeded"] = self.on_duration_warning_threshold_exceeded - if self.on_failure: - body["on_failure"] = self.on_failure - if self.on_start: - body["on_start"] = self.on_start - if self.on_streaming_backlog_exceeded: - body["on_streaming_backlog_exceeded"] = self.on_streaming_backlog_exceeded - if self.on_success: - body["on_success"] = self.on_success + if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded + if self.on_failure: body['on_failure'] = self.on_failure + if self.on_start: body['on_start'] = self.on_start + if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded + if self.on_success: body['on_success'] = self.on_success return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TaskEmailNotifications: """Deserializes the TaskEmailNotifications from a dictionary.""" - return cls( - no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None), - on_duration_warning_threshold_exceeded=d.get("on_duration_warning_threshold_exceeded", None), - on_failure=d.get("on_failure", None), - on_start=d.get("on_start", None), - on_streaming_backlog_exceeded=d.get("on_streaming_backlog_exceeded", None), - on_success=d.get("on_success", None), - ) + return cls(no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None), on_duration_warning_threshold_exceeded=d.get('on_duration_warning_threshold_exceeded', None), on_failure=d.get('on_failure', None), on_start=d.get('on_start', None), on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None), on_success=d.get('on_success', None)) + + @dataclass @@ -8424,45 +7018,37 @@ class TaskNotificationSettings: """If true, do not send notifications to recipients specified in `on_start` for the retried runs and do not send notifications to recipients specified in `on_failure` until the last retry of the run.""" - + no_alert_for_canceled_runs: Optional[bool] = None """If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.""" - + no_alert_for_skipped_runs: Optional[bool] = None """If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.""" - + def as_dict(self) -> dict: """Serializes the TaskNotificationSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_on_last_attempt is not None: - body["alert_on_last_attempt"] = self.alert_on_last_attempt - if self.no_alert_for_canceled_runs is not None: - body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs - if self.no_alert_for_skipped_runs is not None: - body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs + if self.alert_on_last_attempt is not None: body['alert_on_last_attempt'] = self.alert_on_last_attempt + if self.no_alert_for_canceled_runs is not None: body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs return body def as_shallow_dict(self) -> dict: """Serializes the TaskNotificationSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_on_last_attempt is not None: - body["alert_on_last_attempt"] = self.alert_on_last_attempt - if self.no_alert_for_canceled_runs is not None: - body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs - if self.no_alert_for_skipped_runs is not None: - body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs + if self.alert_on_last_attempt is not None: body['alert_on_last_attempt'] = self.alert_on_last_attempt + if self.no_alert_for_canceled_runs is not None: body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TaskNotificationSettings: """Deserializes the TaskNotificationSettings from a dictionary.""" - return cls( - alert_on_last_attempt=d.get("alert_on_last_attempt", None), - no_alert_for_canceled_runs=d.get("no_alert_for_canceled_runs", None), - no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None), - ) + return cls(alert_on_last_attempt=d.get('alert_on_last_attempt', None), no_alert_for_canceled_runs=d.get('no_alert_for_canceled_runs', None), no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None)) + + class TerminationCodeCode(Enum): @@ -8500,35 +7086,34 @@ class TerminationCodeCode(Enum): run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. - + [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" - - BUDGET_POLICY_LIMIT_EXCEEDED = "BUDGET_POLICY_LIMIT_EXCEEDED" - CANCELED = "CANCELED" - CLOUD_FAILURE = "CLOUD_FAILURE" - CLUSTER_ERROR = "CLUSTER_ERROR" - CLUSTER_REQUEST_LIMIT_EXCEEDED = "CLUSTER_REQUEST_LIMIT_EXCEEDED" - DISABLED = "DISABLED" - DRIVER_ERROR = "DRIVER_ERROR" - FEATURE_DISABLED = "FEATURE_DISABLED" - INTERNAL_ERROR = "INTERNAL_ERROR" - INVALID_CLUSTER_REQUEST = "INVALID_CLUSTER_REQUEST" - INVALID_RUN_CONFIGURATION = "INVALID_RUN_CONFIGURATION" - LIBRARY_INSTALLATION_ERROR = "LIBRARY_INSTALLATION_ERROR" - MAX_CONCURRENT_RUNS_EXCEEDED = "MAX_CONCURRENT_RUNS_EXCEEDED" - MAX_JOB_QUEUE_SIZE_EXCEEDED = "MAX_JOB_QUEUE_SIZE_EXCEEDED" - MAX_SPARK_CONTEXTS_EXCEEDED = "MAX_SPARK_CONTEXTS_EXCEEDED" - REPOSITORY_CHECKOUT_FAILED = "REPOSITORY_CHECKOUT_FAILED" - RESOURCE_NOT_FOUND = "RESOURCE_NOT_FOUND" - RUN_EXECUTION_ERROR = "RUN_EXECUTION_ERROR" - SKIPPED = "SKIPPED" - STORAGE_ACCESS_ERROR = "STORAGE_ACCESS_ERROR" - SUCCESS = "SUCCESS" - SUCCESS_WITH_FAILURES = "SUCCESS_WITH_FAILURES" - UNAUTHORIZED_ERROR = "UNAUTHORIZED_ERROR" - USER_CANCELED = "USER_CANCELED" - WORKSPACE_RUN_LIMIT_EXCEEDED = "WORKSPACE_RUN_LIMIT_EXCEEDED" - + + BUDGET_POLICY_LIMIT_EXCEEDED = 'BUDGET_POLICY_LIMIT_EXCEEDED' + CANCELED = 'CANCELED' + CLOUD_FAILURE = 'CLOUD_FAILURE' + CLUSTER_ERROR = 'CLUSTER_ERROR' + CLUSTER_REQUEST_LIMIT_EXCEEDED = 'CLUSTER_REQUEST_LIMIT_EXCEEDED' + DISABLED = 'DISABLED' + DRIVER_ERROR = 'DRIVER_ERROR' + FEATURE_DISABLED = 'FEATURE_DISABLED' + INTERNAL_ERROR = 'INTERNAL_ERROR' + INVALID_CLUSTER_REQUEST = 'INVALID_CLUSTER_REQUEST' + INVALID_RUN_CONFIGURATION = 'INVALID_RUN_CONFIGURATION' + LIBRARY_INSTALLATION_ERROR = 'LIBRARY_INSTALLATION_ERROR' + MAX_CONCURRENT_RUNS_EXCEEDED = 'MAX_CONCURRENT_RUNS_EXCEEDED' + MAX_JOB_QUEUE_SIZE_EXCEEDED = 'MAX_JOB_QUEUE_SIZE_EXCEEDED' + MAX_SPARK_CONTEXTS_EXCEEDED = 'MAX_SPARK_CONTEXTS_EXCEEDED' + REPOSITORY_CHECKOUT_FAILED = 'REPOSITORY_CHECKOUT_FAILED' + RESOURCE_NOT_FOUND = 'RESOURCE_NOT_FOUND' + RUN_EXECUTION_ERROR = 'RUN_EXECUTION_ERROR' + SKIPPED = 'SKIPPED' + STORAGE_ACCESS_ERROR = 'STORAGE_ACCESS_ERROR' + SUCCESS = 'SUCCESS' + SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES' + UNAUTHORIZED_ERROR = 'UNAUTHORIZED_ERROR' + USER_CANCELED = 'USER_CANCELED' + WORKSPACE_RUN_LIMIT_EXCEEDED = 'WORKSPACE_RUN_LIMIT_EXCEEDED' @dataclass class TerminationDetails: @@ -8569,11 +7154,11 @@ class TerminationDetails: limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" - + message: Optional[str] = None """A descriptive message with the termination details. This field is unstructured and the format might change.""" - + type: Optional[TerminationTypeType] = None """* `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the Databricks platform. Please look at the [status page] or contact support if the issue persists. @@ -8582,37 +7167,29 @@ class TerminationDetails: provider. [status page]: https://status.databricks.com/""" - + def as_dict(self) -> dict: """Serializes the TerminationDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.code is not None: - body["code"] = self.code.value - if self.message is not None: - body["message"] = self.message - if self.type is not None: - body["type"] = self.type.value + if self.code is not None: body['code'] = self.code.value + if self.message is not None: body['message'] = self.message + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the TerminationDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.code is not None: - body["code"] = self.code - if self.message is not None: - body["message"] = self.message - if self.type is not None: - body["type"] = self.type + if self.code is not None: body['code'] = self.code + if self.message is not None: body['message'] = self.message + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TerminationDetails: """Deserializes the TerminationDetails from a dictionary.""" - return cls( - code=_enum(d, "code", TerminationCodeCode), - message=d.get("message", None), - type=_enum(d, "type", TerminationTypeType), - ) + return cls(code=_enum(d, 'code', TerminationCodeCode), message=d.get('message', None), type=_enum(d, 'type', TerminationTypeType)) + + class TerminationTypeType(Enum): @@ -8621,103 +7198,112 @@ class TerminationTypeType(Enum): * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud provider. - + [status page]: https://status.databricks.com/""" - - CLIENT_ERROR = "CLIENT_ERROR" - CLOUD_FAILURE = "CLOUD_FAILURE" - INTERNAL_ERROR = "INTERNAL_ERROR" - SUCCESS = "SUCCESS" - + + CLIENT_ERROR = 'CLIENT_ERROR' + CLOUD_FAILURE = 'CLOUD_FAILURE' + INTERNAL_ERROR = 'INTERNAL_ERROR' + SUCCESS = 'SUCCESS' @dataclass class TriggerInfo: """Additional details about what triggered the run""" - + run_id: Optional[int] = None """The run id of the Run Job task run""" - + def as_dict(self) -> dict: """Serializes the TriggerInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the TriggerInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TriggerInfo: """Deserializes the TriggerInfo from a dictionary.""" - return cls(run_id=d.get("run_id", None)) + return cls(run_id=d.get('run_id', None)) + + @dataclass class TriggerSettings: file_arrival: Optional[FileArrivalTriggerConfiguration] = None """File arrival trigger settings.""" - + pause_status: Optional[PauseStatus] = None """Whether this trigger is paused or not.""" - + periodic: Optional[PeriodicTriggerConfiguration] = None """Periodic trigger settings.""" - + table: Optional[TableUpdateTriggerConfiguration] = None """Old table trigger settings name. Deprecated in favor of `table_update`.""" - + table_update: Optional[TableUpdateTriggerConfiguration] = None - + def as_dict(self) -> dict: """Serializes the TriggerSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_arrival: - body["file_arrival"] = self.file_arrival.as_dict() - if self.pause_status is not None: - body["pause_status"] = self.pause_status.value - if self.periodic: - body["periodic"] = self.periodic.as_dict() - if self.table: - body["table"] = self.table.as_dict() - if self.table_update: - body["table_update"] = self.table_update.as_dict() + if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict() + if self.pause_status is not None: body['pause_status'] = self.pause_status.value + if self.periodic: body['periodic'] = self.periodic.as_dict() + if self.table: body['table'] = self.table.as_dict() + if self.table_update: body['table_update'] = self.table_update.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TriggerSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_arrival: - body["file_arrival"] = self.file_arrival - if self.pause_status is not None: - body["pause_status"] = self.pause_status - if self.periodic: - body["periodic"] = self.periodic - if self.table: - body["table"] = self.table - if self.table_update: - body["table_update"] = self.table_update + if self.file_arrival: body['file_arrival'] = self.file_arrival + if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.periodic: body['periodic'] = self.periodic + if self.table: body['table'] = self.table + if self.table_update: body['table_update'] = self.table_update return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TriggerSettings: """Deserializes the TriggerSettings from a dictionary.""" - return cls( - file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerConfiguration), - pause_status=_enum(d, "pause_status", PauseStatus), - periodic=_from_dict(d, "periodic", PeriodicTriggerConfiguration), - table=_from_dict(d, "table", TableUpdateTriggerConfiguration), - table_update=_from_dict(d, "table_update", TableUpdateTriggerConfiguration), - ) + return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerConfiguration), pause_status=_enum(d, 'pause_status', PauseStatus), periodic=_from_dict(d, 'periodic', PeriodicTriggerConfiguration), table=_from_dict(d, 'table', TableUpdateTriggerConfiguration), table_update=_from_dict(d, 'table_update', TableUpdateTriggerConfiguration)) + + + + +@dataclass +class TriggerStateProto: + file_arrival: Optional[FileArrivalTriggerState] = None + + def as_dict(self) -> dict: + """Serializes the TriggerStateProto into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TriggerStateProto into a shallow dictionary of its immediate attributes.""" + body = {} + if self.file_arrival: body['file_arrival'] = self.file_arrival + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TriggerStateProto: + """Deserializes the TriggerStateProto from a dictionary.""" + return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerState)) + + class TriggerType(Enum): """The type of trigger that fired this run. - + * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a @@ -8726,24 +7312,23 @@ class TriggerType(Enum): Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run.""" - - FILE_ARRIVAL = "FILE_ARRIVAL" - ONE_TIME = "ONE_TIME" - PERIODIC = "PERIODIC" - RETRY = "RETRY" - RUN_JOB_TASK = "RUN_JOB_TASK" - TABLE = "TABLE" - + + FILE_ARRIVAL = 'FILE_ARRIVAL' + ONE_TIME = 'ONE_TIME' + PERIODIC = 'PERIODIC' + RETRY = 'RETRY' + RUN_JOB_TASK = 'RUN_JOB_TASK' + TABLE = 'TABLE' @dataclass class UpdateJob: job_id: int """The canonical identifier of the job to update. This field is required.""" - + fields_to_remove: Optional[List[str]] = None """Remove top-level fields in the job settings. Removing nested fields is not supported, except for tasks and job clusters (`tasks/task_1`). This field is optional.""" - + new_settings: Optional[JobSettings] = None """The new settings for the job. @@ -8755,37 +7340,29 @@ class UpdateJob: Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only.""" - + def as_dict(self) -> dict: """Serializes the UpdateJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fields_to_remove: - body["fields_to_remove"] = [v for v in self.fields_to_remove] - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings.as_dict() + if self.fields_to_remove: body['fields_to_remove'] = [v for v in self.fields_to_remove] + if self.job_id is not None: body['job_id'] = self.job_id + if self.new_settings: body['new_settings'] = self.new_settings.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.fields_to_remove: - body["fields_to_remove"] = self.fields_to_remove - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings + if self.fields_to_remove: body['fields_to_remove'] = self.fields_to_remove + if self.job_id is not None: body['job_id'] = self.job_id + if self.new_settings: body['new_settings'] = self.new_settings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateJob: """Deserializes the UpdateJob from a dictionary.""" - return cls( - fields_to_remove=d.get("fields_to_remove", None), - job_id=d.get("job_id", None), - new_settings=_from_dict(d, "new_settings", JobSettings), - ) + return cls(fields_to_remove=d.get('fields_to_remove', None), job_id=d.get('job_id', None), new_settings=_from_dict(d, 'new_settings', JobSettings)) + + @dataclass @@ -8804,86 +7381,82 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() + + @dataclass class ViewItem: content: Optional[str] = None """Content of the view.""" - + name: Optional[str] = None """Name of the view item. In the case of code view, it would be the notebook’s name. In the case of dashboard view, it would be the dashboard’s name.""" - + type: Optional[ViewType] = None """Type of the view item.""" - + def as_dict(self) -> dict: """Serializes the ViewItem into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.name is not None: - body["name"] = self.name - if self.type is not None: - body["type"] = self.type.value + if self.content is not None: body['content'] = self.content + if self.name is not None: body['name'] = self.name + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the ViewItem into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.name is not None: - body["name"] = self.name - if self.type is not None: - body["type"] = self.type + if self.content is not None: body['content'] = self.content + if self.name is not None: body['name'] = self.name + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ViewItem: """Deserializes the ViewItem from a dictionary.""" - return cls(content=d.get("content", None), name=d.get("name", None), type=_enum(d, "type", ViewType)) + return cls(content=d.get('content', None), name=d.get('name', None), type=_enum(d, 'type', ViewType)) + + class ViewType(Enum): """* `NOTEBOOK`: Notebook view item. * `DASHBOARD`: Dashboard view item.""" - - DASHBOARD = "DASHBOARD" - NOTEBOOK = "NOTEBOOK" - + + DASHBOARD = 'DASHBOARD' + NOTEBOOK = 'NOTEBOOK' class ViewsToExport(Enum): """* `CODE`: Code view of the notebook. * `DASHBOARDS`: All dashboard views of the notebook. * `ALL`: All views of the notebook.""" - - ALL = "ALL" - CODE = "CODE" - DASHBOARDS = "DASHBOARDS" - + + ALL = 'ALL' + CODE = 'CODE' + DASHBOARDS = 'DASHBOARDS' @dataclass class Webhook: id: str - + def as_dict(self) -> dict: """Serializes the Webhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the Webhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Webhook: """Deserializes the Webhook from a dictionary.""" - return cls(id=d.get("id", None)) + return cls(id=d.get('id', None)) + + @dataclass @@ -8892,15 +7465,15 @@ class WebhookNotifications: """An optional list of system notification IDs to call when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3 destinations can be specified for the `on_duration_warning_threshold_exceeded` property.""" - + on_failure: Optional[List[Webhook]] = None """An optional list of system notification IDs to call when the run fails. A maximum of 3 destinations can be specified for the `on_failure` property.""" - + on_start: Optional[List[Webhook]] = None """An optional list of system notification IDs to call when the run starts. A maximum of 3 destinations can be specified for the `on_start` property.""" - + on_streaming_backlog_exceeded: Optional[List[Webhook]] = None """An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream. Streaming backlog thresholds can be set in the `health` field using the @@ -8908,221 +7481,200 @@ class WebhookNotifications: `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes. A maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.""" - + on_success: Optional[List[Webhook]] = None """An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.""" - + def as_dict(self) -> dict: """Serializes the WebhookNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.on_duration_warning_threshold_exceeded: - body["on_duration_warning_threshold_exceeded"] = [ - v.as_dict() for v in self.on_duration_warning_threshold_exceeded - ] - if self.on_failure: - body["on_failure"] = [v.as_dict() for v in self.on_failure] - if self.on_start: - body["on_start"] = [v.as_dict() for v in self.on_start] - if self.on_streaming_backlog_exceeded: - body["on_streaming_backlog_exceeded"] = [v.as_dict() for v in self.on_streaming_backlog_exceeded] - if self.on_success: - body["on_success"] = [v.as_dict() for v in self.on_success] + if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = [v.as_dict() for v in self.on_duration_warning_threshold_exceeded] + if self.on_failure: body['on_failure'] = [v.as_dict() for v in self.on_failure] + if self.on_start: body['on_start'] = [v.as_dict() for v in self.on_start] + if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = [v.as_dict() for v in self.on_streaming_backlog_exceeded] + if self.on_success: body['on_success'] = [v.as_dict() for v in self.on_success] return body def as_shallow_dict(self) -> dict: """Serializes the WebhookNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.on_duration_warning_threshold_exceeded: - body["on_duration_warning_threshold_exceeded"] = self.on_duration_warning_threshold_exceeded - if self.on_failure: - body["on_failure"] = self.on_failure - if self.on_start: - body["on_start"] = self.on_start - if self.on_streaming_backlog_exceeded: - body["on_streaming_backlog_exceeded"] = self.on_streaming_backlog_exceeded - if self.on_success: - body["on_success"] = self.on_success + if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded + if self.on_failure: body['on_failure'] = self.on_failure + if self.on_start: body['on_start'] = self.on_start + if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded + if self.on_success: body['on_success'] = self.on_success return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WebhookNotifications: """Deserializes the WebhookNotifications from a dictionary.""" - return cls( - on_duration_warning_threshold_exceeded=_repeated_dict(d, "on_duration_warning_threshold_exceeded", Webhook), - on_failure=_repeated_dict(d, "on_failure", Webhook), - on_start=_repeated_dict(d, "on_start", Webhook), - on_streaming_backlog_exceeded=_repeated_dict(d, "on_streaming_backlog_exceeded", Webhook), - on_success=_repeated_dict(d, "on_success", Webhook), - ) + return cls(on_duration_warning_threshold_exceeded=_repeated_dict(d, 'on_duration_warning_threshold_exceeded', Webhook), on_failure=_repeated_dict(d, 'on_failure', Webhook), on_start=_repeated_dict(d, 'on_start', Webhook), on_streaming_backlog_exceeded=_repeated_dict(d, 'on_streaming_backlog_exceeded', Webhook), on_success=_repeated_dict(d, 'on_success', Webhook)) + + @dataclass class WidgetErrorDetail: message: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the WidgetErrorDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message + if self.message is not None: body['message'] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the WidgetErrorDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message + if self.message is not None: body['message'] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WidgetErrorDetail: """Deserializes the WidgetErrorDetail from a dictionary.""" - return cls(message=d.get("message", None)) + return cls(message=d.get('message', None)) + + + + class JobsAPI: """The Jobs API allows you to create, edit, and delete jobs. - + You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or Python, Scala, Spark submit, and Java applications. - + You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebooks and jobs. - + [Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html [Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets""" - + def __init__(self, api_client): self._api = api_client - - def wait_get_run_job_terminated_or_skipped( - self, run_id: int, timeout=timedelta(minutes=20), callback: Optional[Callable[[Run], None]] = None - ) -> Run: - deadline = time.time() + timeout.total_seconds() - target_states = ( - RunLifeCycleState.TERMINATED, - RunLifeCycleState.SKIPPED, - ) - failure_states = (RunLifeCycleState.INTERNAL_ERROR,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get_run(run_id=run_id) - status = poll.state.life_cycle_state - status_message = f"current status: {status}" - if poll.state: - status_message = poll.state.state_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach TERMINATED or SKIPPED, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"run_id={run_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def cancel_all_runs(self, *, all_queued_runs: Optional[bool] = None, job_id: Optional[int] = None): + + + + + + def wait_get_run_job_terminated_or_skipped(self, run_id: int, + timeout=timedelta(minutes=20), callback: Optional[Callable[[Run], None]] = None) -> Run: + deadline = time.time() + timeout.total_seconds() + target_states = (RunLifeCycleState.TERMINATED, RunLifeCycleState.SKIPPED, ) + failure_states = (RunLifeCycleState.INTERNAL_ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_run(run_id=run_id) + status = poll.state.life_cycle_state + status_message = f'current status: {status}' + if poll.state: + status_message = poll.state.state_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach TERMINATED or SKIPPED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"run_id={run_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + + def cancel_all_runs(self + + , * + , all_queued_runs: Optional[bool] = None, job_id: Optional[int] = None): """Cancel all runs of a job. - + Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs from being started. - + :param all_queued_runs: bool (optional) Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in the workspace are canceled. :param job_id: int (optional) The canonical identifier of the job to cancel all runs of. - - + + """ body = {} - if all_queued_runs is not None: - body["all_queued_runs"] = all_queued_runs - if job_id is not None: - body["job_id"] = job_id - headers = { - "Content-Type": "application/json", - } + if all_queued_runs is not None: body['all_queued_runs'] = all_queued_runs + if job_id is not None: body['job_id'] = job_id + headers = {'Content-Type': 'application/json',} + + self._api.do('POST','/api/2.2/jobs/runs/cancel-all', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.2/jobs/runs/cancel-all", body=body, headers=headers) + + + - def cancel_run(self, run_id: int) -> Wait[Run]: + def cancel_run(self + , run_id: int + ) -> Wait[Run]: """Cancel a run. - + Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when this request completes. - + :param run_id: int This field is required. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ body = {} - if run_id is not None: - body["run_id"] = run_id - headers = { - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.2/jobs/runs/cancel", body=body, headers=headers) - return Wait( - self.wait_get_run_job_terminated_or_skipped, - response=CancelRunResponse.from_dict(op_response), - run_id=run_id, - ) + if run_id is not None: body['run_id'] = run_id + headers = {'Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.2/jobs/runs/cancel', body=body + + , headers=headers + ) + return Wait(self.wait_get_run_job_terminated_or_skipped + , response = CancelRunResponse.from_dict(op_response) + , run_id=run_id) - def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run: + + def cancel_run_and_wait(self + , run_id: int + , + timeout=timedelta(minutes=20)) -> Run: return self.cancel_run(run_id=run_id).result(timeout=timeout) + + + - def create( - self, - *, - access_control_list: Optional[List[JobAccessControlRequest]] = None, - budget_policy_id: Optional[str] = None, - continuous: Optional[Continuous] = None, - deployment: Optional[JobDeployment] = None, - description: Optional[str] = None, - edit_mode: Optional[JobEditMode] = None, - email_notifications: Optional[JobEmailNotifications] = None, - environments: Optional[List[JobEnvironment]] = None, - format: Optional[Format] = None, - git_source: Optional[GitSource] = None, - health: Optional[JobsHealthRules] = None, - job_clusters: Optional[List[JobCluster]] = None, - max_concurrent_runs: Optional[int] = None, - name: Optional[str] = None, - notification_settings: Optional[JobNotificationSettings] = None, - parameters: Optional[List[JobParameterDefinition]] = None, - performance_target: Optional[PerformanceTarget] = None, - queue: Optional[QueueSettings] = None, - run_as: Optional[JobRunAs] = None, - schedule: Optional[CronSchedule] = None, - tags: Optional[Dict[str, str]] = None, - tasks: Optional[List[Task]] = None, - timeout_seconds: Optional[int] = None, - trigger: Optional[TriggerSettings] = None, - webhook_notifications: Optional[WebhookNotifications] = None, - ) -> CreateResponse: + def create(self + + , * + , access_control_list: Optional[List[JobAccessControlRequest]] = None, budget_policy_id: Optional[str] = None, continuous: Optional[Continuous] = None, deployment: Optional[JobDeployment] = None, description: Optional[str] = None, edit_mode: Optional[JobEditMode] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, format: Optional[Format] = None, git_source: Optional[GitSource] = None, health: Optional[JobsHealthRules] = None, job_clusters: Optional[List[JobCluster]] = None, max_concurrent_runs: Optional[int] = None, name: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, parameters: Optional[List[JobParameterDefinition]] = None, performance_target: Optional[PerformanceTarget] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, schedule: Optional[CronSchedule] = None, tags: Optional[Dict[str,str]] = None, tasks: Optional[List[Task]] = None, timeout_seconds: Optional[int] = None, trigger: Optional[TriggerSettings] = None, webhook_notifications: Optional[WebhookNotifications] = None) -> CreateResponse: """Create a new job. - + Create a new job. - + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -9138,7 +7690,7 @@ def create( An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. :param edit_mode: :class:`JobEditMode` (optional) Edit mode of the job. - + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified. :param email_notifications: :class:`JobEmailNotifications` (optional) @@ -9155,10 +7707,10 @@ def create( :param git_source: :class:`GitSource` (optional) An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) @@ -9185,7 +7737,7 @@ def create( :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -9194,7 +7746,7 @@ def create( :param run_as: :class:`JobRunAs` (optional) Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. - + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -9217,219 +7769,234 @@ def create( `runNow`. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when runs of this job begin or complete. - + :returns: :class:`CreateResponse` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - if budget_policy_id is not None: - body["budget_policy_id"] = budget_policy_id - if continuous is not None: - body["continuous"] = continuous.as_dict() - if deployment is not None: - body["deployment"] = deployment.as_dict() - if description is not None: - body["description"] = description - if edit_mode is not None: - body["edit_mode"] = edit_mode.value - if email_notifications is not None: - body["email_notifications"] = email_notifications.as_dict() - if environments is not None: - body["environments"] = [v.as_dict() for v in environments] - if format is not None: - body["format"] = format.value - if git_source is not None: - body["git_source"] = git_source.as_dict() - if health is not None: - body["health"] = health.as_dict() - if job_clusters is not None: - body["job_clusters"] = [v.as_dict() for v in job_clusters] - if max_concurrent_runs is not None: - body["max_concurrent_runs"] = max_concurrent_runs - if name is not None: - body["name"] = name - if notification_settings is not None: - body["notification_settings"] = notification_settings.as_dict() - if parameters is not None: - body["parameters"] = [v.as_dict() for v in parameters] - if performance_target is not None: - body["performance_target"] = performance_target.value - if queue is not None: - body["queue"] = queue.as_dict() - if run_as is not None: - body["run_as"] = run_as.as_dict() - if schedule is not None: - body["schedule"] = schedule.as_dict() - if tags is not None: - body["tags"] = tags - if tasks is not None: - body["tasks"] = [v.as_dict() for v in tasks] - if timeout_seconds is not None: - body["timeout_seconds"] = timeout_seconds - if trigger is not None: - body["trigger"] = trigger.as_dict() - if webhook_notifications is not None: - body["webhook_notifications"] = webhook_notifications.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.2/jobs/create", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id + if continuous is not None: body['continuous'] = continuous.as_dict() + if deployment is not None: body['deployment'] = deployment.as_dict() + if description is not None: body['description'] = description + if edit_mode is not None: body['edit_mode'] = edit_mode.value + if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict() + if environments is not None: body['environments'] = [v.as_dict() for v in environments] + if format is not None: body['format'] = format.value + if git_source is not None: body['git_source'] = git_source.as_dict() + if health is not None: body['health'] = health.as_dict() + if job_clusters is not None: body['job_clusters'] = [v.as_dict() for v in job_clusters] + if max_concurrent_runs is not None: body['max_concurrent_runs'] = max_concurrent_runs + if name is not None: body['name'] = name + if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict() + if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters] + if performance_target is not None: body['performance_target'] = performance_target.value + if queue is not None: body['queue'] = queue.as_dict() + if run_as is not None: body['run_as'] = run_as.as_dict() + if schedule is not None: body['schedule'] = schedule.as_dict() + if tags is not None: body['tags'] = tags + if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks] + if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds + if trigger is not None: body['trigger'] = trigger.as_dict() + if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.2/jobs/create', body=body + + , headers=headers + ) return CreateResponse.from_dict(res) - def delete(self, job_id: int): - """Delete a job. + + + + def delete(self + , job_id: int + ): + """Delete a job. + Deletes a job. - + :param job_id: int The canonical identifier of the job to delete. This field is required. - - + + """ body = {} - if job_id is not None: - body["job_id"] = job_id - headers = { - "Content-Type": "application/json", - } + if job_id is not None: body['job_id'] = job_id + headers = {'Content-Type': 'application/json',} + + self._api.do('POST','/api/2.2/jobs/delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.2/jobs/delete", body=body, headers=headers) + + + - def delete_run(self, run_id: int): + def delete_run(self + , run_id: int + ): """Delete a job run. - + Deletes a non-active run. Returns an error if the run is active. - + :param run_id: int ID of the run to delete. - - + + """ body = {} - if run_id is not None: - body["run_id"] = run_id - headers = { - "Content-Type": "application/json", - } + if run_id is not None: body['run_id'] = run_id + headers = {'Content-Type': 'application/json',} + + self._api.do('POST','/api/2.2/jobs/runs/delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.2/jobs/runs/delete", body=body, headers=headers) + + + - def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] = None) -> ExportRunOutput: + def export_run(self + , run_id: int + , * + , views_to_export: Optional[ViewsToExport] = None) -> ExportRunOutput: """Export and retrieve a job run. - + Export and retrieve the job run task. - + :param run_id: int The canonical identifier for the run. This field is required. :param views_to_export: :class:`ViewsToExport` (optional) Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE. - + :returns: :class:`ExportRunOutput` """ - + query = {} - if run_id is not None: - query["run_id"] = run_id - if views_to_export is not None: - query["views_to_export"] = views_to_export.value - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.2/jobs/runs/export", query=query, headers=headers) + if run_id is not None: query['run_id'] = run_id + if views_to_export is not None: query['views_to_export'] = views_to_export.value + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.2/jobs/runs/export', query=query + + , headers=headers + ) return ExportRunOutput.from_dict(res) - def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job: - """Get a single job. + + + + def get(self + , job_id: int + , * + , page_token: Optional[str] = None) -> Job: + """Get a single job. + Retrieves the details for a single job. - + Large arrays in the results will be paginated when they exceed 100 elements. A request for a single job will return all properties for that job, and the first 100 elements of array properties (`tasks`, `job_clusters`, `environments` and `parameters`). Use the `next_page_token` field to check for more results and pass its value as the `page_token` in subsequent requests. If any array properties have more than 100 elements, additional results will be returned on subsequent requests. Arrays without additional results will be empty on later pages. - + :param job_id: int The canonical identifier of the job to retrieve information about. This field is required. :param page_token: str (optional) Use `next_page_token` returned from the previous GetJob response to request the next page of the job's array properties. - + :returns: :class:`Job` """ - + query = {} - if job_id is not None: - query["job_id"] = job_id - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.2/jobs/get", query=query, headers=headers) + if job_id is not None: query['job_id'] = job_id + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.2/jobs/get', query=query + + , headers=headers + ) return Job.from_dict(res) - def get_permission_levels(self, job_id: str) -> GetJobPermissionLevelsResponse: - """Get job permission levels. + + + + def get_permission_levels(self + , job_id: str + ) -> GetJobPermissionLevelsResponse: + """Get job permission levels. + Gets the permission levels that a user can have on an object. - + :param job_id: str The job for which to get or manage permissions. - + :returns: :class:`GetJobPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/jobs/{job_id}/permissionLevels", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/jobs/{job_id}/permissionLevels' + + , headers=headers + ) return GetJobPermissionLevelsResponse.from_dict(res) - def get_permissions(self, job_id: str) -> JobPermissions: - """Get job permissions. + + + + def get_permissions(self + , job_id: str + ) -> JobPermissions: + """Get job permissions. + Gets the permissions of a job. Jobs can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. - + :returns: :class:`JobPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/jobs/{job_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/jobs/{job_id}' + + , headers=headers + ) return JobPermissions.from_dict(res) - def get_run( - self, - run_id: int, - *, - include_history: Optional[bool] = None, - include_resolved_values: Optional[bool] = None, - page_token: Optional[str] = None, - ) -> Run: - """Get a single job run. + + + + def get_run(self + , run_id: int + , * + , include_history: Optional[bool] = None, include_resolved_values: Optional[bool] = None, page_token: Optional[str] = None) -> Run: + """Get a single job run. + Retrieves the metadata of a run. - + Large arrays in the results will be paginated when they exceed 100 elements. A request for a single run will return all properties for that run, and the first 100 elements of array properties (`tasks`, `job_clusters`, `job_parameters` and `repair_history`). Use the next_page_token field to check for more results and pass its value as the page_token in subsequent requests. If any array properties have more than 100 elements, additional results will be returned on subsequent requests. Arrays without additional results will be empty on later pages. - + :param run_id: int The canonical identifier of the run for which to retrieve the metadata. This field is required. :param include_history: bool (optional) @@ -9439,67 +8006,69 @@ def get_run( :param page_token: str (optional) Use `next_page_token` returned from the previous GetRun response to request the next page of the run's array properties. - + :returns: :class:`Run` """ - + query = {} - if include_history is not None: - query["include_history"] = include_history - if include_resolved_values is not None: - query["include_resolved_values"] = include_resolved_values - if page_token is not None: - query["page_token"] = page_token - if run_id is not None: - query["run_id"] = run_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.2/jobs/runs/get", query=query, headers=headers) + if include_history is not None: query['include_history'] = include_history + if include_resolved_values is not None: query['include_resolved_values'] = include_resolved_values + if page_token is not None: query['page_token'] = page_token + if run_id is not None: query['run_id'] = run_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.2/jobs/runs/get', query=query + + , headers=headers + ) return Run.from_dict(res) - def get_run_output(self, run_id: int) -> RunOutput: - """Get the output for a single run. + + + + def get_run_output(self + , run_id: int + ) -> RunOutput: + """Get the output for a single run. + Retrieve the output and metadata of a single task run. When a notebook task returns a value through the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks restricts this API to returning the first 5 MB of the output. To return a larger result, you can store job results in a cloud storage service. - + This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to reference them beyond 60 days, you must save old run results before they expire. - + :param run_id: int The canonical identifier for the run. - + :returns: :class:`RunOutput` """ - + query = {} - if run_id is not None: - query["run_id"] = run_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.2/jobs/runs/get-output", query=query, headers=headers) + if run_id is not None: query['run_id'] = run_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.2/jobs/runs/get-output', query=query + + , headers=headers + ) return RunOutput.from_dict(res) - def list( - self, - *, - expand_tasks: Optional[bool] = None, - limit: Optional[int] = None, - name: Optional[str] = None, - offset: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[BaseJob]: - """List jobs. + + + + def list(self + + , * + , expand_tasks: Optional[bool] = None, limit: Optional[int] = None, name: Optional[str] = None, offset: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[BaseJob]: + """List jobs. + Retrieves a list of jobs. - + :param expand_tasks: bool (optional) Whether to include task and cluster details in the response. Note that only the first 100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters. @@ -9514,52 +8083,45 @@ def list( :param page_token: str (optional) Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or previous page of jobs respectively. - + :returns: Iterator over :class:`BaseJob` """ - + query = {} - if expand_tasks is not None: - query["expand_tasks"] = expand_tasks - if limit is not None: - query["limit"] = limit - if name is not None: - query["name"] = name - if offset is not None: - query["offset"] = offset - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if expand_tasks is not None: query['expand_tasks'] = expand_tasks + if limit is not None: query['limit'] = limit + if name is not None: query['name'] = name + if offset is not None: query['offset'] = offset + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.2/jobs/list", query=query, headers=headers) - if "jobs" in json: - for v in json["jobs"]: - yield BaseJob.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_runs( - self, - *, - active_only: Optional[bool] = None, - completed_only: Optional[bool] = None, - expand_tasks: Optional[bool] = None, - job_id: Optional[int] = None, - limit: Optional[int] = None, - offset: Optional[int] = None, - page_token: Optional[str] = None, - run_type: Optional[RunType] = None, - start_time_from: Optional[int] = None, - start_time_to: Optional[int] = None, - ) -> Iterator[BaseRun]: - """List job runs. + json = self._api.do('GET','/api/2.2/jobs/list', query=query + + , headers=headers + ) + if 'jobs' in json: + for v in json['jobs']: + yield BaseJob.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - List runs in descending order by start time. + + + + def list_runs(self + + , * + , active_only: Optional[bool] = None, completed_only: Optional[bool] = None, expand_tasks: Optional[bool] = None, job_id: Optional[int] = None, limit: Optional[int] = None, offset: Optional[int] = None, page_token: Optional[str] = None, run_type: Optional[RunType] = None, start_time_from: Optional[int] = None, start_time_to: Optional[int] = None) -> Iterator[BaseRun]: + """List job runs. + + List runs in descending order by start time. + :param active_only: bool (optional) If active_only is `true`, only active runs are included in the results; otherwise, lists both active and completed runs. An active run is a run in the `QUEUED`, `PENDING`, `RUNNING`, or `TERMINATING`. @@ -9589,68 +8151,51 @@ def list_runs( :param start_time_to: int (optional) Show runs that started _at or before_ this value. The value must be a UTC timestamp in milliseconds. Can be combined with _start_time_from_ to filter by a time range. - + :returns: Iterator over :class:`BaseRun` """ - + query = {} - if active_only is not None: - query["active_only"] = active_only - if completed_only is not None: - query["completed_only"] = completed_only - if expand_tasks is not None: - query["expand_tasks"] = expand_tasks - if job_id is not None: - query["job_id"] = job_id - if limit is not None: - query["limit"] = limit - if offset is not None: - query["offset"] = offset - if page_token is not None: - query["page_token"] = page_token - if run_type is not None: - query["run_type"] = run_type.value - if start_time_from is not None: - query["start_time_from"] = start_time_from - if start_time_to is not None: - query["start_time_to"] = start_time_to - headers = { - "Accept": "application/json", - } - + if active_only is not None: query['active_only'] = active_only + if completed_only is not None: query['completed_only'] = completed_only + if expand_tasks is not None: query['expand_tasks'] = expand_tasks + if job_id is not None: query['job_id'] = job_id + if limit is not None: query['limit'] = limit + if offset is not None: query['offset'] = offset + if page_token is not None: query['page_token'] = page_token + if run_type is not None: query['run_type'] = run_type.value + if start_time_from is not None: query['start_time_from'] = start_time_from + if start_time_to is not None: query['start_time_to'] = start_time_to + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.2/jobs/runs/list", query=query, headers=headers) - if "runs" in json: - for v in json["runs"]: - yield BaseRun.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def repair_run( - self, - run_id: int, - *, - dbt_commands: Optional[List[str]] = None, - jar_params: Optional[List[str]] = None, - job_parameters: Optional[Dict[str, str]] = None, - latest_repair_id: Optional[int] = None, - notebook_params: Optional[Dict[str, str]] = None, - performance_target: Optional[PerformanceTarget] = None, - pipeline_params: Optional[PipelineParams] = None, - python_named_params: Optional[Dict[str, str]] = None, - python_params: Optional[List[str]] = None, - rerun_all_failed_tasks: Optional[bool] = None, - rerun_dependent_tasks: Optional[bool] = None, - rerun_tasks: Optional[List[str]] = None, - spark_submit_params: Optional[List[str]] = None, - sql_params: Optional[Dict[str, str]] = None, - ) -> Wait[Run]: - """Repair a job run. + json = self._api.do('GET','/api/2.2/jobs/runs/list', query=query + + , headers=headers + ) + if 'runs' in json: + for v in json['runs']: + yield BaseRun.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def repair_run(self + , run_id: int + , * + , dbt_commands: Optional[List[str]] = None, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str,str]] = None, latest_repair_id: Optional[int] = None, notebook_params: Optional[Dict[str,str]] = None, performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str,str]] = None, python_params: Optional[List[str]] = None, rerun_all_failed_tasks: Optional[bool] = None, rerun_dependent_tasks: Optional[bool] = None, rerun_tasks: Optional[List[str]] = None, spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str,str]] = None) -> Wait[Run]: + """Repair a job run. + Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job and task settings, and can be viewed in the history for the original job run. - + :param run_id: int The job run ID of the run to repair. The run must not be in progress. :param dbt_commands: List[str] (optional) @@ -9662,9 +8207,9 @@ def repair_run( task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` @@ -9675,23 +8220,23 @@ def repair_run( A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. - + If not specified upon `run-now`, the triggered run uses the job’s base parameters. - + notebook_params cannot be specified in conjunction with jar_params. - + Use [Task parameter variables] to set parameters containing information about job runs. - + The JSON representation of this field (for example `{"notebook_params":{"name":"john doe","age":"35"}}`) cannot exceed 10,000 bytes. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined on the job level. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -9703,15 +8248,15 @@ def repair_run( The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param rerun_all_failed_tasks: bool (optional) If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used. @@ -9726,154 +8271,102 @@ def repair_run( as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param sql_params: Dict[str,str] (optional) A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ body = {} - if dbt_commands is not None: - body["dbt_commands"] = [v for v in dbt_commands] - if jar_params is not None: - body["jar_params"] = [v for v in jar_params] - if job_parameters is not None: - body["job_parameters"] = job_parameters - if latest_repair_id is not None: - body["latest_repair_id"] = latest_repair_id - if notebook_params is not None: - body["notebook_params"] = notebook_params - if performance_target is not None: - body["performance_target"] = performance_target.value - if pipeline_params is not None: - body["pipeline_params"] = pipeline_params.as_dict() - if python_named_params is not None: - body["python_named_params"] = python_named_params - if python_params is not None: - body["python_params"] = [v for v in python_params] - if rerun_all_failed_tasks is not None: - body["rerun_all_failed_tasks"] = rerun_all_failed_tasks - if rerun_dependent_tasks is not None: - body["rerun_dependent_tasks"] = rerun_dependent_tasks - if rerun_tasks is not None: - body["rerun_tasks"] = [v for v in rerun_tasks] - if run_id is not None: - body["run_id"] = run_id - if spark_submit_params is not None: - body["spark_submit_params"] = [v for v in spark_submit_params] - if sql_params is not None: - body["sql_params"] = sql_params - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.2/jobs/runs/repair", body=body, headers=headers) - return Wait( - self.wait_get_run_job_terminated_or_skipped, - response=RepairRunResponse.from_dict(op_response), - run_id=run_id, - ) - - def repair_run_and_wait( - self, - run_id: int, - *, - dbt_commands: Optional[List[str]] = None, - jar_params: Optional[List[str]] = None, - job_parameters: Optional[Dict[str, str]] = None, - latest_repair_id: Optional[int] = None, - notebook_params: Optional[Dict[str, str]] = None, - performance_target: Optional[PerformanceTarget] = None, - pipeline_params: Optional[PipelineParams] = None, - python_named_params: Optional[Dict[str, str]] = None, - python_params: Optional[List[str]] = None, - rerun_all_failed_tasks: Optional[bool] = None, - rerun_dependent_tasks: Optional[bool] = None, - rerun_tasks: Optional[List[str]] = None, - spark_submit_params: Optional[List[str]] = None, - sql_params: Optional[Dict[str, str]] = None, - timeout=timedelta(minutes=20), - ) -> Run: - return self.repair_run( - dbt_commands=dbt_commands, - jar_params=jar_params, - job_parameters=job_parameters, - latest_repair_id=latest_repair_id, - notebook_params=notebook_params, - performance_target=performance_target, - pipeline_params=pipeline_params, - python_named_params=python_named_params, - python_params=python_params, - rerun_all_failed_tasks=rerun_all_failed_tasks, - rerun_dependent_tasks=rerun_dependent_tasks, - rerun_tasks=rerun_tasks, - run_id=run_id, - spark_submit_params=spark_submit_params, - sql_params=sql_params, - ).result(timeout=timeout) - - def reset(self, job_id: int, new_settings: JobSettings): + if dbt_commands is not None: body['dbt_commands'] = [v for v in dbt_commands] + if jar_params is not None: body['jar_params'] = [v for v in jar_params] + if job_parameters is not None: body['job_parameters'] = job_parameters + if latest_repair_id is not None: body['latest_repair_id'] = latest_repair_id + if notebook_params is not None: body['notebook_params'] = notebook_params + if performance_target is not None: body['performance_target'] = performance_target.value + if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict() + if python_named_params is not None: body['python_named_params'] = python_named_params + if python_params is not None: body['python_params'] = [v for v in python_params] + if rerun_all_failed_tasks is not None: body['rerun_all_failed_tasks'] = rerun_all_failed_tasks + if rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = rerun_dependent_tasks + if rerun_tasks is not None: body['rerun_tasks'] = [v for v in rerun_tasks] + if run_id is not None: body['run_id'] = run_id + if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params] + if sql_params is not None: body['sql_params'] = sql_params + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.2/jobs/runs/repair', body=body + + , headers=headers + ) + return Wait(self.wait_get_run_job_terminated_or_skipped + , response = RepairRunResponse.from_dict(op_response) + , run_id=run_id) + + + def repair_run_and_wait(self + , run_id: int + , * + , dbt_commands: Optional[List[str]] = None, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str,str]] = None, latest_repair_id: Optional[int] = None, notebook_params: Optional[Dict[str,str]] = None, performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str,str]] = None, python_params: Optional[List[str]] = None, rerun_all_failed_tasks: Optional[bool] = None, rerun_dependent_tasks: Optional[bool] = None, rerun_tasks: Optional[List[str]] = None, spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str,str]] = None, + timeout=timedelta(minutes=20)) -> Run: + return self.repair_run(dbt_commands=dbt_commands, jar_params=jar_params, job_parameters=job_parameters, latest_repair_id=latest_repair_id, notebook_params=notebook_params, performance_target=performance_target, pipeline_params=pipeline_params, python_named_params=python_named_params, python_params=python_params, rerun_all_failed_tasks=rerun_all_failed_tasks, rerun_dependent_tasks=rerun_dependent_tasks, rerun_tasks=rerun_tasks, run_id=run_id, spark_submit_params=spark_submit_params, sql_params=sql_params).result(timeout=timeout) + + + + + def reset(self + , job_id: int, new_settings: JobSettings + ): """Update all job settings (reset). - + Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update job settings partially. - + :param job_id: int The canonical identifier of the job to reset. This field is required. :param new_settings: :class:`JobSettings` The new settings of the job. These settings completely replace the old settings. - + Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only. - - + + """ body = {} - if job_id is not None: - body["job_id"] = job_id - if new_settings is not None: - body["new_settings"] = new_settings.as_dict() - headers = { - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.2/jobs/reset", body=body, headers=headers) - - def run_now( - self, - job_id: int, - *, - dbt_commands: Optional[List[str]] = None, - idempotency_token: Optional[str] = None, - jar_params: Optional[List[str]] = None, - job_parameters: Optional[Dict[str, str]] = None, - notebook_params: Optional[Dict[str, str]] = None, - only: Optional[List[str]] = None, - performance_target: Optional[PerformanceTarget] = None, - pipeline_params: Optional[PipelineParams] = None, - python_named_params: Optional[Dict[str, str]] = None, - python_params: Optional[List[str]] = None, - queue: Optional[QueueSettings] = None, - spark_submit_params: Optional[List[str]] = None, - sql_params: Optional[Dict[str, str]] = None, - ) -> Wait[Run]: - """Trigger a new job run. + if job_id is not None: body['job_id'] = job_id + if new_settings is not None: body['new_settings'] = new_settings.as_dict() + headers = {'Content-Type': 'application/json',} + + self._api.do('POST','/api/2.2/jobs/reset', body=body + + , headers=headers + ) + - Run a job and return the `run_id` of the triggered run. + + + + def run_now(self + , job_id: int + , * + , dbt_commands: Optional[List[str]] = None, idempotency_token: Optional[str] = None, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str,str]] = None, notebook_params: Optional[Dict[str,str]] = None, only: Optional[List[str]] = None, performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str,str]] = None, python_params: Optional[List[str]] = None, queue: Optional[QueueSettings] = None, spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str,str]] = None) -> Wait[Run]: + """Trigger a new job run. + + Run a job and return the `run_id` of the triggered run. + :param job_id: int The ID of the job to be executed :param dbt_commands: List[str] (optional) @@ -9883,14 +8376,14 @@ def run_now( An optional token to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing run instead. If a run with the provided token is deleted, an error is returned. - + If you specify the idempotency token, upon failure you can retry until the request succeeds. Databricks guarantees that exactly one run is launched with that idempotency token. - + This token must have at most 64 characters. - + For more information, see [How to ensure idempotency for jobs]. - + [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html :param jar_params: List[str] (optional) A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. @@ -9898,9 +8391,9 @@ def run_now( task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` @@ -9908,16 +8401,16 @@ def run_now( A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. - + If not specified upon `run-now`, the triggered run uses the job’s base parameters. - + notebook_params cannot be specified in conjunction with jar_params. - + Use [Task parameter variables] to set parameters containing information about job runs. - + The JSON representation of this field (for example `{"notebook_params":{"name":"john doe","age":"35"}}`) cannot exceed 10,000 bytes. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html :param only: List[str] (optional) @@ -9927,7 +8420,7 @@ def run_now( The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined on the job level. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -9939,15 +8432,15 @@ def run_now( The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param queue: :class:`QueueSettings` (optional) The queue settings of the run. @@ -9957,150 +8450,99 @@ def run_now( as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param sql_params: Dict[str,str] (optional) A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ body = {} - if dbt_commands is not None: - body["dbt_commands"] = [v for v in dbt_commands] - if idempotency_token is not None: - body["idempotency_token"] = idempotency_token - if jar_params is not None: - body["jar_params"] = [v for v in jar_params] - if job_id is not None: - body["job_id"] = job_id - if job_parameters is not None: - body["job_parameters"] = job_parameters - if notebook_params is not None: - body["notebook_params"] = notebook_params - if only is not None: - body["only"] = [v for v in only] - if performance_target is not None: - body["performance_target"] = performance_target.value - if pipeline_params is not None: - body["pipeline_params"] = pipeline_params.as_dict() - if python_named_params is not None: - body["python_named_params"] = python_named_params - if python_params is not None: - body["python_params"] = [v for v in python_params] - if queue is not None: - body["queue"] = queue.as_dict() - if spark_submit_params is not None: - body["spark_submit_params"] = [v for v in spark_submit_params] - if sql_params is not None: - body["sql_params"] = sql_params - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.2/jobs/run-now", body=body, headers=headers) - return Wait( - self.wait_get_run_job_terminated_or_skipped, - response=RunNowResponse.from_dict(op_response), - run_id=op_response["run_id"], - ) - - def run_now_and_wait( - self, - job_id: int, - *, - dbt_commands: Optional[List[str]] = None, - idempotency_token: Optional[str] = None, - jar_params: Optional[List[str]] = None, - job_parameters: Optional[Dict[str, str]] = None, - notebook_params: Optional[Dict[str, str]] = None, - only: Optional[List[str]] = None, - performance_target: Optional[PerformanceTarget] = None, - pipeline_params: Optional[PipelineParams] = None, - python_named_params: Optional[Dict[str, str]] = None, - python_params: Optional[List[str]] = None, - queue: Optional[QueueSettings] = None, - spark_submit_params: Optional[List[str]] = None, - sql_params: Optional[Dict[str, str]] = None, - timeout=timedelta(minutes=20), - ) -> Run: - return self.run_now( - dbt_commands=dbt_commands, - idempotency_token=idempotency_token, - jar_params=jar_params, - job_id=job_id, - job_parameters=job_parameters, - notebook_params=notebook_params, - only=only, - performance_target=performance_target, - pipeline_params=pipeline_params, - python_named_params=python_named_params, - python_params=python_params, - queue=queue, - spark_submit_params=spark_submit_params, - sql_params=sql_params, - ).result(timeout=timeout) - - def set_permissions( - self, job_id: str, *, access_control_list: Optional[List[JobAccessControlRequest]] = None - ) -> JobPermissions: + if dbt_commands is not None: body['dbt_commands'] = [v for v in dbt_commands] + if idempotency_token is not None: body['idempotency_token'] = idempotency_token + if jar_params is not None: body['jar_params'] = [v for v in jar_params] + if job_id is not None: body['job_id'] = job_id + if job_parameters is not None: body['job_parameters'] = job_parameters + if notebook_params is not None: body['notebook_params'] = notebook_params + if only is not None: body['only'] = [v for v in only] + if performance_target is not None: body['performance_target'] = performance_target.value + if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict() + if python_named_params is not None: body['python_named_params'] = python_named_params + if python_params is not None: body['python_params'] = [v for v in python_params] + if queue is not None: body['queue'] = queue.as_dict() + if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params] + if sql_params is not None: body['sql_params'] = sql_params + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.2/jobs/run-now', body=body + + , headers=headers + ) + return Wait(self.wait_get_run_job_terminated_or_skipped + , response = RunNowResponse.from_dict(op_response) + , run_id=op_response['run_id']) + + + def run_now_and_wait(self + , job_id: int + , * + , dbt_commands: Optional[List[str]] = None, idempotency_token: Optional[str] = None, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str,str]] = None, notebook_params: Optional[Dict[str,str]] = None, only: Optional[List[str]] = None, performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str,str]] = None, python_params: Optional[List[str]] = None, queue: Optional[QueueSettings] = None, spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str,str]] = None, + timeout=timedelta(minutes=20)) -> Run: + return self.run_now(dbt_commands=dbt_commands, idempotency_token=idempotency_token, jar_params=jar_params, job_id=job_id, job_parameters=job_parameters, notebook_params=notebook_params, only=only, performance_target=performance_target, pipeline_params=pipeline_params, python_named_params=python_named_params, python_params=python_params, queue=queue, spark_submit_params=spark_submit_params, sql_params=sql_params).result(timeout=timeout) + + + + + def set_permissions(self + , job_id: str + , * + , access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions: """Set job permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) - + :returns: :class:`JobPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/permissions/jobs/{job_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/jobs/{job_id}', body=body + + , headers=headers + ) return JobPermissions.from_dict(res) - def submit( - self, - *, - access_control_list: Optional[List[JobAccessControlRequest]] = None, - budget_policy_id: Optional[str] = None, - email_notifications: Optional[JobEmailNotifications] = None, - environments: Optional[List[JobEnvironment]] = None, - git_source: Optional[GitSource] = None, - health: Optional[JobsHealthRules] = None, - idempotency_token: Optional[str] = None, - notification_settings: Optional[JobNotificationSettings] = None, - queue: Optional[QueueSettings] = None, - run_as: Optional[JobRunAs] = None, - run_name: Optional[str] = None, - tasks: Optional[List[SubmitTask]] = None, - timeout_seconds: Optional[int] = None, - webhook_notifications: Optional[WebhookNotifications] = None, - ) -> Wait[Run]: - """Create and trigger a one-time run. + + + + def submit(self + + , * + , access_control_list: Optional[List[JobAccessControlRequest]] = None, budget_policy_id: Optional[str] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, git_source: Optional[GitSource] = None, health: Optional[JobsHealthRules] = None, idempotency_token: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, run_name: Optional[str] = None, tasks: Optional[List[SubmitTask]] = None, timeout_seconds: Optional[int] = None, webhook_notifications: Optional[WebhookNotifications] = None) -> Wait[Run]: + """Create and trigger a one-time run. + Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. - + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -10113,10 +8555,10 @@ def submit( :param git_source: :class:`GitSource` (optional) An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) @@ -10125,14 +8567,14 @@ def submit( An optional token that can be used to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing run instead. If a run with the provided token is deleted, an error is returned. - + If you specify the idempotency token, upon failure you can retry until the request succeeds. Databricks guarantees that exactly one run is launched with that idempotency token. - + This token must have at most 64 characters. - + For more information, see [How to ensure idempotency for jobs]. - + [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html :param notification_settings: :class:`JobNotificationSettings` (optional) Optional notification settings that are used when sending notifications to each of the @@ -10149,96 +8591,56 @@ def submit( An optional timeout applied to each run of this job. A value of `0` means no timeout. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when the run begins or completes. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - if budget_policy_id is not None: - body["budget_policy_id"] = budget_policy_id - if email_notifications is not None: - body["email_notifications"] = email_notifications.as_dict() - if environments is not None: - body["environments"] = [v.as_dict() for v in environments] - if git_source is not None: - body["git_source"] = git_source.as_dict() - if health is not None: - body["health"] = health.as_dict() - if idempotency_token is not None: - body["idempotency_token"] = idempotency_token - if notification_settings is not None: - body["notification_settings"] = notification_settings.as_dict() - if queue is not None: - body["queue"] = queue.as_dict() - if run_as is not None: - body["run_as"] = run_as.as_dict() - if run_name is not None: - body["run_name"] = run_name - if tasks is not None: - body["tasks"] = [v.as_dict() for v in tasks] - if timeout_seconds is not None: - body["timeout_seconds"] = timeout_seconds - if webhook_notifications is not None: - body["webhook_notifications"] = webhook_notifications.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.2/jobs/runs/submit", body=body, headers=headers) - return Wait( - self.wait_get_run_job_terminated_or_skipped, - response=SubmitRunResponse.from_dict(op_response), - run_id=op_response["run_id"], - ) - - def submit_and_wait( - self, - *, - access_control_list: Optional[List[JobAccessControlRequest]] = None, - budget_policy_id: Optional[str] = None, - email_notifications: Optional[JobEmailNotifications] = None, - environments: Optional[List[JobEnvironment]] = None, - git_source: Optional[GitSource] = None, - health: Optional[JobsHealthRules] = None, - idempotency_token: Optional[str] = None, - notification_settings: Optional[JobNotificationSettings] = None, - queue: Optional[QueueSettings] = None, - run_as: Optional[JobRunAs] = None, - run_name: Optional[str] = None, - tasks: Optional[List[SubmitTask]] = None, - timeout_seconds: Optional[int] = None, - webhook_notifications: Optional[WebhookNotifications] = None, - timeout=timedelta(minutes=20), - ) -> Run: - return self.submit( - access_control_list=access_control_list, - budget_policy_id=budget_policy_id, - email_notifications=email_notifications, - environments=environments, - git_source=git_source, - health=health, - idempotency_token=idempotency_token, - notification_settings=notification_settings, - queue=queue, - run_as=run_as, - run_name=run_name, - tasks=tasks, - timeout_seconds=timeout_seconds, - webhook_notifications=webhook_notifications, - ).result(timeout=timeout) - - def update( - self, job_id: int, *, fields_to_remove: Optional[List[str]] = None, new_settings: Optional[JobSettings] = None - ): + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id + if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict() + if environments is not None: body['environments'] = [v.as_dict() for v in environments] + if git_source is not None: body['git_source'] = git_source.as_dict() + if health is not None: body['health'] = health.as_dict() + if idempotency_token is not None: body['idempotency_token'] = idempotency_token + if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict() + if queue is not None: body['queue'] = queue.as_dict() + if run_as is not None: body['run_as'] = run_as.as_dict() + if run_name is not None: body['run_name'] = run_name + if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks] + if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds + if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.2/jobs/runs/submit', body=body + + , headers=headers + ) + return Wait(self.wait_get_run_job_terminated_or_skipped + , response = SubmitRunResponse.from_dict(op_response) + , run_id=op_response['run_id']) + + + def submit_and_wait(self + + , * + , access_control_list: Optional[List[JobAccessControlRequest]] = None, budget_policy_id: Optional[str] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, git_source: Optional[GitSource] = None, health: Optional[JobsHealthRules] = None, idempotency_token: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, run_name: Optional[str] = None, tasks: Optional[List[SubmitTask]] = None, timeout_seconds: Optional[int] = None, webhook_notifications: Optional[WebhookNotifications] = None, + timeout=timedelta(minutes=20)) -> Run: + return self.submit(access_control_list=access_control_list, budget_policy_id=budget_policy_id, email_notifications=email_notifications, environments=environments, git_source=git_source, health=health, idempotency_token=idempotency_token, notification_settings=notification_settings, queue=queue, run_as=run_as, run_name=run_name, tasks=tasks, timeout_seconds=timeout_seconds, webhook_notifications=webhook_notifications).result(timeout=timeout) + + + + + def update(self + , job_id: int + , * + , fields_to_remove: Optional[List[str]] = None, new_settings: Optional[JobSettings] = None): """Update job settings partially. - + Add, update, or remove specific settings of an existing job. Use the [_Reset_ endpoint](:method:jobs/reset) to overwrite all job settings. - + :param job_id: int The canonical identifier of the job to update. This field is required. :param fields_to_remove: List[str] (optional) @@ -10246,132 +8648,154 @@ def update( tasks and job clusters (`tasks/task_1`). This field is optional. :param new_settings: :class:`JobSettings` (optional) The new settings for the job. - + Top-level fields specified in `new_settings` are completely replaced, except for arrays which are merged. That is, new and existing entries are completely replaced based on the respective key fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept. - + Partially updating nested fields is not supported. - + Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only. - - + + """ body = {} - if fields_to_remove is not None: - body["fields_to_remove"] = [v for v in fields_to_remove] - if job_id is not None: - body["job_id"] = job_id - if new_settings is not None: - body["new_settings"] = new_settings.as_dict() - headers = { - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.2/jobs/update", body=body, headers=headers) - - def update_permissions( - self, job_id: str, *, access_control_list: Optional[List[JobAccessControlRequest]] = None - ) -> JobPermissions: - """Update job permissions. + if fields_to_remove is not None: body['fields_to_remove'] = [v for v in fields_to_remove] + if job_id is not None: body['job_id'] = job_id + if new_settings is not None: body['new_settings'] = new_settings.as_dict() + headers = {'Content-Type': 'application/json',} + + self._api.do('POST','/api/2.2/jobs/update', body=body + + , headers=headers + ) + - Updates the permissions on a job. Jobs can inherit permissions from their root object. + + + + def update_permissions(self + , job_id: str + , * + , access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions: + """Update job permissions. + + Updates the permissions on a job. Jobs can inherit permissions from their root object. + :param job_id: str The job for which to get or manage permissions. :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) - + :returns: :class:`JobPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/permissions/jobs/{job_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/jobs/{job_id}', body=body + + , headers=headers + ) return JobPermissions.from_dict(res) - + + class PolicyComplianceForJobsAPI: """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace. This API currently only supports compliance controls for cluster policies. - + A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last edited. The job is considered out of compliance if any of its clusters no longer comply with their updated policies. - + The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce compliance API allows you to update a job so that it becomes compliant with all of its policies.""" - + def __init__(self, api_client): self._api = api_client + - def enforce_compliance( - self, job_id: int, *, validate_only: Optional[bool] = None - ) -> EnforcePolicyComplianceResponse: - """Enforce job policy compliance. + + + + + + + def enforce_compliance(self + , job_id: int + , * + , validate_only: Optional[bool] = None) -> EnforcePolicyComplianceResponse: + """Enforce job policy compliance. + Updates a job so the job clusters that are created when running the job (specified in `new_cluster`) are compliant with the current versions of their respective cluster policies. All-purpose clusters used in the job will not be updated. - + :param job_id: int The ID of the job you want to enforce policy compliance on. :param validate_only: bool (optional) If set, previews changes made to the job to comply with its policy, but does not update the job. - + :returns: :class:`EnforcePolicyComplianceResponse` """ body = {} - if job_id is not None: - body["job_id"] = job_id - if validate_only is not None: - body["validate_only"] = validate_only - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/policies/jobs/enforce-compliance", body=body, headers=headers) + if job_id is not None: body['job_id'] = job_id + if validate_only is not None: body['validate_only'] = validate_only + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/policies/jobs/enforce-compliance', body=body + + , headers=headers + ) return EnforcePolicyComplianceResponse.from_dict(res) - def get_compliance(self, job_id: int) -> GetPolicyComplianceResponse: - """Get job policy compliance. + + + + def get_compliance(self + , job_id: int + ) -> GetPolicyComplianceResponse: + """Get job policy compliance. + Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and some of its job clusters no longer comply with their updated policies. - + :param job_id: int The ID of the job whose compliance status you are requesting. - + :returns: :class:`GetPolicyComplianceResponse` """ - + query = {} - if job_id is not None: - query["job_id"] = job_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/policies/jobs/get-compliance", query=query, headers=headers) + if job_id is not None: query['job_id'] = job_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/policies/jobs/get-compliance', query=query + + , headers=headers + ) return GetPolicyComplianceResponse.from_dict(res) - def list_compliance( - self, policy_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[JobCompliance]: - """List job policy compliance. + + + + def list_compliance(self + , policy_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[JobCompliance]: + """List job policy compliance. + Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and its job clusters no longer comply with the updated policy. - + :param policy_id: str Canonical unique identifier for the cluster policy. :param page_size: int (optional) @@ -10380,26 +8804,30 @@ def list_compliance( :param page_token: str (optional) A page token that can be used to navigate to the next page or previous page as returned by `next_page_token` or `prev_page_token`. - + :returns: Iterator over :class:`JobCompliance` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - if policy_id is not None: - query["policy_id"] = policy_id - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if policy_id is not None: query['policy_id'] = policy_id + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/policies/jobs/list-compliance", query=query, headers=headers) - if "jobs" in json: - for v in json["jobs"]: - yield JobCompliance.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] + json = self._api.do('GET','/api/2.0/policies/jobs/list-compliance', query=query + + , headers=headers + ) + if 'jobs' in json: + for v in json['jobs']: + yield JobCompliance.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + \ No newline at end of file diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 6d3a8815b..156e8b91e 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -1,719 +1,663 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Any, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class AddExchangeForListingRequest: listing_id: str - + exchange_id: str - + def as_dict(self) -> dict: """Serializes the AddExchangeForListingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.listing_id is not None: body['listing_id'] = self.listing_id return body def as_shallow_dict(self) -> dict: """Serializes the AddExchangeForListingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.listing_id is not None: body['listing_id'] = self.listing_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AddExchangeForListingRequest: """Deserializes the AddExchangeForListingRequest from a dictionary.""" - return cls(exchange_id=d.get("exchange_id", None), listing_id=d.get("listing_id", None)) + return cls(exchange_id=d.get('exchange_id', None), listing_id=d.get('listing_id', None)) + + @dataclass class AddExchangeForListingResponse: exchange_for_listing: Optional[ExchangeListing] = None - + def as_dict(self) -> dict: """Serializes the AddExchangeForListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_for_listing: - body["exchange_for_listing"] = self.exchange_for_listing.as_dict() + if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AddExchangeForListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_for_listing: - body["exchange_for_listing"] = self.exchange_for_listing + if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AddExchangeForListingResponse: """Deserializes the AddExchangeForListingResponse from a dictionary.""" - return cls(exchange_for_listing=_from_dict(d, "exchange_for_listing", ExchangeListing)) + return cls(exchange_for_listing=_from_dict(d, 'exchange_for_listing', ExchangeListing)) + + class AssetType(Enum): + + + ASSET_TYPE_APP = 'ASSET_TYPE_APP' + ASSET_TYPE_DATA_TABLE = 'ASSET_TYPE_DATA_TABLE' + ASSET_TYPE_GIT_REPO = 'ASSET_TYPE_GIT_REPO' + ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA' + ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL' + ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK' + ASSET_TYPE_PARTNER_INTEGRATION = 'ASSET_TYPE_PARTNER_INTEGRATION' + - ASSET_TYPE_APP = "ASSET_TYPE_APP" - ASSET_TYPE_DATA_TABLE = "ASSET_TYPE_DATA_TABLE" - ASSET_TYPE_GIT_REPO = "ASSET_TYPE_GIT_REPO" - ASSET_TYPE_MEDIA = "ASSET_TYPE_MEDIA" - ASSET_TYPE_MODEL = "ASSET_TYPE_MODEL" - ASSET_TYPE_NOTEBOOK = "ASSET_TYPE_NOTEBOOK" - ASSET_TYPE_PARTNER_INTEGRATION = "ASSET_TYPE_PARTNER_INTEGRATION" @dataclass class BatchGetListingsResponse: listings: Optional[List[Listing]] = None - + def as_dict(self) -> dict: """Serializes the BatchGetListingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listings: - body["listings"] = [v.as_dict() for v in self.listings] + if self.listings: body['listings'] = [v.as_dict() for v in self.listings] return body def as_shallow_dict(self) -> dict: """Serializes the BatchGetListingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listings: - body["listings"] = self.listings + if self.listings: body['listings'] = self.listings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BatchGetListingsResponse: """Deserializes the BatchGetListingsResponse from a dictionary.""" - return cls(listings=_repeated_dict(d, "listings", Listing)) + return cls(listings=_repeated_dict(d, 'listings', Listing)) + + + + + @dataclass class BatchGetProvidersResponse: providers: Optional[List[ProviderInfo]] = None - + def as_dict(self) -> dict: """Serializes the BatchGetProvidersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.providers: - body["providers"] = [v.as_dict() for v in self.providers] + if self.providers: body['providers'] = [v.as_dict() for v in self.providers] return body def as_shallow_dict(self) -> dict: """Serializes the BatchGetProvidersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.providers: - body["providers"] = self.providers + if self.providers: body['providers'] = self.providers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BatchGetProvidersResponse: """Deserializes the BatchGetProvidersResponse from a dictionary.""" - return cls(providers=_repeated_dict(d, "providers", ProviderInfo)) - + return cls(providers=_repeated_dict(d, 'providers', ProviderInfo)) + -class Category(Enum): - ADVERTISING_AND_MARKETING = "ADVERTISING_AND_MARKETING" - CLIMATE_AND_ENVIRONMENT = "CLIMATE_AND_ENVIRONMENT" - COMMERCE = "COMMERCE" - DEMOGRAPHICS = "DEMOGRAPHICS" - ECONOMICS = "ECONOMICS" - EDUCATION = "EDUCATION" - ENERGY = "ENERGY" - FINANCIAL = "FINANCIAL" - GAMING = "GAMING" - GEOSPATIAL = "GEOSPATIAL" - HEALTH = "HEALTH" - LOOKUP_TABLES = "LOOKUP_TABLES" - MANUFACTURING = "MANUFACTURING" - MEDIA = "MEDIA" - OTHER = "OTHER" - PUBLIC_SECTOR = "PUBLIC_SECTOR" - RETAIL = "RETAIL" - SCIENCE_AND_RESEARCH = "SCIENCE_AND_RESEARCH" - SECURITY = "SECURITY" - SPORTS = "SPORTS" - TRANSPORTATION_AND_LOGISTICS = "TRANSPORTATION_AND_LOGISTICS" - TRAVEL_AND_TOURISM = "TRAVEL_AND_TOURISM" +class Category(Enum): + + + ADVERTISING_AND_MARKETING = 'ADVERTISING_AND_MARKETING' + CLIMATE_AND_ENVIRONMENT = 'CLIMATE_AND_ENVIRONMENT' + COMMERCE = 'COMMERCE' + DEMOGRAPHICS = 'DEMOGRAPHICS' + ECONOMICS = 'ECONOMICS' + EDUCATION = 'EDUCATION' + ENERGY = 'ENERGY' + FINANCIAL = 'FINANCIAL' + GAMING = 'GAMING' + GEOSPATIAL = 'GEOSPATIAL' + HEALTH = 'HEALTH' + LOOKUP_TABLES = 'LOOKUP_TABLES' + MANUFACTURING = 'MANUFACTURING' + MEDIA = 'MEDIA' + OTHER = 'OTHER' + PUBLIC_SECTOR = 'PUBLIC_SECTOR' + RETAIL = 'RETAIL' + SCIENCE_AND_RESEARCH = 'SCIENCE_AND_RESEARCH' + SECURITY = 'SECURITY' + SPORTS = 'SPORTS' + TRANSPORTATION_AND_LOGISTICS = 'TRANSPORTATION_AND_LOGISTICS' + TRAVEL_AND_TOURISM = 'TRAVEL_AND_TOURISM' @dataclass class ConsumerTerms: version: str - + def as_dict(self) -> dict: """Serializes the ConsumerTerms into a dictionary suitable for use as a JSON request body.""" body = {} - if self.version is not None: - body["version"] = self.version + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ConsumerTerms into a shallow dictionary of its immediate attributes.""" body = {} - if self.version is not None: - body["version"] = self.version + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ConsumerTerms: """Deserializes the ConsumerTerms from a dictionary.""" - return cls(version=d.get("version", None)) + return cls(version=d.get('version', None)) + + @dataclass class ContactInfo: """contact info for the consumer requesting data or performing a listing installation""" - + company: Optional[str] = None - + email: Optional[str] = None - + first_name: Optional[str] = None - + last_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ContactInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.company is not None: - body["company"] = self.company - if self.email is not None: - body["email"] = self.email - if self.first_name is not None: - body["first_name"] = self.first_name - if self.last_name is not None: - body["last_name"] = self.last_name + if self.company is not None: body['company'] = self.company + if self.email is not None: body['email'] = self.email + if self.first_name is not None: body['first_name'] = self.first_name + if self.last_name is not None: body['last_name'] = self.last_name return body def as_shallow_dict(self) -> dict: """Serializes the ContactInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.company is not None: - body["company"] = self.company - if self.email is not None: - body["email"] = self.email - if self.first_name is not None: - body["first_name"] = self.first_name - if self.last_name is not None: - body["last_name"] = self.last_name + if self.company is not None: body['company'] = self.company + if self.email is not None: body['email'] = self.email + if self.first_name is not None: body['first_name'] = self.first_name + if self.last_name is not None: body['last_name'] = self.last_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ContactInfo: """Deserializes the ContactInfo from a dictionary.""" - return cls( - company=d.get("company", None), - email=d.get("email", None), - first_name=d.get("first_name", None), - last_name=d.get("last_name", None), - ) - + return cls(company=d.get('company', None), email=d.get('email', None), first_name=d.get('first_name', None), last_name=d.get('last_name', None)) + -class Cost(Enum): - FREE = "FREE" - PAID = "PAID" +class Cost(Enum): + + + FREE = 'FREE' + PAID = 'PAID' @dataclass class CreateExchangeFilterRequest: filter: ExchangeFilter - + def as_dict(self) -> dict: """Serializes the CreateExchangeFilterRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter: - body["filter"] = self.filter.as_dict() + if self.filter: body['filter'] = self.filter.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter: - body["filter"] = self.filter + if self.filter: body['filter'] = self.filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeFilterRequest: """Deserializes the CreateExchangeFilterRequest from a dictionary.""" - return cls(filter=_from_dict(d, "filter", ExchangeFilter)) + return cls(filter=_from_dict(d, 'filter', ExchangeFilter)) + + @dataclass class CreateExchangeFilterResponse: filter_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateExchangeFilterResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter_id is not None: - body["filter_id"] = self.filter_id + if self.filter_id is not None: body['filter_id'] = self.filter_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateExchangeFilterResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter_id is not None: - body["filter_id"] = self.filter_id + if self.filter_id is not None: body['filter_id'] = self.filter_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeFilterResponse: """Deserializes the CreateExchangeFilterResponse from a dictionary.""" - return cls(filter_id=d.get("filter_id", None)) + return cls(filter_id=d.get('filter_id', None)) + + @dataclass class CreateExchangeRequest: exchange: Exchange - + def as_dict(self) -> dict: """Serializes the CreateExchangeRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange: - body["exchange"] = self.exchange.as_dict() + if self.exchange: body['exchange'] = self.exchange.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateExchangeRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange: - body["exchange"] = self.exchange + if self.exchange: body['exchange'] = self.exchange return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeRequest: """Deserializes the CreateExchangeRequest from a dictionary.""" - return cls(exchange=_from_dict(d, "exchange", Exchange)) + return cls(exchange=_from_dict(d, 'exchange', Exchange)) + + @dataclass class CreateExchangeResponse: exchange_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateExchangeResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateExchangeResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeResponse: """Deserializes the CreateExchangeResponse from a dictionary.""" - return cls(exchange_id=d.get("exchange_id", None)) + return cls(exchange_id=d.get('exchange_id', None)) + + @dataclass class CreateFileRequest: file_parent: FileParent - + marketplace_file_type: MarketplaceFileType - + mime_type: str - + display_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateFileRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.file_parent: - body["file_parent"] = self.file_parent.as_dict() - if self.marketplace_file_type is not None: - body["marketplace_file_type"] = self.marketplace_file_type.value - if self.mime_type is not None: - body["mime_type"] = self.mime_type + if self.display_name is not None: body['display_name'] = self.display_name + if self.file_parent: body['file_parent'] = self.file_parent.as_dict() + if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type.value + if self.mime_type is not None: body['mime_type'] = self.mime_type return body def as_shallow_dict(self) -> dict: """Serializes the CreateFileRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.file_parent: - body["file_parent"] = self.file_parent - if self.marketplace_file_type is not None: - body["marketplace_file_type"] = self.marketplace_file_type - if self.mime_type is not None: - body["mime_type"] = self.mime_type + if self.display_name is not None: body['display_name'] = self.display_name + if self.file_parent: body['file_parent'] = self.file_parent + if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type + if self.mime_type is not None: body['mime_type'] = self.mime_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateFileRequest: """Deserializes the CreateFileRequest from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - file_parent=_from_dict(d, "file_parent", FileParent), - marketplace_file_type=_enum(d, "marketplace_file_type", MarketplaceFileType), - mime_type=d.get("mime_type", None), - ) + return cls(display_name=d.get('display_name', None), file_parent=_from_dict(d, 'file_parent', FileParent), marketplace_file_type=_enum(d, 'marketplace_file_type', MarketplaceFileType), mime_type=d.get('mime_type', None)) + + @dataclass class CreateFileResponse: file_info: Optional[FileInfo] = None - + upload_url: Optional[str] = None """Pre-signed POST URL to blob storage""" - + def as_dict(self) -> dict: """Serializes the CreateFileResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_info: - body["file_info"] = self.file_info.as_dict() - if self.upload_url is not None: - body["upload_url"] = self.upload_url + if self.file_info: body['file_info'] = self.file_info.as_dict() + if self.upload_url is not None: body['upload_url'] = self.upload_url return body def as_shallow_dict(self) -> dict: """Serializes the CreateFileResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_info: - body["file_info"] = self.file_info - if self.upload_url is not None: - body["upload_url"] = self.upload_url + if self.file_info: body['file_info'] = self.file_info + if self.upload_url is not None: body['upload_url'] = self.upload_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateFileResponse: """Deserializes the CreateFileResponse from a dictionary.""" - return cls(file_info=_from_dict(d, "file_info", FileInfo), upload_url=d.get("upload_url", None)) + return cls(file_info=_from_dict(d, 'file_info', FileInfo), upload_url=d.get('upload_url', None)) + + @dataclass class CreateInstallationRequest: accepted_consumer_terms: Optional[ConsumerTerms] = None - + catalog_name: Optional[str] = None - + listing_id: Optional[str] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + repo_detail: Optional[RepoInstallation] = None """for git repo installations""" - + share_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateInstallationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict() - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type.value - if self.repo_detail: - body["repo_detail"] = self.repo_detail.as_dict() - if self.share_name is not None: - body["share_name"] = self.share_name + if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms.as_dict() + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value + if self.repo_detail: body['repo_detail'] = self.repo_detail.as_dict() + if self.share_name is not None: body['share_name'] = self.share_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateInstallationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type - if self.repo_detail: - body["repo_detail"] = self.repo_detail - if self.share_name is not None: - body["share_name"] = self.share_name + if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + if self.repo_detail: body['repo_detail'] = self.repo_detail + if self.share_name is not None: body['share_name'] = self.share_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateInstallationRequest: """Deserializes the CreateInstallationRequest from a dictionary.""" - return cls( - accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms), - catalog_name=d.get("catalog_name", None), - listing_id=d.get("listing_id", None), - recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), - repo_detail=_from_dict(d, "repo_detail", RepoInstallation), - share_name=d.get("share_name", None), - ) + return cls(accepted_consumer_terms=_from_dict(d, 'accepted_consumer_terms', ConsumerTerms), catalog_name=d.get('catalog_name', None), listing_id=d.get('listing_id', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType), repo_detail=_from_dict(d, 'repo_detail', RepoInstallation), share_name=d.get('share_name', None)) + + @dataclass class CreateListingRequest: listing: Listing - + def as_dict(self) -> dict: """Serializes the CreateListingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing: - body["listing"] = self.listing.as_dict() + if self.listing: body['listing'] = self.listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateListingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing: - body["listing"] = self.listing + if self.listing: body['listing'] = self.listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateListingRequest: """Deserializes the CreateListingRequest from a dictionary.""" - return cls(listing=_from_dict(d, "listing", Listing)) + return cls(listing=_from_dict(d, 'listing', Listing)) + + @dataclass class CreateListingResponse: listing_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing_id is not None: - body["listing_id"] = self.listing_id + if self.listing_id is not None: body['listing_id'] = self.listing_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing_id is not None: - body["listing_id"] = self.listing_id + if self.listing_id is not None: body['listing_id'] = self.listing_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateListingResponse: """Deserializes the CreateListingResponse from a dictionary.""" - return cls(listing_id=d.get("listing_id", None)) + return cls(listing_id=d.get('listing_id', None)) + + @dataclass class CreatePersonalizationRequest: """Data request messages also creates a lead (maybe)""" - + intended_use: str - + accepted_consumer_terms: ConsumerTerms - + comment: Optional[str] = None - + company: Optional[str] = None - + first_name: Optional[str] = None - + is_from_lighthouse: Optional[bool] = None - + last_name: Optional[str] = None - + listing_id: Optional[str] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + def as_dict(self) -> dict: """Serializes the CreatePersonalizationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.company is not None: - body["company"] = self.company - if self.first_name is not None: - body["first_name"] = self.first_name - if self.intended_use is not None: - body["intended_use"] = self.intended_use - if self.is_from_lighthouse is not None: - body["is_from_lighthouse"] = self.is_from_lighthouse - if self.last_name is not None: - body["last_name"] = self.last_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type.value + if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.company is not None: body['company'] = self.company + if self.first_name is not None: body['first_name'] = self.first_name + if self.intended_use is not None: body['intended_use'] = self.intended_use + if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse + if self.last_name is not None: body['last_name'] = self.last_name + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreatePersonalizationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms - if self.comment is not None: - body["comment"] = self.comment - if self.company is not None: - body["company"] = self.company - if self.first_name is not None: - body["first_name"] = self.first_name - if self.intended_use is not None: - body["intended_use"] = self.intended_use - if self.is_from_lighthouse is not None: - body["is_from_lighthouse"] = self.is_from_lighthouse - if self.last_name is not None: - body["last_name"] = self.last_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type + if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms + if self.comment is not None: body['comment'] = self.comment + if self.company is not None: body['company'] = self.company + if self.first_name is not None: body['first_name'] = self.first_name + if self.intended_use is not None: body['intended_use'] = self.intended_use + if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse + if self.last_name is not None: body['last_name'] = self.last_name + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePersonalizationRequest: """Deserializes the CreatePersonalizationRequest from a dictionary.""" - return cls( - accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms), - comment=d.get("comment", None), - company=d.get("company", None), - first_name=d.get("first_name", None), - intended_use=d.get("intended_use", None), - is_from_lighthouse=d.get("is_from_lighthouse", None), - last_name=d.get("last_name", None), - listing_id=d.get("listing_id", None), - recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), - ) + return cls(accepted_consumer_terms=_from_dict(d, 'accepted_consumer_terms', ConsumerTerms), comment=d.get('comment', None), company=d.get('company', None), first_name=d.get('first_name', None), intended_use=d.get('intended_use', None), is_from_lighthouse=d.get('is_from_lighthouse', None), last_name=d.get('last_name', None), listing_id=d.get('listing_id', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType)) + + @dataclass class CreatePersonalizationRequestResponse: id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreatePersonalizationRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePersonalizationRequestResponse: """Deserializes the CreatePersonalizationRequestResponse from a dictionary.""" - return cls(id=d.get("id", None)) + return cls(id=d.get('id', None)) + + @dataclass class CreateProviderRequest: provider: ProviderInfo - + def as_dict(self) -> dict: """Serializes the CreateProviderRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.provider: - body["provider"] = self.provider.as_dict() + if self.provider: body['provider'] = self.provider.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateProviderRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.provider: - body["provider"] = self.provider + if self.provider: body['provider'] = self.provider return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateProviderRequest: """Deserializes the CreateProviderRequest from a dictionary.""" - return cls(provider=_from_dict(d, "provider", ProviderInfo)) + return cls(provider=_from_dict(d, 'provider', ProviderInfo)) + + @dataclass class CreateProviderResponse: id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateProviderResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the CreateProviderResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateProviderResponse: """Deserializes the CreateProviderResponse from a dictionary.""" - return cls(id=d.get("id", None)) - + return cls(id=d.get('id', None)) + -class DataRefresh(Enum): - DAILY = "DAILY" - HOURLY = "HOURLY" - MINUTE = "MINUTE" - MONTHLY = "MONTHLY" - NONE = "NONE" - QUARTERLY = "QUARTERLY" - SECOND = "SECOND" - WEEKLY = "WEEKLY" - YEARLY = "YEARLY" +class DataRefresh(Enum): + + + DAILY = 'DAILY' + HOURLY = 'HOURLY' + MINUTE = 'MINUTE' + MONTHLY = 'MONTHLY' + NONE = 'NONE' + QUARTERLY = 'QUARTERLY' + SECOND = 'SECOND' + WEEKLY = 'WEEKLY' + YEARLY = 'YEARLY' @dataclass class DataRefreshInfo: interval: int - + unit: DataRefresh - + def as_dict(self) -> dict: """Serializes the DataRefreshInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.interval is not None: - body["interval"] = self.interval - if self.unit is not None: - body["unit"] = self.unit.value + if self.interval is not None: body['interval'] = self.interval + if self.unit is not None: body['unit'] = self.unit.value return body def as_shallow_dict(self) -> dict: """Serializes the DataRefreshInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.interval is not None: - body["interval"] = self.interval - if self.unit is not None: - body["unit"] = self.unit + if self.interval is not None: body['interval'] = self.interval + if self.unit is not None: body['unit'] = self.unit return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataRefreshInfo: """Deserializes the DataRefreshInfo from a dictionary.""" - return cls(interval=d.get("interval", None), unit=_enum(d, "unit", DataRefresh)) + return cls(interval=d.get('interval', None), unit=_enum(d, 'unit', DataRefresh)) + + + + + @dataclass @@ -732,6 +676,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteExchangeFilterResponse: """Deserializes the DeleteExchangeFilterResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -750,6 +699,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteExchangeResponse: """Deserializes the DeleteExchangeResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -768,6 +722,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteFileResponse: """Deserializes the DeleteFileResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -786,6 +745,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteInstallationResponse: """Deserializes the DeleteInstallationResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -804,6 +768,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteListingResponse: """Deserializes the DeleteListingResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -822,1215 +791,1069 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteProviderResponse: """Deserializes the DeleteProviderResponse from a dictionary.""" return cls() + -class DeltaSharingRecipientType(Enum): - - DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS = "DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS" - DELTA_SHARING_RECIPIENT_TYPE_OPEN = "DELTA_SHARING_RECIPIENT_TYPE_OPEN" +class DeltaSharingRecipientType(Enum): + + + DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS = 'DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS' + DELTA_SHARING_RECIPIENT_TYPE_OPEN = 'DELTA_SHARING_RECIPIENT_TYPE_OPEN' @dataclass class Exchange: name: str - + comment: Optional[str] = None - + created_at: Optional[int] = None - + created_by: Optional[str] = None - + filters: Optional[List[ExchangeFilter]] = None - + id: Optional[str] = None - + linked_listings: Optional[List[ExchangeListing]] = None - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the Exchange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.filters: - body["filters"] = [v.as_dict() for v in self.filters] - if self.id is not None: - body["id"] = self.id - if self.linked_listings: - body["linked_listings"] = [v.as_dict() for v in self.linked_listings] - if self.name is not None: - body["name"] = self.name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.filters: body['filters'] = [v.as_dict() for v in self.filters] + if self.id is not None: body['id'] = self.id + if self.linked_listings: body['linked_listings'] = [v.as_dict() for v in self.linked_listings] + if self.name is not None: body['name'] = self.name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the Exchange into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.filters: - body["filters"] = self.filters - if self.id is not None: - body["id"] = self.id - if self.linked_listings: - body["linked_listings"] = self.linked_listings - if self.name is not None: - body["name"] = self.name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.filters: body['filters'] = self.filters + if self.id is not None: body['id'] = self.id + if self.linked_listings: body['linked_listings'] = self.linked_listings + if self.name is not None: body['name'] = self.name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Exchange: """Deserializes the Exchange from a dictionary.""" - return cls( - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - filters=_repeated_dict(d, "filters", ExchangeFilter), - id=d.get("id", None), - linked_listings=_repeated_dict(d, "linked_listings", ExchangeListing), - name=d.get("name", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), filters=_repeated_dict(d, 'filters', ExchangeFilter), id=d.get('id', None), linked_listings=_repeated_dict(d, 'linked_listings', ExchangeListing), name=d.get('name', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass class ExchangeFilter: exchange_id: str - + filter_value: str - + filter_type: ExchangeFilterType - + created_at: Optional[int] = None - + created_by: Optional[str] = None - + id: Optional[str] = None - + name: Optional[str] = None - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ExchangeFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.filter_type is not None: - body["filter_type"] = self.filter_type.value - if self.filter_value is not None: - body["filter_value"] = self.filter_value - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.filter_type is not None: body['filter_type'] = self.filter_type.value + if self.filter_value is not None: body['filter_value'] = self.filter_value + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeFilter into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.filter_type is not None: - body["filter_type"] = self.filter_type - if self.filter_value is not None: - body["filter_value"] = self.filter_value - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.filter_type is not None: body['filter_type'] = self.filter_type + if self.filter_value is not None: body['filter_value'] = self.filter_value + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeFilter: """Deserializes the ExchangeFilter from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - exchange_id=d.get("exchange_id", None), - filter_type=_enum(d, "filter_type", ExchangeFilterType), - filter_value=d.get("filter_value", None), - id=d.get("id", None), - name=d.get("name", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), exchange_id=d.get('exchange_id', None), filter_type=_enum(d, 'filter_type', ExchangeFilterType), filter_value=d.get('filter_value', None), id=d.get('id', None), name=d.get('name', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + -class ExchangeFilterType(Enum): - - GLOBAL_METASTORE_ID = "GLOBAL_METASTORE_ID" +class ExchangeFilterType(Enum): + + + GLOBAL_METASTORE_ID = 'GLOBAL_METASTORE_ID' @dataclass class ExchangeListing: created_at: Optional[int] = None - + created_by: Optional[str] = None - + exchange_id: Optional[str] = None - + exchange_name: Optional[str] = None - + id: Optional[str] = None - + listing_id: Optional[str] = None - + listing_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ExchangeListing into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.exchange_name is not None: - body["exchange_name"] = self.exchange_name - if self.id is not None: - body["id"] = self.id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.listing_name is not None: - body["listing_name"] = self.listing_name + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.exchange_name is not None: body['exchange_name'] = self.exchange_name + if self.id is not None: body['id'] = self.id + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_name is not None: body['listing_name'] = self.listing_name return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeListing into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.exchange_name is not None: - body["exchange_name"] = self.exchange_name - if self.id is not None: - body["id"] = self.id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.listing_name is not None: - body["listing_name"] = self.listing_name + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.exchange_name is not None: body['exchange_name'] = self.exchange_name + if self.id is not None: body['id'] = self.id + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_name is not None: body['listing_name'] = self.listing_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeListing: """Deserializes the ExchangeListing from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - exchange_id=d.get("exchange_id", None), - exchange_name=d.get("exchange_name", None), - id=d.get("id", None), - listing_id=d.get("listing_id", None), - listing_name=d.get("listing_name", None), - ) + return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), exchange_id=d.get('exchange_id', None), exchange_name=d.get('exchange_name', None), id=d.get('id', None), listing_id=d.get('listing_id', None), listing_name=d.get('listing_name', None)) + + @dataclass class FileInfo: created_at: Optional[int] = None - + display_name: Optional[str] = None """Name displayed to users for applicable files, e.g. embedded notebooks""" - + download_link: Optional[str] = None - + file_parent: Optional[FileParent] = None - + id: Optional[str] = None - + marketplace_file_type: Optional[MarketplaceFileType] = None - + mime_type: Optional[str] = None - + status: Optional[FileStatus] = None - + status_message: Optional[str] = None """Populated if status is in a failed state with more information on reason for the failure.""" - + updated_at: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the FileInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.display_name is not None: - body["display_name"] = self.display_name - if self.download_link is not None: - body["download_link"] = self.download_link - if self.file_parent: - body["file_parent"] = self.file_parent.as_dict() - if self.id is not None: - body["id"] = self.id - if self.marketplace_file_type is not None: - body["marketplace_file_type"] = self.marketplace_file_type.value - if self.mime_type is not None: - body["mime_type"] = self.mime_type - if self.status is not None: - body["status"] = self.status.value - if self.status_message is not None: - body["status_message"] = self.status_message - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.created_at is not None: body['created_at'] = self.created_at + if self.display_name is not None: body['display_name'] = self.display_name + if self.download_link is not None: body['download_link'] = self.download_link + if self.file_parent: body['file_parent'] = self.file_parent.as_dict() + if self.id is not None: body['id'] = self.id + if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type.value + if self.mime_type is not None: body['mime_type'] = self.mime_type + if self.status is not None: body['status'] = self.status.value + if self.status_message is not None: body['status_message'] = self.status_message + if self.updated_at is not None: body['updated_at'] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the FileInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.display_name is not None: - body["display_name"] = self.display_name - if self.download_link is not None: - body["download_link"] = self.download_link - if self.file_parent: - body["file_parent"] = self.file_parent - if self.id is not None: - body["id"] = self.id - if self.marketplace_file_type is not None: - body["marketplace_file_type"] = self.marketplace_file_type - if self.mime_type is not None: - body["mime_type"] = self.mime_type - if self.status is not None: - body["status"] = self.status - if self.status_message is not None: - body["status_message"] = self.status_message - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.created_at is not None: body['created_at'] = self.created_at + if self.display_name is not None: body['display_name'] = self.display_name + if self.download_link is not None: body['download_link'] = self.download_link + if self.file_parent: body['file_parent'] = self.file_parent + if self.id is not None: body['id'] = self.id + if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type + if self.mime_type is not None: body['mime_type'] = self.mime_type + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + if self.updated_at is not None: body['updated_at'] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileInfo: """Deserializes the FileInfo from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - display_name=d.get("display_name", None), - download_link=d.get("download_link", None), - file_parent=_from_dict(d, "file_parent", FileParent), - id=d.get("id", None), - marketplace_file_type=_enum(d, "marketplace_file_type", MarketplaceFileType), - mime_type=d.get("mime_type", None), - status=_enum(d, "status", FileStatus), - status_message=d.get("status_message", None), - updated_at=d.get("updated_at", None), - ) + return cls(created_at=d.get('created_at', None), display_name=d.get('display_name', None), download_link=d.get('download_link', None), file_parent=_from_dict(d, 'file_parent', FileParent), id=d.get('id', None), marketplace_file_type=_enum(d, 'marketplace_file_type', MarketplaceFileType), mime_type=d.get('mime_type', None), status=_enum(d, 'status', FileStatus), status_message=d.get('status_message', None), updated_at=d.get('updated_at', None)) + + @dataclass class FileParent: file_parent_type: Optional[FileParentType] = None - + parent_id: Optional[str] = None """TODO make the following fields required""" - + def as_dict(self) -> dict: """Serializes the FileParent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_parent_type is not None: - body["file_parent_type"] = self.file_parent_type.value - if self.parent_id is not None: - body["parent_id"] = self.parent_id + if self.file_parent_type is not None: body['file_parent_type'] = self.file_parent_type.value + if self.parent_id is not None: body['parent_id'] = self.parent_id return body def as_shallow_dict(self) -> dict: """Serializes the FileParent into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_parent_type is not None: - body["file_parent_type"] = self.file_parent_type - if self.parent_id is not None: - body["parent_id"] = self.parent_id + if self.file_parent_type is not None: body['file_parent_type'] = self.file_parent_type + if self.parent_id is not None: body['parent_id'] = self.parent_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileParent: """Deserializes the FileParent from a dictionary.""" - return cls(file_parent_type=_enum(d, "file_parent_type", FileParentType), parent_id=d.get("parent_id", None)) + return cls(file_parent_type=_enum(d, 'file_parent_type', FileParentType), parent_id=d.get('parent_id', None)) + -class FileParentType(Enum): - - LISTING = "LISTING" - LISTING_RESOURCE = "LISTING_RESOURCE" - PROVIDER = "PROVIDER" +class FileParentType(Enum): + + + LISTING = 'LISTING' + LISTING_RESOURCE = 'LISTING_RESOURCE' + PROVIDER = 'PROVIDER' class FileStatus(Enum): - - FILE_STATUS_PUBLISHED = "FILE_STATUS_PUBLISHED" - FILE_STATUS_SANITIZATION_FAILED = "FILE_STATUS_SANITIZATION_FAILED" - FILE_STATUS_SANITIZING = "FILE_STATUS_SANITIZING" - FILE_STATUS_STAGING = "FILE_STATUS_STAGING" - + + + FILE_STATUS_PUBLISHED = 'FILE_STATUS_PUBLISHED' + FILE_STATUS_SANITIZATION_FAILED = 'FILE_STATUS_SANITIZATION_FAILED' + FILE_STATUS_SANITIZING = 'FILE_STATUS_SANITIZING' + FILE_STATUS_STAGING = 'FILE_STATUS_STAGING' class FulfillmentType(Enum): + + + INSTALL = 'INSTALL' + REQUEST_ACCESS = 'REQUEST_ACCESS' + - INSTALL = "INSTALL" - REQUEST_ACCESS = "REQUEST_ACCESS" @dataclass class GetExchangeResponse: exchange: Optional[Exchange] = None - + def as_dict(self) -> dict: """Serializes the GetExchangeResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange: - body["exchange"] = self.exchange.as_dict() + if self.exchange: body['exchange'] = self.exchange.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetExchangeResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange: - body["exchange"] = self.exchange + if self.exchange: body['exchange'] = self.exchange return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetExchangeResponse: """Deserializes the GetExchangeResponse from a dictionary.""" - return cls(exchange=_from_dict(d, "exchange", Exchange)) + return cls(exchange=_from_dict(d, 'exchange', Exchange)) + + + + + @dataclass class GetFileResponse: file_info: Optional[FileInfo] = None - + def as_dict(self) -> dict: """Serializes the GetFileResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_info: - body["file_info"] = self.file_info.as_dict() + if self.file_info: body['file_info'] = self.file_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetFileResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_info: - body["file_info"] = self.file_info + if self.file_info: body['file_info'] = self.file_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetFileResponse: """Deserializes the GetFileResponse from a dictionary.""" - return cls(file_info=_from_dict(d, "file_info", FileInfo)) + return cls(file_info=_from_dict(d, 'file_info', FileInfo)) + + @dataclass class GetLatestVersionProviderAnalyticsDashboardResponse: version: Optional[int] = None """version here is latest logical version of the dashboard template""" - + def as_dict(self) -> dict: """Serializes the GetLatestVersionProviderAnalyticsDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.version is not None: - body["version"] = self.version + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the GetLatestVersionProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.version is not None: - body["version"] = self.version + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetLatestVersionProviderAnalyticsDashboardResponse: """Deserializes the GetLatestVersionProviderAnalyticsDashboardResponse from a dictionary.""" - return cls(version=d.get("version", None)) + return cls(version=d.get('version', None)) + + + + + @dataclass class GetListingContentMetadataResponse: next_page_token: Optional[str] = None - + shared_data_objects: Optional[List[SharedDataObject]] = None - + def as_dict(self) -> dict: """Serializes the GetListingContentMetadataResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.shared_data_objects: - body["shared_data_objects"] = [v.as_dict() for v in self.shared_data_objects] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.shared_data_objects: body['shared_data_objects'] = [v.as_dict() for v in self.shared_data_objects] return body def as_shallow_dict(self) -> dict: """Serializes the GetListingContentMetadataResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.shared_data_objects: - body["shared_data_objects"] = self.shared_data_objects + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.shared_data_objects: body['shared_data_objects'] = self.shared_data_objects return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetListingContentMetadataResponse: """Deserializes the GetListingContentMetadataResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - shared_data_objects=_repeated_dict(d, "shared_data_objects", SharedDataObject), - ) + return cls(next_page_token=d.get('next_page_token', None), shared_data_objects=_repeated_dict(d, 'shared_data_objects', SharedDataObject)) + + + + + @dataclass class GetListingResponse: listing: Optional[Listing] = None - + def as_dict(self) -> dict: """Serializes the GetListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing: - body["listing"] = self.listing.as_dict() + if self.listing: body['listing'] = self.listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing: - body["listing"] = self.listing + if self.listing: body['listing'] = self.listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetListingResponse: """Deserializes the GetListingResponse from a dictionary.""" - return cls(listing=_from_dict(d, "listing", Listing)) + return cls(listing=_from_dict(d, 'listing', Listing)) + + + + + @dataclass class GetListingsResponse: listings: Optional[List[Listing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the GetListingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listings: - body["listings"] = [v.as_dict() for v in self.listings] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.listings: body['listings'] = [v.as_dict() for v in self.listings] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetListingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listings: - body["listings"] = self.listings - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.listings: body['listings'] = self.listings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetListingsResponse: """Deserializes the GetListingsResponse from a dictionary.""" - return cls(listings=_repeated_dict(d, "listings", Listing), next_page_token=d.get("next_page_token", None)) + return cls(listings=_repeated_dict(d, 'listings', Listing), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class GetPersonalizationRequestResponse: personalization_requests: Optional[List[PersonalizationRequest]] = None - + def as_dict(self) -> dict: """Serializes the GetPersonalizationRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.personalization_requests: - body["personalization_requests"] = [v.as_dict() for v in self.personalization_requests] + if self.personalization_requests: body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests] return body def as_shallow_dict(self) -> dict: """Serializes the GetPersonalizationRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.personalization_requests: - body["personalization_requests"] = self.personalization_requests + if self.personalization_requests: body['personalization_requests'] = self.personalization_requests return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPersonalizationRequestResponse: """Deserializes the GetPersonalizationRequestResponse from a dictionary.""" - return cls(personalization_requests=_repeated_dict(d, "personalization_requests", PersonalizationRequest)) + return cls(personalization_requests=_repeated_dict(d, 'personalization_requests', PersonalizationRequest)) + + + + + @dataclass class GetProviderResponse: provider: Optional[ProviderInfo] = None - + def as_dict(self) -> dict: """Serializes the GetProviderResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.provider: - body["provider"] = self.provider.as_dict() + if self.provider: body['provider'] = self.provider.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetProviderResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.provider: - body["provider"] = self.provider + if self.provider: body['provider'] = self.provider return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetProviderResponse: """Deserializes the GetProviderResponse from a dictionary.""" - return cls(provider=_from_dict(d, "provider", ProviderInfo)) + return cls(provider=_from_dict(d, 'provider', ProviderInfo)) + + @dataclass class Installation: installation: Optional[InstallationDetail] = None - + def as_dict(self) -> dict: """Serializes the Installation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installation: - body["installation"] = self.installation.as_dict() + if self.installation: body['installation'] = self.installation.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Installation into a shallow dictionary of its immediate attributes.""" body = {} - if self.installation: - body["installation"] = self.installation + if self.installation: body['installation'] = self.installation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Installation: """Deserializes the Installation from a dictionary.""" - return cls(installation=_from_dict(d, "installation", InstallationDetail)) + return cls(installation=_from_dict(d, 'installation', InstallationDetail)) + + @dataclass class InstallationDetail: catalog_name: Optional[str] = None - + error_message: Optional[str] = None - + id: Optional[str] = None - + installed_on: Optional[int] = None - + listing_id: Optional[str] = None - + listing_name: Optional[str] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + repo_name: Optional[str] = None - + repo_path: Optional[str] = None - + share_name: Optional[str] = None - + status: Optional[InstallationStatus] = None - + token_detail: Optional[TokenDetail] = None - + tokens: Optional[List[TokenInfo]] = None - + def as_dict(self) -> dict: """Serializes the InstallationDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.error_message is not None: - body["error_message"] = self.error_message - if self.id is not None: - body["id"] = self.id - if self.installed_on is not None: - body["installed_on"] = self.installed_on - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.listing_name is not None: - body["listing_name"] = self.listing_name - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type.value - if self.repo_name is not None: - body["repo_name"] = self.repo_name - if self.repo_path is not None: - body["repo_path"] = self.repo_path - if self.share_name is not None: - body["share_name"] = self.share_name - if self.status is not None: - body["status"] = self.status.value - if self.token_detail: - body["token_detail"] = self.token_detail.as_dict() - if self.tokens: - body["tokens"] = [v.as_dict() for v in self.tokens] + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.error_message is not None: body['error_message'] = self.error_message + if self.id is not None: body['id'] = self.id + if self.installed_on is not None: body['installed_on'] = self.installed_on + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_name is not None: body['listing_name'] = self.listing_name + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value + if self.repo_name is not None: body['repo_name'] = self.repo_name + if self.repo_path is not None: body['repo_path'] = self.repo_path + if self.share_name is not None: body['share_name'] = self.share_name + if self.status is not None: body['status'] = self.status.value + if self.token_detail: body['token_detail'] = self.token_detail.as_dict() + if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens] return body def as_shallow_dict(self) -> dict: """Serializes the InstallationDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.error_message is not None: - body["error_message"] = self.error_message - if self.id is not None: - body["id"] = self.id - if self.installed_on is not None: - body["installed_on"] = self.installed_on - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.listing_name is not None: - body["listing_name"] = self.listing_name - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type - if self.repo_name is not None: - body["repo_name"] = self.repo_name - if self.repo_path is not None: - body["repo_path"] = self.repo_path - if self.share_name is not None: - body["share_name"] = self.share_name - if self.status is not None: - body["status"] = self.status - if self.token_detail: - body["token_detail"] = self.token_detail - if self.tokens: - body["tokens"] = self.tokens + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.error_message is not None: body['error_message'] = self.error_message + if self.id is not None: body['id'] = self.id + if self.installed_on is not None: body['installed_on'] = self.installed_on + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_name is not None: body['listing_name'] = self.listing_name + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + if self.repo_name is not None: body['repo_name'] = self.repo_name + if self.repo_path is not None: body['repo_path'] = self.repo_path + if self.share_name is not None: body['share_name'] = self.share_name + if self.status is not None: body['status'] = self.status + if self.token_detail: body['token_detail'] = self.token_detail + if self.tokens: body['tokens'] = self.tokens return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstallationDetail: """Deserializes the InstallationDetail from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - error_message=d.get("error_message", None), - id=d.get("id", None), - installed_on=d.get("installed_on", None), - listing_id=d.get("listing_id", None), - listing_name=d.get("listing_name", None), - recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), - repo_name=d.get("repo_name", None), - repo_path=d.get("repo_path", None), - share_name=d.get("share_name", None), - status=_enum(d, "status", InstallationStatus), - token_detail=_from_dict(d, "token_detail", TokenDetail), - tokens=_repeated_dict(d, "tokens", TokenInfo), - ) + return cls(catalog_name=d.get('catalog_name', None), error_message=d.get('error_message', None), id=d.get('id', None), installed_on=d.get('installed_on', None), listing_id=d.get('listing_id', None), listing_name=d.get('listing_name', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType), repo_name=d.get('repo_name', None), repo_path=d.get('repo_path', None), share_name=d.get('share_name', None), status=_enum(d, 'status', InstallationStatus), token_detail=_from_dict(d, 'token_detail', TokenDetail), tokens=_repeated_dict(d, 'tokens', TokenInfo)) + + class InstallationStatus(Enum): + + + FAILED = 'FAILED' + INSTALLED = 'INSTALLED' + - FAILED = "FAILED" - INSTALLED = "INSTALLED" @dataclass class ListAllInstallationsResponse: installations: Optional[List[InstallationDetail]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListAllInstallationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installations: - body["installations"] = [v.as_dict() for v in self.installations] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.installations: body['installations'] = [v.as_dict() for v in self.installations] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListAllInstallationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.installations: - body["installations"] = self.installations - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.installations: body['installations'] = self.installations + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAllInstallationsResponse: """Deserializes the ListAllInstallationsResponse from a dictionary.""" - return cls( - installations=_repeated_dict(d, "installations", InstallationDetail), - next_page_token=d.get("next_page_token", None), - ) + return cls(installations=_repeated_dict(d, 'installations', InstallationDetail), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListAllPersonalizationRequestsResponse: next_page_token: Optional[str] = None - + personalization_requests: Optional[List[PersonalizationRequest]] = None - + def as_dict(self) -> dict: """Serializes the ListAllPersonalizationRequestsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.personalization_requests: - body["personalization_requests"] = [v.as_dict() for v in self.personalization_requests] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.personalization_requests: body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests] return body def as_shallow_dict(self) -> dict: """Serializes the ListAllPersonalizationRequestsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.personalization_requests: - body["personalization_requests"] = self.personalization_requests + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.personalization_requests: body['personalization_requests'] = self.personalization_requests return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAllPersonalizationRequestsResponse: """Deserializes the ListAllPersonalizationRequestsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - personalization_requests=_repeated_dict(d, "personalization_requests", PersonalizationRequest), - ) + return cls(next_page_token=d.get('next_page_token', None), personalization_requests=_repeated_dict(d, 'personalization_requests', PersonalizationRequest)) + + + + + @dataclass class ListExchangeFiltersResponse: filters: Optional[List[ExchangeFilter]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListExchangeFiltersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filters: - body["filters"] = [v.as_dict() for v in self.filters] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.filters: body['filters'] = [v.as_dict() for v in self.filters] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExchangeFiltersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.filters: - body["filters"] = self.filters - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.filters: body['filters'] = self.filters + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExchangeFiltersResponse: """Deserializes the ListExchangeFiltersResponse from a dictionary.""" - return cls(filters=_repeated_dict(d, "filters", ExchangeFilter), next_page_token=d.get("next_page_token", None)) + return cls(filters=_repeated_dict(d, 'filters', ExchangeFilter), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListExchangesForListingResponse: exchange_listing: Optional[List[ExchangeListing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListExchangesForListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_listing: - body["exchange_listing"] = [v.as_dict() for v in self.exchange_listing] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.exchange_listing: body['exchange_listing'] = [v.as_dict() for v in self.exchange_listing] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExchangesForListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_listing: - body["exchange_listing"] = self.exchange_listing - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.exchange_listing: body['exchange_listing'] = self.exchange_listing + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExchangesForListingResponse: """Deserializes the ListExchangesForListingResponse from a dictionary.""" - return cls( - exchange_listing=_repeated_dict(d, "exchange_listing", ExchangeListing), - next_page_token=d.get("next_page_token", None), - ) + return cls(exchange_listing=_repeated_dict(d, 'exchange_listing', ExchangeListing), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListExchangesResponse: exchanges: Optional[List[Exchange]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListExchangesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchanges: - body["exchanges"] = [v.as_dict() for v in self.exchanges] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.exchanges: body['exchanges'] = [v.as_dict() for v in self.exchanges] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExchangesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchanges: - body["exchanges"] = self.exchanges - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.exchanges: body['exchanges'] = self.exchanges + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExchangesResponse: """Deserializes the ListExchangesResponse from a dictionary.""" - return cls(exchanges=_repeated_dict(d, "exchanges", Exchange), next_page_token=d.get("next_page_token", None)) + return cls(exchanges=_repeated_dict(d, 'exchanges', Exchange), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListFilesResponse: file_infos: Optional[List[FileInfo]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListFilesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_infos: - body["file_infos"] = [v.as_dict() for v in self.file_infos] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.file_infos: body['file_infos'] = [v.as_dict() for v in self.file_infos] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListFilesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_infos: - body["file_infos"] = self.file_infos - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.file_infos: body['file_infos'] = self.file_infos + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFilesResponse: """Deserializes the ListFilesResponse from a dictionary.""" - return cls(file_infos=_repeated_dict(d, "file_infos", FileInfo), next_page_token=d.get("next_page_token", None)) + return cls(file_infos=_repeated_dict(d, 'file_infos', FileInfo), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListFulfillmentsResponse: fulfillments: Optional[List[ListingFulfillment]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListFulfillmentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fulfillments: - body["fulfillments"] = [v.as_dict() for v in self.fulfillments] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.fulfillments: body['fulfillments'] = [v.as_dict() for v in self.fulfillments] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListFulfillmentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.fulfillments: - body["fulfillments"] = self.fulfillments - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.fulfillments: body['fulfillments'] = self.fulfillments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFulfillmentsResponse: """Deserializes the ListFulfillmentsResponse from a dictionary.""" - return cls( - fulfillments=_repeated_dict(d, "fulfillments", ListingFulfillment), - next_page_token=d.get("next_page_token", None), - ) + return cls(fulfillments=_repeated_dict(d, 'fulfillments', ListingFulfillment), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListInstallationsResponse: installations: Optional[List[InstallationDetail]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListInstallationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installations: - body["installations"] = [v.as_dict() for v in self.installations] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.installations: body['installations'] = [v.as_dict() for v in self.installations] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListInstallationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.installations: - body["installations"] = self.installations - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.installations: body['installations'] = self.installations + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListInstallationsResponse: """Deserializes the ListInstallationsResponse from a dictionary.""" - return cls( - installations=_repeated_dict(d, "installations", InstallationDetail), - next_page_token=d.get("next_page_token", None), - ) + return cls(installations=_repeated_dict(d, 'installations', InstallationDetail), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListListingsForExchangeResponse: exchange_listings: Optional[List[ExchangeListing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListListingsForExchangeResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_listings: - body["exchange_listings"] = [v.as_dict() for v in self.exchange_listings] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.exchange_listings: body['exchange_listings'] = [v.as_dict() for v in self.exchange_listings] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListListingsForExchangeResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_listings: - body["exchange_listings"] = self.exchange_listings - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.exchange_listings: body['exchange_listings'] = self.exchange_listings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListListingsForExchangeResponse: """Deserializes the ListListingsForExchangeResponse from a dictionary.""" - return cls( - exchange_listings=_repeated_dict(d, "exchange_listings", ExchangeListing), - next_page_token=d.get("next_page_token", None), - ) + return cls(exchange_listings=_repeated_dict(d, 'exchange_listings', ExchangeListing), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListListingsResponse: listings: Optional[List[Listing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListListingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listings: - body["listings"] = [v.as_dict() for v in self.listings] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.listings: body['listings'] = [v.as_dict() for v in self.listings] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListListingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listings: - body["listings"] = self.listings - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.listings: body['listings'] = self.listings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListListingsResponse: """Deserializes the ListListingsResponse from a dictionary.""" - return cls(listings=_repeated_dict(d, "listings", Listing), next_page_token=d.get("next_page_token", None)) + return cls(listings=_repeated_dict(d, 'listings', Listing), next_page_token=d.get('next_page_token', None)) + + @dataclass class ListProviderAnalyticsDashboardResponse: id: str - + dashboard_id: str """dashboard_id will be used to open Lakeview dashboard.""" - + version: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the ListProviderAnalyticsDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.id is not None: body['id'] = self.id + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ListProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.id is not None: body['id'] = self.id + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProviderAnalyticsDashboardResponse: """Deserializes the ListProviderAnalyticsDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get("dashboard_id", None), id=d.get("id", None), version=d.get("version", None)) + return cls(dashboard_id=d.get('dashboard_id', None), id=d.get('id', None), version=d.get('version', None)) + + + + + @dataclass class ListProvidersResponse: next_page_token: Optional[str] = None - + providers: Optional[List[ProviderInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListProvidersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.providers: - body["providers"] = [v.as_dict() for v in self.providers] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.providers: body['providers'] = [v.as_dict() for v in self.providers] return body def as_shallow_dict(self) -> dict: """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.providers: - body["providers"] = self.providers + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.providers: body['providers'] = self.providers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProvidersResponse: """Deserializes the ListProvidersResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), providers=_repeated_dict(d, "providers", ProviderInfo) - ) + return cls(next_page_token=d.get('next_page_token', None), providers=_repeated_dict(d, 'providers', ProviderInfo)) + + @dataclass class Listing: summary: ListingSummary - + detail: Optional[ListingDetail] = None - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the Listing into a dictionary suitable for use as a JSON request body.""" body = {} - if self.detail: - body["detail"] = self.detail.as_dict() - if self.id is not None: - body["id"] = self.id - if self.summary: - body["summary"] = self.summary.as_dict() + if self.detail: body['detail'] = self.detail.as_dict() + if self.id is not None: body['id'] = self.id + if self.summary: body['summary'] = self.summary.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Listing into a shallow dictionary of its immediate attributes.""" body = {} - if self.detail: - body["detail"] = self.detail - if self.id is not None: - body["id"] = self.id - if self.summary: - body["summary"] = self.summary + if self.detail: body['detail'] = self.detail + if self.id is not None: body['id'] = self.id + if self.summary: body['summary'] = self.summary return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Listing: """Deserializes the Listing from a dictionary.""" - return cls( - detail=_from_dict(d, "detail", ListingDetail), - id=d.get("id", None), - summary=_from_dict(d, "summary", ListingSummary), - ) + return cls(detail=_from_dict(d, 'detail', ListingDetail), id=d.get('id', None), summary=_from_dict(d, 'summary', ListingSummary)) + + @dataclass class ListingDetail: assets: Optional[List[AssetType]] = None """Type of assets included in the listing. eg. GIT_REPO, DATA_TABLE, MODEL, NOTEBOOK""" - + collection_date_end: Optional[int] = None """The ending date timestamp for when the data spans""" - + collection_date_start: Optional[int] = None """The starting date timestamp for when the data spans""" - + collection_granularity: Optional[DataRefreshInfo] = None """Smallest unit of time in the dataset""" - + cost: Optional[Cost] = None """Whether the dataset is free or paid""" - + data_source: Optional[str] = None """Where/how the data is sourced""" - + description: Optional[str] = None - + documentation_link: Optional[str] = None - + embedded_notebook_file_infos: Optional[List[FileInfo]] = None - + file_ids: Optional[List[str]] = None - + geographical_coverage: Optional[str] = None """Which geo region the listing data is collected from""" - + license: Optional[str] = None """ID 20, 21 removed don't use License of the data asset - Required for listings with model based assets""" - + pricing_model: Optional[str] = None """What the pricing model is (e.g. paid, subscription, paid upfront); should only be present if cost is paid TODO: Not used yet, should deprecate if we will never use it""" - + privacy_policy_link: Optional[str] = None - + size: Optional[float] = None """size of the dataset in GB""" - + support_link: Optional[str] = None - + tags: Optional[List[ListingTag]] = None """Listing tags - Simple key value pair to annotate listings. When should I use tags vs dedicated fields? Using tags avoids the need to add new columns in the database for new annotations. @@ -2038,734 +1861,518 @@ class ListingDetail: 1. If the field is optional and won't need to have NOT NULL integrity check 2. The value is fairly fixed, static and low cardinality (eg. enums). 3. The value won't be used in filters or joins with other tables.""" - + terms_of_service: Optional[str] = None - + update_frequency: Optional[DataRefreshInfo] = None """How often data is updated""" - + def as_dict(self) -> dict: """Serializes the ListingDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets: - body["assets"] = [v.value for v in self.assets] - if self.collection_date_end is not None: - body["collection_date_end"] = self.collection_date_end - if self.collection_date_start is not None: - body["collection_date_start"] = self.collection_date_start - if self.collection_granularity: - body["collection_granularity"] = self.collection_granularity.as_dict() - if self.cost is not None: - body["cost"] = self.cost.value - if self.data_source is not None: - body["data_source"] = self.data_source - if self.description is not None: - body["description"] = self.description - if self.documentation_link is not None: - body["documentation_link"] = self.documentation_link - if self.embedded_notebook_file_infos: - body["embedded_notebook_file_infos"] = [v.as_dict() for v in self.embedded_notebook_file_infos] - if self.file_ids: - body["file_ids"] = [v for v in self.file_ids] - if self.geographical_coverage is not None: - body["geographical_coverage"] = self.geographical_coverage - if self.license is not None: - body["license"] = self.license - if self.pricing_model is not None: - body["pricing_model"] = self.pricing_model - if self.privacy_policy_link is not None: - body["privacy_policy_link"] = self.privacy_policy_link - if self.size is not None: - body["size"] = self.size - if self.support_link is not None: - body["support_link"] = self.support_link - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.terms_of_service is not None: - body["terms_of_service"] = self.terms_of_service - if self.update_frequency: - body["update_frequency"] = self.update_frequency.as_dict() + if self.assets: body['assets'] = [v.value for v in self.assets] + if self.collection_date_end is not None: body['collection_date_end'] = self.collection_date_end + if self.collection_date_start is not None: body['collection_date_start'] = self.collection_date_start + if self.collection_granularity: body['collection_granularity'] = self.collection_granularity.as_dict() + if self.cost is not None: body['cost'] = self.cost.value + if self.data_source is not None: body['data_source'] = self.data_source + if self.description is not None: body['description'] = self.description + if self.documentation_link is not None: body['documentation_link'] = self.documentation_link + if self.embedded_notebook_file_infos: body['embedded_notebook_file_infos'] = [v.as_dict() for v in self.embedded_notebook_file_infos] + if self.file_ids: body['file_ids'] = [v for v in self.file_ids] + if self.geographical_coverage is not None: body['geographical_coverage'] = self.geographical_coverage + if self.license is not None: body['license'] = self.license + if self.pricing_model is not None: body['pricing_model'] = self.pricing_model + if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link + if self.size is not None: body['size'] = self.size + if self.support_link is not None: body['support_link'] = self.support_link + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.terms_of_service is not None: body['terms_of_service'] = self.terms_of_service + if self.update_frequency: body['update_frequency'] = self.update_frequency.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ListingDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets: - body["assets"] = self.assets - if self.collection_date_end is not None: - body["collection_date_end"] = self.collection_date_end - if self.collection_date_start is not None: - body["collection_date_start"] = self.collection_date_start - if self.collection_granularity: - body["collection_granularity"] = self.collection_granularity - if self.cost is not None: - body["cost"] = self.cost - if self.data_source is not None: - body["data_source"] = self.data_source - if self.description is not None: - body["description"] = self.description - if self.documentation_link is not None: - body["documentation_link"] = self.documentation_link - if self.embedded_notebook_file_infos: - body["embedded_notebook_file_infos"] = self.embedded_notebook_file_infos - if self.file_ids: - body["file_ids"] = self.file_ids - if self.geographical_coverage is not None: - body["geographical_coverage"] = self.geographical_coverage - if self.license is not None: - body["license"] = self.license - if self.pricing_model is not None: - body["pricing_model"] = self.pricing_model - if self.privacy_policy_link is not None: - body["privacy_policy_link"] = self.privacy_policy_link - if self.size is not None: - body["size"] = self.size - if self.support_link is not None: - body["support_link"] = self.support_link - if self.tags: - body["tags"] = self.tags - if self.terms_of_service is not None: - body["terms_of_service"] = self.terms_of_service - if self.update_frequency: - body["update_frequency"] = self.update_frequency + if self.assets: body['assets'] = self.assets + if self.collection_date_end is not None: body['collection_date_end'] = self.collection_date_end + if self.collection_date_start is not None: body['collection_date_start'] = self.collection_date_start + if self.collection_granularity: body['collection_granularity'] = self.collection_granularity + if self.cost is not None: body['cost'] = self.cost + if self.data_source is not None: body['data_source'] = self.data_source + if self.description is not None: body['description'] = self.description + if self.documentation_link is not None: body['documentation_link'] = self.documentation_link + if self.embedded_notebook_file_infos: body['embedded_notebook_file_infos'] = self.embedded_notebook_file_infos + if self.file_ids: body['file_ids'] = self.file_ids + if self.geographical_coverage is not None: body['geographical_coverage'] = self.geographical_coverage + if self.license is not None: body['license'] = self.license + if self.pricing_model is not None: body['pricing_model'] = self.pricing_model + if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link + if self.size is not None: body['size'] = self.size + if self.support_link is not None: body['support_link'] = self.support_link + if self.tags: body['tags'] = self.tags + if self.terms_of_service is not None: body['terms_of_service'] = self.terms_of_service + if self.update_frequency: body['update_frequency'] = self.update_frequency return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingDetail: """Deserializes the ListingDetail from a dictionary.""" - return cls( - assets=_repeated_enum(d, "assets", AssetType), - collection_date_end=d.get("collection_date_end", None), - collection_date_start=d.get("collection_date_start", None), - collection_granularity=_from_dict(d, "collection_granularity", DataRefreshInfo), - cost=_enum(d, "cost", Cost), - data_source=d.get("data_source", None), - description=d.get("description", None), - documentation_link=d.get("documentation_link", None), - embedded_notebook_file_infos=_repeated_dict(d, "embedded_notebook_file_infos", FileInfo), - file_ids=d.get("file_ids", None), - geographical_coverage=d.get("geographical_coverage", None), - license=d.get("license", None), - pricing_model=d.get("pricing_model", None), - privacy_policy_link=d.get("privacy_policy_link", None), - size=d.get("size", None), - support_link=d.get("support_link", None), - tags=_repeated_dict(d, "tags", ListingTag), - terms_of_service=d.get("terms_of_service", None), - update_frequency=_from_dict(d, "update_frequency", DataRefreshInfo), - ) + return cls(assets=_repeated_enum(d, 'assets', AssetType), collection_date_end=d.get('collection_date_end', None), collection_date_start=d.get('collection_date_start', None), collection_granularity=_from_dict(d, 'collection_granularity', DataRefreshInfo), cost=_enum(d, 'cost', Cost), data_source=d.get('data_source', None), description=d.get('description', None), documentation_link=d.get('documentation_link', None), embedded_notebook_file_infos=_repeated_dict(d, 'embedded_notebook_file_infos', FileInfo), file_ids=d.get('file_ids', None), geographical_coverage=d.get('geographical_coverage', None), license=d.get('license', None), pricing_model=d.get('pricing_model', None), privacy_policy_link=d.get('privacy_policy_link', None), size=d.get('size', None), support_link=d.get('support_link', None), tags=_repeated_dict(d, 'tags', ListingTag), terms_of_service=d.get('terms_of_service', None), update_frequency=_from_dict(d, 'update_frequency', DataRefreshInfo)) + + @dataclass class ListingFulfillment: listing_id: str - + fulfillment_type: Optional[FulfillmentType] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + repo_info: Optional[RepoInfo] = None - + share_info: Optional[ShareInfo] = None - + def as_dict(self) -> dict: """Serializes the ListingFulfillment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fulfillment_type is not None: - body["fulfillment_type"] = self.fulfillment_type.value - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type.value - if self.repo_info: - body["repo_info"] = self.repo_info.as_dict() - if self.share_info: - body["share_info"] = self.share_info.as_dict() + if self.fulfillment_type is not None: body['fulfillment_type'] = self.fulfillment_type.value + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value + if self.repo_info: body['repo_info'] = self.repo_info.as_dict() + if self.share_info: body['share_info'] = self.share_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ListingFulfillment into a shallow dictionary of its immediate attributes.""" body = {} - if self.fulfillment_type is not None: - body["fulfillment_type"] = self.fulfillment_type - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type - if self.repo_info: - body["repo_info"] = self.repo_info - if self.share_info: - body["share_info"] = self.share_info + if self.fulfillment_type is not None: body['fulfillment_type'] = self.fulfillment_type + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + if self.repo_info: body['repo_info'] = self.repo_info + if self.share_info: body['share_info'] = self.share_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingFulfillment: """Deserializes the ListingFulfillment from a dictionary.""" - return cls( - fulfillment_type=_enum(d, "fulfillment_type", FulfillmentType), - listing_id=d.get("listing_id", None), - recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), - repo_info=_from_dict(d, "repo_info", RepoInfo), - share_info=_from_dict(d, "share_info", ShareInfo), - ) + return cls(fulfillment_type=_enum(d, 'fulfillment_type', FulfillmentType), listing_id=d.get('listing_id', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType), repo_info=_from_dict(d, 'repo_info', RepoInfo), share_info=_from_dict(d, 'share_info', ShareInfo)) + + @dataclass class ListingSetting: visibility: Optional[Visibility] = None - + def as_dict(self) -> dict: """Serializes the ListingSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.visibility is not None: - body["visibility"] = self.visibility.value + if self.visibility is not None: body['visibility'] = self.visibility.value return body def as_shallow_dict(self) -> dict: """Serializes the ListingSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.visibility is not None: - body["visibility"] = self.visibility + if self.visibility is not None: body['visibility'] = self.visibility return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingSetting: """Deserializes the ListingSetting from a dictionary.""" - return cls(visibility=_enum(d, "visibility", Visibility)) - + return cls(visibility=_enum(d, 'visibility', Visibility)) + -class ListingShareType(Enum): - FULL = "FULL" - SAMPLE = "SAMPLE" +class ListingShareType(Enum): + + + FULL = 'FULL' + SAMPLE = 'SAMPLE' class ListingStatus(Enum): """Enums""" - - DRAFT = "DRAFT" - PENDING = "PENDING" - PUBLISHED = "PUBLISHED" - SUSPENDED = "SUSPENDED" - + + DRAFT = 'DRAFT' + PENDING = 'PENDING' + PUBLISHED = 'PUBLISHED' + SUSPENDED = 'SUSPENDED' @dataclass class ListingSummary: name: str - + listing_type: ListingType - + categories: Optional[List[Category]] = None - + created_at: Optional[int] = None - + created_by: Optional[str] = None - + created_by_id: Optional[int] = None - + exchange_ids: Optional[List[str]] = None - + git_repo: Optional[RepoInfo] = None """if a git repo is being created, a listing will be initialized with this field as opposed to a share""" - + provider_id: Optional[str] = None - + provider_region: Optional[RegionInfo] = None - + published_at: Optional[int] = None - + published_by: Optional[str] = None - + setting: Optional[ListingSetting] = None - + share: Optional[ShareInfo] = None - + status: Optional[ListingStatus] = None """Enums""" - + subtitle: Optional[str] = None - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None - + updated_by_id: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the ListingSummary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.categories: - body["categories"] = [v.value for v in self.categories] - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.created_by_id is not None: - body["created_by_id"] = self.created_by_id - if self.exchange_ids: - body["exchange_ids"] = [v for v in self.exchange_ids] - if self.git_repo: - body["git_repo"] = self.git_repo.as_dict() - if self.listing_type is not None: - body["listingType"] = self.listing_type.value - if self.name is not None: - body["name"] = self.name - if self.provider_id is not None: - body["provider_id"] = self.provider_id - if self.provider_region: - body["provider_region"] = self.provider_region.as_dict() - if self.published_at is not None: - body["published_at"] = self.published_at - if self.published_by is not None: - body["published_by"] = self.published_by - if self.setting: - body["setting"] = self.setting.as_dict() - if self.share: - body["share"] = self.share.as_dict() - if self.status is not None: - body["status"] = self.status.value - if self.subtitle is not None: - body["subtitle"] = self.subtitle - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.updated_by_id is not None: - body["updated_by_id"] = self.updated_by_id + if self.categories: body['categories'] = [v.value for v in self.categories] + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.created_by_id is not None: body['created_by_id'] = self.created_by_id + if self.exchange_ids: body['exchange_ids'] = [v for v in self.exchange_ids] + if self.git_repo: body['git_repo'] = self.git_repo.as_dict() + if self.listing_type is not None: body['listingType'] = self.listing_type.value + if self.name is not None: body['name'] = self.name + if self.provider_id is not None: body['provider_id'] = self.provider_id + if self.provider_region: body['provider_region'] = self.provider_region.as_dict() + if self.published_at is not None: body['published_at'] = self.published_at + if self.published_by is not None: body['published_by'] = self.published_by + if self.setting: body['setting'] = self.setting.as_dict() + if self.share: body['share'] = self.share.as_dict() + if self.status is not None: body['status'] = self.status.value + if self.subtitle is not None: body['subtitle'] = self.subtitle + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id return body def as_shallow_dict(self) -> dict: """Serializes the ListingSummary into a shallow dictionary of its immediate attributes.""" body = {} - if self.categories: - body["categories"] = self.categories - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.created_by_id is not None: - body["created_by_id"] = self.created_by_id - if self.exchange_ids: - body["exchange_ids"] = self.exchange_ids - if self.git_repo: - body["git_repo"] = self.git_repo - if self.listing_type is not None: - body["listingType"] = self.listing_type - if self.name is not None: - body["name"] = self.name - if self.provider_id is not None: - body["provider_id"] = self.provider_id - if self.provider_region: - body["provider_region"] = self.provider_region - if self.published_at is not None: - body["published_at"] = self.published_at - if self.published_by is not None: - body["published_by"] = self.published_by - if self.setting: - body["setting"] = self.setting - if self.share: - body["share"] = self.share - if self.status is not None: - body["status"] = self.status - if self.subtitle is not None: - body["subtitle"] = self.subtitle - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.updated_by_id is not None: - body["updated_by_id"] = self.updated_by_id + if self.categories: body['categories'] = self.categories + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.created_by_id is not None: body['created_by_id'] = self.created_by_id + if self.exchange_ids: body['exchange_ids'] = self.exchange_ids + if self.git_repo: body['git_repo'] = self.git_repo + if self.listing_type is not None: body['listingType'] = self.listing_type + if self.name is not None: body['name'] = self.name + if self.provider_id is not None: body['provider_id'] = self.provider_id + if self.provider_region: body['provider_region'] = self.provider_region + if self.published_at is not None: body['published_at'] = self.published_at + if self.published_by is not None: body['published_by'] = self.published_by + if self.setting: body['setting'] = self.setting + if self.share: body['share'] = self.share + if self.status is not None: body['status'] = self.status + if self.subtitle is not None: body['subtitle'] = self.subtitle + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingSummary: """Deserializes the ListingSummary from a dictionary.""" - return cls( - categories=_repeated_enum(d, "categories", Category), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - created_by_id=d.get("created_by_id", None), - exchange_ids=d.get("exchange_ids", None), - git_repo=_from_dict(d, "git_repo", RepoInfo), - listing_type=_enum(d, "listingType", ListingType), - name=d.get("name", None), - provider_id=d.get("provider_id", None), - provider_region=_from_dict(d, "provider_region", RegionInfo), - published_at=d.get("published_at", None), - published_by=d.get("published_by", None), - setting=_from_dict(d, "setting", ListingSetting), - share=_from_dict(d, "share", ShareInfo), - status=_enum(d, "status", ListingStatus), - subtitle=d.get("subtitle", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - updated_by_id=d.get("updated_by_id", None), - ) + return cls(categories=_repeated_enum(d, 'categories', Category), created_at=d.get('created_at', None), created_by=d.get('created_by', None), created_by_id=d.get('created_by_id', None), exchange_ids=d.get('exchange_ids', None), git_repo=_from_dict(d, 'git_repo', RepoInfo), listing_type=_enum(d, 'listingType', ListingType), name=d.get('name', None), provider_id=d.get('provider_id', None), provider_region=_from_dict(d, 'provider_region', RegionInfo), published_at=d.get('published_at', None), published_by=d.get('published_by', None), setting=_from_dict(d, 'setting', ListingSetting), share=_from_dict(d, 'share', ShareInfo), status=_enum(d, 'status', ListingStatus), subtitle=d.get('subtitle', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), updated_by_id=d.get('updated_by_id', None)) + + @dataclass class ListingTag: tag_name: Optional[ListingTagType] = None """Tag name (enum)""" - + tag_values: Optional[List[str]] = None """String representation of the tag value. Values should be string literals (no complex types)""" - + def as_dict(self) -> dict: """Serializes the ListingTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.tag_name is not None: - body["tag_name"] = self.tag_name.value - if self.tag_values: - body["tag_values"] = [v for v in self.tag_values] + if self.tag_name is not None: body['tag_name'] = self.tag_name.value + if self.tag_values: body['tag_values'] = [v for v in self.tag_values] return body def as_shallow_dict(self) -> dict: """Serializes the ListingTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.tag_name is not None: - body["tag_name"] = self.tag_name - if self.tag_values: - body["tag_values"] = self.tag_values + if self.tag_name is not None: body['tag_name'] = self.tag_name + if self.tag_values: body['tag_values'] = self.tag_values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingTag: """Deserializes the ListingTag from a dictionary.""" - return cls(tag_name=_enum(d, "tag_name", ListingTagType), tag_values=d.get("tag_values", None)) + return cls(tag_name=_enum(d, 'tag_name', ListingTagType), tag_values=d.get('tag_values', None)) + -class ListingTagType(Enum): - - LISTING_TAG_TYPE_LANGUAGE = "LISTING_TAG_TYPE_LANGUAGE" - LISTING_TAG_TYPE_TASK = "LISTING_TAG_TYPE_TASK" +class ListingTagType(Enum): + + + LISTING_TAG_TYPE_LANGUAGE = 'LISTING_TAG_TYPE_LANGUAGE' + LISTING_TAG_TYPE_TASK = 'LISTING_TAG_TYPE_TASK' class ListingType(Enum): - - PERSONALIZED = "PERSONALIZED" - STANDARD = "STANDARD" - + + + PERSONALIZED = 'PERSONALIZED' + STANDARD = 'STANDARD' class MarketplaceFileType(Enum): - - APP = "APP" - EMBEDDED_NOTEBOOK = "EMBEDDED_NOTEBOOK" - PROVIDER_ICON = "PROVIDER_ICON" - + + + APP = 'APP' + EMBEDDED_NOTEBOOK = 'EMBEDDED_NOTEBOOK' + PROVIDER_ICON = 'PROVIDER_ICON' @dataclass class PersonalizationRequest: consumer_region: RegionInfo - + comment: Optional[str] = None - + contact_info: Optional[ContactInfo] = None """contact info for the consumer requesting data or performing a listing installation""" - + created_at: Optional[int] = None - + id: Optional[str] = None - + intended_use: Optional[str] = None - + is_from_lighthouse: Optional[bool] = None - + listing_id: Optional[str] = None - + listing_name: Optional[str] = None - + metastore_id: Optional[str] = None - + provider_id: Optional[str] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + share: Optional[ShareInfo] = None - + status: Optional[PersonalizationRequestStatus] = None - + status_message: Optional[str] = None - + updated_at: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the PersonalizationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.consumer_region: - body["consumer_region"] = self.consumer_region.as_dict() - if self.contact_info: - body["contact_info"] = self.contact_info.as_dict() - if self.created_at is not None: - body["created_at"] = self.created_at - if self.id is not None: - body["id"] = self.id - if self.intended_use is not None: - body["intended_use"] = self.intended_use - if self.is_from_lighthouse is not None: - body["is_from_lighthouse"] = self.is_from_lighthouse - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.listing_name is not None: - body["listing_name"] = self.listing_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.provider_id is not None: - body["provider_id"] = self.provider_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type.value - if self.share: - body["share"] = self.share.as_dict() - if self.status is not None: - body["status"] = self.status.value - if self.status_message is not None: - body["status_message"] = self.status_message - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.comment is not None: body['comment'] = self.comment + if self.consumer_region: body['consumer_region'] = self.consumer_region.as_dict() + if self.contact_info: body['contact_info'] = self.contact_info.as_dict() + if self.created_at is not None: body['created_at'] = self.created_at + if self.id is not None: body['id'] = self.id + if self.intended_use is not None: body['intended_use'] = self.intended_use + if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_name is not None: body['listing_name'] = self.listing_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.provider_id is not None: body['provider_id'] = self.provider_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value + if self.share: body['share'] = self.share.as_dict() + if self.status is not None: body['status'] = self.status.value + if self.status_message is not None: body['status_message'] = self.status_message + if self.updated_at is not None: body['updated_at'] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the PersonalizationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.consumer_region: - body["consumer_region"] = self.consumer_region - if self.contact_info: - body["contact_info"] = self.contact_info - if self.created_at is not None: - body["created_at"] = self.created_at - if self.id is not None: - body["id"] = self.id - if self.intended_use is not None: - body["intended_use"] = self.intended_use - if self.is_from_lighthouse is not None: - body["is_from_lighthouse"] = self.is_from_lighthouse - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.listing_name is not None: - body["listing_name"] = self.listing_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.provider_id is not None: - body["provider_id"] = self.provider_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type - if self.share: - body["share"] = self.share - if self.status is not None: - body["status"] = self.status - if self.status_message is not None: - body["status_message"] = self.status_message - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.comment is not None: body['comment'] = self.comment + if self.consumer_region: body['consumer_region'] = self.consumer_region + if self.contact_info: body['contact_info'] = self.contact_info + if self.created_at is not None: body['created_at'] = self.created_at + if self.id is not None: body['id'] = self.id + if self.intended_use is not None: body['intended_use'] = self.intended_use + if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_name is not None: body['listing_name'] = self.listing_name + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.provider_id is not None: body['provider_id'] = self.provider_id + if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + if self.share: body['share'] = self.share + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + if self.updated_at is not None: body['updated_at'] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PersonalizationRequest: """Deserializes the PersonalizationRequest from a dictionary.""" - return cls( - comment=d.get("comment", None), - consumer_region=_from_dict(d, "consumer_region", RegionInfo), - contact_info=_from_dict(d, "contact_info", ContactInfo), - created_at=d.get("created_at", None), - id=d.get("id", None), - intended_use=d.get("intended_use", None), - is_from_lighthouse=d.get("is_from_lighthouse", None), - listing_id=d.get("listing_id", None), - listing_name=d.get("listing_name", None), - metastore_id=d.get("metastore_id", None), - provider_id=d.get("provider_id", None), - recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), - share=_from_dict(d, "share", ShareInfo), - status=_enum(d, "status", PersonalizationRequestStatus), - status_message=d.get("status_message", None), - updated_at=d.get("updated_at", None), - ) - + return cls(comment=d.get('comment', None), consumer_region=_from_dict(d, 'consumer_region', RegionInfo), contact_info=_from_dict(d, 'contact_info', ContactInfo), created_at=d.get('created_at', None), id=d.get('id', None), intended_use=d.get('intended_use', None), is_from_lighthouse=d.get('is_from_lighthouse', None), listing_id=d.get('listing_id', None), listing_name=d.get('listing_name', None), metastore_id=d.get('metastore_id', None), provider_id=d.get('provider_id', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType), share=_from_dict(d, 'share', ShareInfo), status=_enum(d, 'status', PersonalizationRequestStatus), status_message=d.get('status_message', None), updated_at=d.get('updated_at', None)) + -class PersonalizationRequestStatus(Enum): - DENIED = "DENIED" - FULFILLED = "FULFILLED" - NEW = "NEW" - REQUEST_PENDING = "REQUEST_PENDING" +class PersonalizationRequestStatus(Enum): + + + DENIED = 'DENIED' + FULFILLED = 'FULFILLED' + NEW = 'NEW' + REQUEST_PENDING = 'REQUEST_PENDING' @dataclass class ProviderAnalyticsDashboard: id: str - + def as_dict(self) -> dict: """Serializes the ProviderAnalyticsDashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the ProviderAnalyticsDashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProviderAnalyticsDashboard: """Deserializes the ProviderAnalyticsDashboard from a dictionary.""" - return cls(id=d.get("id", None)) + return cls(id=d.get('id', None)) + + @dataclass class ProviderInfo: name: str - + business_contact_email: str - + term_of_service_link: str - + privacy_policy_link: str - + company_website_link: Optional[str] = None - + dark_mode_icon_file_id: Optional[str] = None - + dark_mode_icon_file_path: Optional[str] = None - + description: Optional[str] = None - + icon_file_id: Optional[str] = None - + icon_file_path: Optional[str] = None - + id: Optional[str] = None - + is_featured: Optional[bool] = None """is_featured is accessible by consumers only""" - + published_by: Optional[str] = None """published_by is only applicable to data aggregators (e.g. Crux)""" - + support_contact_email: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ProviderInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.business_contact_email is not None: - body["business_contact_email"] = self.business_contact_email - if self.company_website_link is not None: - body["company_website_link"] = self.company_website_link - if self.dark_mode_icon_file_id is not None: - body["dark_mode_icon_file_id"] = self.dark_mode_icon_file_id - if self.dark_mode_icon_file_path is not None: - body["dark_mode_icon_file_path"] = self.dark_mode_icon_file_path - if self.description is not None: - body["description"] = self.description - if self.icon_file_id is not None: - body["icon_file_id"] = self.icon_file_id - if self.icon_file_path is not None: - body["icon_file_path"] = self.icon_file_path - if self.id is not None: - body["id"] = self.id - if self.is_featured is not None: - body["is_featured"] = self.is_featured - if self.name is not None: - body["name"] = self.name - if self.privacy_policy_link is not None: - body["privacy_policy_link"] = self.privacy_policy_link - if self.published_by is not None: - body["published_by"] = self.published_by - if self.support_contact_email is not None: - body["support_contact_email"] = self.support_contact_email - if self.term_of_service_link is not None: - body["term_of_service_link"] = self.term_of_service_link + if self.business_contact_email is not None: body['business_contact_email'] = self.business_contact_email + if self.company_website_link is not None: body['company_website_link'] = self.company_website_link + if self.dark_mode_icon_file_id is not None: body['dark_mode_icon_file_id'] = self.dark_mode_icon_file_id + if self.dark_mode_icon_file_path is not None: body['dark_mode_icon_file_path'] = self.dark_mode_icon_file_path + if self.description is not None: body['description'] = self.description + if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id + if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path + if self.id is not None: body['id'] = self.id + if self.is_featured is not None: body['is_featured'] = self.is_featured + if self.name is not None: body['name'] = self.name + if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link + if self.published_by is not None: body['published_by'] = self.published_by + if self.support_contact_email is not None: body['support_contact_email'] = self.support_contact_email + if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link return body def as_shallow_dict(self) -> dict: """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.business_contact_email is not None: - body["business_contact_email"] = self.business_contact_email - if self.company_website_link is not None: - body["company_website_link"] = self.company_website_link - if self.dark_mode_icon_file_id is not None: - body["dark_mode_icon_file_id"] = self.dark_mode_icon_file_id - if self.dark_mode_icon_file_path is not None: - body["dark_mode_icon_file_path"] = self.dark_mode_icon_file_path - if self.description is not None: - body["description"] = self.description - if self.icon_file_id is not None: - body["icon_file_id"] = self.icon_file_id - if self.icon_file_path is not None: - body["icon_file_path"] = self.icon_file_path - if self.id is not None: - body["id"] = self.id - if self.is_featured is not None: - body["is_featured"] = self.is_featured - if self.name is not None: - body["name"] = self.name - if self.privacy_policy_link is not None: - body["privacy_policy_link"] = self.privacy_policy_link - if self.published_by is not None: - body["published_by"] = self.published_by - if self.support_contact_email is not None: - body["support_contact_email"] = self.support_contact_email - if self.term_of_service_link is not None: - body["term_of_service_link"] = self.term_of_service_link + if self.business_contact_email is not None: body['business_contact_email'] = self.business_contact_email + if self.company_website_link is not None: body['company_website_link'] = self.company_website_link + if self.dark_mode_icon_file_id is not None: body['dark_mode_icon_file_id'] = self.dark_mode_icon_file_id + if self.dark_mode_icon_file_path is not None: body['dark_mode_icon_file_path'] = self.dark_mode_icon_file_path + if self.description is not None: body['description'] = self.description + if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id + if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path + if self.id is not None: body['id'] = self.id + if self.is_featured is not None: body['is_featured'] = self.is_featured + if self.name is not None: body['name'] = self.name + if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link + if self.published_by is not None: body['published_by'] = self.published_by + if self.support_contact_email is not None: body['support_contact_email'] = self.support_contact_email + if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProviderInfo: """Deserializes the ProviderInfo from a dictionary.""" - return cls( - business_contact_email=d.get("business_contact_email", None), - company_website_link=d.get("company_website_link", None), - dark_mode_icon_file_id=d.get("dark_mode_icon_file_id", None), - dark_mode_icon_file_path=d.get("dark_mode_icon_file_path", None), - description=d.get("description", None), - icon_file_id=d.get("icon_file_id", None), - icon_file_path=d.get("icon_file_path", None), - id=d.get("id", None), - is_featured=d.get("is_featured", None), - name=d.get("name", None), - privacy_policy_link=d.get("privacy_policy_link", None), - published_by=d.get("published_by", None), - support_contact_email=d.get("support_contact_email", None), - term_of_service_link=d.get("term_of_service_link", None), - ) + return cls(business_contact_email=d.get('business_contact_email', None), company_website_link=d.get('company_website_link', None), dark_mode_icon_file_id=d.get('dark_mode_icon_file_id', None), dark_mode_icon_file_path=d.get('dark_mode_icon_file_path', None), description=d.get('description', None), icon_file_id=d.get('icon_file_id', None), icon_file_path=d.get('icon_file_path', None), id=d.get('id', None), is_featured=d.get('is_featured', None), name=d.get('name', None), privacy_policy_link=d.get('privacy_policy_link', None), published_by=d.get('published_by', None), support_contact_email=d.get('support_contact_email', None), term_of_service_link=d.get('term_of_service_link', None)) + + @dataclass class RegionInfo: cloud: Optional[str] = None - + region: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the RegionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.region is not None: - body["region"] = self.region + if self.cloud is not None: body['cloud'] = self.cloud + if self.region is not None: body['region'] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the RegionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.region is not None: - body["region"] = self.region + if self.cloud is not None: body['cloud'] = self.cloud + if self.region is not None: body['region'] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegionInfo: """Deserializes the RegionInfo from a dictionary.""" - return cls(cloud=d.get("cloud", None), region=d.get("region", None)) + return cls(cloud=d.get('cloud', None), region=d.get('region', None)) + + + + + @dataclass @@ -2784,205 +2391,191 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RemoveExchangeForListingResponse: """Deserializes the RemoveExchangeForListingResponse from a dictionary.""" return cls() + + @dataclass class RepoInfo: git_repo_url: str """the git repo url e.g. https://github.com/databrickslabs/dolly.git""" - + def as_dict(self) -> dict: """Serializes the RepoInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.git_repo_url is not None: - body["git_repo_url"] = self.git_repo_url + if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url return body def as_shallow_dict(self) -> dict: """Serializes the RepoInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.git_repo_url is not None: - body["git_repo_url"] = self.git_repo_url + if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoInfo: """Deserializes the RepoInfo from a dictionary.""" - return cls(git_repo_url=d.get("git_repo_url", None)) + return cls(git_repo_url=d.get('git_repo_url', None)) + + @dataclass class RepoInstallation: repo_name: str """the user-specified repo name for their installed git repo listing""" - + repo_path: str """refers to the full url file path that navigates the user to the repo's entrypoint (e.g. a README.md file, or the repo file view in the unified UI) should just be a relative path""" - + def as_dict(self) -> dict: """Serializes the RepoInstallation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.repo_name is not None: - body["repo_name"] = self.repo_name - if self.repo_path is not None: - body["repo_path"] = self.repo_path + if self.repo_name is not None: body['repo_name'] = self.repo_name + if self.repo_path is not None: body['repo_path'] = self.repo_path return body def as_shallow_dict(self) -> dict: """Serializes the RepoInstallation into a shallow dictionary of its immediate attributes.""" body = {} - if self.repo_name is not None: - body["repo_name"] = self.repo_name - if self.repo_path is not None: - body["repo_path"] = self.repo_path + if self.repo_name is not None: body['repo_name'] = self.repo_name + if self.repo_path is not None: body['repo_path'] = self.repo_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoInstallation: """Deserializes the RepoInstallation from a dictionary.""" - return cls(repo_name=d.get("repo_name", None), repo_path=d.get("repo_path", None)) + return cls(repo_name=d.get('repo_name', None), repo_path=d.get('repo_path', None)) + + + + + @dataclass class SearchListingsResponse: listings: Optional[List[Listing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the SearchListingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listings: - body["listings"] = [v.as_dict() for v in self.listings] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.listings: body['listings'] = [v.as_dict() for v in self.listings] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchListingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listings: - body["listings"] = self.listings - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.listings: body['listings'] = self.listings + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchListingsResponse: """Deserializes the SearchListingsResponse from a dictionary.""" - return cls(listings=_repeated_dict(d, "listings", Listing), next_page_token=d.get("next_page_token", None)) + return cls(listings=_repeated_dict(d, 'listings', Listing), next_page_token=d.get('next_page_token', None)) + + @dataclass class ShareInfo: name: str - + type: ListingShareType - + def as_dict(self) -> dict: """Serializes the ShareInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.type is not None: - body["type"] = self.type.value + if self.name is not None: body['name'] = self.name + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the ShareInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.type is not None: - body["type"] = self.type + if self.name is not None: body['name'] = self.name + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ShareInfo: """Deserializes the ShareInfo from a dictionary.""" - return cls(name=d.get("name", None), type=_enum(d, "type", ListingShareType)) + return cls(name=d.get('name', None), type=_enum(d, 'type', ListingShareType)) + + @dataclass class SharedDataObject: data_object_type: Optional[str] = None """The type of the data object. Could be one of: TABLE, SCHEMA, NOTEBOOK_FILE, MODEL, VOLUME""" - + name: Optional[str] = None """Name of the shared object""" - + def as_dict(self) -> dict: """Serializes the SharedDataObject into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_object_type is not None: - body["data_object_type"] = self.data_object_type - if self.name is not None: - body["name"] = self.name + if self.data_object_type is not None: body['data_object_type'] = self.data_object_type + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_object_type is not None: - body["data_object_type"] = self.data_object_type - if self.name is not None: - body["name"] = self.name + if self.data_object_type is not None: body['data_object_type'] = self.data_object_type + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SharedDataObject: """Deserializes the SharedDataObject from a dictionary.""" - return cls(data_object_type=d.get("data_object_type", None), name=d.get("name", None)) + return cls(data_object_type=d.get('data_object_type', None), name=d.get('name', None)) + + @dataclass class TokenDetail: bearer_token: Optional[str] = None - + endpoint: Optional[str] = None - + expiration_time: Optional[str] = None - + share_credentials_version: Optional[int] = None """These field names must follow the delta sharing protocol. Original message: RetrieveToken.Response in managed-catalog/api/messages/recipient.proto""" - + def as_dict(self) -> dict: """Serializes the TokenDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bearer_token is not None: - body["bearerToken"] = self.bearer_token - if self.endpoint is not None: - body["endpoint"] = self.endpoint - if self.expiration_time is not None: - body["expirationTime"] = self.expiration_time - if self.share_credentials_version is not None: - body["shareCredentialsVersion"] = self.share_credentials_version + if self.bearer_token is not None: body['bearerToken'] = self.bearer_token + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.expiration_time is not None: body['expirationTime'] = self.expiration_time + if self.share_credentials_version is not None: body['shareCredentialsVersion'] = self.share_credentials_version return body def as_shallow_dict(self) -> dict: """Serializes the TokenDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.bearer_token is not None: - body["bearerToken"] = self.bearer_token - if self.endpoint is not None: - body["endpoint"] = self.endpoint - if self.expiration_time is not None: - body["expirationTime"] = self.expiration_time - if self.share_credentials_version is not None: - body["shareCredentialsVersion"] = self.share_credentials_version + if self.bearer_token is not None: body['bearerToken'] = self.bearer_token + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.expiration_time is not None: body['expirationTime'] = self.expiration_time + if self.share_credentials_version is not None: body['shareCredentialsVersion'] = self.share_credentials_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenDetail: """Deserializes the TokenDetail from a dictionary.""" - return cls( - bearer_token=d.get("bearerToken", None), - endpoint=d.get("endpoint", None), - expiration_time=d.get("expirationTime", None), - share_credentials_version=d.get("shareCredentialsVersion", None), - ) + return cls(bearer_token=d.get('bearerToken', None), endpoint=d.get('endpoint', None), expiration_time=d.get('expirationTime', None), share_credentials_version=d.get('shareCredentialsVersion', None)) + + @dataclass @@ -2990,617 +2583,577 @@ class TokenInfo: activation_url: Optional[str] = None """Full activation url to retrieve the access token. It will be empty if the token is already retrieved.""" - + created_at: Optional[int] = None """Time at which this Recipient Token was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of Recipient Token creator.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token in epoch milliseconds.""" - + id: Optional[str] = None """Unique id of the Recipient Token.""" - + updated_at: Optional[int] = None """Time at which this Recipient Token was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of Recipient Token updater.""" - + def as_dict(self) -> dict: """Serializes the TokenInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activation_url is not None: - body["activation_url"] = self.activation_url - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.id is not None: - body["id"] = self.id - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.activation_url is not None: body['activation_url'] = self.activation_url + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.id is not None: body['id'] = self.id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the TokenInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.activation_url is not None: - body["activation_url"] = self.activation_url - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.id is not None: - body["id"] = self.id - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.activation_url is not None: body['activation_url'] = self.activation_url + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.id is not None: body['id'] = self.id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenInfo: """Deserializes the TokenInfo from a dictionary.""" - return cls( - activation_url=d.get("activation_url", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - expiration_time=d.get("expiration_time", None), - id=d.get("id", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(activation_url=d.get('activation_url', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), expiration_time=d.get('expiration_time', None), id=d.get('id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass class UpdateExchangeFilterRequest: filter: ExchangeFilter - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateExchangeFilterRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter: - body["filter"] = self.filter.as_dict() - if self.id is not None: - body["id"] = self.id + if self.filter: body['filter'] = self.filter.as_dict() + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter: - body["filter"] = self.filter - if self.id is not None: - body["id"] = self.id + if self.filter: body['filter'] = self.filter + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeFilterRequest: """Deserializes the UpdateExchangeFilterRequest from a dictionary.""" - return cls(filter=_from_dict(d, "filter", ExchangeFilter), id=d.get("id", None)) + return cls(filter=_from_dict(d, 'filter', ExchangeFilter), id=d.get('id', None)) + + @dataclass class UpdateExchangeFilterResponse: filter: Optional[ExchangeFilter] = None - + def as_dict(self) -> dict: """Serializes the UpdateExchangeFilterResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter: - body["filter"] = self.filter.as_dict() + if self.filter: body['filter'] = self.filter.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExchangeFilterResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter: - body["filter"] = self.filter + if self.filter: body['filter'] = self.filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeFilterResponse: """Deserializes the UpdateExchangeFilterResponse from a dictionary.""" - return cls(filter=_from_dict(d, "filter", ExchangeFilter)) + return cls(filter=_from_dict(d, 'filter', ExchangeFilter)) + + @dataclass class UpdateExchangeRequest: exchange: Exchange - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateExchangeRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange: - body["exchange"] = self.exchange.as_dict() - if self.id is not None: - body["id"] = self.id + if self.exchange: body['exchange'] = self.exchange.as_dict() + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExchangeRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange: - body["exchange"] = self.exchange - if self.id is not None: - body["id"] = self.id + if self.exchange: body['exchange'] = self.exchange + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeRequest: """Deserializes the UpdateExchangeRequest from a dictionary.""" - return cls(exchange=_from_dict(d, "exchange", Exchange), id=d.get("id", None)) + return cls(exchange=_from_dict(d, 'exchange', Exchange), id=d.get('id', None)) + + @dataclass class UpdateExchangeResponse: exchange: Optional[Exchange] = None - + def as_dict(self) -> dict: """Serializes the UpdateExchangeResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange: - body["exchange"] = self.exchange.as_dict() + if self.exchange: body['exchange'] = self.exchange.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExchangeResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange: - body["exchange"] = self.exchange + if self.exchange: body['exchange'] = self.exchange return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeResponse: """Deserializes the UpdateExchangeResponse from a dictionary.""" - return cls(exchange=_from_dict(d, "exchange", Exchange)) + return cls(exchange=_from_dict(d, 'exchange', Exchange)) + + @dataclass class UpdateInstallationRequest: installation: InstallationDetail - + installation_id: Optional[str] = None - + listing_id: Optional[str] = None - + rotate_token: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the UpdateInstallationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installation: - body["installation"] = self.installation.as_dict() - if self.installation_id is not None: - body["installation_id"] = self.installation_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.rotate_token is not None: - body["rotate_token"] = self.rotate_token + if self.installation: body['installation'] = self.installation.as_dict() + if self.installation_id is not None: body['installation_id'] = self.installation_id + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.rotate_token is not None: body['rotate_token'] = self.rotate_token return body def as_shallow_dict(self) -> dict: """Serializes the UpdateInstallationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.installation: - body["installation"] = self.installation - if self.installation_id is not None: - body["installation_id"] = self.installation_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.rotate_token is not None: - body["rotate_token"] = self.rotate_token + if self.installation: body['installation'] = self.installation + if self.installation_id is not None: body['installation_id'] = self.installation_id + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.rotate_token is not None: body['rotate_token'] = self.rotate_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateInstallationRequest: """Deserializes the UpdateInstallationRequest from a dictionary.""" - return cls( - installation=_from_dict(d, "installation", InstallationDetail), - installation_id=d.get("installation_id", None), - listing_id=d.get("listing_id", None), - rotate_token=d.get("rotate_token", None), - ) + return cls(installation=_from_dict(d, 'installation', InstallationDetail), installation_id=d.get('installation_id', None), listing_id=d.get('listing_id', None), rotate_token=d.get('rotate_token', None)) + + @dataclass class UpdateInstallationResponse: installation: Optional[InstallationDetail] = None - + def as_dict(self) -> dict: """Serializes the UpdateInstallationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installation: - body["installation"] = self.installation.as_dict() + if self.installation: body['installation'] = self.installation.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateInstallationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.installation: - body["installation"] = self.installation + if self.installation: body['installation'] = self.installation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateInstallationResponse: """Deserializes the UpdateInstallationResponse from a dictionary.""" - return cls(installation=_from_dict(d, "installation", InstallationDetail)) + return cls(installation=_from_dict(d, 'installation', InstallationDetail)) + + @dataclass class UpdateListingRequest: listing: Listing - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateListingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.listing: - body["listing"] = self.listing.as_dict() + if self.id is not None: body['id'] = self.id + if self.listing: body['listing'] = self.listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateListingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.listing: - body["listing"] = self.listing + if self.id is not None: body['id'] = self.id + if self.listing: body['listing'] = self.listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateListingRequest: """Deserializes the UpdateListingRequest from a dictionary.""" - return cls(id=d.get("id", None), listing=_from_dict(d, "listing", Listing)) + return cls(id=d.get('id', None), listing=_from_dict(d, 'listing', Listing)) + + @dataclass class UpdateListingResponse: listing: Optional[Listing] = None - + def as_dict(self) -> dict: """Serializes the UpdateListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing: - body["listing"] = self.listing.as_dict() + if self.listing: body['listing'] = self.listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing: - body["listing"] = self.listing + if self.listing: body['listing'] = self.listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateListingResponse: """Deserializes the UpdateListingResponse from a dictionary.""" - return cls(listing=_from_dict(d, "listing", Listing)) + return cls(listing=_from_dict(d, 'listing', Listing)) + + @dataclass class UpdatePersonalizationRequestRequest: status: PersonalizationRequestStatus - + listing_id: Optional[str] = None - + reason: Optional[str] = None - + request_id: Optional[str] = None - + share: Optional[ShareInfo] = None - + def as_dict(self) -> dict: """Serializes the UpdatePersonalizationRequestRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.reason is not None: - body["reason"] = self.reason - if self.request_id is not None: - body["request_id"] = self.request_id - if self.share: - body["share"] = self.share.as_dict() - if self.status is not None: - body["status"] = self.status.value + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.reason is not None: body['reason'] = self.reason + if self.request_id is not None: body['request_id'] = self.request_id + if self.share: body['share'] = self.share.as_dict() + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePersonalizationRequestRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.reason is not None: - body["reason"] = self.reason - if self.request_id is not None: - body["request_id"] = self.request_id - if self.share: - body["share"] = self.share - if self.status is not None: - body["status"] = self.status + if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.reason is not None: body['reason'] = self.reason + if self.request_id is not None: body['request_id'] = self.request_id + if self.share: body['share'] = self.share + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalizationRequestRequest: """Deserializes the UpdatePersonalizationRequestRequest from a dictionary.""" - return cls( - listing_id=d.get("listing_id", None), - reason=d.get("reason", None), - request_id=d.get("request_id", None), - share=_from_dict(d, "share", ShareInfo), - status=_enum(d, "status", PersonalizationRequestStatus), - ) + return cls(listing_id=d.get('listing_id', None), reason=d.get('reason', None), request_id=d.get('request_id', None), share=_from_dict(d, 'share', ShareInfo), status=_enum(d, 'status', PersonalizationRequestStatus)) + + @dataclass class UpdatePersonalizationRequestResponse: request: Optional[PersonalizationRequest] = None - + def as_dict(self) -> dict: """Serializes the UpdatePersonalizationRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.request: - body["request"] = self.request.as_dict() + if self.request: body['request'] = self.request.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.request: - body["request"] = self.request + if self.request: body['request'] = self.request return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalizationRequestResponse: """Deserializes the UpdatePersonalizationRequestResponse from a dictionary.""" - return cls(request=_from_dict(d, "request", PersonalizationRequest)) + return cls(request=_from_dict(d, 'request', PersonalizationRequest)) + + @dataclass class UpdateProviderAnalyticsDashboardRequest: id: Optional[str] = None """id is immutable property and can't be updated.""" - + version: Optional[int] = None """this is the version of the dashboard template we want to update our user to current expectation is that it should be equal to latest version of the dashboard template""" - + def as_dict(self) -> dict: """Serializes the UpdateProviderAnalyticsDashboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version + if self.id is not None: body['id'] = self.id + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProviderAnalyticsDashboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version + if self.id is not None: body['id'] = self.id + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderAnalyticsDashboardRequest: """Deserializes the UpdateProviderAnalyticsDashboardRequest from a dictionary.""" - return cls(id=d.get("id", None), version=d.get("version", None)) + return cls(id=d.get('id', None), version=d.get('version', None)) + + @dataclass class UpdateProviderAnalyticsDashboardResponse: id: str """id & version should be the same as the request""" - + dashboard_id: str """this is newly created Lakeview dashboard for the user""" - + version: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the UpdateProviderAnalyticsDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.id is not None: body['id'] = self.id + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.id is not None: body['id'] = self.id + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderAnalyticsDashboardResponse: """Deserializes the UpdateProviderAnalyticsDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get("dashboard_id", None), id=d.get("id", None), version=d.get("version", None)) + return cls(dashboard_id=d.get('dashboard_id', None), id=d.get('id', None), version=d.get('version', None)) + + @dataclass class UpdateProviderRequest: provider: ProviderInfo - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateProviderRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.provider: - body["provider"] = self.provider.as_dict() + if self.id is not None: body['id'] = self.id + if self.provider: body['provider'] = self.provider.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProviderRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.provider: - body["provider"] = self.provider + if self.id is not None: body['id'] = self.id + if self.provider: body['provider'] = self.provider return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderRequest: """Deserializes the UpdateProviderRequest from a dictionary.""" - return cls(id=d.get("id", None), provider=_from_dict(d, "provider", ProviderInfo)) + return cls(id=d.get('id', None), provider=_from_dict(d, 'provider', ProviderInfo)) + + @dataclass class UpdateProviderResponse: provider: Optional[ProviderInfo] = None - + def as_dict(self) -> dict: """Serializes the UpdateProviderResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.provider: - body["provider"] = self.provider.as_dict() + if self.provider: body['provider'] = self.provider.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProviderResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.provider: - body["provider"] = self.provider + if self.provider: body['provider'] = self.provider return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderResponse: """Deserializes the UpdateProviderResponse from a dictionary.""" - return cls(provider=_from_dict(d, "provider", ProviderInfo)) + return cls(provider=_from_dict(d, 'provider', ProviderInfo)) + + class Visibility(Enum): + + + PRIVATE = 'PRIVATE' + PUBLIC = 'PUBLIC' - PRIVATE = "PRIVATE" - PUBLIC = "PUBLIC" class ConsumerFulfillmentsAPI: """Fulfillments are entities that allow consumers to preview installations.""" - + def __init__(self, api_client): self._api = api_client + - def get( - self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[SharedDataObject]: - """Get listing content metadata. + - Get a high level preview of the metadata of listing installable content. + + + + + def get(self + , listing_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[SharedDataObject]: + """Get listing content metadata. + + Get a high level preview of the metadata of listing installable content. + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`SharedDataObject` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.1/marketplace-consumer/listings/{listing_id}/content", query=query, headers=headers - ) - if "shared_data_objects" in json: - for v in json["shared_data_objects"]: - yield SharedDataObject.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list( - self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ListingFulfillment]: + json = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{listing_id}/content', query=query + + , headers=headers + ) + if 'shared_data_objects' in json: + for v in json['shared_data_objects']: + yield SharedDataObject.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def list(self + , listing_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ListingFulfillment]: """List all listing fulfillments. - + Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation. Standard installations contain metadata about the attached share or git repo. Only one of these fields will be present. Personalized installations contain metadata about the attached share or git repo, as well as the Delta Sharing recipient type. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListingFulfillment` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.1/marketplace-consumer/listings/{listing_id}/fulfillments", query=query, headers=headers - ) - if "fulfillments" in json: - for v in json["fulfillments"]: - yield ListingFulfillment.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - + json = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{listing_id}/fulfillments', query=query + + , headers=headers + ) + if 'fulfillments' in json: + for v in json['fulfillments']: + yield ListingFulfillment.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + class ConsumerInstallationsAPI: """Installations are entities that allow consumers to interact with Databricks Marketplace listings.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - listing_id: str, - *, - accepted_consumer_terms: Optional[ConsumerTerms] = None, - catalog_name: Optional[str] = None, - recipient_type: Optional[DeltaSharingRecipientType] = None, - repo_detail: Optional[RepoInstallation] = None, - share_name: Optional[str] = None, - ) -> Installation: - """Install from a listing. + - Install payload associated with a Databricks Marketplace listing. + + + + + def create(self + , listing_id: str + , * + , accepted_consumer_terms: Optional[ConsumerTerms] = None, catalog_name: Optional[str] = None, recipient_type: Optional[DeltaSharingRecipientType] = None, repo_detail: Optional[RepoInstallation] = None, share_name: Optional[str] = None) -> Installation: + """Install from a listing. + + Install payload associated with a Databricks Marketplace listing. + :param listing_id: str :param accepted_consumer_terms: :class:`ConsumerTerms` (optional) :param catalog_name: str (optional) @@ -3608,221 +3161,234 @@ def create( :param repo_detail: :class:`RepoInstallation` (optional) for git repo installations :param share_name: str (optional) - + :returns: :class:`Installation` """ body = {} - if accepted_consumer_terms is not None: - body["accepted_consumer_terms"] = accepted_consumer_terms.as_dict() - if catalog_name is not None: - body["catalog_name"] = catalog_name - if recipient_type is not None: - body["recipient_type"] = recipient_type.value - if repo_detail is not None: - body["repo_detail"] = repo_detail.as_dict() - if share_name is not None: - body["share_name"] = share_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations", body=body, headers=headers - ) + if accepted_consumer_terms is not None: body['accepted_consumer_terms'] = accepted_consumer_terms.as_dict() + if catalog_name is not None: body['catalog_name'] = catalog_name + if recipient_type is not None: body['recipient_type'] = recipient_type.value + if repo_detail is not None: body['repo_detail'] = repo_detail.as_dict() + if share_name is not None: body['share_name'] = share_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations', body=body + + , headers=headers + ) return Installation.from_dict(res) - def delete(self, listing_id: str, installation_id: str): - """Uninstall from a listing. + + + + def delete(self + , listing_id: str, installation_id: str + ): + """Uninstall from a listing. + Uninstall an installation associated with a Databricks Marketplace listing. - + :param listing_id: str :param installation_id: str - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}", - headers=headers, - ) - - def list( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[InstallationDetail]: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}' + + , headers=headers + ) + + + + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[InstallationDetail]: """List all installations. - + List all installations across all listings. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`InstallationDetail` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/marketplace-consumer/installations", query=query, headers=headers) - if "installations" in json: - for v in json["installations"]: - yield InstallationDetail.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_listing_installations( - self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[InstallationDetail]: + json = self._api.do('GET','/api/2.1/marketplace-consumer/installations', query=query + + , headers=headers + ) + if 'installations' in json: + for v in json['installations']: + yield InstallationDetail.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def list_listing_installations(self + , listing_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[InstallationDetail]: """List installations for a listing. - + List all installations for a particular listing. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`InstallationDetail` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations", - query=query, - headers=headers, - ) - if "installations" in json: - for v in json["installations"]: - yield InstallationDetail.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - listing_id: str, - installation_id: str, - installation: InstallationDetail, - *, - rotate_token: Optional[bool] = None, - ) -> UpdateInstallationResponse: + json = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations', query=query + + , headers=headers + ) + if 'installations' in json: + for v in json['installations']: + yield InstallationDetail.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , listing_id: str, installation_id: str, installation: InstallationDetail + , * + , rotate_token: Optional[bool] = None) -> UpdateInstallationResponse: """Update an installation. - + This is a update API that will update the part of the fields defined in the installation table as well as interact with external services according to the fields not included in the installation table 1. the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the rotateToken flag is true and the tokenInfo field is empty - + :param listing_id: str :param installation_id: str :param installation: :class:`InstallationDetail` :param rotate_token: bool (optional) - + :returns: :class:`UpdateInstallationResponse` """ body = {} - if installation is not None: - body["installation"] = installation.as_dict() - if rotate_token is not None: - body["rotate_token"] = rotate_token - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", - f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}", - body=body, - headers=headers, - ) + if installation is not None: body['installation'] = installation.as_dict() + if rotate_token is not None: body['rotate_token'] = rotate_token + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}', body=body + + , headers=headers + ) return UpdateInstallationResponse.from_dict(res) - + + class ConsumerListingsAPI: """Listings are the core entities in the Marketplace. They represent the products that are available for consumption.""" - + def __init__(self, api_client): self._api = api_client + - def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetListingsResponse: - """Get one batch of listings. One may specify up to 50 IDs per request. + - Batch get a published listing in the Databricks Marketplace that the consumer has access to. + - :param ids: List[str] (optional) + + + def batch_get(self + + , * + , ids: Optional[List[str]] = None) -> BatchGetListingsResponse: + """Get one batch of listings. One may specify up to 50 IDs per request. + + Batch get a published listing in the Databricks Marketplace that the consumer has access to. + + :param ids: List[str] (optional) + :returns: :class:`BatchGetListingsResponse` """ - + query = {} - if ids is not None: - query["ids"] = [v for v in ids] - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.1/marketplace-consumer/listings:batchGet", query=query, headers=headers) + if ids is not None: query['ids'] = [v for v in ids] + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.1/marketplace-consumer/listings:batchGet', query=query + + , headers=headers + ) return BatchGetListingsResponse.from_dict(res) - def get(self, id: str) -> GetListingResponse: - """Get listing. + + + + def get(self + , id: str + ) -> GetListingResponse: + """Get listing. + Get a published listing in the Databricks Marketplace that the consumer has access to. - + :param id: str - + :returns: :class:`GetListingResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/marketplace-consumer/listings/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{id}' + + , headers=headers + ) return GetListingResponse.from_dict(res) - def list( - self, - *, - assets: Optional[List[AssetType]] = None, - categories: Optional[List[Category]] = None, - is_free: Optional[bool] = None, - is_private_exchange: Optional[bool] = None, - is_staff_pick: Optional[bool] = None, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - provider_ids: Optional[List[str]] = None, - tags: Optional[List[ListingTag]] = None, - ) -> Iterator[Listing]: - """List listings. + + + + def list(self + + , * + , assets: Optional[List[AssetType]] = None, categories: Optional[List[Category]] = None, is_free: Optional[bool] = None, is_private_exchange: Optional[bool] = None, is_staff_pick: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, provider_ids: Optional[List[str]] = None, tags: Optional[List[ListingTag]] = None) -> Iterator[Listing]: + """List listings. + List all published listings in the Databricks Marketplace that the consumer has access to. - + :param assets: List[:class:`AssetType`] (optional) Matches any of the following asset types :param categories: List[:class:`Category`] (optional) @@ -3839,59 +3405,50 @@ def list( Matches any of the following provider ids :param tags: List[:class:`ListingTag`] (optional) Matches any of the following tags - + :returns: Iterator over :class:`Listing` """ - + query = {} - if assets is not None: - query["assets"] = [v.value for v in assets] - if categories is not None: - query["categories"] = [v.value for v in categories] - if is_free is not None: - query["is_free"] = is_free - if is_private_exchange is not None: - query["is_private_exchange"] = is_private_exchange - if is_staff_pick is not None: - query["is_staff_pick"] = is_staff_pick - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - if provider_ids is not None: - query["provider_ids"] = [v for v in provider_ids] - if tags is not None: - query["tags"] = [v.as_dict() for v in tags] - headers = { - "Accept": "application/json", - } - + if assets is not None: query['assets'] = [v.value for v in assets] + if categories is not None: query['categories'] = [v.value for v in categories] + if is_free is not None: query['is_free'] = is_free + if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange + if is_staff_pick is not None: query['is_staff_pick'] = is_staff_pick + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids] + if tags is not None: query['tags'] = [v.as_dict() for v in tags] + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/marketplace-consumer/listings", query=query, headers=headers) - if "listings" in json: - for v in json["listings"]: - yield Listing.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def search( - self, - query: str, - *, - assets: Optional[List[AssetType]] = None, - categories: Optional[List[Category]] = None, - is_free: Optional[bool] = None, - is_private_exchange: Optional[bool] = None, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - provider_ids: Optional[List[str]] = None, - ) -> Iterator[Listing]: + json = self._api.do('GET','/api/2.1/marketplace-consumer/listings', query=query + + , headers=headers + ) + if 'listings' in json: + for v in json['listings']: + yield Listing.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def search(self + , query: str + , * + , assets: Optional[List[AssetType]] = None, categories: Optional[List[Category]] = None, is_free: Optional[bool] = None, is_private_exchange: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, provider_ids: Optional[List[str]] = None) -> Iterator[Listing]: """Search listings. - + Search published listings in the Databricks Marketplace that the consumer has access to. This query supports a variety of different search parameters and performs fuzzy matching. - + :param query: str Fuzzy matches query :param assets: List[:class:`AssetType`] (optional) @@ -3904,64 +3461,60 @@ def search( :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - + :returns: Iterator over :class:`Listing` """ - + query = {} - if assets is not None: - query["assets"] = [v.value for v in assets] - if categories is not None: - query["categories"] = [v.value for v in categories] - if is_free is not None: - query["is_free"] = is_free - if is_private_exchange is not None: - query["is_private_exchange"] = is_private_exchange - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - if provider_ids is not None: - query["provider_ids"] = [v for v in provider_ids] - if query is not None: - query["query"] = query - headers = { - "Accept": "application/json", - } - + if assets is not None: query['assets'] = [v.value for v in assets] + if categories is not None: query['categories'] = [v.value for v in categories] + if is_free is not None: query['is_free'] = is_free + if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids] + if query is not None: query['query'] = query + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/marketplace-consumer/search-listings", query=query, headers=headers) - if "listings" in json: - for v in json["listings"]: - yield Listing.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - + json = self._api.do('GET','/api/2.1/marketplace-consumer/search-listings', query=query + + , headers=headers + ) + if 'listings' in json: + for v in json['listings']: + yield Listing.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + class ConsumerPersonalizationRequestsAPI: """Personalization Requests allow customers to interact with the individualized Marketplace listing flow.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - listing_id: str, - intended_use: str, - accepted_consumer_terms: ConsumerTerms, - *, - comment: Optional[str] = None, - company: Optional[str] = None, - first_name: Optional[str] = None, - is_from_lighthouse: Optional[bool] = None, - last_name: Optional[str] = None, - recipient_type: Optional[DeltaSharingRecipientType] = None, - ) -> CreatePersonalizationRequestResponse: - """Create a personalization request. + - Create a personalization request for a listing. + + + + + def create(self + , listing_id: str, intended_use: str, accepted_consumer_terms: ConsumerTerms + , * + , comment: Optional[str] = None, company: Optional[str] = None, first_name: Optional[str] = None, is_from_lighthouse: Optional[bool] = None, last_name: Optional[str] = None, recipient_type: Optional[DeltaSharingRecipientType] = None) -> CreatePersonalizationRequestResponse: + """Create a personalization request. + + Create a personalization request for a listing. + :param listing_id: str :param intended_use: str :param accepted_consumer_terms: :class:`ConsumerTerms` @@ -3971,976 +3524,1213 @@ def create( :param is_from_lighthouse: bool (optional) :param last_name: str (optional) :param recipient_type: :class:`DeltaSharingRecipientType` (optional) - + :returns: :class:`CreatePersonalizationRequestResponse` """ body = {} - if accepted_consumer_terms is not None: - body["accepted_consumer_terms"] = accepted_consumer_terms.as_dict() - if comment is not None: - body["comment"] = comment - if company is not None: - body["company"] = company - if first_name is not None: - body["first_name"] = first_name - if intended_use is not None: - body["intended_use"] = intended_use - if is_from_lighthouse is not None: - body["is_from_lighthouse"] = is_from_lighthouse - if last_name is not None: - body["last_name"] = last_name - if recipient_type is not None: - body["recipient_type"] = recipient_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests", - body=body, - headers=headers, - ) + if accepted_consumer_terms is not None: body['accepted_consumer_terms'] = accepted_consumer_terms.as_dict() + if comment is not None: body['comment'] = comment + if company is not None: body['company'] = company + if first_name is not None: body['first_name'] = first_name + if intended_use is not None: body['intended_use'] = intended_use + if is_from_lighthouse is not None: body['is_from_lighthouse'] = is_from_lighthouse + if last_name is not None: body['last_name'] = last_name + if recipient_type is not None: body['recipient_type'] = recipient_type.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests', body=body + + , headers=headers + ) return CreatePersonalizationRequestResponse.from_dict(res) - def get(self, listing_id: str) -> GetPersonalizationRequestResponse: - """Get the personalization request for a listing. + + + + def get(self + , listing_id: str + ) -> GetPersonalizationRequestResponse: + """Get the personalization request for a listing. + Get the personalization request for a listing. Each consumer can make at *most* one personalization request for a listing. - + :param listing_id: str - + :returns: :class:`GetPersonalizationRequestResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests' + + , headers=headers + ) return GetPersonalizationRequestResponse.from_dict(res) - def list( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[PersonalizationRequest]: - """List all personalization requests. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PersonalizationRequest]: + """List all personalization requests. + List personalization requests for a consumer across all listings. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`PersonalizationRequest` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", "/api/2.1/marketplace-consumer/personalization-requests", query=query, headers=headers - ) - if "personalization_requests" in json: - for v in json["personalization_requests"]: - yield PersonalizationRequest.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - + json = self._api.do('GET','/api/2.1/marketplace-consumer/personalization-requests', query=query + + , headers=headers + ) + if 'personalization_requests' in json: + for v in json['personalization_requests']: + yield PersonalizationRequest.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + class ConsumerProvidersAPI: """Providers are the entities that publish listings to the Marketplace.""" - + def __init__(self, api_client): self._api = api_client + - def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetProvidersResponse: - """Get one batch of providers. One may specify up to 50 IDs per request. + - Batch get a provider in the Databricks Marketplace with at least one visible listing. + - :param ids: List[str] (optional) + + + def batch_get(self + + , * + , ids: Optional[List[str]] = None) -> BatchGetProvidersResponse: + """Get one batch of providers. One may specify up to 50 IDs per request. + + Batch get a provider in the Databricks Marketplace with at least one visible listing. + + :param ids: List[str] (optional) + :returns: :class:`BatchGetProvidersResponse` """ - + query = {} - if ids is not None: - query["ids"] = [v for v in ids] - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.1/marketplace-consumer/providers:batchGet", query=query, headers=headers) + if ids is not None: query['ids'] = [v for v in ids] + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.1/marketplace-consumer/providers:batchGet', query=query + + , headers=headers + ) return BatchGetProvidersResponse.from_dict(res) - def get(self, id: str) -> GetProviderResponse: - """Get a provider. + + + + def get(self + , id: str + ) -> GetProviderResponse: + """Get a provider. + Get a provider in the Databricks Marketplace with at least one visible listing. - + :param id: str - + :returns: :class:`GetProviderResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/marketplace-consumer/providers/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/marketplace-consumer/providers/{id}' + + , headers=headers + ) return GetProviderResponse.from_dict(res) - def list( - self, *, is_featured: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ProviderInfo]: - """List providers. + + + + def list(self + + , * + , is_featured: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderInfo]: + """List providers. + List all providers in the Databricks Marketplace with at least one visible listing. - + :param is_featured: bool (optional) :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ProviderInfo` """ - + query = {} - if is_featured is not None: - query["is_featured"] = is_featured - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if is_featured is not None: query['is_featured'] = is_featured + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.1/marketplace-consumer/providers", query=query, headers=headers) - if "providers" in json: - for v in json["providers"]: - yield ProviderInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - + json = self._api.do('GET','/api/2.1/marketplace-consumer/providers', query=query + + , headers=headers + ) + if 'providers' in json: + for v in json['providers']: + yield ProviderInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + class ProviderExchangeFiltersAPI: """Marketplace exchanges filters curate which groups can access an exchange.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, filter: ExchangeFilter) -> CreateExchangeFilterResponse: - """Create a new exchange filter. + - Add an exchange filter. + - :param filter: :class:`ExchangeFilter` + + + def create(self + , filter: ExchangeFilter + ) -> CreateExchangeFilterResponse: + """Create a new exchange filter. + + Add an exchange filter. + + :param filter: :class:`ExchangeFilter` + :returns: :class:`CreateExchangeFilterResponse` """ body = {} - if filter is not None: - body["filter"] = filter.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/marketplace-exchange/filters", body=body, headers=headers) + if filter is not None: body['filter'] = filter.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/marketplace-exchange/filters', body=body + + , headers=headers + ) return CreateExchangeFilterResponse.from_dict(res) - def delete(self, id: str): - """Delete an exchange filter. + + + + def delete(self + , id: str + ): + """Delete an exchange filter. + Delete an exchange filter - + :param id: str - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/marketplace-exchange/filters/{id}", headers=headers) - - def list( - self, exchange_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ExchangeFilter]: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/marketplace-exchange/filters/{id}' + + , headers=headers + ) + + + + + + + def list(self + , exchange_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ExchangeFilter]: """List exchange filters. - + List exchange filter - + :param exchange_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeFilter` """ - + query = {} - if exchange_id is not None: - query["exchange_id"] = exchange_id - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if exchange_id is not None: query['exchange_id'] = exchange_id + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/marketplace-exchange/filters", query=query, headers=headers) - if "filters" in json: - for v in json["filters"]: - yield ExchangeFilter.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, id: str, filter: ExchangeFilter) -> UpdateExchangeFilterResponse: + json = self._api.do('GET','/api/2.0/marketplace-exchange/filters', query=query + + , headers=headers + ) + if 'filters' in json: + for v in json['filters']: + yield ExchangeFilter.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , id: str, filter: ExchangeFilter + ) -> UpdateExchangeFilterResponse: """Update exchange filter. - + Update an exchange filter. - + :param id: str :param filter: :class:`ExchangeFilter` - + :returns: :class:`UpdateExchangeFilterResponse` """ body = {} - if filter is not None: - body["filter"] = filter.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/marketplace-exchange/filters/{id}", body=body, headers=headers) + if filter is not None: body['filter'] = filter.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/marketplace-exchange/filters/{id}', body=body + + , headers=headers + ) return UpdateExchangeFilterResponse.from_dict(res) - + + class ProviderExchangesAPI: """Marketplace exchanges allow providers to share their listings with a curated set of customers.""" - + def __init__(self, api_client): self._api = api_client + - def add_listing_to_exchange(self, listing_id: str, exchange_id: str) -> AddExchangeForListingResponse: - """Add an exchange for listing. + - Associate an exchange with a listing + + + + + def add_listing_to_exchange(self + , listing_id: str, exchange_id: str + ) -> AddExchangeForListingResponse: + """Add an exchange for listing. + + Associate an exchange with a listing + :param listing_id: str :param exchange_id: str - + :returns: :class:`AddExchangeForListingResponse` """ body = {} - if exchange_id is not None: - body["exchange_id"] = exchange_id - if listing_id is not None: - body["listing_id"] = listing_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/marketplace-exchange/exchanges-for-listing", body=body, headers=headers) + if exchange_id is not None: body['exchange_id'] = exchange_id + if listing_id is not None: body['listing_id'] = listing_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/marketplace-exchange/exchanges-for-listing', body=body + + , headers=headers + ) return AddExchangeForListingResponse.from_dict(res) - def create(self, exchange: Exchange) -> CreateExchangeResponse: - """Create an exchange. + + + + def create(self + , exchange: Exchange + ) -> CreateExchangeResponse: + """Create an exchange. + Create an exchange - + :param exchange: :class:`Exchange` - + :returns: :class:`CreateExchangeResponse` """ body = {} - if exchange is not None: - body["exchange"] = exchange.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/marketplace-exchange/exchanges", body=body, headers=headers) + if exchange is not None: body['exchange'] = exchange.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/marketplace-exchange/exchanges', body=body + + , headers=headers + ) return CreateExchangeResponse.from_dict(res) - def delete(self, id: str): - """Delete an exchange. + + + + def delete(self + , id: str + ): + """Delete an exchange. + This removes a listing from marketplace. - + :param id: str - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/marketplace-exchange/exchanges/{id}", headers=headers) - - def delete_listing_from_exchange(self, id: str): + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/marketplace-exchange/exchanges/{id}' + + , headers=headers + ) + + + + + + + def delete_listing_from_exchange(self + , id: str + ): """Remove an exchange for listing. - + Disassociate an exchange with a listing - + :param id: str - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/marketplace-exchange/exchanges-for-listing/{id}", headers=headers) - - def get(self, id: str) -> GetExchangeResponse: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/marketplace-exchange/exchanges-for-listing/{id}' + + , headers=headers + ) + + + + + + + def get(self + , id: str + ) -> GetExchangeResponse: """Get an exchange. - + Get an exchange. - + :param id: str - + :returns: :class:`GetExchangeResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/marketplace-exchange/exchanges/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/marketplace-exchange/exchanges/{id}' + + , headers=headers + ) return GetExchangeResponse.from_dict(res) - def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Exchange]: - """List exchanges. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Exchange]: + """List exchanges. + List exchanges visible to provider - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Exchange` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/marketplace-exchange/exchanges", query=query, headers=headers) - if "exchanges" in json: - for v in json["exchanges"]: - yield Exchange.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_exchanges_for_listing( - self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ExchangeListing]: + json = self._api.do('GET','/api/2.0/marketplace-exchange/exchanges', query=query + + , headers=headers + ) + if 'exchanges' in json: + for v in json['exchanges']: + yield Exchange.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def list_exchanges_for_listing(self + , listing_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ExchangeListing]: """List exchanges for listing. - + List exchanges associated with a listing - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeListing` """ - + query = {} - if listing_id is not None: - query["listing_id"] = listing_id - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if listing_id is not None: query['listing_id'] = listing_id + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", "/api/2.0/marketplace-exchange/exchanges-for-listing", query=query, headers=headers - ) - if "exchange_listing" in json: - for v in json["exchange_listing"]: - yield ExchangeListing.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_listings_for_exchange( - self, exchange_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ExchangeListing]: + json = self._api.do('GET','/api/2.0/marketplace-exchange/exchanges-for-listing', query=query + + , headers=headers + ) + if 'exchange_listing' in json: + for v in json['exchange_listing']: + yield ExchangeListing.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def list_listings_for_exchange(self + , exchange_id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ExchangeListing]: """List listings for exchange. - + List listings associated with an exchange - + :param exchange_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeListing` """ - + query = {} - if exchange_id is not None: - query["exchange_id"] = exchange_id - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if exchange_id is not None: query['exchange_id'] = exchange_id + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", "/api/2.0/marketplace-exchange/listings-for-exchange", query=query, headers=headers - ) - if "exchange_listings" in json: - for v in json["exchange_listings"]: - yield ExchangeListing.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, id: str, exchange: Exchange) -> UpdateExchangeResponse: + json = self._api.do('GET','/api/2.0/marketplace-exchange/listings-for-exchange', query=query + + , headers=headers + ) + if 'exchange_listings' in json: + for v in json['exchange_listings']: + yield ExchangeListing.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , id: str, exchange: Exchange + ) -> UpdateExchangeResponse: """Update exchange. - + Update an exchange - + :param id: str :param exchange: :class:`Exchange` - + :returns: :class:`UpdateExchangeResponse` """ body = {} - if exchange is not None: - body["exchange"] = exchange.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/marketplace-exchange/exchanges/{id}", body=body, headers=headers) + if exchange is not None: body['exchange'] = exchange.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/marketplace-exchange/exchanges/{id}', body=body + + , headers=headers + ) return UpdateExchangeResponse.from_dict(res) - + + class ProviderFilesAPI: """Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - file_parent: FileParent, - marketplace_file_type: MarketplaceFileType, - mime_type: str, - *, - display_name: Optional[str] = None, - ) -> CreateFileResponse: - """Create a file. + - Create a file. Currently, only provider icons and attached notebooks are supported. + + + + + def create(self + , file_parent: FileParent, marketplace_file_type: MarketplaceFileType, mime_type: str + , * + , display_name: Optional[str] = None) -> CreateFileResponse: + """Create a file. + + Create a file. Currently, only provider icons and attached notebooks are supported. + :param file_parent: :class:`FileParent` :param marketplace_file_type: :class:`MarketplaceFileType` :param mime_type: str :param display_name: str (optional) - + :returns: :class:`CreateFileResponse` """ body = {} - if display_name is not None: - body["display_name"] = display_name - if file_parent is not None: - body["file_parent"] = file_parent.as_dict() - if marketplace_file_type is not None: - body["marketplace_file_type"] = marketplace_file_type.value - if mime_type is not None: - body["mime_type"] = mime_type - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/marketplace-provider/files", body=body, headers=headers) + if display_name is not None: body['display_name'] = display_name + if file_parent is not None: body['file_parent'] = file_parent.as_dict() + if marketplace_file_type is not None: body['marketplace_file_type'] = marketplace_file_type.value + if mime_type is not None: body['mime_type'] = mime_type + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/marketplace-provider/files', body=body + + , headers=headers + ) return CreateFileResponse.from_dict(res) - def delete(self, file_id: str): - """Delete a file. + + + + def delete(self + , file_id: str + ): + """Delete a file. + Delete a file - + :param file_id: str - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/marketplace-provider/files/{file_id}", headers=headers) - - def get(self, file_id: str) -> GetFileResponse: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/marketplace-provider/files/{file_id}' + + , headers=headers + ) + + + + + + + def get(self + , file_id: str + ) -> GetFileResponse: """Get a file. - + Get a file - + :param file_id: str - + :returns: :class:`GetFileResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/marketplace-provider/files/{file_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/marketplace-provider/files/{file_id}' + + , headers=headers + ) return GetFileResponse.from_dict(res) - def list( - self, file_parent: FileParent, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[FileInfo]: - """List files. + + + + def list(self + , file_parent: FileParent + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FileInfo]: + """List files. + List files attached to a parent entity. - + :param file_parent: :class:`FileParent` :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FileInfo` """ - + query = {} - if file_parent is not None: - query["file_parent"] = file_parent.as_dict() - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if file_parent is not None: query['file_parent'] = file_parent.as_dict() + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/marketplace-provider/files", query=query, headers=headers) - if "file_infos" in json: - for v in json["file_infos"]: - yield FileInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - + json = self._api.do('GET','/api/2.0/marketplace-provider/files', query=query + + , headers=headers + ) + if 'file_infos' in json: + for v in json['file_infos']: + yield FileInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + class ProviderListingsAPI: """Listings are the core entities in the Marketplace. They represent the products that are available for consumption.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, listing: Listing) -> CreateListingResponse: - """Create a listing. + - Create a new listing + - :param listing: :class:`Listing` + + + def create(self + , listing: Listing + ) -> CreateListingResponse: + """Create a listing. + + Create a new listing + + :param listing: :class:`Listing` + :returns: :class:`CreateListingResponse` """ body = {} - if listing is not None: - body["listing"] = listing.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/marketplace-provider/listing", body=body, headers=headers) + if listing is not None: body['listing'] = listing.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/marketplace-provider/listing', body=body + + , headers=headers + ) return CreateListingResponse.from_dict(res) - def delete(self, id: str): - """Delete a listing. + + + + def delete(self + , id: str + ): + """Delete a listing. + Delete a listing - + :param id: str - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/marketplace-provider/listings/{id}", headers=headers) - - def get(self, id: str) -> GetListingResponse: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/marketplace-provider/listings/{id}' + + , headers=headers + ) + + + + + + + def get(self + , id: str + ) -> GetListingResponse: """Get a listing. - + Get a listing - + :param id: str - + :returns: :class:`GetListingResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/marketplace-provider/listings/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/marketplace-provider/listings/{id}' + + , headers=headers + ) return GetListingResponse.from_dict(res) - def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Listing]: - """List listings. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Listing]: + """List listings. + List listings owned by this provider - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Listing` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/marketplace-provider/listings", query=query, headers=headers) - if "listings" in json: - for v in json["listings"]: - yield Listing.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, id: str, listing: Listing) -> UpdateListingResponse: + json = self._api.do('GET','/api/2.0/marketplace-provider/listings', query=query + + , headers=headers + ) + if 'listings' in json: + for v in json['listings']: + yield Listing.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , id: str, listing: Listing + ) -> UpdateListingResponse: """Update listing. - + Update a listing - + :param id: str :param listing: :class:`Listing` - + :returns: :class:`UpdateListingResponse` """ body = {} - if listing is not None: - body["listing"] = listing.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/marketplace-provider/listings/{id}", body=body, headers=headers) + if listing is not None: body['listing'] = listing.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/marketplace-provider/listings/{id}', body=body + + , headers=headers + ) return UpdateListingResponse.from_dict(res) - + + class ProviderPersonalizationRequestsAPI: """Personalization requests are an alternate to instantly available listings. Control the lifecycle of personalized solutions.""" - + def __init__(self, api_client): self._api = api_client + - def list( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[PersonalizationRequest]: - """All personalization requests across all listings. + + + + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PersonalizationRequest]: + """All personalization requests across all listings. + List personalization requests to this provider. This will return all personalization requests, regardless of which listing they are for. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`PersonalizationRequest` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", "/api/2.0/marketplace-provider/personalization-requests", query=query, headers=headers - ) - if "personalization_requests" in json: - for v in json["personalization_requests"]: - yield PersonalizationRequest.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - listing_id: str, - request_id: str, - status: PersonalizationRequestStatus, - *, - reason: Optional[str] = None, - share: Optional[ShareInfo] = None, - ) -> UpdatePersonalizationRequestResponse: + json = self._api.do('GET','/api/2.0/marketplace-provider/personalization-requests', query=query + + , headers=headers + ) + if 'personalization_requests' in json: + for v in json['personalization_requests']: + yield PersonalizationRequest.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , listing_id: str, request_id: str, status: PersonalizationRequestStatus + , * + , reason: Optional[str] = None, share: Optional[ShareInfo] = None) -> UpdatePersonalizationRequestResponse: """Update personalization request status. - + Update personalization request. This method only permits updating the status of the request. - + :param listing_id: str :param request_id: str :param status: :class:`PersonalizationRequestStatus` :param reason: str (optional) :param share: :class:`ShareInfo` (optional) - + :returns: :class:`UpdatePersonalizationRequestResponse` """ body = {} - if reason is not None: - body["reason"] = reason - if share is not None: - body["share"] = share.as_dict() - if status is not None: - body["status"] = status.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", - f"/api/2.0/marketplace-provider/listings/{listing_id}/personalization-requests/{request_id}/request-status", - body=body, - headers=headers, - ) + if reason is not None: body['reason'] = reason + if share is not None: body['share'] = share.as_dict() + if status is not None: body['status'] = status.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/marketplace-provider/listings/{listing_id}/personalization-requests/{request_id}/request-status', body=body + + , headers=headers + ) return UpdatePersonalizationRequestResponse.from_dict(res) - + + class ProviderProviderAnalyticsDashboardsAPI: """Manage templated analytics solution for providers.""" - + def __init__(self, api_client): self._api = api_client + + + + + + + + def create(self) -> ProviderAnalyticsDashboard: """Create provider analytics dashboard. - + Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the Lakeview dashboard id. - + :returns: :class:`ProviderAnalyticsDashboard` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("POST", "/api/2.0/marketplace-provider/analytics_dashboard", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('POST','/api/2.0/marketplace-provider/analytics_dashboard' + , headers=headers + ) return ProviderAnalyticsDashboard.from_dict(res) + + + + def get(self) -> ListProviderAnalyticsDashboardResponse: """Get provider analytics dashboard. - + Get provider analytics dashboard. - + :returns: :class:`ListProviderAnalyticsDashboardResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/marketplace-provider/analytics_dashboard", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/marketplace-provider/analytics_dashboard' + , headers=headers + ) return ListProviderAnalyticsDashboardResponse.from_dict(res) + + + + def get_latest_version(self) -> GetLatestVersionProviderAnalyticsDashboardResponse: """Get latest version of provider analytics dashboard. - + Get latest version of provider analytics dashboard. - + :returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/marketplace-provider/analytics_dashboard/latest", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/marketplace-provider/analytics_dashboard/latest' + , headers=headers + ) return GetLatestVersionProviderAnalyticsDashboardResponse.from_dict(res) - def update(self, id: str, *, version: Optional[int] = None) -> UpdateProviderAnalyticsDashboardResponse: - """Update provider analytics dashboard. + + + + def update(self + , id: str + , * + , version: Optional[int] = None) -> UpdateProviderAnalyticsDashboardResponse: + """Update provider analytics dashboard. + Update provider analytics dashboard. - + :param id: str id is immutable property and can't be updated. :param version: int (optional) this is the version of the dashboard template we want to update our user to current expectation is that it should be equal to latest version of the dashboard template - + :returns: :class:`UpdateProviderAnalyticsDashboardResponse` """ body = {} - if version is not None: - body["version"] = version - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/marketplace-provider/analytics_dashboard/{id}", body=body, headers=headers) + if version is not None: body['version'] = version + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/marketplace-provider/analytics_dashboard/{id}', body=body + + , headers=headers + ) return UpdateProviderAnalyticsDashboardResponse.from_dict(res) - + + class ProviderProvidersAPI: """Providers are entities that manage assets in Marketplace.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, provider: ProviderInfo) -> CreateProviderResponse: - """Create a provider. + - Create a provider + - :param provider: :class:`ProviderInfo` + + + def create(self + , provider: ProviderInfo + ) -> CreateProviderResponse: + """Create a provider. + + Create a provider + + :param provider: :class:`ProviderInfo` + :returns: :class:`CreateProviderResponse` """ body = {} - if provider is not None: - body["provider"] = provider.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/marketplace-provider/provider", body=body, headers=headers) + if provider is not None: body['provider'] = provider.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/marketplace-provider/provider', body=body + + , headers=headers + ) return CreateProviderResponse.from_dict(res) - def delete(self, id: str): - """Delete provider. + + + + def delete(self + , id: str + ): + """Delete provider. + Delete provider - + :param id: str - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/marketplace-provider/providers/{id}", headers=headers) - - def get(self, id: str) -> GetProviderResponse: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/marketplace-provider/providers/{id}' + + , headers=headers + ) + + + + + + + def get(self + , id: str + ) -> GetProviderResponse: """Get provider. - + Get provider profile - + :param id: str - + :returns: :class:`GetProviderResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/marketplace-provider/providers/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/marketplace-provider/providers/{id}' + + , headers=headers + ) return GetProviderResponse.from_dict(res) - def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderInfo]: - """List providers. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderInfo]: + """List providers. + List provider profiles for account. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ProviderInfo` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/marketplace-provider/providers", query=query, headers=headers) - if "providers" in json: - for v in json["providers"]: - yield ProviderInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, id: str, provider: ProviderInfo) -> UpdateProviderResponse: + json = self._api.do('GET','/api/2.0/marketplace-provider/providers', query=query + + , headers=headers + ) + if 'providers' in json: + for v in json['providers']: + yield ProviderInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , id: str, provider: ProviderInfo + ) -> UpdateProviderResponse: """Update provider. - + Update provider profile - + :param id: str :param provider: :class:`ProviderInfo` - + :returns: :class:`UpdateProviderResponse` """ body = {} - if provider is not None: - body["provider"] = provider.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/marketplace-provider/providers/{id}", body=body, headers=headers) + if provider is not None: body['provider'] = provider.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/marketplace-provider/providers/{id}', body=body + + , headers=headers + ) return UpdateProviderResponse.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 1e500f10d..483e59300 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -1,28 +1,30 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class Activity: """Activity recorded for the action.""" - + activity_type: Optional[ActivityType] = None """Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding stage transition. @@ -37,13 +39,13 @@ class Activity: * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing model versions in a stage.""" - + comment: Optional[str] = None """User-provided comment associated with the activity.""" - + creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + from_stage: Optional[Stage] = None """Source stage of the transition (if the activity is stage transition related). Valid values are: @@ -54,18 +56,18 @@ class Activity: * `Production`: Production stage. * `Archived`: Archived stage.""" - + id: Optional[str] = None """Unique identifier for the object.""" - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + system_comment: Optional[str] = None """Comment made by system, for example explaining an activity of type `SYSTEM_TRANSITION`. It usually describes a side effect, such as a version being archived as part of another version's stage transition, and may not be returned for some activity types.""" - + to_stage: Optional[Stage] = None """Target stage of the transition (if the activity is stage transition related). Valid values are: @@ -76,117 +78,89 @@ class Activity: * `Production`: Production stage. * `Archived`: Archived stage.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + def as_dict(self) -> dict: """Serializes the Activity into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activity_type is not None: - body["activity_type"] = self.activity_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.from_stage is not None: - body["from_stage"] = self.from_stage.value - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.system_comment is not None: - body["system_comment"] = self.system_comment - if self.to_stage is not None: - body["to_stage"] = self.to_stage.value - if self.user_id is not None: - body["user_id"] = self.user_id + if self.activity_type is not None: body['activity_type'] = self.activity_type.value + if self.comment is not None: body['comment'] = self.comment + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.from_stage is not None: body['from_stage'] = self.from_stage.value + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.system_comment is not None: body['system_comment'] = self.system_comment + if self.to_stage is not None: body['to_stage'] = self.to_stage.value + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the Activity into a shallow dictionary of its immediate attributes.""" body = {} - if self.activity_type is not None: - body["activity_type"] = self.activity_type - if self.comment is not None: - body["comment"] = self.comment - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.from_stage is not None: - body["from_stage"] = self.from_stage - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.system_comment is not None: - body["system_comment"] = self.system_comment - if self.to_stage is not None: - body["to_stage"] = self.to_stage - if self.user_id is not None: - body["user_id"] = self.user_id + if self.activity_type is not None: body['activity_type'] = self.activity_type + if self.comment is not None: body['comment'] = self.comment + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.from_stage is not None: body['from_stage'] = self.from_stage + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.system_comment is not None: body['system_comment'] = self.system_comment + if self.to_stage is not None: body['to_stage'] = self.to_stage + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Activity: """Deserializes the Activity from a dictionary.""" - return cls( - activity_type=_enum(d, "activity_type", ActivityType), - comment=d.get("comment", None), - creation_timestamp=d.get("creation_timestamp", None), - from_stage=_enum(d, "from_stage", Stage), - id=d.get("id", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - system_comment=d.get("system_comment", None), - to_stage=_enum(d, "to_stage", Stage), - user_id=d.get("user_id", None), - ) + return cls(activity_type=_enum(d, 'activity_type', ActivityType), comment=d.get('comment', None), creation_timestamp=d.get('creation_timestamp', None), from_stage=_enum(d, 'from_stage', Stage), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), system_comment=d.get('system_comment', None), to_stage=_enum(d, 'to_stage', Stage), user_id=d.get('user_id', None)) + + class ActivityAction(Enum): """An action that a user (with sufficient permissions) could take on an activity. Valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request - + * `REJECT_TRANSITION_REQUEST`: Reject a transition request - + * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request""" - - APPROVE_TRANSITION_REQUEST = "APPROVE_TRANSITION_REQUEST" - CANCEL_TRANSITION_REQUEST = "CANCEL_TRANSITION_REQUEST" - REJECT_TRANSITION_REQUEST = "REJECT_TRANSITION_REQUEST" - + + APPROVE_TRANSITION_REQUEST = 'APPROVE_TRANSITION_REQUEST' + CANCEL_TRANSITION_REQUEST = 'CANCEL_TRANSITION_REQUEST' + REJECT_TRANSITION_REQUEST = 'REJECT_TRANSITION_REQUEST' class ActivityType(Enum): """Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding stage transition. - + * `REQUESTED_TRANSITION`: User requested the corresponding stage transition. - + * `CANCELLED_REQUEST`: User cancelled an existing transition request. - + * `APPROVED_REQUEST`: User approved the corresponding stage transition. - + * `REJECTED_REQUEST`: User rejected the coressponding stage transition. - + * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing model versions in a stage.""" - - APPLIED_TRANSITION = "APPLIED_TRANSITION" - APPROVED_REQUEST = "APPROVED_REQUEST" - CANCELLED_REQUEST = "CANCELLED_REQUEST" - NEW_COMMENT = "NEW_COMMENT" - REJECTED_REQUEST = "REJECTED_REQUEST" - REQUESTED_TRANSITION = "REQUESTED_TRANSITION" - SYSTEM_TRANSITION = "SYSTEM_TRANSITION" - + + APPLIED_TRANSITION = 'APPLIED_TRANSITION' + APPROVED_REQUEST = 'APPROVED_REQUEST' + CANCELLED_REQUEST = 'CANCELLED_REQUEST' + NEW_COMMENT = 'NEW_COMMENT' + REJECTED_REQUEST = 'REJECTED_REQUEST' + REQUESTED_TRANSITION = 'REQUESTED_TRANSITION' + SYSTEM_TRANSITION = 'SYSTEM_TRANSITION' @dataclass class ApproveTransitionRequest: name: str """Name of the model.""" - + version: str """Version of the model.""" - + stage: Stage """Target stage of the transition. Valid values are: @@ -197,396 +171,249 @@ class ApproveTransitionRequest: * `Production`: Production stage. * `Archived`: Archived stage.""" - + archive_existing_versions: bool """Specifies whether to archive all current model versions in the target stage.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the ApproveTransitionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage.value - if self.version is not None: - body["version"] = self.version + if self.archive_existing_versions is not None: body['archive_existing_versions'] = self.archive_existing_versions + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage.value + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ApproveTransitionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version + if self.archive_existing_versions is not None: body['archive_existing_versions'] = self.archive_existing_versions + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ApproveTransitionRequest: """Deserializes the ApproveTransitionRequest from a dictionary.""" - return cls( - archive_existing_versions=d.get("archive_existing_versions", None), - comment=d.get("comment", None), - name=d.get("name", None), - stage=_enum(d, "stage", Stage), - version=d.get("version", None), - ) + return cls(archive_existing_versions=d.get('archive_existing_versions', None), comment=d.get('comment', None), name=d.get('name', None), stage=_enum(d, 'stage', Stage), version=d.get('version', None)) + + @dataclass class ApproveTransitionRequestResponse: activity: Optional[Activity] = None """Activity recorded for the action.""" - + def as_dict(self) -> dict: """Serializes the ApproveTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activity: - body["activity"] = self.activity.as_dict() + if self.activity: body['activity'] = self.activity.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ApproveTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.activity: - body["activity"] = self.activity + if self.activity: body['activity'] = self.activity return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ApproveTransitionRequestResponse: """Deserializes the ApproveTransitionRequestResponse from a dictionary.""" - return cls(activity=_from_dict(d, "activity", Activity)) - - -@dataclass -class ArtifactCredentialInfo: - headers: Optional[List[ArtifactCredentialInfoHttpHeader]] = None - """A collection of HTTP headers that should be specified when uploading to or downloading from the - specified `signed_uri`.""" - - path: Optional[str] = None - """The path, relative to the Run's artifact root location, of the artifact that can be accessed - with the credential.""" - - run_id: Optional[str] = None - """The ID of the MLflow Run containing the artifact that can be accessed with the credential.""" - - signed_uri: Optional[str] = None - """The signed URI credential that provides access to the artifact.""" - - type: Optional[ArtifactCredentialType] = None - """The type of the signed credential URI (e.g., an AWS presigned URL or an Azure Shared Access - Signature URI).""" - - def as_dict(self) -> dict: - """Serializes the ArtifactCredentialInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.headers: - body["headers"] = [v.as_dict() for v in self.headers] - if self.path is not None: - body["path"] = self.path - if self.run_id is not None: - body["run_id"] = self.run_id - if self.signed_uri is not None: - body["signed_uri"] = self.signed_uri - if self.type is not None: - body["type"] = self.type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ArtifactCredentialInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.headers: - body["headers"] = self.headers - if self.path is not None: - body["path"] = self.path - if self.run_id is not None: - body["run_id"] = self.run_id - if self.signed_uri is not None: - body["signed_uri"] = self.signed_uri - if self.type is not None: - body["type"] = self.type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ArtifactCredentialInfo: - """Deserializes the ArtifactCredentialInfo from a dictionary.""" - return cls( - headers=_repeated_dict(d, "headers", ArtifactCredentialInfoHttpHeader), - path=d.get("path", None), - run_id=d.get("run_id", None), - signed_uri=d.get("signed_uri", None), - type=_enum(d, "type", ArtifactCredentialType), - ) - - -@dataclass -class ArtifactCredentialInfoHttpHeader: - name: Optional[str] = None - """The HTTP header name.""" - - value: Optional[str] = None - """The HTTP header value.""" - - def as_dict(self) -> dict: - """Serializes the ArtifactCredentialInfoHttpHeader into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ArtifactCredentialInfoHttpHeader into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ArtifactCredentialInfoHttpHeader: - """Deserializes the ArtifactCredentialInfoHttpHeader from a dictionary.""" - return cls(name=d.get("name", None), value=d.get("value", None)) - - -class ArtifactCredentialType(Enum): - """The type of a given artifact access credential""" + return cls(activity=_from_dict(d, 'activity', Activity)) + - AWS_PRESIGNED_URL = "AWS_PRESIGNED_URL" - AZURE_ADLS_GEN2_SAS_URI = "AZURE_ADLS_GEN2_SAS_URI" - AZURE_SAS_URI = "AZURE_SAS_URI" - GCP_SIGNED_URL = "GCP_SIGNED_URL" class CommentActivityAction(Enum): """An action that a user (with sufficient permissions) could take on a comment. Valid values are: * `EDIT_COMMENT`: Edit the comment - + * `DELETE_COMMENT`: Delete the comment""" - - DELETE_COMMENT = "DELETE_COMMENT" - EDIT_COMMENT = "EDIT_COMMENT" - + + DELETE_COMMENT = 'DELETE_COMMENT' + EDIT_COMMENT = 'EDIT_COMMENT' @dataclass class CommentObject: """Comment details.""" - + available_actions: Optional[List[CommentActivityAction]] = None """Array of actions on the activity allowed for the current viewer.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + id: Optional[str] = None """Comment ID""" - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + def as_dict(self) -> dict: """Serializes the CommentObject into a dictionary suitable for use as a JSON request body.""" body = {} - if self.available_actions: - body["available_actions"] = [v.value for v in self.available_actions] - if self.comment is not None: - body["comment"] = self.comment - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.user_id is not None: - body["user_id"] = self.user_id + if self.available_actions: body['available_actions'] = [v.value for v in self.available_actions] + if self.comment is not None: body['comment'] = self.comment + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the CommentObject into a shallow dictionary of its immediate attributes.""" body = {} - if self.available_actions: - body["available_actions"] = self.available_actions - if self.comment is not None: - body["comment"] = self.comment - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.user_id is not None: - body["user_id"] = self.user_id + if self.available_actions: body['available_actions'] = self.available_actions + if self.comment is not None: body['comment'] = self.comment + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CommentObject: """Deserializes the CommentObject from a dictionary.""" - return cls( - available_actions=_repeated_enum(d, "available_actions", CommentActivityAction), - comment=d.get("comment", None), - creation_timestamp=d.get("creation_timestamp", None), - id=d.get("id", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - user_id=d.get("user_id", None), - ) + return cls(available_actions=_repeated_enum(d, 'available_actions', CommentActivityAction), comment=d.get('comment', None), creation_timestamp=d.get('creation_timestamp', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), user_id=d.get('user_id', None)) + + @dataclass class CreateComment: name: str """Name of the model.""" - + version: str """Version of the model.""" - + comment: str """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the CreateComment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the CreateComment into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateComment: """Deserializes the CreateComment from a dictionary.""" - return cls(comment=d.get("comment", None), name=d.get("name", None), version=d.get("version", None)) + return cls(comment=d.get('comment', None), name=d.get('name', None), version=d.get('version', None)) + + @dataclass class CreateCommentResponse: comment: Optional[CommentObject] = None """Comment details.""" - + def as_dict(self) -> dict: """Serializes the CreateCommentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment: - body["comment"] = self.comment.as_dict() + if self.comment: body['comment'] = self.comment.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateCommentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment: - body["comment"] = self.comment + if self.comment: body['comment'] = self.comment return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCommentResponse: """Deserializes the CreateCommentResponse from a dictionary.""" - return cls(comment=_from_dict(d, "comment", CommentObject)) + return cls(comment=_from_dict(d, 'comment', CommentObject)) + + @dataclass class CreateExperiment: name: str """Experiment name.""" - + artifact_location: Optional[str] = None """Location where all artifacts for the experiment are stored. If not provided, the remote server will select an appropriate default.""" - + tags: Optional[List[ExperimentTag]] = None """A collection of tags to set on the experiment. Maximum tag size and number of tags per request depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to support up to 20 tags per request.""" - + def as_dict(self) -> dict: """Serializes the CreateExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_location is not None: - body["artifact_location"] = self.artifact_location - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.artifact_location is not None: body['artifact_location'] = self.artifact_location + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_location is not None: - body["artifact_location"] = self.artifact_location - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags + if self.artifact_location is not None: body['artifact_location'] = self.artifact_location + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExperiment: """Deserializes the CreateExperiment from a dictionary.""" - return cls( - artifact_location=d.get("artifact_location", None), - name=d.get("name", None), - tags=_repeated_dict(d, "tags", ExperimentTag), - ) + return cls(artifact_location=d.get('artifact_location', None), name=d.get('name', None), tags=_repeated_dict(d, 'tags', ExperimentTag)) + + @dataclass class CreateExperimentResponse: experiment_id: Optional[str] = None """Unique identifier for the experiment.""" - + def as_dict(self) -> dict: """Serializes the CreateExperimentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateExperimentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExperimentResponse: """Deserializes the CreateExperimentResponse from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None)) + return cls(experiment_id=d.get('experiment_id', None)) + + @dataclass @@ -594,446 +421,356 @@ class CreateForecastingExperimentRequest: train_data_path: str """The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used as training data for the forecasting model.""" - + target_column: str """The column in the input training table used as the prediction target for model training. The values in this column are used as the ground truth for model training.""" - + time_column: str """The column in the input training table that represents each row's timestamp.""" - + forecast_granularity: str """The time interval between consecutive rows in the time series data. Possible values include: '1 second', '1 minute', '5 minutes', '10 minutes', '15 minutes', '30 minutes', 'Hourly', 'Daily', 'Weekly', 'Monthly', 'Quarterly', 'Yearly'.""" - + forecast_horizon: int """The number of time steps into the future to make predictions, calculated as a multiple of forecast_granularity. This value represents how far ahead the model should forecast.""" - + custom_weights_column: Optional[str] = None """The column in the training table used to customize weights for each time series.""" - + experiment_path: Optional[str] = None """The path in the workspace to store the created experiment.""" - + future_feature_data_path: Optional[str] = None """The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used to store future feature data for predictions.""" - + holiday_regions: Optional[List[str]] = None """The region code(s) to automatically add holiday features. Currently supports only one region.""" - + include_features: Optional[List[str]] = None """Specifies the list of feature columns to include in model training. These columns must exist in the training data and be of type string, numerical, or boolean. If not specified, no additional features will be included. Note: Certain columns are automatically handled: - Automatically excluded: split_column, target_column, custom_weights_column. - Automatically included: time_column.""" - + max_runtime: Optional[int] = None """The maximum duration for the experiment in minutes. The experiment stops automatically if it exceeds this limit.""" - + prediction_data_path: Optional[str] = None """The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used to store predictions.""" - + primary_metric: Optional[str] = None """The evaluation metric used to optimize the forecasting model.""" - + register_to: Optional[str] = None """The fully qualified path of a Unity Catalog model, formatted as catalog_name.schema_name.model_name, used to store the best model.""" - + split_column: Optional[str] = None """// The column in the training table used for custom data splits. Values must be 'train', 'validate', or 'test'.""" - + timeseries_identifier_columns: Optional[List[str]] = None """The column in the training table used to group the dataset for predicting individual time series.""" - + training_frameworks: Optional[List[str]] = None """List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', 'DeepAR'. An empty list includes all supported frameworks.""" - + def as_dict(self) -> dict: """Serializes the CreateForecastingExperimentRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_weights_column is not None: - body["custom_weights_column"] = self.custom_weights_column - if self.experiment_path is not None: - body["experiment_path"] = self.experiment_path - if self.forecast_granularity is not None: - body["forecast_granularity"] = self.forecast_granularity - if self.forecast_horizon is not None: - body["forecast_horizon"] = self.forecast_horizon - if self.future_feature_data_path is not None: - body["future_feature_data_path"] = self.future_feature_data_path - if self.holiday_regions: - body["holiday_regions"] = [v for v in self.holiday_regions] - if self.include_features: - body["include_features"] = [v for v in self.include_features] - if self.max_runtime is not None: - body["max_runtime"] = self.max_runtime - if self.prediction_data_path is not None: - body["prediction_data_path"] = self.prediction_data_path - if self.primary_metric is not None: - body["primary_metric"] = self.primary_metric - if self.register_to is not None: - body["register_to"] = self.register_to - if self.split_column is not None: - body["split_column"] = self.split_column - if self.target_column is not None: - body["target_column"] = self.target_column - if self.time_column is not None: - body["time_column"] = self.time_column - if self.timeseries_identifier_columns: - body["timeseries_identifier_columns"] = [v for v in self.timeseries_identifier_columns] - if self.train_data_path is not None: - body["train_data_path"] = self.train_data_path - if self.training_frameworks: - body["training_frameworks"] = [v for v in self.training_frameworks] + if self.custom_weights_column is not None: body['custom_weights_column'] = self.custom_weights_column + if self.experiment_path is not None: body['experiment_path'] = self.experiment_path + if self.forecast_granularity is not None: body['forecast_granularity'] = self.forecast_granularity + if self.forecast_horizon is not None: body['forecast_horizon'] = self.forecast_horizon + if self.future_feature_data_path is not None: body['future_feature_data_path'] = self.future_feature_data_path + if self.holiday_regions: body['holiday_regions'] = [v for v in self.holiday_regions] + if self.include_features: body['include_features'] = [v for v in self.include_features] + if self.max_runtime is not None: body['max_runtime'] = self.max_runtime + if self.prediction_data_path is not None: body['prediction_data_path'] = self.prediction_data_path + if self.primary_metric is not None: body['primary_metric'] = self.primary_metric + if self.register_to is not None: body['register_to'] = self.register_to + if self.split_column is not None: body['split_column'] = self.split_column + if self.target_column is not None: body['target_column'] = self.target_column + if self.time_column is not None: body['time_column'] = self.time_column + if self.timeseries_identifier_columns: body['timeseries_identifier_columns'] = [v for v in self.timeseries_identifier_columns] + if self.train_data_path is not None: body['train_data_path'] = self.train_data_path + if self.training_frameworks: body['training_frameworks'] = [v for v in self.training_frameworks] return body def as_shallow_dict(self) -> dict: """Serializes the CreateForecastingExperimentRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_weights_column is not None: - body["custom_weights_column"] = self.custom_weights_column - if self.experiment_path is not None: - body["experiment_path"] = self.experiment_path - if self.forecast_granularity is not None: - body["forecast_granularity"] = self.forecast_granularity - if self.forecast_horizon is not None: - body["forecast_horizon"] = self.forecast_horizon - if self.future_feature_data_path is not None: - body["future_feature_data_path"] = self.future_feature_data_path - if self.holiday_regions: - body["holiday_regions"] = self.holiday_regions - if self.include_features: - body["include_features"] = self.include_features - if self.max_runtime is not None: - body["max_runtime"] = self.max_runtime - if self.prediction_data_path is not None: - body["prediction_data_path"] = self.prediction_data_path - if self.primary_metric is not None: - body["primary_metric"] = self.primary_metric - if self.register_to is not None: - body["register_to"] = self.register_to - if self.split_column is not None: - body["split_column"] = self.split_column - if self.target_column is not None: - body["target_column"] = self.target_column - if self.time_column is not None: - body["time_column"] = self.time_column - if self.timeseries_identifier_columns: - body["timeseries_identifier_columns"] = self.timeseries_identifier_columns - if self.train_data_path is not None: - body["train_data_path"] = self.train_data_path - if self.training_frameworks: - body["training_frameworks"] = self.training_frameworks + if self.custom_weights_column is not None: body['custom_weights_column'] = self.custom_weights_column + if self.experiment_path is not None: body['experiment_path'] = self.experiment_path + if self.forecast_granularity is not None: body['forecast_granularity'] = self.forecast_granularity + if self.forecast_horizon is not None: body['forecast_horizon'] = self.forecast_horizon + if self.future_feature_data_path is not None: body['future_feature_data_path'] = self.future_feature_data_path + if self.holiday_regions: body['holiday_regions'] = self.holiday_regions + if self.include_features: body['include_features'] = self.include_features + if self.max_runtime is not None: body['max_runtime'] = self.max_runtime + if self.prediction_data_path is not None: body['prediction_data_path'] = self.prediction_data_path + if self.primary_metric is not None: body['primary_metric'] = self.primary_metric + if self.register_to is not None: body['register_to'] = self.register_to + if self.split_column is not None: body['split_column'] = self.split_column + if self.target_column is not None: body['target_column'] = self.target_column + if self.time_column is not None: body['time_column'] = self.time_column + if self.timeseries_identifier_columns: body['timeseries_identifier_columns'] = self.timeseries_identifier_columns + if self.train_data_path is not None: body['train_data_path'] = self.train_data_path + if self.training_frameworks: body['training_frameworks'] = self.training_frameworks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateForecastingExperimentRequest: """Deserializes the CreateForecastingExperimentRequest from a dictionary.""" - return cls( - custom_weights_column=d.get("custom_weights_column", None), - experiment_path=d.get("experiment_path", None), - forecast_granularity=d.get("forecast_granularity", None), - forecast_horizon=d.get("forecast_horizon", None), - future_feature_data_path=d.get("future_feature_data_path", None), - holiday_regions=d.get("holiday_regions", None), - include_features=d.get("include_features", None), - max_runtime=d.get("max_runtime", None), - prediction_data_path=d.get("prediction_data_path", None), - primary_metric=d.get("primary_metric", None), - register_to=d.get("register_to", None), - split_column=d.get("split_column", None), - target_column=d.get("target_column", None), - time_column=d.get("time_column", None), - timeseries_identifier_columns=d.get("timeseries_identifier_columns", None), - train_data_path=d.get("train_data_path", None), - training_frameworks=d.get("training_frameworks", None), - ) + return cls(custom_weights_column=d.get('custom_weights_column', None), experiment_path=d.get('experiment_path', None), forecast_granularity=d.get('forecast_granularity', None), forecast_horizon=d.get('forecast_horizon', None), future_feature_data_path=d.get('future_feature_data_path', None), holiday_regions=d.get('holiday_regions', None), include_features=d.get('include_features', None), max_runtime=d.get('max_runtime', None), prediction_data_path=d.get('prediction_data_path', None), primary_metric=d.get('primary_metric', None), register_to=d.get('register_to', None), split_column=d.get('split_column', None), target_column=d.get('target_column', None), time_column=d.get('time_column', None), timeseries_identifier_columns=d.get('timeseries_identifier_columns', None), train_data_path=d.get('train_data_path', None), training_frameworks=d.get('training_frameworks', None)) + + @dataclass class CreateForecastingExperimentResponse: experiment_id: Optional[str] = None """The unique ID of the created forecasting experiment""" - + def as_dict(self) -> dict: """Serializes the CreateForecastingExperimentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateForecastingExperimentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateForecastingExperimentResponse: """Deserializes the CreateForecastingExperimentResponse from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None)) + return cls(experiment_id=d.get('experiment_id', None)) + + @dataclass class CreateLoggedModelRequest: experiment_id: str """The ID of the experiment that owns the model.""" - + model_type: Optional[str] = None """The type of the model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``.""" - + name: Optional[str] = None """The name of the model (optional). If not specified one will be generated.""" - + params: Optional[List[LoggedModelParameter]] = None """Parameters attached to the model.""" - + source_run_id: Optional[str] = None """The ID of the run that created the model.""" - + tags: Optional[List[LoggedModelTag]] = None """Tags attached to the model.""" - + def as_dict(self) -> dict: """Serializes the CreateLoggedModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.model_type is not None: - body["model_type"] = self.model_type - if self.name is not None: - body["name"] = self.name - if self.params: - body["params"] = [v.as_dict() for v in self.params] - if self.source_run_id is not None: - body["source_run_id"] = self.source_run_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.model_type is not None: body['model_type'] = self.model_type + if self.name is not None: body['name'] = self.name + if self.params: body['params'] = [v.as_dict() for v in self.params] + if self.source_run_id is not None: body['source_run_id'] = self.source_run_id + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateLoggedModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.model_type is not None: - body["model_type"] = self.model_type - if self.name is not None: - body["name"] = self.name - if self.params: - body["params"] = self.params - if self.source_run_id is not None: - body["source_run_id"] = self.source_run_id - if self.tags: - body["tags"] = self.tags + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.model_type is not None: body['model_type'] = self.model_type + if self.name is not None: body['name'] = self.name + if self.params: body['params'] = self.params + if self.source_run_id is not None: body['source_run_id'] = self.source_run_id + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateLoggedModelRequest: """Deserializes the CreateLoggedModelRequest from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - model_type=d.get("model_type", None), - name=d.get("name", None), - params=_repeated_dict(d, "params", LoggedModelParameter), - source_run_id=d.get("source_run_id", None), - tags=_repeated_dict(d, "tags", LoggedModelTag), - ) + return cls(experiment_id=d.get('experiment_id', None), model_type=d.get('model_type', None), name=d.get('name', None), params=_repeated_dict(d, 'params', LoggedModelParameter), source_run_id=d.get('source_run_id', None), tags=_repeated_dict(d, 'tags', LoggedModelTag)) + + @dataclass class CreateLoggedModelResponse: model: Optional[LoggedModel] = None """The newly created logged model.""" - + def as_dict(self) -> dict: """Serializes the CreateLoggedModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model: - body["model"] = self.model.as_dict() + if self.model: body['model'] = self.model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateLoggedModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model: - body["model"] = self.model + if self.model: body['model'] = self.model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateLoggedModelResponse: """Deserializes the CreateLoggedModelResponse from a dictionary.""" - return cls(model=_from_dict(d, "model", LoggedModel)) + return cls(model=_from_dict(d, 'model', LoggedModel)) + + @dataclass class CreateModelRequest: name: str """Register models under this name""" - + description: Optional[str] = None """Optional description for registered model.""" - + tags: Optional[List[ModelTag]] = None """Additional metadata for registered model.""" - + def as_dict(self) -> dict: """Serializes the CreateModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateModelRequest: """Deserializes the CreateModelRequest from a dictionary.""" - return cls( - description=d.get("description", None), name=d.get("name", None), tags=_repeated_dict(d, "tags", ModelTag) - ) + return cls(description=d.get('description', None), name=d.get('name', None), tags=_repeated_dict(d, 'tags', ModelTag)) + + @dataclass class CreateModelResponse: registered_model: Optional[Model] = None - + def as_dict(self) -> dict: """Serializes the CreateModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.registered_model: - body["registered_model"] = self.registered_model.as_dict() + if self.registered_model: body['registered_model'] = self.registered_model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.registered_model: - body["registered_model"] = self.registered_model + if self.registered_model: body['registered_model'] = self.registered_model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateModelResponse: """Deserializes the CreateModelResponse from a dictionary.""" - return cls(registered_model=_from_dict(d, "registered_model", Model)) + return cls(registered_model=_from_dict(d, 'registered_model', Model)) + + @dataclass class CreateModelVersionRequest: name: str """Register model under this name""" - + source: str """URI indicating the location of the model artifacts.""" - + description: Optional[str] = None """Optional description for model version.""" - + run_id: Optional[str] = None """MLflow run ID for correlation, if `source` was generated by an experiment run in MLflow tracking server""" - + run_link: Optional[str] = None """MLflow run link - this is the exact link of the run that generated this model version, potentially hosted at another instance of MLflow.""" - + tags: Optional[List[ModelVersionTag]] = None """Additional metadata for model version.""" - + def as_dict(self) -> dict: """Serializes the CreateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_link is not None: body['run_link'] = self.run_link + if self.source is not None: body['source'] = self.source + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateModelVersionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.tags: - body["tags"] = self.tags + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_link is not None: body['run_link'] = self.run_link + if self.source is not None: body['source'] = self.source + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionRequest: """Deserializes the CreateModelVersionRequest from a dictionary.""" - return cls( - description=d.get("description", None), - name=d.get("name", None), - run_id=d.get("run_id", None), - run_link=d.get("run_link", None), - source=d.get("source", None), - tags=_repeated_dict(d, "tags", ModelVersionTag), - ) + return cls(description=d.get('description', None), name=d.get('name', None), run_id=d.get('run_id', None), run_link=d.get('run_link', None), source=d.get('source', None), tags=_repeated_dict(d, 'tags', ModelVersionTag)) + + @dataclass class CreateModelVersionResponse: model_version: Optional[ModelVersion] = None """Return new version number generated for this model in registry.""" - + def as_dict(self) -> dict: """Serializes the CreateModelVersionResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_version: - body["model_version"] = self.model_version.as_dict() + if self.model_version: body['model_version'] = self.model_version.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_version: - body["model_version"] = self.model_version + if self.model_version: body['model_version'] = self.model_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionResponse: """Deserializes the CreateModelVersionResponse from a dictionary.""" - return cls(model_version=_from_dict(d, "model_version", ModelVersion)) + return cls(model_version=_from_dict(d, 'model_version', ModelVersion)) + + @dataclass @@ -1067,17 +804,18 @@ class CreateRegistryWebhook: to production. * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - + description: Optional[str] = None """User-specified description for the webhook.""" - + http_url_spec: Optional[HttpUrlSpec] = None - + job_spec: Optional[JobSpec] = None - + model_name: Optional[str] = None - """Name of the model whose events would trigger this webhook.""" - + """If model name is not specified, a registry-wide webhook is created that listens for the + specified events across all versions of all registered models.""" + status: Optional[RegistryWebhookStatus] = None """Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. @@ -1086,147 +824,116 @@ class CreateRegistryWebhook: * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event.""" - + def as_dict(self) -> dict: """Serializes the CreateRegistryWebhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = [v.value for v in self.events] - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec.as_dict() - if self.job_spec: - body["job_spec"] = self.job_spec.as_dict() - if self.model_name is not None: - body["model_name"] = self.model_name - if self.status is not None: - body["status"] = self.status.value + if self.description is not None: body['description'] = self.description + if self.events: body['events'] = [v.value for v in self.events] + if self.http_url_spec: body['http_url_spec'] = self.http_url_spec.as_dict() + if self.job_spec: body['job_spec'] = self.job_spec.as_dict() + if self.model_name is not None: body['model_name'] = self.model_name + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateRegistryWebhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = self.events - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec - if self.job_spec: - body["job_spec"] = self.job_spec - if self.model_name is not None: - body["model_name"] = self.model_name - if self.status is not None: - body["status"] = self.status + if self.description is not None: body['description'] = self.description + if self.events: body['events'] = self.events + if self.http_url_spec: body['http_url_spec'] = self.http_url_spec + if self.job_spec: body['job_spec'] = self.job_spec + if self.model_name is not None: body['model_name'] = self.model_name + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRegistryWebhook: """Deserializes the CreateRegistryWebhook from a dictionary.""" - return cls( - description=d.get("description", None), - events=_repeated_enum(d, "events", RegistryWebhookEvent), - http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec), - job_spec=_from_dict(d, "job_spec", JobSpec), - model_name=d.get("model_name", None), - status=_enum(d, "status", RegistryWebhookStatus), - ) + return cls(description=d.get('description', None), events=_repeated_enum(d, 'events', RegistryWebhookEvent), http_url_spec=_from_dict(d, 'http_url_spec', HttpUrlSpec), job_spec=_from_dict(d, 'job_spec', JobSpec), model_name=d.get('model_name', None), status=_enum(d, 'status', RegistryWebhookStatus)) + + @dataclass class CreateRun: experiment_id: Optional[str] = None """ID of the associated experiment.""" - + run_name: Optional[str] = None """The name of the run.""" - + start_time: Optional[int] = None """Unix timestamp in milliseconds of when the run started.""" - + tags: Optional[List[RunTag]] = None """Additional metadata for run.""" - + user_id: Optional[str] = None """ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in a future MLflow release. Use 'mlflow.user' tag instead.""" - + def as_dict(self) -> dict: """Serializes the CreateRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.start_time is not None: - body["start_time"] = self.start_time - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.user_id is not None: - body["user_id"] = self.user_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.start_time is not None: body['start_time'] = self.start_time + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.start_time is not None: - body["start_time"] = self.start_time - if self.tags: - body["tags"] = self.tags - if self.user_id is not None: - body["user_id"] = self.user_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.start_time is not None: body['start_time'] = self.start_time + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRun: """Deserializes the CreateRun from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - run_name=d.get("run_name", None), - start_time=d.get("start_time", None), - tags=_repeated_dict(d, "tags", RunTag), - user_id=d.get("user_id", None), - ) + return cls(experiment_id=d.get('experiment_id', None), run_name=d.get('run_name', None), start_time=d.get('start_time', None), tags=_repeated_dict(d, 'tags', RunTag), user_id=d.get('user_id', None)) + + @dataclass class CreateRunResponse: run: Optional[Run] = None """The newly created run.""" - + def as_dict(self) -> dict: """Serializes the CreateRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run: - body["run"] = self.run.as_dict() + if self.run: body['run'] = self.run.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.run: - body["run"] = self.run + if self.run: body['run'] = self.run return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRunResponse: """Deserializes the CreateRunResponse from a dictionary.""" - return cls(run=_from_dict(d, "run", Run)) + return cls(run=_from_dict(d, 'run', Run)) + + @dataclass class CreateTransitionRequest: name: str """Name of the model.""" - + version: str """Version of the model.""" - + stage: Stage """Target stage of the transition. Valid values are: @@ -1237,202 +944,175 @@ class CreateTransitionRequest: * `Production`: Production stage. * `Archived`: Archived stage.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the CreateTransitionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage.value - if self.version is not None: - body["version"] = self.version + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage.value + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the CreateTransitionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTransitionRequest: """Deserializes the CreateTransitionRequest from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - stage=_enum(d, "stage", Stage), - version=d.get("version", None), - ) + return cls(comment=d.get('comment', None), name=d.get('name', None), stage=_enum(d, 'stage', Stage), version=d.get('version', None)) + + @dataclass class CreateTransitionRequestResponse: request: Optional[TransitionRequest] = None """Transition request details.""" - + def as_dict(self) -> dict: """Serializes the CreateTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.request: - body["request"] = self.request.as_dict() + if self.request: body['request'] = self.request.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.request: - body["request"] = self.request + if self.request: body['request'] = self.request return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTransitionRequestResponse: """Deserializes the CreateTransitionRequestResponse from a dictionary.""" - return cls(request=_from_dict(d, "request", TransitionRequest)) + return cls(request=_from_dict(d, 'request', TransitionRequest)) + + @dataclass class CreateWebhookResponse: webhook: Optional[RegistryWebhook] = None - + def as_dict(self) -> dict: """Serializes the CreateWebhookResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.webhook: - body["webhook"] = self.webhook.as_dict() + if self.webhook: body['webhook'] = self.webhook.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateWebhookResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.webhook: - body["webhook"] = self.webhook + if self.webhook: body['webhook'] = self.webhook return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWebhookResponse: """Deserializes the CreateWebhookResponse from a dictionary.""" - return cls(webhook=_from_dict(d, "webhook", RegistryWebhook)) + return cls(webhook=_from_dict(d, 'webhook', RegistryWebhook)) + + @dataclass class Dataset: """Dataset. Represents a reference to data used for training, testing, or evaluation during the model development process.""" - + name: str """The name of the dataset. E.g. “my.uc.table@2” “nyc-taxi-dataset”, “fantastic-elk-3”""" - + digest: str """Dataset digest, e.g. an md5 hash of the dataset that uniquely identifies it within datasets of the same name.""" - + source_type: str """The type of the dataset source, e.g. ‘databricks-uc-table’, ‘DBFS’, ‘S3’, ...""" - + source: str """Source information for the dataset. Note that the source may not exactly reproduce the dataset if it was transformed / modified before use with MLflow.""" - + profile: Optional[str] = None """The profile of the dataset. Summary statistics for the dataset, such as the number of rows in a table, the mean / std / mode of each column in a table, or the number of elements in an array.""" - + schema: Optional[str] = None """The schema of the dataset. E.g., MLflow ColSpec JSON for a dataframe, MLflow TensorSpec JSON for an ndarray, or another schema format.""" - + def as_dict(self) -> dict: """Serializes the Dataset into a dictionary suitable for use as a JSON request body.""" body = {} - if self.digest is not None: - body["digest"] = self.digest - if self.name is not None: - body["name"] = self.name - if self.profile is not None: - body["profile"] = self.profile - if self.schema is not None: - body["schema"] = self.schema - if self.source is not None: - body["source"] = self.source - if self.source_type is not None: - body["source_type"] = self.source_type + if self.digest is not None: body['digest'] = self.digest + if self.name is not None: body['name'] = self.name + if self.profile is not None: body['profile'] = self.profile + if self.schema is not None: body['schema'] = self.schema + if self.source is not None: body['source'] = self.source + if self.source_type is not None: body['source_type'] = self.source_type return body def as_shallow_dict(self) -> dict: """Serializes the Dataset into a shallow dictionary of its immediate attributes.""" body = {} - if self.digest is not None: - body["digest"] = self.digest - if self.name is not None: - body["name"] = self.name - if self.profile is not None: - body["profile"] = self.profile - if self.schema is not None: - body["schema"] = self.schema - if self.source is not None: - body["source"] = self.source - if self.source_type is not None: - body["source_type"] = self.source_type + if self.digest is not None: body['digest'] = self.digest + if self.name is not None: body['name'] = self.name + if self.profile is not None: body['profile'] = self.profile + if self.schema is not None: body['schema'] = self.schema + if self.source is not None: body['source'] = self.source + if self.source_type is not None: body['source_type'] = self.source_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Dataset: """Deserializes the Dataset from a dictionary.""" - return cls( - digest=d.get("digest", None), - name=d.get("name", None), - profile=d.get("profile", None), - schema=d.get("schema", None), - source=d.get("source", None), - source_type=d.get("source_type", None), - ) + return cls(digest=d.get('digest', None), name=d.get('name', None), profile=d.get('profile', None), schema=d.get('schema', None), source=d.get('source', None), source_type=d.get('source_type', None)) + + @dataclass class DatasetInput: """DatasetInput. Represents a dataset and input tags.""" - + dataset: Dataset """The dataset being used as a Run input.""" - + tags: Optional[List[InputTag]] = None """A list of tags for the dataset input, e.g. a “context” tag with value “training”""" - + def as_dict(self) -> dict: """Serializes the DatasetInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset: - body["dataset"] = self.dataset.as_dict() - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.dataset: body['dataset'] = self.dataset.as_dict() + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the DatasetInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset: - body["dataset"] = self.dataset - if self.tags: - body["tags"] = self.tags + if self.dataset: body['dataset'] = self.dataset + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatasetInput: """Deserializes the DatasetInput from a dictionary.""" - return cls(dataset=_from_dict(d, "dataset", Dataset), tags=_repeated_dict(d, "tags", InputTag)) + return cls(dataset=_from_dict(d, 'dataset', Dataset), tags=_repeated_dict(d, 'tags', InputTag)) + + + + + @dataclass @@ -1451,31 +1131,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCommentResponse: """Deserializes the DeleteCommentResponse from a dictionary.""" return cls() + + @dataclass class DeleteExperiment: experiment_id: str """ID of the associated experiment.""" - + def as_dict(self) -> dict: """Serializes the DeleteExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteExperiment: """Deserializes the DeleteExperiment from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None)) + return cls(experiment_id=d.get('experiment_id', None)) + + @dataclass @@ -1494,6 +1176,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteExperimentResponse: """Deserializes the DeleteExperimentResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -1512,6 +1199,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteLoggedModelResponse: """Deserializes the DeleteLoggedModelResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -1530,6 +1222,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteLoggedModelTagResponse: """Deserializes the DeleteLoggedModelTagResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -1548,6 +1245,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteModelResponse: """Deserializes the DeleteModelResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -1566,6 +1268,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteModelTagResponse: """Deserializes the DeleteModelTagResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -1584,6 +1291,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteModelVersionResponse: """Deserializes the DeleteModelVersionResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -1602,31 +1314,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteModelVersionTagResponse: """Deserializes the DeleteModelVersionTagResponse from a dictionary.""" return cls() + + @dataclass class DeleteRun: run_id: str """ID of the run to delete.""" - + def as_dict(self) -> dict: """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRun: """Deserializes the DeleteRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) + return cls(run_id=d.get('run_id', None)) + + @dataclass @@ -1645,108 +1359,100 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse: """Deserializes the DeleteRunResponse from a dictionary.""" return cls() + + @dataclass class DeleteRuns: experiment_id: str """The ID of the experiment containing the runs to delete.""" - + max_timestamp_millis: int """The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only runs created prior to or at this timestamp are deleted.""" - + max_runs: Optional[int] = None """An optional positive integer indicating the maximum number of runs to delete. The maximum allowed value for max_runs is 10000.""" - + def as_dict(self) -> dict: """Serializes the DeleteRuns into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.max_timestamp_millis is not None: - body["max_timestamp_millis"] = self.max_timestamp_millis + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.max_runs is not None: body['max_runs'] = self.max_runs + if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRuns into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.max_timestamp_millis is not None: - body["max_timestamp_millis"] = self.max_timestamp_millis + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.max_runs is not None: body['max_runs'] = self.max_runs + if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRuns: """Deserializes the DeleteRuns from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - max_runs=d.get("max_runs", None), - max_timestamp_millis=d.get("max_timestamp_millis", None), - ) + return cls(experiment_id=d.get('experiment_id', None), max_runs=d.get('max_runs', None), max_timestamp_millis=d.get('max_timestamp_millis', None)) + + @dataclass class DeleteRunsResponse: runs_deleted: Optional[int] = None """The number of runs deleted.""" - + def as_dict(self) -> dict: """Serializes the DeleteRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.runs_deleted is not None: - body["runs_deleted"] = self.runs_deleted + if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.runs_deleted is not None: - body["runs_deleted"] = self.runs_deleted + if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRunsResponse: """Deserializes the DeleteRunsResponse from a dictionary.""" - return cls(runs_deleted=d.get("runs_deleted", None)) + return cls(runs_deleted=d.get('runs_deleted', None)) + + @dataclass class DeleteTag: run_id: str """ID of the run that the tag was logged under. Must be provided.""" - + key: str """Name of the tag. Maximum size is 255 bytes. Must be provided.""" - + def as_dict(self) -> dict: """Serializes the DeleteTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteTag: """Deserializes the DeleteTag from a dictionary.""" - return cls(key=d.get("key", None), run_id=d.get("run_id", None)) + return cls(key=d.get('key', None), run_id=d.get('run_id', None)) + + @dataclass @@ -1765,6 +1471,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteTagResponse: """Deserializes the DeleteTagResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -1783,14 +1494,19 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteTransitionRequestResponse: """Deserializes the DeleteTransitionRequestResponse from a dictionary.""" return cls() + + class DeleteTransitionRequestStage(Enum): + + + ARCHIVED = 'Archived' + NONE = 'None' + PRODUCTION = 'Production' + STAGING = 'Staging' + - ARCHIVED = "Archived" - NONE = "None" - PRODUCTION = "Production" - STAGING = "Staging" @dataclass @@ -1809,695 +1525,575 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteWebhookResponse: """Deserializes the DeleteWebhookResponse from a dictionary.""" return cls() + + @dataclass class Experiment: """An experiment and its metadata.""" - + artifact_location: Optional[str] = None """Location where artifacts for the experiment are stored.""" - + creation_time: Optional[int] = None """Creation time""" - + experiment_id: Optional[str] = None """Unique identifier for the experiment.""" - + last_update_time: Optional[int] = None """Last update time""" - + lifecycle_stage: Optional[str] = None """Current life cycle stage of the experiment: "active" or "deleted". Deleted experiments are not returned by APIs.""" - + name: Optional[str] = None """Human readable name that identifies the experiment.""" - + tags: Optional[List[ExperimentTag]] = None """Tags: Additional metadata key-value pairs.""" - + def as_dict(self) -> dict: """Serializes the Experiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_location is not None: - body["artifact_location"] = self.artifact_location - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.last_update_time is not None: - body["last_update_time"] = self.last_update_time - if self.lifecycle_stage is not None: - body["lifecycle_stage"] = self.lifecycle_stage - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.artifact_location is not None: body['artifact_location'] = self.artifact_location + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.last_update_time is not None: body['last_update_time'] = self.last_update_time + if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the Experiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_location is not None: - body["artifact_location"] = self.artifact_location - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.last_update_time is not None: - body["last_update_time"] = self.last_update_time - if self.lifecycle_stage is not None: - body["lifecycle_stage"] = self.lifecycle_stage - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags + if self.artifact_location is not None: body['artifact_location'] = self.artifact_location + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.last_update_time is not None: body['last_update_time'] = self.last_update_time + if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Experiment: """Deserializes the Experiment from a dictionary.""" - return cls( - artifact_location=d.get("artifact_location", None), - creation_time=d.get("creation_time", None), - experiment_id=d.get("experiment_id", None), - last_update_time=d.get("last_update_time", None), - lifecycle_stage=d.get("lifecycle_stage", None), - name=d.get("name", None), - tags=_repeated_dict(d, "tags", ExperimentTag), - ) + return cls(artifact_location=d.get('artifact_location', None), creation_time=d.get('creation_time', None), experiment_id=d.get('experiment_id', None), last_update_time=d.get('last_update_time', None), lifecycle_stage=d.get('lifecycle_stage', None), name=d.get('name', None), tags=_repeated_dict(d, 'tags', ExperimentTag)) + + @dataclass class ExperimentAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[ExperimentPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ExperimentAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentAccessControlRequest: """Deserializes the ExperimentAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", ExperimentPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ExperimentPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class ExperimentAccessControlResponse: all_permissions: Optional[List[ExperimentPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ExperimentAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentAccessControlResponse: """Deserializes the ExperimentAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", ExperimentPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', ExperimentPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class ExperimentPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[ExperimentPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ExperimentPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermission: """Deserializes the ExperimentPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", ExperimentPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ExperimentPermissionLevel)) + + class ExperimentPermissionLevel(Enum): """Permission level""" - - CAN_EDIT = "CAN_EDIT" - CAN_MANAGE = "CAN_MANAGE" - CAN_READ = "CAN_READ" - + + CAN_EDIT = 'CAN_EDIT' + CAN_MANAGE = 'CAN_MANAGE' + CAN_READ = 'CAN_READ' @dataclass class ExperimentPermissions: access_control_list: Optional[List[ExperimentAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ExperimentPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissions: """Deserializes the ExperimentPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ExperimentAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', ExperimentAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class ExperimentPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[ExperimentPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ExperimentPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissionsDescription: """Deserializes the ExperimentPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", ExperimentPermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ExperimentPermissionLevel)) + + @dataclass class ExperimentPermissionsRequest: access_control_list: Optional[List[ExperimentAccessControlRequest]] = None - + experiment_id: Optional[str] = None """The experiment for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the ExperimentPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissionsRequest: """Deserializes the ExperimentPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ExperimentAccessControlRequest), - experiment_id=d.get("experiment_id", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', ExperimentAccessControlRequest), experiment_id=d.get('experiment_id', None)) + + @dataclass class ExperimentTag: """A tag for an experiment.""" - + key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the ExperimentTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentTag: """Deserializes the ExperimentTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class FileInfo: """Metadata of a single artifact file or directory.""" - + file_size: Optional[int] = None """The size in bytes of the file. Unset for directories.""" - + is_dir: Optional[bool] = None """Whether the path is a directory.""" - + path: Optional[str] = None """The path relative to the root artifact directory run.""" - + def as_dict(self) -> dict: """Serializes the FileInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_size is not None: - body["file_size"] = self.file_size - if self.is_dir is not None: - body["is_dir"] = self.is_dir - if self.path is not None: - body["path"] = self.path + if self.file_size is not None: body['file_size'] = self.file_size + if self.is_dir is not None: body['is_dir'] = self.is_dir + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the FileInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_size is not None: - body["file_size"] = self.file_size - if self.is_dir is not None: - body["is_dir"] = self.is_dir - if self.path is not None: - body["path"] = self.path + if self.file_size is not None: body['file_size'] = self.file_size + if self.is_dir is not None: body['is_dir'] = self.is_dir + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileInfo: """Deserializes the FileInfo from a dictionary.""" - return cls(file_size=d.get("file_size", None), is_dir=d.get("is_dir", None), path=d.get("path", None)) + return cls(file_size=d.get('file_size', None), is_dir=d.get('is_dir', None), path=d.get('path', None)) + + @dataclass class FinalizeLoggedModelRequest: status: LoggedModelStatus """Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that - something went wrong when logging the model weights / agent code).""" - + something went wrong when logging the model weights / agent code.""" + model_id: Optional[str] = None """The ID of the logged model to finalize.""" - + def as_dict(self) -> dict: """Serializes the FinalizeLoggedModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.status is not None: - body["status"] = self.status.value + if self.model_id is not None: body['model_id'] = self.model_id + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the FinalizeLoggedModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.status is not None: - body["status"] = self.status + if self.model_id is not None: body['model_id'] = self.model_id + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FinalizeLoggedModelRequest: """Deserializes the FinalizeLoggedModelRequest from a dictionary.""" - return cls(model_id=d.get("model_id", None), status=_enum(d, "status", LoggedModelStatus)) + return cls(model_id=d.get('model_id', None), status=_enum(d, 'status', LoggedModelStatus)) + + @dataclass class FinalizeLoggedModelResponse: model: Optional[LoggedModel] = None """The updated logged model.""" - + def as_dict(self) -> dict: """Serializes the FinalizeLoggedModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model: - body["model"] = self.model.as_dict() + if self.model: body['model'] = self.model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the FinalizeLoggedModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model: - body["model"] = self.model + if self.model: body['model'] = self.model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FinalizeLoggedModelResponse: """Deserializes the FinalizeLoggedModelResponse from a dictionary.""" - return cls(model=_from_dict(d, "model", LoggedModel)) + return cls(model=_from_dict(d, 'model', LoggedModel)) + + @dataclass class ForecastingExperiment: """Represents a forecasting experiment with its unique identifier, URL, and state.""" - + experiment_id: Optional[str] = None """The unique ID for the forecasting experiment.""" - + experiment_page_url: Optional[str] = None """The URL to the forecasting experiment page.""" - + state: Optional[ForecastingExperimentState] = None """The current state of the forecasting experiment.""" - + def as_dict(self) -> dict: """Serializes the ForecastingExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.experiment_page_url is not None: - body["experiment_page_url"] = self.experiment_page_url - if self.state is not None: - body["state"] = self.state.value + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_page_url is not None: body['experiment_page_url'] = self.experiment_page_url + if self.state is not None: body['state'] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the ForecastingExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.experiment_page_url is not None: - body["experiment_page_url"] = self.experiment_page_url - if self.state is not None: - body["state"] = self.state + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_page_url is not None: body['experiment_page_url'] = self.experiment_page_url + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForecastingExperiment: """Deserializes the ForecastingExperiment from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - experiment_page_url=d.get("experiment_page_url", None), - state=_enum(d, "state", ForecastingExperimentState), - ) + return cls(experiment_id=d.get('experiment_id', None), experiment_page_url=d.get('experiment_page_url', None), state=_enum(d, 'state', ForecastingExperimentState)) + + class ForecastingExperimentState(Enum): + + + CANCELLED = 'CANCELLED' + FAILED = 'FAILED' + PENDING = 'PENDING' + RUNNING = 'RUNNING' + SUCCEEDED = 'SUCCEEDED' - CANCELLED = "CANCELLED" - FAILED = "FAILED" - PENDING = "PENDING" - RUNNING = "RUNNING" - SUCCEEDED = "SUCCEEDED" -@dataclass -class GetCredentialsForTraceDataDownloadResponse: - credential_info: Optional[ArtifactCredentialInfo] = None - """The artifact download credentials for the specified trace data.""" +@dataclass +class GetExperimentByNameResponse: + experiment: Optional[Experiment] = None + """Experiment details.""" + def as_dict(self) -> dict: - """Serializes the GetCredentialsForTraceDataDownloadResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the GetExperimentByNameResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_info: - body["credential_info"] = self.credential_info.as_dict() + if self.experiment: body['experiment'] = self.experiment.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the GetCredentialsForTraceDataDownloadResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the GetExperimentByNameResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_info: - body["credential_info"] = self.credential_info + if self.experiment: body['experiment'] = self.experiment return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetCredentialsForTraceDataDownloadResponse: - """Deserializes the GetCredentialsForTraceDataDownloadResponse from a dictionary.""" - return cls(credential_info=_from_dict(d, "credential_info", ArtifactCredentialInfo)) + def from_dict(cls, d: Dict[str, Any]) -> GetExperimentByNameResponse: + """Deserializes the GetExperimentByNameResponse from a dictionary.""" + return cls(experiment=_from_dict(d, 'experiment', Experiment)) + -@dataclass -class GetCredentialsForTraceDataUploadResponse: - credential_info: Optional[ArtifactCredentialInfo] = None - """The artifact upload credentials for the specified trace data.""" - def as_dict(self) -> dict: - """Serializes the GetCredentialsForTraceDataUploadResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info.as_dict() - return body - def as_shallow_dict(self) -> dict: - """Serializes the GetCredentialsForTraceDataUploadResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetCredentialsForTraceDataUploadResponse: - """Deserializes the GetCredentialsForTraceDataUploadResponse from a dictionary.""" - return cls(credential_info=_from_dict(d, "credential_info", ArtifactCredentialInfo)) - - -@dataclass -class GetExperimentByNameResponse: - experiment: Optional[Experiment] = None - """Experiment details.""" - - def as_dict(self) -> dict: - """Serializes the GetExperimentByNameResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment: - body["experiment"] = self.experiment.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GetExperimentByNameResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment: - body["experiment"] = self.experiment - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetExperimentByNameResponse: - """Deserializes the GetExperimentByNameResponse from a dictionary.""" - return cls(experiment=_from_dict(d, "experiment", Experiment)) @dataclass class GetExperimentPermissionLevelsResponse: permission_levels: Optional[List[ExperimentPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetExperimentPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetExperimentPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetExperimentPermissionLevelsResponse: """Deserializes the GetExperimentPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", ExperimentPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', ExperimentPermissionsDescription)) + + + + + + + + @dataclass class GetExperimentResponse: experiment: Optional[Experiment] = None """Experiment details.""" - + def as_dict(self) -> dict: """Serializes the GetExperimentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment: - body["experiment"] = self.experiment.as_dict() + if self.experiment: body['experiment'] = self.experiment.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetExperimentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment: - body["experiment"] = self.experiment + if self.experiment: body['experiment'] = self.experiment return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetExperimentResponse: """Deserializes the GetExperimentResponse from a dictionary.""" - return cls(experiment=_from_dict(d, "experiment", Experiment)) + return cls(experiment=_from_dict(d, 'experiment', Experiment)) + + + + + + + + @dataclass class GetLatestVersionsRequest: name: str """Registered model unique name identifier.""" - + stages: Optional[List[str]] = None """List of stages.""" - + def as_dict(self) -> dict: """Serializes the GetLatestVersionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.stages: - body["stages"] = [v for v in self.stages] + if self.name is not None: body['name'] = self.name + if self.stages: body['stages'] = [v for v in self.stages] return body def as_shallow_dict(self) -> dict: """Serializes the GetLatestVersionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.stages: - body["stages"] = self.stages + if self.name is not None: body['name'] = self.name + if self.stages: body['stages'] = self.stages return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetLatestVersionsRequest: """Deserializes the GetLatestVersionsRequest from a dictionary.""" - return cls(name=d.get("name", None), stages=d.get("stages", None)) + return cls(name=d.get('name', None), stages=d.get('stages', None)) + + @dataclass @@ -2505,50 +2101,53 @@ class GetLatestVersionsResponse: model_versions: Optional[List[ModelVersion]] = None """Latest version models for each requests stage. Only return models with current `READY` status. If no `stages` provided, returns the latest version for each stage, including `"None"`.""" - + def as_dict(self) -> dict: """Serializes the GetLatestVersionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_versions: - body["model_versions"] = [v.as_dict() for v in self.model_versions] + if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions] return body def as_shallow_dict(self) -> dict: """Serializes the GetLatestVersionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_versions: - body["model_versions"] = self.model_versions + if self.model_versions: body['model_versions'] = self.model_versions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetLatestVersionsResponse: """Deserializes the GetLatestVersionsResponse from a dictionary.""" - return cls(model_versions=_repeated_dict(d, "model_versions", ModelVersion)) + return cls(model_versions=_repeated_dict(d, 'model_versions', ModelVersion)) + + + + + @dataclass class GetLoggedModelResponse: model: Optional[LoggedModel] = None """The retrieved logged model.""" - + def as_dict(self) -> dict: """Serializes the GetLoggedModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model: - body["model"] = self.model.as_dict() + if self.model: body['model'] = self.model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetLoggedModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model: - body["model"] = self.model + if self.model: body['model'] = self.model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelResponse: """Deserializes the GetLoggedModelResponse from a dictionary.""" - return cls(model=_from_dict(d, "model", LoggedModel)) + return cls(model=_from_dict(d, 'model', LoggedModel)) + + @dataclass @@ -2557,214 +2156,219 @@ class GetMetricHistoryResponse: """All logged values for this metric if `max_results` is not specified in the request or if the total count of metrics returned is less than the service level pagination threshold. Otherwise, this is one page of results.""" - + next_page_token: Optional[str] = None """A token that can be used to issue a query for the next page of metric history values. A missing token indicates that no additional metrics are available to fetch.""" - + def as_dict(self) -> dict: """Serializes the GetMetricHistoryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metrics: - body["metrics"] = [v.as_dict() for v in self.metrics] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetMetricHistoryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.metrics: - body["metrics"] = self.metrics - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.metrics: body['metrics'] = self.metrics + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetMetricHistoryResponse: """Deserializes the GetMetricHistoryResponse from a dictionary.""" - return cls(metrics=_repeated_dict(d, "metrics", Metric), next_page_token=d.get("next_page_token", None)) + return cls(metrics=_repeated_dict(d, 'metrics', Metric), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class GetModelResponse: registered_model_databricks: Optional[ModelDatabricks] = None - + def as_dict(self) -> dict: """Serializes the GetModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.registered_model_databricks: - body["registered_model_databricks"] = self.registered_model_databricks.as_dict() + if self.registered_model_databricks: body['registered_model_databricks'] = self.registered_model_databricks.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.registered_model_databricks: - body["registered_model_databricks"] = self.registered_model_databricks + if self.registered_model_databricks: body['registered_model_databricks'] = self.registered_model_databricks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetModelResponse: """Deserializes the GetModelResponse from a dictionary.""" - return cls(registered_model_databricks=_from_dict(d, "registered_model_databricks", ModelDatabricks)) + return cls(registered_model_databricks=_from_dict(d, 'registered_model_databricks', ModelDatabricks)) + + + + + @dataclass class GetModelVersionDownloadUriResponse: artifact_uri: Optional[str] = None """URI corresponding to where artifacts for this model version are stored.""" - + def as_dict(self) -> dict: """Serializes the GetModelVersionDownloadUriResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_uri is not None: - body["artifact_uri"] = self.artifact_uri + if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri return body def as_shallow_dict(self) -> dict: """Serializes the GetModelVersionDownloadUriResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_uri is not None: - body["artifact_uri"] = self.artifact_uri + if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetModelVersionDownloadUriResponse: """Deserializes the GetModelVersionDownloadUriResponse from a dictionary.""" - return cls(artifact_uri=d.get("artifact_uri", None)) + return cls(artifact_uri=d.get('artifact_uri', None)) + + + + + @dataclass class GetModelVersionResponse: model_version: Optional[ModelVersion] = None - + def as_dict(self) -> dict: """Serializes the GetModelVersionResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_version: - body["model_version"] = self.model_version.as_dict() + if self.model_version: body['model_version'] = self.model_version.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetModelVersionResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_version: - body["model_version"] = self.model_version + if self.model_version: body['model_version'] = self.model_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetModelVersionResponse: """Deserializes the GetModelVersionResponse from a dictionary.""" - return cls(model_version=_from_dict(d, "model_version", ModelVersion)) + return cls(model_version=_from_dict(d, 'model_version', ModelVersion)) + + + + + @dataclass class GetRegisteredModelPermissionLevelsResponse: permission_levels: Optional[List[RegisteredModelPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetRegisteredModelPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetRegisteredModelPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRegisteredModelPermissionLevelsResponse: """Deserializes the GetRegisteredModelPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", RegisteredModelPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', RegisteredModelPermissionsDescription)) + + + + + + + + @dataclass class GetRunResponse: run: Optional[Run] = None """Run metadata (name, start time, etc) and data (metrics, params, and tags).""" - + def as_dict(self) -> dict: """Serializes the GetRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run: - body["run"] = self.run.as_dict() + if self.run: body['run'] = self.run.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.run: - body["run"] = self.run + if self.run: body['run'] = self.run return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRunResponse: """Deserializes the GetRunResponse from a dictionary.""" - return cls(run=_from_dict(d, "run", Run)) + return cls(run=_from_dict(d, 'run', Run)) + + @dataclass class HttpUrlSpec: url: str """External HTTPS URL called on event trigger (by using a POST request).""" - + authorization: Optional[str] = None """Value of the authorization header that should be sent in the request sent by the wehbook. It should be of the form `" "`. If set to an empty string, no authorization header will be included in the request.""" - + enable_ssl_verification: Optional[bool] = None """Enable/disable SSL certificate validation. Default is true. For self-signed certificates, this field must be false AND the destination server must disable certificate validation as well. For security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.""" - + secret: Optional[str] = None """Shared secret required for HMAC encoding payload. The HMAC-encoded payload will be sent in the header as: { "X-Databricks-Signature": $encoded_payload }.""" - + def as_dict(self) -> dict: """Serializes the HttpUrlSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authorization is not None: - body["authorization"] = self.authorization - if self.enable_ssl_verification is not None: - body["enable_ssl_verification"] = self.enable_ssl_verification - if self.secret is not None: - body["secret"] = self.secret - if self.url is not None: - body["url"] = self.url + if self.authorization is not None: body['authorization'] = self.authorization + if self.enable_ssl_verification is not None: body['enable_ssl_verification'] = self.enable_ssl_verification + if self.secret is not None: body['secret'] = self.secret + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the HttpUrlSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.authorization is not None: - body["authorization"] = self.authorization - if self.enable_ssl_verification is not None: - body["enable_ssl_verification"] = self.enable_ssl_verification - if self.secret is not None: - body["secret"] = self.secret - if self.url is not None: - body["url"] = self.url + if self.authorization is not None: body['authorization'] = self.authorization + if self.enable_ssl_verification is not None: body['enable_ssl_verification'] = self.enable_ssl_verification + if self.secret is not None: body['secret'] = self.secret + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> HttpUrlSpec: """Deserializes the HttpUrlSpec from a dictionary.""" - return cls( - authorization=d.get("authorization", None), - enable_ssl_verification=d.get("enable_ssl_verification", None), - secret=d.get("secret", None), - url=d.get("url", None), - ) + return cls(authorization=d.get('authorization', None), enable_ssl_verification=d.get('enable_ssl_verification', None), secret=d.get('secret', None), url=d.get('url', None)) + + @dataclass @@ -2775,358 +2379,295 @@ class HttpUrlSpecWithoutSecret: security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.""" - + url: Optional[str] = None """External HTTPS URL called on event trigger (by using a POST request).""" - + def as_dict(self) -> dict: """Serializes the HttpUrlSpecWithoutSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enable_ssl_verification is not None: - body["enable_ssl_verification"] = self.enable_ssl_verification - if self.url is not None: - body["url"] = self.url + if self.enable_ssl_verification is not None: body['enable_ssl_verification'] = self.enable_ssl_verification + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the HttpUrlSpecWithoutSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.enable_ssl_verification is not None: - body["enable_ssl_verification"] = self.enable_ssl_verification - if self.url is not None: - body["url"] = self.url + if self.enable_ssl_verification is not None: body['enable_ssl_verification'] = self.enable_ssl_verification + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> HttpUrlSpecWithoutSecret: """Deserializes the HttpUrlSpecWithoutSecret from a dictionary.""" - return cls(enable_ssl_verification=d.get("enable_ssl_verification", None), url=d.get("url", None)) + return cls(enable_ssl_verification=d.get('enable_ssl_verification', None), url=d.get('url', None)) + + @dataclass class InputTag: """Tag for a dataset input.""" - + key: str """The tag key.""" - + value: str """The tag value.""" - + def as_dict(self) -> dict: """Serializes the InputTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the InputTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InputTag: """Deserializes the InputTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class JobSpec: job_id: str """ID of the job that the webhook runs.""" - + access_token: str """The personal access token used to authorize webhook's job runs.""" - + workspace_url: Optional[str] = None """URL of the workspace containing the job that this webhook runs. If not specified, the job’s workspace URL is assumed to be the same as the workspace where the webhook is created.""" - + def as_dict(self) -> dict: """Serializes the JobSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_token is not None: - body["access_token"] = self.access_token - if self.job_id is not None: - body["job_id"] = self.job_id - if self.workspace_url is not None: - body["workspace_url"] = self.workspace_url + if self.access_token is not None: body['access_token'] = self.access_token + if self.job_id is not None: body['job_id'] = self.job_id + if self.workspace_url is not None: body['workspace_url'] = self.workspace_url return body def as_shallow_dict(self) -> dict: """Serializes the JobSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_token is not None: - body["access_token"] = self.access_token - if self.job_id is not None: - body["job_id"] = self.job_id - if self.workspace_url is not None: - body["workspace_url"] = self.workspace_url + if self.access_token is not None: body['access_token'] = self.access_token + if self.job_id is not None: body['job_id'] = self.job_id + if self.workspace_url is not None: body['workspace_url'] = self.workspace_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobSpec: """Deserializes the JobSpec from a dictionary.""" - return cls( - access_token=d.get("access_token", None), - job_id=d.get("job_id", None), - workspace_url=d.get("workspace_url", None), - ) + return cls(access_token=d.get('access_token', None), job_id=d.get('job_id', None), workspace_url=d.get('workspace_url', None)) + + @dataclass class JobSpecWithoutSecret: job_id: Optional[str] = None """ID of the job that the webhook runs.""" - + workspace_url: Optional[str] = None """URL of the workspace containing the job that this webhook runs. Defaults to the workspace URL in which the webhook is created. If not specified, the job’s workspace is assumed to be the same as the webhook’s.""" - + def as_dict(self) -> dict: """Serializes the JobSpecWithoutSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.workspace_url is not None: - body["workspace_url"] = self.workspace_url + if self.job_id is not None: body['job_id'] = self.job_id + if self.workspace_url is not None: body['workspace_url'] = self.workspace_url return body def as_shallow_dict(self) -> dict: """Serializes the JobSpecWithoutSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.workspace_url is not None: - body["workspace_url"] = self.workspace_url + if self.job_id is not None: body['job_id'] = self.job_id + if self.workspace_url is not None: body['workspace_url'] = self.workspace_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobSpecWithoutSecret: """Deserializes the JobSpecWithoutSecret from a dictionary.""" - return cls(job_id=d.get("job_id", None), workspace_url=d.get("workspace_url", None)) + return cls(job_id=d.get('job_id', None), workspace_url=d.get('workspace_url', None)) + + + + + @dataclass class ListArtifactsResponse: files: Optional[List[FileInfo]] = None """The file location and metadata for artifacts.""" - + next_page_token: Optional[str] = None """The token that can be used to retrieve the next page of artifact results.""" - + root_uri: Optional[str] = None """The root artifact directory for the run.""" - + def as_dict(self) -> dict: """Serializes the ListArtifactsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.files: - body["files"] = [v.as_dict() for v in self.files] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.root_uri is not None: - body["root_uri"] = self.root_uri + if self.files: body['files'] = [v.as_dict() for v in self.files] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.root_uri is not None: body['root_uri'] = self.root_uri return body def as_shallow_dict(self) -> dict: """Serializes the ListArtifactsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.files: - body["files"] = self.files - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.root_uri is not None: - body["root_uri"] = self.root_uri + if self.files: body['files'] = self.files + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.root_uri is not None: body['root_uri'] = self.root_uri return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListArtifactsResponse: """Deserializes the ListArtifactsResponse from a dictionary.""" - return cls( - files=_repeated_dict(d, "files", FileInfo), - next_page_token=d.get("next_page_token", None), - root_uri=d.get("root_uri", None), - ) + return cls(files=_repeated_dict(d, 'files', FileInfo), next_page_token=d.get('next_page_token', None), root_uri=d.get('root_uri', None)) + + + + + @dataclass class ListExperimentsResponse: experiments: Optional[List[Experiment]] = None """Paginated Experiments beginning with the first item on the requested page.""" - + next_page_token: Optional[str] = None """Token that can be used to retrieve the next page of experiments. Empty token means no more experiment is available for retrieval.""" - + def as_dict(self) -> dict: """Serializes the ListExperimentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiments: - body["experiments"] = [v.as_dict() for v in self.experiments] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.experiments: body['experiments'] = [v.as_dict() for v in self.experiments] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExperimentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiments: - body["experiments"] = self.experiments - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.experiments: body['experiments'] = self.experiments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExperimentsResponse: """Deserializes the ListExperimentsResponse from a dictionary.""" - return cls( - experiments=_repeated_dict(d, "experiments", Experiment), next_page_token=d.get("next_page_token", None) - ) - - -@dataclass -class ListLoggedModelArtifactsResponse: - files: Optional[List[FileInfo]] = None - """File location and metadata for artifacts.""" - - next_page_token: Optional[str] = None - """Token that can be used to retrieve the next page of artifact results""" + return cls(experiments=_repeated_dict(d, 'experiments', Experiment), next_page_token=d.get('next_page_token', None)) + - root_uri: Optional[str] = None - """Root artifact directory for the logged model.""" - def as_dict(self) -> dict: - """Serializes the ListLoggedModelArtifactsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.files: - body["files"] = [v.as_dict() for v in self.files] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.root_uri is not None: - body["root_uri"] = self.root_uri - return body - def as_shallow_dict(self) -> dict: - """Serializes the ListLoggedModelArtifactsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.files: - body["files"] = self.files - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.root_uri is not None: - body["root_uri"] = self.root_uri - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListLoggedModelArtifactsResponse: - """Deserializes the ListLoggedModelArtifactsResponse from a dictionary.""" - return cls( - files=_repeated_dict(d, "files", FileInfo), - next_page_token=d.get("next_page_token", None), - root_uri=d.get("root_uri", None), - ) @dataclass class ListModelsResponse: next_page_token: Optional[str] = None """Pagination token to request next page of models for the same query.""" - + registered_models: Optional[List[Model]] = None - + def as_dict(self) -> dict: """Serializes the ListModelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = [v.as_dict() for v in self.registered_models] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models] return body def as_shallow_dict(self) -> dict: """Serializes the ListModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = self.registered_models + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.registered_models: body['registered_models'] = self.registered_models return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListModelsResponse: """Deserializes the ListModelsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - registered_models=_repeated_dict(d, "registered_models", Model), - ) + return cls(next_page_token=d.get('next_page_token', None), registered_models=_repeated_dict(d, 'registered_models', Model)) + + @dataclass class ListRegistryWebhooks: next_page_token: Optional[str] = None """Token that can be used to retrieve the next page of artifact results""" - + webhooks: Optional[List[RegistryWebhook]] = None """Array of registry webhooks.""" - + def as_dict(self) -> dict: """Serializes the ListRegistryWebhooks into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.webhooks: - body["webhooks"] = [v.as_dict() for v in self.webhooks] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.webhooks: body['webhooks'] = [v.as_dict() for v in self.webhooks] return body def as_shallow_dict(self) -> dict: """Serializes the ListRegistryWebhooks into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.webhooks: - body["webhooks"] = self.webhooks + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.webhooks: body['webhooks'] = self.webhooks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListRegistryWebhooks: """Deserializes the ListRegistryWebhooks from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), webhooks=_repeated_dict(d, "webhooks", RegistryWebhook) - ) + return cls(next_page_token=d.get('next_page_token', None), webhooks=_repeated_dict(d, 'webhooks', RegistryWebhook)) + + + + + @dataclass class ListTransitionRequestsResponse: requests: Optional[List[Activity]] = None """Array of open transition requests.""" - + def as_dict(self) -> dict: """Serializes the ListTransitionRequestsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.requests: - body["requests"] = [v.as_dict() for v in self.requests] + if self.requests: body['requests'] = [v.as_dict() for v in self.requests] return body def as_shallow_dict(self) -> dict: """Serializes the ListTransitionRequestsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.requests: - body["requests"] = self.requests + if self.requests: body['requests'] = self.requests return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListTransitionRequestsResponse: """Deserializes the ListTransitionRequestsResponse from a dictionary.""" - return cls(requests=_repeated_dict(d, "requests", Activity)) + return cls(requests=_repeated_dict(d, 'requests', Activity)) + + + + + @dataclass @@ -3134,53 +2675,42 @@ class LogBatch: metrics: Optional[List[Metric]] = None """Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and tags in total.""" - + params: Optional[List[Param]] = None """Params to log. A single request can contain up to 100 params, and up to 1000 metrics, params, and tags in total.""" - + run_id: Optional[str] = None """ID of the run to log under""" - + tags: Optional[List[RunTag]] = None """Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags in total.""" - + def as_dict(self) -> dict: """Serializes the LogBatch into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metrics: - body["metrics"] = [v.as_dict() for v in self.metrics] - if self.params: - body["params"] = [v.as_dict() for v in self.params] - if self.run_id is not None: - body["run_id"] = self.run_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics] + if self.params: body['params'] = [v.as_dict() for v in self.params] + if self.run_id is not None: body['run_id'] = self.run_id + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the LogBatch into a shallow dictionary of its immediate attributes.""" body = {} - if self.metrics: - body["metrics"] = self.metrics - if self.params: - body["params"] = self.params - if self.run_id is not None: - body["run_id"] = self.run_id - if self.tags: - body["tags"] = self.tags + if self.metrics: body['metrics'] = self.metrics + if self.params: body['params'] = self.params + if self.run_id is not None: body['run_id'] = self.run_id + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogBatch: """Deserializes the LogBatch from a dictionary.""" - return cls( - metrics=_repeated_dict(d, "metrics", Metric), - params=_repeated_dict(d, "params", Param), - run_id=d.get("run_id", None), - tags=_repeated_dict(d, "tags", RunTag), - ) + return cls(metrics=_repeated_dict(d, 'metrics', Metric), params=_repeated_dict(d, 'params', Param), run_id=d.get('run_id', None), tags=_repeated_dict(d, 'tags', RunTag)) + + @dataclass @@ -3199,49 +2729,43 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogBatchResponse: """Deserializes the LogBatchResponse from a dictionary.""" return cls() + + @dataclass class LogInputs: run_id: str """ID of the run to log under""" - + datasets: Optional[List[DatasetInput]] = None """Dataset inputs""" - + models: Optional[List[ModelInput]] = None """Model inputs""" - + def as_dict(self) -> dict: """Serializes the LogInputs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.datasets: - body["datasets"] = [v.as_dict() for v in self.datasets] - if self.models: - body["models"] = [v.as_dict() for v in self.models] - if self.run_id is not None: - body["run_id"] = self.run_id + if self.datasets: body['datasets'] = [v.as_dict() for v in self.datasets] + if self.models: body['models'] = [v.as_dict() for v in self.models] + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the LogInputs into a shallow dictionary of its immediate attributes.""" body = {} - if self.datasets: - body["datasets"] = self.datasets - if self.models: - body["models"] = self.models - if self.run_id is not None: - body["run_id"] = self.run_id + if self.datasets: body['datasets'] = self.datasets + if self.models: body['models'] = self.models + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogInputs: """Deserializes the LogInputs from a dictionary.""" - return cls( - datasets=_repeated_dict(d, "datasets", DatasetInput), - models=_repeated_dict(d, "models", ModelInput), - run_id=d.get("run_id", None), - ) + return cls(datasets=_repeated_dict(d, 'datasets', DatasetInput), models=_repeated_dict(d, 'models', ModelInput), run_id=d.get('run_id', None)) + + @dataclass @@ -3260,38 +2784,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogInputsResponse: """Deserializes the LogInputsResponse from a dictionary.""" return cls() + + @dataclass class LogLoggedModelParamsRequest: model_id: Optional[str] = None """The ID of the logged model to log params for.""" - + params: Optional[List[LoggedModelParameter]] = None """Parameters to attach to the model.""" - + def as_dict(self) -> dict: """Serializes the LogLoggedModelParamsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.params: - body["params"] = [v.as_dict() for v in self.params] + if self.model_id is not None: body['model_id'] = self.model_id + if self.params: body['params'] = [v.as_dict() for v in self.params] return body def as_shallow_dict(self) -> dict: """Serializes the LogLoggedModelParamsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.params: - body["params"] = self.params + if self.model_id is not None: body['model_id'] = self.model_id + if self.params: body['params'] = self.params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogLoggedModelParamsRequest: """Deserializes the LogLoggedModelParamsRequest from a dictionary.""" - return cls(model_id=d.get("model_id", None), params=_repeated_dict(d, "params", LoggedModelParameter)) + return cls(model_id=d.get('model_id', None), params=_repeated_dict(d, 'params', LoggedModelParameter)) + + @dataclass @@ -3310,100 +2834,76 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogLoggedModelParamsRequestResponse: """Deserializes the LogLoggedModelParamsRequestResponse from a dictionary.""" return cls() + + @dataclass class LogMetric: key: str """Name of the metric.""" - + value: float """Double value of the metric being logged.""" - + timestamp: int """Unix timestamp in milliseconds at the time metric was logged.""" - + dataset_digest: Optional[str] = None """Dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset that uniquely identifies it within datasets of the same name.""" - + dataset_name: Optional[str] = None """The name of the dataset associated with the metric. E.g. “my.uc.table@2” “nyc-taxi-dataset”, “fantastic-elk-3”""" - + model_id: Optional[str] = None """ID of the logged model associated with the metric, if applicable""" - + run_id: Optional[str] = None """ID of the run under which to log the metric. Must be provided.""" - + run_uuid: Optional[str] = None """[Deprecated, use `run_id` instead] ID of the run under which to log the metric. This field will be removed in a future MLflow version.""" - + step: Optional[int] = None """Step at which to log the metric""" - + def as_dict(self) -> dict: """Serializes the LogMetric into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.key is not None: - body["key"] = self.key - if self.model_id is not None: - body["model_id"] = self.model_id - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.step is not None: - body["step"] = self.step - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.value is not None: - body["value"] = self.value + if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.key is not None: body['key'] = self.key + if self.model_id is not None: body['model_id'] = self.model_id + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.step is not None: body['step'] = self.step + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the LogMetric into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.key is not None: - body["key"] = self.key - if self.model_id is not None: - body["model_id"] = self.model_id - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.step is not None: - body["step"] = self.step - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.value is not None: - body["value"] = self.value + if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.key is not None: body['key'] = self.key + if self.model_id is not None: body['model_id'] = self.model_id + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.step is not None: body['step'] = self.step + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogMetric: """Deserializes the LogMetric from a dictionary.""" - return cls( - dataset_digest=d.get("dataset_digest", None), - dataset_name=d.get("dataset_name", None), - key=d.get("key", None), - model_id=d.get("model_id", None), - run_id=d.get("run_id", None), - run_uuid=d.get("run_uuid", None), - step=d.get("step", None), - timestamp=d.get("timestamp", None), - value=d.get("value", None), - ) + return cls(dataset_digest=d.get('dataset_digest', None), dataset_name=d.get('dataset_name', None), key=d.get('key', None), model_id=d.get('model_id', None), run_id=d.get('run_id', None), run_uuid=d.get('run_uuid', None), step=d.get('step', None), timestamp=d.get('timestamp', None), value=d.get('value', None)) + + @dataclass @@ -3422,38 +2922,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogMetricResponse: """Deserializes the LogMetricResponse from a dictionary.""" return cls() + + @dataclass class LogModel: model_json: Optional[str] = None """MLmodel file in json format.""" - + run_id: Optional[str] = None """ID of the run to log under""" - + def as_dict(self) -> dict: """Serializes the LogModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_json is not None: - body["model_json"] = self.model_json - if self.run_id is not None: - body["run_id"] = self.run_id + if self.model_json is not None: body['model_json'] = self.model_json + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the LogModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_json is not None: - body["model_json"] = self.model_json - if self.run_id is not None: - body["run_id"] = self.run_id + if self.model_json is not None: body['model_json'] = self.model_json + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogModel: """Deserializes the LogModel from a dictionary.""" - return cls(model_json=d.get("model_json", None), run_id=d.get("run_id", None)) + return cls(model_json=d.get('model_json', None), run_id=d.get('run_id', None)) + + @dataclass @@ -3472,38 +2972,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogModelResponse: """Deserializes the LogModelResponse from a dictionary.""" return cls() + + @dataclass class LogOutputsRequest: run_id: str """The ID of the Run from which to log outputs.""" - + models: Optional[List[ModelOutput]] = None """The model outputs from the Run.""" - + def as_dict(self) -> dict: """Serializes the LogOutputsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.models: - body["models"] = [v.as_dict() for v in self.models] - if self.run_id is not None: - body["run_id"] = self.run_id + if self.models: body['models'] = [v.as_dict() for v in self.models] + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the LogOutputsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.models: - body["models"] = self.models - if self.run_id is not None: - body["run_id"] = self.run_id + if self.models: body['models'] = self.models + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogOutputsRequest: """Deserializes the LogOutputsRequest from a dictionary.""" - return cls(models=_repeated_dict(d, "models", ModelOutput), run_id=d.get("run_id", None)) + return cls(models=_repeated_dict(d, 'models', ModelOutput), run_id=d.get('run_id', None)) + + @dataclass @@ -3522,58 +3022,49 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogOutputsResponse: """Deserializes the LogOutputsResponse from a dictionary.""" return cls() + + @dataclass class LogParam: key: str """Name of the param. Maximum size is 255 bytes.""" - + value: str """String value of the param being logged. Maximum size is 500 bytes.""" - + run_id: Optional[str] = None """ID of the run under which to log the param. Must be provided.""" - + run_uuid: Optional[str] = None """[Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will be removed in a future MLflow version.""" - + def as_dict(self) -> dict: """Serializes the LogParam into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the LogParam into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogParam: """Deserializes the LogParam from a dictionary.""" - return cls( - key=d.get("key", None), - run_id=d.get("run_id", None), - run_uuid=d.get("run_uuid", None), - value=d.get("value", None), - ) + return cls(key=d.get('key', None), run_id=d.get('run_id', None), run_uuid=d.get('run_uuid', None), value=d.get('value', None)) + + @dataclass @@ -3592,750 +3083,595 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogParamResponse: """Deserializes the LogParamResponse from a dictionary.""" return cls() + + @dataclass class LoggedModel: """A logged model message includes logged model attributes, tags, registration info, params, and linked run metrics.""" - + data: Optional[LoggedModelData] = None """The params and metrics attached to the logged model.""" - + info: Optional[LoggedModelInfo] = None """The logged model attributes such as model ID, status, tags, etc.""" - + def as_dict(self) -> dict: """Serializes the LoggedModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data: - body["data"] = self.data.as_dict() - if self.info: - body["info"] = self.info.as_dict() + if self.data: body['data'] = self.data.as_dict() + if self.info: body['info'] = self.info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.data: - body["data"] = self.data - if self.info: - body["info"] = self.info + if self.data: body['data'] = self.data + if self.info: body['info'] = self.info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModel: """Deserializes the LoggedModel from a dictionary.""" - return cls(data=_from_dict(d, "data", LoggedModelData), info=_from_dict(d, "info", LoggedModelInfo)) + return cls(data=_from_dict(d, 'data', LoggedModelData), info=_from_dict(d, 'info', LoggedModelInfo)) + + @dataclass class LoggedModelData: """A LoggedModelData message includes logged model params and linked metrics.""" - + metrics: Optional[List[Metric]] = None """Performance metrics linked to the model.""" - + params: Optional[List[LoggedModelParameter]] = None """Immutable string key-value pairs of the model.""" - + def as_dict(self) -> dict: """Serializes the LoggedModelData into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metrics: - body["metrics"] = [v.as_dict() for v in self.metrics] - if self.params: - body["params"] = [v.as_dict() for v in self.params] + if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics] + if self.params: body['params'] = [v.as_dict() for v in self.params] return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModelData into a shallow dictionary of its immediate attributes.""" body = {} - if self.metrics: - body["metrics"] = self.metrics - if self.params: - body["params"] = self.params + if self.metrics: body['metrics'] = self.metrics + if self.params: body['params'] = self.params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModelData: """Deserializes the LoggedModelData from a dictionary.""" - return cls( - metrics=_repeated_dict(d, "metrics", Metric), params=_repeated_dict(d, "params", LoggedModelParameter) - ) + return cls(metrics=_repeated_dict(d, 'metrics', Metric), params=_repeated_dict(d, 'params', LoggedModelParameter)) + + @dataclass class LoggedModelInfo: """A LoggedModelInfo includes logged model attributes, tags, and registration info.""" - + artifact_uri: Optional[str] = None """The URI of the directory where model artifacts are stored.""" - + creation_timestamp_ms: Optional[int] = None """The timestamp when the model was created in milliseconds since the UNIX epoch.""" - + creator_id: Optional[int] = None """The ID of the user or principal that created the model.""" - + experiment_id: Optional[str] = None """The ID of the experiment that owns the model.""" - + last_updated_timestamp_ms: Optional[int] = None """The timestamp when the model was last updated in milliseconds since the UNIX epoch.""" - + model_id: Optional[str] = None """The unique identifier for the logged model.""" - + model_type: Optional[str] = None """The type of model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``.""" - + name: Optional[str] = None """The name of the model.""" - + source_run_id: Optional[str] = None """The ID of the run that created the model.""" - + status: Optional[LoggedModelStatus] = None """The status of whether or not the model is ready for use.""" - + status_message: Optional[str] = None """Details on the current model status.""" - + tags: Optional[List[LoggedModelTag]] = None """Mutable string key-value pairs set on the model.""" - + def as_dict(self) -> dict: """Serializes the LoggedModelInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_uri is not None: - body["artifact_uri"] = self.artifact_uri - if self.creation_timestamp_ms is not None: - body["creation_timestamp_ms"] = self.creation_timestamp_ms - if self.creator_id is not None: - body["creator_id"] = self.creator_id - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.last_updated_timestamp_ms is not None: - body["last_updated_timestamp_ms"] = self.last_updated_timestamp_ms - if self.model_id is not None: - body["model_id"] = self.model_id - if self.model_type is not None: - body["model_type"] = self.model_type - if self.name is not None: - body["name"] = self.name - if self.source_run_id is not None: - body["source_run_id"] = self.source_run_id - if self.status is not None: - body["status"] = self.status.value - if self.status_message is not None: - body["status_message"] = self.status_message - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri + if self.creation_timestamp_ms is not None: body['creation_timestamp_ms'] = self.creation_timestamp_ms + if self.creator_id is not None: body['creator_id'] = self.creator_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.last_updated_timestamp_ms is not None: body['last_updated_timestamp_ms'] = self.last_updated_timestamp_ms + if self.model_id is not None: body['model_id'] = self.model_id + if self.model_type is not None: body['model_type'] = self.model_type + if self.name is not None: body['name'] = self.name + if self.source_run_id is not None: body['source_run_id'] = self.source_run_id + if self.status is not None: body['status'] = self.status.value + if self.status_message is not None: body['status_message'] = self.status_message + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModelInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_uri is not None: - body["artifact_uri"] = self.artifact_uri - if self.creation_timestamp_ms is not None: - body["creation_timestamp_ms"] = self.creation_timestamp_ms - if self.creator_id is not None: - body["creator_id"] = self.creator_id - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.last_updated_timestamp_ms is not None: - body["last_updated_timestamp_ms"] = self.last_updated_timestamp_ms - if self.model_id is not None: - body["model_id"] = self.model_id - if self.model_type is not None: - body["model_type"] = self.model_type - if self.name is not None: - body["name"] = self.name - if self.source_run_id is not None: - body["source_run_id"] = self.source_run_id - if self.status is not None: - body["status"] = self.status - if self.status_message is not None: - body["status_message"] = self.status_message - if self.tags: - body["tags"] = self.tags + if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri + if self.creation_timestamp_ms is not None: body['creation_timestamp_ms'] = self.creation_timestamp_ms + if self.creator_id is not None: body['creator_id'] = self.creator_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.last_updated_timestamp_ms is not None: body['last_updated_timestamp_ms'] = self.last_updated_timestamp_ms + if self.model_id is not None: body['model_id'] = self.model_id + if self.model_type is not None: body['model_type'] = self.model_type + if self.name is not None: body['name'] = self.name + if self.source_run_id is not None: body['source_run_id'] = self.source_run_id + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModelInfo: """Deserializes the LoggedModelInfo from a dictionary.""" - return cls( - artifact_uri=d.get("artifact_uri", None), - creation_timestamp_ms=d.get("creation_timestamp_ms", None), - creator_id=d.get("creator_id", None), - experiment_id=d.get("experiment_id", None), - last_updated_timestamp_ms=d.get("last_updated_timestamp_ms", None), - model_id=d.get("model_id", None), - model_type=d.get("model_type", None), - name=d.get("name", None), - source_run_id=d.get("source_run_id", None), - status=_enum(d, "status", LoggedModelStatus), - status_message=d.get("status_message", None), - tags=_repeated_dict(d, "tags", LoggedModelTag), - ) + return cls(artifact_uri=d.get('artifact_uri', None), creation_timestamp_ms=d.get('creation_timestamp_ms', None), creator_id=d.get('creator_id', None), experiment_id=d.get('experiment_id', None), last_updated_timestamp_ms=d.get('last_updated_timestamp_ms', None), model_id=d.get('model_id', None), model_type=d.get('model_type', None), name=d.get('name', None), source_run_id=d.get('source_run_id', None), status=_enum(d, 'status', LoggedModelStatus), status_message=d.get('status_message', None), tags=_repeated_dict(d, 'tags', LoggedModelTag)) + + @dataclass class LoggedModelParameter: """Parameter associated with a LoggedModel.""" - + key: Optional[str] = None """The key identifying this param.""" - + value: Optional[str] = None """The value of this param.""" - + def as_dict(self) -> dict: """Serializes the LoggedModelParameter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModelParameter into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModelParameter: """Deserializes the LoggedModelParameter from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + class LoggedModelStatus(Enum): """A LoggedModelStatus enum value represents the status of a logged model.""" - - LOGGED_MODEL_PENDING = "LOGGED_MODEL_PENDING" - LOGGED_MODEL_READY = "LOGGED_MODEL_READY" - LOGGED_MODEL_UPLOAD_FAILED = "LOGGED_MODEL_UPLOAD_FAILED" - + + LOGGED_MODEL_PENDING = 'LOGGED_MODEL_PENDING' + LOGGED_MODEL_READY = 'LOGGED_MODEL_READY' + LOGGED_MODEL_UPLOAD_FAILED = 'LOGGED_MODEL_UPLOAD_FAILED' @dataclass class LoggedModelTag: """Tag for a LoggedModel.""" - + key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the LoggedModelTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModelTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModelTag: """Deserializes the LoggedModelTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class Metric: """Metric associated with a run, represented as a key-value pair.""" - + dataset_digest: Optional[str] = None """The dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset that uniquely identifies it within datasets of the same name.""" - + dataset_name: Optional[str] = None """The name of the dataset associated with the metric. E.g. “my.uc.table@2” “nyc-taxi-dataset”, “fantastic-elk-3”""" - + key: Optional[str] = None """The key identifying the metric.""" - + model_id: Optional[str] = None """The ID of the logged model or registered model version associated with the metric, if applicable.""" - + run_id: Optional[str] = None """The ID of the run containing the metric.""" - + step: Optional[int] = None """The step at which the metric was logged.""" - + timestamp: Optional[int] = None """The timestamp at which the metric was recorded.""" - + value: Optional[float] = None """The value of the metric.""" - + def as_dict(self) -> dict: """Serializes the Metric into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.key is not None: - body["key"] = self.key - if self.model_id is not None: - body["model_id"] = self.model_id - if self.run_id is not None: - body["run_id"] = self.run_id - if self.step is not None: - body["step"] = self.step - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.value is not None: - body["value"] = self.value + if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.key is not None: body['key'] = self.key + if self.model_id is not None: body['model_id'] = self.model_id + if self.run_id is not None: body['run_id'] = self.run_id + if self.step is not None: body['step'] = self.step + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the Metric into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.key is not None: - body["key"] = self.key - if self.model_id is not None: - body["model_id"] = self.model_id - if self.run_id is not None: - body["run_id"] = self.run_id - if self.step is not None: - body["step"] = self.step - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.value is not None: - body["value"] = self.value + if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.key is not None: body['key'] = self.key + if self.model_id is not None: body['model_id'] = self.model_id + if self.run_id is not None: body['run_id'] = self.run_id + if self.step is not None: body['step'] = self.step + if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Metric: """Deserializes the Metric from a dictionary.""" - return cls( - dataset_digest=d.get("dataset_digest", None), - dataset_name=d.get("dataset_name", None), - key=d.get("key", None), - model_id=d.get("model_id", None), - run_id=d.get("run_id", None), - step=d.get("step", None), - timestamp=d.get("timestamp", None), - value=d.get("value", None), - ) + return cls(dataset_digest=d.get('dataset_digest', None), dataset_name=d.get('dataset_name', None), key=d.get('key', None), model_id=d.get('model_id', None), run_id=d.get('run_id', None), step=d.get('step', None), timestamp=d.get('timestamp', None), value=d.get('value', None)) + + @dataclass class Model: creation_timestamp: Optional[int] = None """Timestamp recorded when this `registered_model` was created.""" - + description: Optional[str] = None """Description of this `registered_model`.""" - + last_updated_timestamp: Optional[int] = None """Timestamp recorded when metadata for this `registered_model` was last updated.""" - + latest_versions: Optional[List[ModelVersion]] = None """Collection of latest model versions for each stage. Only contains models with current `READY` status.""" - + name: Optional[str] = None """Unique name for the model.""" - + tags: Optional[List[ModelTag]] = None """Tags: Additional metadata key-value pairs for this `registered_model`.""" - + user_id: Optional[str] = None """User that created this `registered_model`""" - + def as_dict(self) -> dict: """Serializes the Model into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.description is not None: - body["description"] = self.description - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.latest_versions: - body["latest_versions"] = [v.as_dict() for v in self.latest_versions] - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.user_id is not None: - body["user_id"] = self.user_id + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.description is not None: body['description'] = self.description + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions] + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the Model into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.description is not None: - body["description"] = self.description - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.latest_versions: - body["latest_versions"] = self.latest_versions - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags - if self.user_id is not None: - body["user_id"] = self.user_id + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.description is not None: body['description'] = self.description + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.latest_versions: body['latest_versions'] = self.latest_versions + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Model: """Deserializes the Model from a dictionary.""" - return cls( - creation_timestamp=d.get("creation_timestamp", None), - description=d.get("description", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - latest_versions=_repeated_dict(d, "latest_versions", ModelVersion), - name=d.get("name", None), - tags=_repeated_dict(d, "tags", ModelTag), - user_id=d.get("user_id", None), - ) + return cls(creation_timestamp=d.get('creation_timestamp', None), description=d.get('description', None), last_updated_timestamp=d.get('last_updated_timestamp', None), latest_versions=_repeated_dict(d, 'latest_versions', ModelVersion), name=d.get('name', None), tags=_repeated_dict(d, 'tags', ModelTag), user_id=d.get('user_id', None)) + + @dataclass class ModelDatabricks: creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + description: Optional[str] = None """User-specified description for the object.""" - + id: Optional[str] = None """Unique identifier for the object.""" - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + latest_versions: Optional[List[ModelVersion]] = None """Array of model versions, each the latest version for its stage.""" - + name: Optional[str] = None """Name of the model.""" - + permission_level: Optional[PermissionLevel] = None """Permission level of the requesting user on the object. For what is allowed at each level, see [MLflow Model permissions](..).""" - + tags: Optional[List[ModelTag]] = None """Array of tags associated with the model.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + def as_dict(self) -> dict: """Serializes the ModelDatabricks into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.latest_versions: - body["latest_versions"] = [v.as_dict() for v in self.latest_versions] - if self.name is not None: - body["name"] = self.name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.user_id is not None: - body["user_id"] = self.user_id + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions] + if self.name is not None: body['name'] = self.name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the ModelDatabricks into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.latest_versions: - body["latest_versions"] = self.latest_versions - if self.name is not None: - body["name"] = self.name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.tags: - body["tags"] = self.tags - if self.user_id is not None: - body["user_id"] = self.user_id + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.latest_versions: body['latest_versions'] = self.latest_versions + if self.name is not None: body['name'] = self.name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelDatabricks: """Deserializes the ModelDatabricks from a dictionary.""" - return cls( - creation_timestamp=d.get("creation_timestamp", None), - description=d.get("description", None), - id=d.get("id", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - latest_versions=_repeated_dict(d, "latest_versions", ModelVersion), - name=d.get("name", None), - permission_level=_enum(d, "permission_level", PermissionLevel), - tags=_repeated_dict(d, "tags", ModelTag), - user_id=d.get("user_id", None), - ) + return cls(creation_timestamp=d.get('creation_timestamp', None), description=d.get('description', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), latest_versions=_repeated_dict(d, 'latest_versions', ModelVersion), name=d.get('name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), tags=_repeated_dict(d, 'tags', ModelTag), user_id=d.get('user_id', None)) + + @dataclass class ModelInput: """Represents a LoggedModel or Registered Model Version input to a Run.""" - + model_id: str """The unique identifier of the model.""" - + def as_dict(self) -> dict: """Serializes the ModelInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id + if self.model_id is not None: body['model_id'] = self.model_id return body def as_shallow_dict(self) -> dict: """Serializes the ModelInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id + if self.model_id is not None: body['model_id'] = self.model_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelInput: """Deserializes the ModelInput from a dictionary.""" - return cls(model_id=d.get("model_id", None)) + return cls(model_id=d.get('model_id', None)) + + @dataclass class ModelOutput: """Represents a LoggedModel output of a Run.""" - + model_id: str """The unique identifier of the model.""" - + step: int """The step at which the model was produced.""" - + def as_dict(self) -> dict: """Serializes the ModelOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.step is not None: - body["step"] = self.step + if self.model_id is not None: body['model_id'] = self.model_id + if self.step is not None: body['step'] = self.step return body def as_shallow_dict(self) -> dict: """Serializes the ModelOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.step is not None: - body["step"] = self.step + if self.model_id is not None: body['model_id'] = self.model_id + if self.step is not None: body['step'] = self.step return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelOutput: """Deserializes the ModelOutput from a dictionary.""" - return cls(model_id=d.get("model_id", None), step=d.get("step", None)) + return cls(model_id=d.get('model_id', None), step=d.get('step', None)) + + @dataclass class ModelTag: key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the ModelTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ModelTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelTag: """Deserializes the ModelTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class ModelVersion: creation_timestamp: Optional[int] = None """Timestamp recorded when this `model_version` was created.""" - + current_stage: Optional[str] = None """Current stage for this `model_version`.""" - + description: Optional[str] = None """Description of this `model_version`.""" - + last_updated_timestamp: Optional[int] = None """Timestamp recorded when metadata for this `model_version` was last updated.""" - + name: Optional[str] = None """Unique name of the model""" - + run_id: Optional[str] = None """MLflow run ID used when creating `model_version`, if `source` was generated by an experiment run stored in MLflow tracking server.""" - + run_link: Optional[str] = None """Run Link: Direct link to the run that generated this version""" - + source: Optional[str] = None """URI indicating the location of the source model artifacts, used when creating `model_version`""" - + status: Optional[ModelVersionStatus] = None """Current status of `model_version`""" - + status_message: Optional[str] = None """Details on current `status`, if it is pending or failed.""" - + tags: Optional[List[ModelVersionTag]] = None """Tags: Additional metadata key-value pairs for this `model_version`.""" - + user_id: Optional[str] = None """User that created this `model_version`.""" - + version: Optional[str] = None """Model's version number.""" - + def as_dict(self) -> dict: """Serializes the ModelVersion into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.current_stage is not None: - body["current_stage"] = self.current_stage - if self.description is not None: - body["description"] = self.description - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.name is not None: - body["name"] = self.name - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status.value - if self.status_message is not None: - body["status_message"] = self.status_message - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.user_id is not None: - body["user_id"] = self.user_id - if self.version is not None: - body["version"] = self.version + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.current_stage is not None: body['current_stage'] = self.current_stage + if self.description is not None: body['description'] = self.description + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_link is not None: body['run_link'] = self.run_link + if self.source is not None: body['source'] = self.source + if self.status is not None: body['status'] = self.status.value + if self.status_message is not None: body['status_message'] = self.status_message + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.user_id is not None: body['user_id'] = self.user_id + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ModelVersion into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.current_stage is not None: - body["current_stage"] = self.current_stage - if self.description is not None: - body["description"] = self.description - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.name is not None: - body["name"] = self.name - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status - if self.status_message is not None: - body["status_message"] = self.status_message - if self.tags: - body["tags"] = self.tags - if self.user_id is not None: - body["user_id"] = self.user_id - if self.version is not None: - body["version"] = self.version + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.current_stage is not None: body['current_stage'] = self.current_stage + if self.description is not None: body['description'] = self.description + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_link is not None: body['run_link'] = self.run_link + if self.source is not None: body['source'] = self.source + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelVersion: """Deserializes the ModelVersion from a dictionary.""" - return cls( - creation_timestamp=d.get("creation_timestamp", None), - current_stage=d.get("current_stage", None), - description=d.get("description", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - name=d.get("name", None), - run_id=d.get("run_id", None), - run_link=d.get("run_link", None), - source=d.get("source", None), - status=_enum(d, "status", ModelVersionStatus), - status_message=d.get("status_message", None), - tags=_repeated_dict(d, "tags", ModelVersionTag), - user_id=d.get("user_id", None), - version=d.get("version", None), - ) + return cls(creation_timestamp=d.get('creation_timestamp', None), current_stage=d.get('current_stage', None), description=d.get('description', None), last_updated_timestamp=d.get('last_updated_timestamp', None), name=d.get('name', None), run_id=d.get('run_id', None), run_link=d.get('run_link', None), source=d.get('source', None), status=_enum(d, 'status', ModelVersionStatus), status_message=d.get('status_message', None), tags=_repeated_dict(d, 'tags', ModelVersionTag), user_id=d.get('user_id', None), version=d.get('version', None)) + + @dataclass class ModelVersionDatabricks: creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + current_stage: Optional[Stage] = None """Stage of the model version. Valid values are: @@ -4346,32 +3682,32 @@ class ModelVersionDatabricks: * `Production`: Production stage. * `Archived`: Archived stage.""" - + description: Optional[str] = None """User-specified description for the object.""" - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + name: Optional[str] = None """Name of the model.""" - + permission_level: Optional[PermissionLevel] = None """Permission level of the requesting user on the object. For what is allowed at each level, see [MLflow Model permissions](..).""" - + run_id: Optional[str] = None """Unique identifier for the MLflow tracking run associated with the source model artifacts.""" - + run_link: Optional[str] = None """URL of the run associated with the model artifacts. This field is set at model version creation time only for model versions whose source run is from a tracking server that is different from the registry server.""" - + source: Optional[str] = None """URI that indicates the location of the source model artifacts. This is used when creating the model version.""" - + status: Optional[Status] = None """The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. @@ -4379,468 +3715,369 @@ class ModelVersionDatabricks: * `FAILED_REGISTRATION`: Request to register a new model version has failed. * `READY`: Model version is ready for use.""" - + status_message: Optional[str] = None """Details on the current status, for example why registration failed.""" - + tags: Optional[List[ModelVersionTag]] = None """Array of tags that are associated with the model version.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + version: Optional[str] = None """Version of the model.""" - + def as_dict(self) -> dict: """Serializes the ModelVersionDatabricks into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.current_stage is not None: - body["current_stage"] = self.current_stage.value - if self.description is not None: - body["description"] = self.description - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.name is not None: - body["name"] = self.name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status.value - if self.status_message is not None: - body["status_message"] = self.status_message - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.user_id is not None: - body["user_id"] = self.user_id - if self.version is not None: - body["version"] = self.version + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.current_stage is not None: body['current_stage'] = self.current_stage.value + if self.description is not None: body['description'] = self.description + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_link is not None: body['run_link'] = self.run_link + if self.source is not None: body['source'] = self.source + if self.status is not None: body['status'] = self.status.value + if self.status_message is not None: body['status_message'] = self.status_message + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.user_id is not None: body['user_id'] = self.user_id + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ModelVersionDatabricks into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.current_stage is not None: - body["current_stage"] = self.current_stage - if self.description is not None: - body["description"] = self.description - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.name is not None: - body["name"] = self.name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status - if self.status_message is not None: - body["status_message"] = self.status_message - if self.tags: - body["tags"] = self.tags - if self.user_id is not None: - body["user_id"] = self.user_id - if self.version is not None: - body["version"] = self.version + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.current_stage is not None: body['current_stage'] = self.current_stage + if self.description is not None: body['description'] = self.description + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_link is not None: body['run_link'] = self.run_link + if self.source is not None: body['source'] = self.source + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message + if self.tags: body['tags'] = self.tags + if self.user_id is not None: body['user_id'] = self.user_id + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelVersionDatabricks: """Deserializes the ModelVersionDatabricks from a dictionary.""" - return cls( - creation_timestamp=d.get("creation_timestamp", None), - current_stage=_enum(d, "current_stage", Stage), - description=d.get("description", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - name=d.get("name", None), - permission_level=_enum(d, "permission_level", PermissionLevel), - run_id=d.get("run_id", None), - run_link=d.get("run_link", None), - source=d.get("source", None), - status=_enum(d, "status", Status), - status_message=d.get("status_message", None), - tags=_repeated_dict(d, "tags", ModelVersionTag), - user_id=d.get("user_id", None), - version=d.get("version", None), - ) + return cls(creation_timestamp=d.get('creation_timestamp', None), current_stage=_enum(d, 'current_stage', Stage), description=d.get('description', None), last_updated_timestamp=d.get('last_updated_timestamp', None), name=d.get('name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), run_id=d.get('run_id', None), run_link=d.get('run_link', None), source=d.get('source', None), status=_enum(d, 'status', Status), status_message=d.get('status_message', None), tags=_repeated_dict(d, 'tags', ModelVersionTag), user_id=d.get('user_id', None), version=d.get('version', None)) + + class ModelVersionStatus(Enum): """Current status of `model_version`""" - - FAILED_REGISTRATION = "FAILED_REGISTRATION" - PENDING_REGISTRATION = "PENDING_REGISTRATION" - READY = "READY" - + + FAILED_REGISTRATION = 'FAILED_REGISTRATION' + PENDING_REGISTRATION = 'PENDING_REGISTRATION' + READY = 'READY' @dataclass class ModelVersionTag: key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the ModelVersionTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ModelVersionTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelVersionTag: """Deserializes the ModelVersionTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class Param: """Param associated with a run.""" - + key: Optional[str] = None """Key identifying this param.""" - + value: Optional[str] = None """Value associated with this param.""" - + def as_dict(self) -> dict: """Serializes the Param into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the Param into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Param: """Deserializes the Param from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + class PermissionLevel(Enum): """Permission level of the requesting user on the object. For what is allowed at each level, see [MLflow Model permissions](..).""" - - CAN_EDIT = "CAN_EDIT" - CAN_MANAGE = "CAN_MANAGE" - CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS" - CAN_MANAGE_STAGING_VERSIONS = "CAN_MANAGE_STAGING_VERSIONS" - CAN_READ = "CAN_READ" - + + CAN_EDIT = 'CAN_EDIT' + CAN_MANAGE = 'CAN_MANAGE' + CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' + CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' + CAN_READ = 'CAN_READ' @dataclass class RegisteredModelAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[RegisteredModelPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAccessControlRequest: """Deserializes the RegisteredModelAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", RegisteredModelPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', RegisteredModelPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class RegisteredModelAccessControlResponse: all_permissions: Optional[List[RegisteredModelPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAccessControlResponse: """Deserializes the RegisteredModelAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", RegisteredModelPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', RegisteredModelPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class RegisteredModelPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[RegisteredModelPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermission: """Deserializes the RegisteredModelPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", RegisteredModelPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', RegisteredModelPermissionLevel)) + + class RegisteredModelPermissionLevel(Enum): """Permission level""" - - CAN_EDIT = "CAN_EDIT" - CAN_MANAGE = "CAN_MANAGE" - CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS" - CAN_MANAGE_STAGING_VERSIONS = "CAN_MANAGE_STAGING_VERSIONS" - CAN_READ = "CAN_READ" - + + CAN_EDIT = 'CAN_EDIT' + CAN_MANAGE = 'CAN_MANAGE' + CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' + CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' + CAN_READ = 'CAN_READ' @dataclass class RegisteredModelPermissions: access_control_list: Optional[List[RegisteredModelAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the RegisteredModelPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissions: """Deserializes the RegisteredModelPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", RegisteredModelAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', RegisteredModelAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class RegisteredModelPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[RegisteredModelPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissionsDescription: """Deserializes the RegisteredModelPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", RegisteredModelPermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', RegisteredModelPermissionLevel)) + + @dataclass class RegisteredModelPermissionsRequest: access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None - + registered_model_id: Optional[str] = None """The registered model for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.registered_model_id is not None: - body["registered_model_id"] = self.registered_model_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.registered_model_id is not None: - body["registered_model_id"] = self.registered_model_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissionsRequest: """Deserializes the RegisteredModelPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", RegisteredModelAccessControlRequest), - registered_model_id=d.get("registered_model_id", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', RegisteredModelAccessControlRequest), registered_model_id=d.get('registered_model_id', None)) + + @dataclass class RegistryWebhook: creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + description: Optional[str] = None """User-specified description for the webhook.""" - + events: Optional[List[RegistryWebhookEvent]] = None """Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. @@ -4870,20 +4107,20 @@ class RegistryWebhook: to production. * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - + http_url_spec: Optional[HttpUrlSpecWithoutSecret] = None - + id: Optional[str] = None """Webhook ID""" - + job_spec: Optional[JobSpecWithoutSecret] = None - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + model_name: Optional[str] = None """Name of the model whose events would trigger this webhook.""" - + status: Optional[RegistryWebhookStatus] = None """Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. @@ -4892,107 +4129,80 @@ class RegistryWebhook: * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event.""" - + def as_dict(self) -> dict: """Serializes the RegistryWebhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = [v.value for v in self.events] - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec.as_dict() - if self.id is not None: - body["id"] = self.id - if self.job_spec: - body["job_spec"] = self.job_spec.as_dict() - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.model_name is not None: - body["model_name"] = self.model_name - if self.status is not None: - body["status"] = self.status.value + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.description is not None: body['description'] = self.description + if self.events: body['events'] = [v.value for v in self.events] + if self.http_url_spec: body['http_url_spec'] = self.http_url_spec.as_dict() + if self.id is not None: body['id'] = self.id + if self.job_spec: body['job_spec'] = self.job_spec.as_dict() + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.model_name is not None: body['model_name'] = self.model_name + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the RegistryWebhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = self.events - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec - if self.id is not None: - body["id"] = self.id - if self.job_spec: - body["job_spec"] = self.job_spec - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.model_name is not None: - body["model_name"] = self.model_name - if self.status is not None: - body["status"] = self.status + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.description is not None: body['description'] = self.description + if self.events: body['events'] = self.events + if self.http_url_spec: body['http_url_spec'] = self.http_url_spec + if self.id is not None: body['id'] = self.id + if self.job_spec: body['job_spec'] = self.job_spec + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.model_name is not None: body['model_name'] = self.model_name + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegistryWebhook: """Deserializes the RegistryWebhook from a dictionary.""" - return cls( - creation_timestamp=d.get("creation_timestamp", None), - description=d.get("description", None), - events=_repeated_enum(d, "events", RegistryWebhookEvent), - http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpecWithoutSecret), - id=d.get("id", None), - job_spec=_from_dict(d, "job_spec", JobSpecWithoutSecret), - last_updated_timestamp=d.get("last_updated_timestamp", None), - model_name=d.get("model_name", None), - status=_enum(d, "status", RegistryWebhookStatus), - ) - + return cls(creation_timestamp=d.get('creation_timestamp', None), description=d.get('description', None), events=_repeated_enum(d, 'events', RegistryWebhookEvent), http_url_spec=_from_dict(d, 'http_url_spec', HttpUrlSpecWithoutSecret), id=d.get('id', None), job_spec=_from_dict(d, 'job_spec', JobSpecWithoutSecret), last_updated_timestamp=d.get('last_updated_timestamp', None), model_name=d.get('model_name', None), status=_enum(d, 'status', RegistryWebhookStatus)) + -class RegistryWebhookEvent(Enum): - COMMENT_CREATED = "COMMENT_CREATED" - MODEL_VERSION_CREATED = "MODEL_VERSION_CREATED" - MODEL_VERSION_TAG_SET = "MODEL_VERSION_TAG_SET" - MODEL_VERSION_TRANSITIONED_STAGE = "MODEL_VERSION_TRANSITIONED_STAGE" - MODEL_VERSION_TRANSITIONED_TO_ARCHIVED = "MODEL_VERSION_TRANSITIONED_TO_ARCHIVED" - MODEL_VERSION_TRANSITIONED_TO_PRODUCTION = "MODEL_VERSION_TRANSITIONED_TO_PRODUCTION" - MODEL_VERSION_TRANSITIONED_TO_STAGING = "MODEL_VERSION_TRANSITIONED_TO_STAGING" - REGISTERED_MODEL_CREATED = "REGISTERED_MODEL_CREATED" - TRANSITION_REQUEST_CREATED = "TRANSITION_REQUEST_CREATED" - TRANSITION_REQUEST_TO_ARCHIVED_CREATED = "TRANSITION_REQUEST_TO_ARCHIVED_CREATED" - TRANSITION_REQUEST_TO_PRODUCTION_CREATED = "TRANSITION_REQUEST_TO_PRODUCTION_CREATED" - TRANSITION_REQUEST_TO_STAGING_CREATED = "TRANSITION_REQUEST_TO_STAGING_CREATED" +class RegistryWebhookEvent(Enum): + + + COMMENT_CREATED = 'COMMENT_CREATED' + MODEL_VERSION_CREATED = 'MODEL_VERSION_CREATED' + MODEL_VERSION_TAG_SET = 'MODEL_VERSION_TAG_SET' + MODEL_VERSION_TRANSITIONED_STAGE = 'MODEL_VERSION_TRANSITIONED_STAGE' + MODEL_VERSION_TRANSITIONED_TO_ARCHIVED = 'MODEL_VERSION_TRANSITIONED_TO_ARCHIVED' + MODEL_VERSION_TRANSITIONED_TO_PRODUCTION = 'MODEL_VERSION_TRANSITIONED_TO_PRODUCTION' + MODEL_VERSION_TRANSITIONED_TO_STAGING = 'MODEL_VERSION_TRANSITIONED_TO_STAGING' + REGISTERED_MODEL_CREATED = 'REGISTERED_MODEL_CREATED' + TRANSITION_REQUEST_CREATED = 'TRANSITION_REQUEST_CREATED' + TRANSITION_REQUEST_TO_ARCHIVED_CREATED = 'TRANSITION_REQUEST_TO_ARCHIVED_CREATED' + TRANSITION_REQUEST_TO_PRODUCTION_CREATED = 'TRANSITION_REQUEST_TO_PRODUCTION_CREATED' + TRANSITION_REQUEST_TO_STAGING_CREATED = 'TRANSITION_REQUEST_TO_STAGING_CREATED' class RegistryWebhookStatus(Enum): """Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event.""" - - ACTIVE = "ACTIVE" - DISABLED = "DISABLED" - TEST_MODE = "TEST_MODE" - + + ACTIVE = 'ACTIVE' + DISABLED = 'DISABLED' + TEST_MODE = 'TEST_MODE' @dataclass class RejectTransitionRequest: name: str """Name of the model.""" - + version: str """Version of the model.""" - + stage: Stage """Target stage of the transition. Valid values are: @@ -5003,151 +4213,138 @@ class RejectTransitionRequest: * `Production`: Production stage. * `Archived`: Archived stage.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the RejectTransitionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage.value - if self.version is not None: - body["version"] = self.version + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage.value + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the RejectTransitionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequest: """Deserializes the RejectTransitionRequest from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - stage=_enum(d, "stage", Stage), - version=d.get("version", None), - ) + return cls(comment=d.get('comment', None), name=d.get('name', None), stage=_enum(d, 'stage', Stage), version=d.get('version', None)) + + @dataclass class RejectTransitionRequestResponse: activity: Optional[Activity] = None """Activity recorded for the action.""" - + def as_dict(self) -> dict: """Serializes the RejectTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activity: - body["activity"] = self.activity.as_dict() + if self.activity: body['activity'] = self.activity.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RejectTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.activity: - body["activity"] = self.activity + if self.activity: body['activity'] = self.activity return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequestResponse: """Deserializes the RejectTransitionRequestResponse from a dictionary.""" - return cls(activity=_from_dict(d, "activity", Activity)) + return cls(activity=_from_dict(d, 'activity', Activity)) + + @dataclass class RenameModelRequest: name: str """Registered model unique name identifier.""" - + new_name: Optional[str] = None """If provided, updates the name for this `registered_model`.""" - + def as_dict(self) -> dict: """Serializes the RenameModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name return body def as_shallow_dict(self) -> dict: """Serializes the RenameModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RenameModelRequest: """Deserializes the RenameModelRequest from a dictionary.""" - return cls(name=d.get("name", None), new_name=d.get("new_name", None)) + return cls(name=d.get('name', None), new_name=d.get('new_name', None)) + + @dataclass class RenameModelResponse: registered_model: Optional[Model] = None - + def as_dict(self) -> dict: """Serializes the RenameModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.registered_model: - body["registered_model"] = self.registered_model.as_dict() + if self.registered_model: body['registered_model'] = self.registered_model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RenameModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.registered_model: - body["registered_model"] = self.registered_model + if self.registered_model: body['registered_model'] = self.registered_model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RenameModelResponse: """Deserializes the RenameModelResponse from a dictionary.""" - return cls(registered_model=_from_dict(d, "registered_model", Model)) + return cls(registered_model=_from_dict(d, 'registered_model', Model)) + + @dataclass class RestoreExperiment: experiment_id: str """ID of the associated experiment.""" - + def as_dict(self) -> dict: """Serializes the RestoreExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the RestoreExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestoreExperiment: """Deserializes the RestoreExperiment from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None)) + return cls(experiment_id=d.get('experiment_id', None)) + + @dataclass @@ -5166,31 +4363,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RestoreExperimentResponse: """Deserializes the RestoreExperimentResponse from a dictionary.""" return cls() + + @dataclass class RestoreRun: run_id: str """ID of the run to restore.""" - + def as_dict(self) -> dict: """Serializes the RestoreRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the RestoreRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: - body["run_id"] = self.run_id + if self.run_id is not None: body['run_id'] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestoreRun: """Deserializes the RestoreRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) + return cls(run_id=d.get('run_id', None)) + + @dataclass @@ -5209,539 +4408,446 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RestoreRunResponse: """Deserializes the RestoreRunResponse from a dictionary.""" return cls() + + @dataclass class RestoreRuns: experiment_id: str """The ID of the experiment containing the runs to restore.""" - + min_timestamp_millis: int """The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only runs deleted no earlier than this timestamp are restored.""" - + max_runs: Optional[int] = None """An optional positive integer indicating the maximum number of runs to restore. The maximum allowed value for max_runs is 10000.""" - + def as_dict(self) -> dict: """Serializes the RestoreRuns into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.min_timestamp_millis is not None: - body["min_timestamp_millis"] = self.min_timestamp_millis + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.max_runs is not None: body['max_runs'] = self.max_runs + if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis return body def as_shallow_dict(self) -> dict: """Serializes the RestoreRuns into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.min_timestamp_millis is not None: - body["min_timestamp_millis"] = self.min_timestamp_millis + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.max_runs is not None: body['max_runs'] = self.max_runs + if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestoreRuns: """Deserializes the RestoreRuns from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - max_runs=d.get("max_runs", None), - min_timestamp_millis=d.get("min_timestamp_millis", None), - ) + return cls(experiment_id=d.get('experiment_id', None), max_runs=d.get('max_runs', None), min_timestamp_millis=d.get('min_timestamp_millis', None)) + + @dataclass class RestoreRunsResponse: runs_restored: Optional[int] = None """The number of runs restored.""" - + def as_dict(self) -> dict: """Serializes the RestoreRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.runs_restored is not None: - body["runs_restored"] = self.runs_restored + if self.runs_restored is not None: body['runs_restored'] = self.runs_restored return body def as_shallow_dict(self) -> dict: """Serializes the RestoreRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.runs_restored is not None: - body["runs_restored"] = self.runs_restored + if self.runs_restored is not None: body['runs_restored'] = self.runs_restored return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestoreRunsResponse: """Deserializes the RestoreRunsResponse from a dictionary.""" - return cls(runs_restored=d.get("runs_restored", None)) + return cls(runs_restored=d.get('runs_restored', None)) + + @dataclass class Run: """A single run.""" - + data: Optional[RunData] = None """Run data.""" - + info: Optional[RunInfo] = None """Run metadata.""" - + inputs: Optional[RunInputs] = None """Run inputs.""" - + def as_dict(self) -> dict: """Serializes the Run into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data: - body["data"] = self.data.as_dict() - if self.info: - body["info"] = self.info.as_dict() - if self.inputs: - body["inputs"] = self.inputs.as_dict() + if self.data: body['data'] = self.data.as_dict() + if self.info: body['info'] = self.info.as_dict() + if self.inputs: body['inputs'] = self.inputs.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Run into a shallow dictionary of its immediate attributes.""" body = {} - if self.data: - body["data"] = self.data - if self.info: - body["info"] = self.info - if self.inputs: - body["inputs"] = self.inputs + if self.data: body['data'] = self.data + if self.info: body['info'] = self.info + if self.inputs: body['inputs'] = self.inputs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Run: """Deserializes the Run from a dictionary.""" - return cls( - data=_from_dict(d, "data", RunData), - info=_from_dict(d, "info", RunInfo), - inputs=_from_dict(d, "inputs", RunInputs), - ) + return cls(data=_from_dict(d, 'data', RunData), info=_from_dict(d, 'info', RunInfo), inputs=_from_dict(d, 'inputs', RunInputs)) + + @dataclass class RunData: """Run data (metrics, params, and tags).""" - + metrics: Optional[List[Metric]] = None """Run metrics.""" - + params: Optional[List[Param]] = None """Run parameters.""" - + tags: Optional[List[RunTag]] = None """Additional metadata key-value pairs.""" - + def as_dict(self) -> dict: """Serializes the RunData into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metrics: - body["metrics"] = [v.as_dict() for v in self.metrics] - if self.params: - body["params"] = [v.as_dict() for v in self.params] - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics] + if self.params: body['params'] = [v.as_dict() for v in self.params] + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the RunData into a shallow dictionary of its immediate attributes.""" body = {} - if self.metrics: - body["metrics"] = self.metrics - if self.params: - body["params"] = self.params - if self.tags: - body["tags"] = self.tags + if self.metrics: body['metrics'] = self.metrics + if self.params: body['params'] = self.params + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunData: """Deserializes the RunData from a dictionary.""" - return cls( - metrics=_repeated_dict(d, "metrics", Metric), - params=_repeated_dict(d, "params", Param), - tags=_repeated_dict(d, "tags", RunTag), - ) + return cls(metrics=_repeated_dict(d, 'metrics', Metric), params=_repeated_dict(d, 'params', Param), tags=_repeated_dict(d, 'tags', RunTag)) + + @dataclass class RunInfo: """Metadata of a single run.""" - + artifact_uri: Optional[str] = None """URI of the directory where artifacts should be uploaded. This can be a local path (starting with "/"), or a distributed file system (DFS) path, like ``s3://bucket/directory`` or ``dbfs:/my/directory``. If not set, the local ``./mlruns`` directory is chosen.""" - + end_time: Optional[int] = None """Unix timestamp of when the run ended in milliseconds.""" - + experiment_id: Optional[str] = None """The experiment ID.""" - + lifecycle_stage: Optional[str] = None """Current life cycle stage of the experiment : OneOf("active", "deleted")""" - + run_id: Optional[str] = None """Unique identifier for the run.""" - + run_name: Optional[str] = None """The name of the run.""" - + run_uuid: Optional[str] = None """[Deprecated, use run_id instead] Unique identifier for the run. This field will be removed in a future MLflow version.""" - + start_time: Optional[int] = None """Unix timestamp of when the run started in milliseconds.""" - + status: Optional[RunInfoStatus] = None """Current status of the run.""" - + user_id: Optional[str] = None """User who initiated the run. This field is deprecated as of MLflow 1.0, and will be removed in a future MLflow release. Use 'mlflow.user' tag instead.""" - + def as_dict(self) -> dict: """Serializes the RunInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_uri is not None: - body["artifact_uri"] = self.artifact_uri - if self.end_time is not None: - body["end_time"] = self.end_time - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.lifecycle_stage is not None: - body["lifecycle_stage"] = self.lifecycle_stage - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.start_time is not None: - body["start_time"] = self.start_time - if self.status is not None: - body["status"] = self.status.value - if self.user_id is not None: - body["user_id"] = self.user_id + if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri + if self.end_time is not None: body['end_time'] = self.end_time + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.start_time is not None: body['start_time'] = self.start_time + if self.status is not None: body['status'] = self.status.value + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the RunInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_uri is not None: - body["artifact_uri"] = self.artifact_uri - if self.end_time is not None: - body["end_time"] = self.end_time - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.lifecycle_stage is not None: - body["lifecycle_stage"] = self.lifecycle_stage - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.start_time is not None: - body["start_time"] = self.start_time - if self.status is not None: - body["status"] = self.status - if self.user_id is not None: - body["user_id"] = self.user_id + if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri + if self.end_time is not None: body['end_time'] = self.end_time + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.start_time is not None: body['start_time'] = self.start_time + if self.status is not None: body['status'] = self.status + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunInfo: """Deserializes the RunInfo from a dictionary.""" - return cls( - artifact_uri=d.get("artifact_uri", None), - end_time=d.get("end_time", None), - experiment_id=d.get("experiment_id", None), - lifecycle_stage=d.get("lifecycle_stage", None), - run_id=d.get("run_id", None), - run_name=d.get("run_name", None), - run_uuid=d.get("run_uuid", None), - start_time=d.get("start_time", None), - status=_enum(d, "status", RunInfoStatus), - user_id=d.get("user_id", None), - ) + return cls(artifact_uri=d.get('artifact_uri', None), end_time=d.get('end_time', None), experiment_id=d.get('experiment_id', None), lifecycle_stage=d.get('lifecycle_stage', None), run_id=d.get('run_id', None), run_name=d.get('run_name', None), run_uuid=d.get('run_uuid', None), start_time=d.get('start_time', None), status=_enum(d, 'status', RunInfoStatus), user_id=d.get('user_id', None)) + + class RunInfoStatus(Enum): """Status of a run.""" - - FAILED = "FAILED" - FINISHED = "FINISHED" - KILLED = "KILLED" - RUNNING = "RUNNING" - SCHEDULED = "SCHEDULED" - + + FAILED = 'FAILED' + FINISHED = 'FINISHED' + KILLED = 'KILLED' + RUNNING = 'RUNNING' + SCHEDULED = 'SCHEDULED' @dataclass class RunInputs: """Run inputs.""" - + dataset_inputs: Optional[List[DatasetInput]] = None """Run metrics.""" - + model_inputs: Optional[List[ModelInput]] = None - """**NOTE**: Experimental: This API field may change or be removed in a future release without - warning. + """Model inputs to the Run.""" - Model inputs to the Run.""" - def as_dict(self) -> dict: """Serializes the RunInputs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset_inputs: - body["dataset_inputs"] = [v.as_dict() for v in self.dataset_inputs] - if self.model_inputs: - body["model_inputs"] = [v.as_dict() for v in self.model_inputs] + if self.dataset_inputs: body['dataset_inputs'] = [v.as_dict() for v in self.dataset_inputs] + if self.model_inputs: body['model_inputs'] = [v.as_dict() for v in self.model_inputs] return body def as_shallow_dict(self) -> dict: """Serializes the RunInputs into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset_inputs: - body["dataset_inputs"] = self.dataset_inputs - if self.model_inputs: - body["model_inputs"] = self.model_inputs + if self.dataset_inputs: body['dataset_inputs'] = self.dataset_inputs + if self.model_inputs: body['model_inputs'] = self.model_inputs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunInputs: """Deserializes the RunInputs from a dictionary.""" - return cls( - dataset_inputs=_repeated_dict(d, "dataset_inputs", DatasetInput), - model_inputs=_repeated_dict(d, "model_inputs", ModelInput), - ) + return cls(dataset_inputs=_repeated_dict(d, 'dataset_inputs', DatasetInput), model_inputs=_repeated_dict(d, 'model_inputs', ModelInput)) + + @dataclass class RunTag: """Tag for a run.""" - + key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the RunTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the RunTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunTag: """Deserializes the RunTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class SearchExperiments: filter: Optional[str] = None """String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'")""" - + max_results: Optional[int] = None """Maximum number of experiments desired. Max threshold is 3000.""" - + order_by: Optional[List[str]] = None """List of columns for ordering search results, which can include experiment name and last updated timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are done by experiment id DESC.""" - + page_token: Optional[str] = None """Token indicating the page of experiments to fetch""" - + view_type: Optional[ViewType] = None """Qualifier for type of experiments to be returned. If unspecified, return only active experiments.""" - + def as_dict(self) -> dict: """Serializes the SearchExperiments into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = [v for v in self.order_by] - if self.page_token is not None: - body["page_token"] = self.page_token - if self.view_type is not None: - body["view_type"] = self.view_type.value + if self.filter is not None: body['filter'] = self.filter + if self.max_results is not None: body['max_results'] = self.max_results + if self.order_by: body['order_by'] = [v for v in self.order_by] + if self.page_token is not None: body['page_token'] = self.page_token + if self.view_type is not None: body['view_type'] = self.view_type.value return body def as_shallow_dict(self) -> dict: """Serializes the SearchExperiments into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = self.order_by - if self.page_token is not None: - body["page_token"] = self.page_token - if self.view_type is not None: - body["view_type"] = self.view_type + if self.filter is not None: body['filter'] = self.filter + if self.max_results is not None: body['max_results'] = self.max_results + if self.order_by: body['order_by'] = self.order_by + if self.page_token is not None: body['page_token'] = self.page_token + if self.view_type is not None: body['view_type'] = self.view_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchExperiments: """Deserializes the SearchExperiments from a dictionary.""" - return cls( - filter=d.get("filter", None), - max_results=d.get("max_results", None), - order_by=d.get("order_by", None), - page_token=d.get("page_token", None), - view_type=_enum(d, "view_type", ViewType), - ) + return cls(filter=d.get('filter', None), max_results=d.get('max_results', None), order_by=d.get('order_by', None), page_token=d.get('page_token', None), view_type=_enum(d, 'view_type', ViewType)) + + @dataclass class SearchExperimentsResponse: experiments: Optional[List[Experiment]] = None """Experiments that match the search criteria""" - + next_page_token: Optional[str] = None """Token that can be used to retrieve the next page of experiments. An empty token means that no more experiments are available for retrieval.""" - + def as_dict(self) -> dict: """Serializes the SearchExperimentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiments: - body["experiments"] = [v.as_dict() for v in self.experiments] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.experiments: body['experiments'] = [v.as_dict() for v in self.experiments] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchExperimentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiments: - body["experiments"] = self.experiments - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.experiments: body['experiments'] = self.experiments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchExperimentsResponse: """Deserializes the SearchExperimentsResponse from a dictionary.""" - return cls( - experiments=_repeated_dict(d, "experiments", Experiment), next_page_token=d.get("next_page_token", None) - ) + return cls(experiments=_repeated_dict(d, 'experiments', Experiment), next_page_token=d.get('next_page_token', None)) + + @dataclass class SearchLoggedModelsDataset: dataset_name: str """The name of the dataset.""" - + dataset_digest: Optional[str] = None """The digest of the dataset.""" - + def as_dict(self) -> dict: """Serializes the SearchLoggedModelsDataset into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name + if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name return body def as_shallow_dict(self) -> dict: """Serializes the SearchLoggedModelsDataset into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name + if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsDataset: """Deserializes the SearchLoggedModelsDataset from a dictionary.""" - return cls(dataset_digest=d.get("dataset_digest", None), dataset_name=d.get("dataset_name", None)) + return cls(dataset_digest=d.get('dataset_digest', None), dataset_name=d.get('dataset_name', None)) + + @dataclass class SearchLoggedModelsOrderBy: field_name: str """The name of the field to order by, e.g. "metrics.accuracy".""" - + ascending: Optional[bool] = None """Whether the search results order is ascending or not.""" - + dataset_digest: Optional[str] = None """If ``field_name`` refers to a metric, this field specifies the digest of the dataset associated with the metric. Only metrics associated with the specified dataset name and digest will be considered for ordering. This field may only be set if ``dataset_name`` is also set.""" - + dataset_name: Optional[str] = None """If ``field_name`` refers to a metric, this field specifies the name of the dataset associated with the metric. Only metrics associated with the specified dataset name will be considered for ordering. This field may only be set if ``field_name`` refers to a metric.""" - + def as_dict(self) -> dict: """Serializes the SearchLoggedModelsOrderBy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ascending is not None: - body["ascending"] = self.ascending - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.field_name is not None: - body["field_name"] = self.field_name + if self.ascending is not None: body['ascending'] = self.ascending + if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.field_name is not None: body['field_name'] = self.field_name return body def as_shallow_dict(self) -> dict: """Serializes the SearchLoggedModelsOrderBy into a shallow dictionary of its immediate attributes.""" body = {} - if self.ascending is not None: - body["ascending"] = self.ascending - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.field_name is not None: - body["field_name"] = self.field_name + if self.ascending is not None: body['ascending'] = self.ascending + if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.field_name is not None: body['field_name'] = self.field_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsOrderBy: """Deserializes the SearchLoggedModelsOrderBy from a dictionary.""" - return cls( - ascending=d.get("ascending", None), - dataset_digest=d.get("dataset_digest", None), - dataset_name=d.get("dataset_name", None), - field_name=d.get("field_name", None), - ) + return cls(ascending=d.get('ascending', None), dataset_digest=d.get('dataset_digest', None), dataset_name=d.get('dataset_name', None), field_name=d.get('field_name', None)) + + @dataclass @@ -5752,179 +4858,156 @@ class SearchLoggedModelsRequest: logged models with accuracy > 0.9 on the test_dataset. Metric values from ANY dataset matching the criteria are considered. If no datasets are specified, then metrics across all datasets are considered in the filter.""" - + experiment_ids: Optional[List[str]] = None """The IDs of the experiments in which to search for logged models.""" - + filter: Optional[str] = None """A filter expression over logged model info and data that allows returning a subset of logged models. The syntax is a subset of SQL that supports AND'ing together binary operations. Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``.""" - + max_results: Optional[int] = None """The maximum number of Logged Models to return. The maximum limit is 50.""" - + order_by: Optional[List[SearchLoggedModelsOrderBy]] = None """The list of columns for ordering the results, with additional fields for sorting criteria.""" - + page_token: Optional[str] = None """The token indicating the page of logged models to fetch.""" - + def as_dict(self) -> dict: """Serializes the SearchLoggedModelsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.datasets: - body["datasets"] = [v.as_dict() for v in self.datasets] - if self.experiment_ids: - body["experiment_ids"] = [v for v in self.experiment_ids] - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = [v.as_dict() for v in self.order_by] - if self.page_token is not None: - body["page_token"] = self.page_token + if self.datasets: body['datasets'] = [v.as_dict() for v in self.datasets] + if self.experiment_ids: body['experiment_ids'] = [v for v in self.experiment_ids] + if self.filter is not None: body['filter'] = self.filter + if self.max_results is not None: body['max_results'] = self.max_results + if self.order_by: body['order_by'] = [v.as_dict() for v in self.order_by] + if self.page_token is not None: body['page_token'] = self.page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchLoggedModelsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.datasets: - body["datasets"] = self.datasets - if self.experiment_ids: - body["experiment_ids"] = self.experiment_ids - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = self.order_by - if self.page_token is not None: - body["page_token"] = self.page_token + if self.datasets: body['datasets'] = self.datasets + if self.experiment_ids: body['experiment_ids'] = self.experiment_ids + if self.filter is not None: body['filter'] = self.filter + if self.max_results is not None: body['max_results'] = self.max_results + if self.order_by: body['order_by'] = self.order_by + if self.page_token is not None: body['page_token'] = self.page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsRequest: """Deserializes the SearchLoggedModelsRequest from a dictionary.""" - return cls( - datasets=_repeated_dict(d, "datasets", SearchLoggedModelsDataset), - experiment_ids=d.get("experiment_ids", None), - filter=d.get("filter", None), - max_results=d.get("max_results", None), - order_by=_repeated_dict(d, "order_by", SearchLoggedModelsOrderBy), - page_token=d.get("page_token", None), - ) + return cls(datasets=_repeated_dict(d, 'datasets', SearchLoggedModelsDataset), experiment_ids=d.get('experiment_ids', None), filter=d.get('filter', None), max_results=d.get('max_results', None), order_by=_repeated_dict(d, 'order_by', SearchLoggedModelsOrderBy), page_token=d.get('page_token', None)) + + @dataclass class SearchLoggedModelsResponse: models: Optional[List[LoggedModel]] = None """Logged models that match the search criteria.""" - + next_page_token: Optional[str] = None """The token that can be used to retrieve the next page of logged models.""" - + def as_dict(self) -> dict: """Serializes the SearchLoggedModelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.models: - body["models"] = [v.as_dict() for v in self.models] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.models: body['models'] = [v.as_dict() for v in self.models] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchLoggedModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.models: - body["models"] = self.models - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.models: body['models'] = self.models + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsResponse: """Deserializes the SearchLoggedModelsResponse from a dictionary.""" - return cls(models=_repeated_dict(d, "models", LoggedModel), next_page_token=d.get("next_page_token", None)) + return cls(models=_repeated_dict(d, 'models', LoggedModel), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class SearchModelVersionsResponse: model_versions: Optional[List[ModelVersion]] = None """Models that match the search criteria""" - + next_page_token: Optional[str] = None """Pagination token to request next page of models for the same search query.""" - + def as_dict(self) -> dict: """Serializes the SearchModelVersionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_versions: - body["model_versions"] = [v.as_dict() for v in self.model_versions] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchModelVersionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_versions: - body["model_versions"] = self.model_versions - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.model_versions: body['model_versions'] = self.model_versions + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchModelVersionsResponse: """Deserializes the SearchModelVersionsResponse from a dictionary.""" - return cls( - model_versions=_repeated_dict(d, "model_versions", ModelVersion), - next_page_token=d.get("next_page_token", None), - ) + return cls(model_versions=_repeated_dict(d, 'model_versions', ModelVersion), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class SearchModelsResponse: next_page_token: Optional[str] = None """Pagination token to request the next page of models.""" - + registered_models: Optional[List[Model]] = None """Registered Models that match the search criteria.""" - + def as_dict(self) -> dict: """Serializes the SearchModelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = [v.as_dict() for v in self.registered_models] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models] return body def as_shallow_dict(self) -> dict: """Serializes the SearchModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = self.registered_models + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.registered_models: body['registered_models'] = self.registered_models return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchModelsResponse: """Deserializes the SearchModelsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - registered_models=_repeated_dict(d, "registered_models", Model), - ) + return cls(next_page_token=d.get('next_page_token', None), registered_models=_repeated_dict(d, 'registered_models', Model)) + + @dataclass class SearchRuns: experiment_ids: Optional[List[str]] = None """List of experiment IDs to search over.""" - + filter: Optional[str] = None """A filter expression over params, metrics, and tags, that allows returning a subset of runs. The syntax is a subset of SQL that supports ANDing together binary operations between a param, @@ -5936,139 +5019,116 @@ class SearchRuns: quotes: `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'` Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`.""" - + max_results: Optional[int] = None """Maximum number of runs desired. Max threshold is 50000""" - + order_by: Optional[List[str]] = None """List of columns to be ordered by, including attributes, params, metrics, and tags with an optional `"DESC"` or `"ASC"` annotation, where `"ASC"` is the default. Example: `["params.input DESC", "metrics.alpha ASC", "metrics.rmse"]`. Tiebreaks are done by start_time `DESC` followed by `run_id` for runs with the same start time (and this is the default ordering criterion if order_by is not provided).""" - + page_token: Optional[str] = None """Token for the current page of runs.""" - + run_view_type: Optional[ViewType] = None """Whether to display only active, only deleted, or all runs. Defaults to only active runs.""" - + def as_dict(self) -> dict: """Serializes the SearchRuns into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_ids: - body["experiment_ids"] = [v for v in self.experiment_ids] - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = [v for v in self.order_by] - if self.page_token is not None: - body["page_token"] = self.page_token - if self.run_view_type is not None: - body["run_view_type"] = self.run_view_type.value + if self.experiment_ids: body['experiment_ids'] = [v for v in self.experiment_ids] + if self.filter is not None: body['filter'] = self.filter + if self.max_results is not None: body['max_results'] = self.max_results + if self.order_by: body['order_by'] = [v for v in self.order_by] + if self.page_token is not None: body['page_token'] = self.page_token + if self.run_view_type is not None: body['run_view_type'] = self.run_view_type.value return body def as_shallow_dict(self) -> dict: """Serializes the SearchRuns into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_ids: - body["experiment_ids"] = self.experiment_ids - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = self.order_by - if self.page_token is not None: - body["page_token"] = self.page_token - if self.run_view_type is not None: - body["run_view_type"] = self.run_view_type + if self.experiment_ids: body['experiment_ids'] = self.experiment_ids + if self.filter is not None: body['filter'] = self.filter + if self.max_results is not None: body['max_results'] = self.max_results + if self.order_by: body['order_by'] = self.order_by + if self.page_token is not None: body['page_token'] = self.page_token + if self.run_view_type is not None: body['run_view_type'] = self.run_view_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchRuns: """Deserializes the SearchRuns from a dictionary.""" - return cls( - experiment_ids=d.get("experiment_ids", None), - filter=d.get("filter", None), - max_results=d.get("max_results", None), - order_by=d.get("order_by", None), - page_token=d.get("page_token", None), - run_view_type=_enum(d, "run_view_type", ViewType), - ) + return cls(experiment_ids=d.get('experiment_ids', None), filter=d.get('filter', None), max_results=d.get('max_results', None), order_by=d.get('order_by', None), page_token=d.get('page_token', None), run_view_type=_enum(d, 'run_view_type', ViewType)) + + @dataclass class SearchRunsResponse: next_page_token: Optional[str] = None """Token for the next page of runs.""" - + runs: Optional[List[Run]] = None """Runs that match the search criteria.""" - + def as_dict(self) -> dict: """Serializes the SearchRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.runs: - body["runs"] = [v.as_dict() for v in self.runs] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.runs: body['runs'] = [v.as_dict() for v in self.runs] return body def as_shallow_dict(self) -> dict: """Serializes the SearchRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.runs: - body["runs"] = self.runs + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.runs: body['runs'] = self.runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchRunsResponse: """Deserializes the SearchRunsResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), runs=_repeated_dict(d, "runs", Run)) + return cls(next_page_token=d.get('next_page_token', None), runs=_repeated_dict(d, 'runs', Run)) + + @dataclass class SetExperimentTag: experiment_id: str """ID of the experiment under which to log the tag. Must be provided.""" - + key: str """Name of the tag. Keys up to 250 bytes in size are supported.""" - + value: str """String value of the tag being logged. Values up to 64KB in size are supported.""" - + def as_dict(self) -> dict: """Serializes the SetExperimentTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the SetExperimentTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetExperimentTag: """Deserializes the SetExperimentTag from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None), key=d.get("key", None), value=d.get("value", None)) + return cls(experiment_id=d.get('experiment_id', None), key=d.get('key', None), value=d.get('value', None)) + + @dataclass @@ -6087,38 +5147,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetExperimentTagResponse: """Deserializes the SetExperimentTagResponse from a dictionary.""" return cls() + + @dataclass class SetLoggedModelTagsRequest: model_id: Optional[str] = None """The ID of the logged model to set the tags on.""" - + tags: Optional[List[LoggedModelTag]] = None """The tags to set on the logged model.""" - + def as_dict(self) -> dict: """Serializes the SetLoggedModelTagsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.model_id is not None: body['model_id'] = self.model_id + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the SetLoggedModelTagsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.tags: - body["tags"] = self.tags + if self.model_id is not None: body['model_id'] = self.model_id + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetLoggedModelTagsRequest: """Deserializes the SetLoggedModelTagsRequest from a dictionary.""" - return cls(model_id=d.get("model_id", None), tags=_repeated_dict(d, "tags", LoggedModelTag)) + return cls(model_id=d.get('model_id', None), tags=_repeated_dict(d, 'tags', LoggedModelTag)) + + @dataclass @@ -6137,48 +5197,46 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetLoggedModelTagsResponse: """Deserializes the SetLoggedModelTagsResponse from a dictionary.""" return cls() + + @dataclass class SetModelTagRequest: name: str """Unique name of the model.""" - + key: str """Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists, its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed to support key values up to 250 bytes in size.""" - + value: str """String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size.""" - + def as_dict(self) -> dict: """Serializes the SetModelTagRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the SetModelTagRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetModelTagRequest: """Deserializes the SetModelTagRequest from a dictionary.""" - return cls(key=d.get("key", None), name=d.get("name", None), value=d.get("value", None)) + return cls(key=d.get('key', None), name=d.get('name', None), value=d.get('value', None)) + + @dataclass @@ -6197,57 +5255,51 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetModelTagResponse: """Deserializes the SetModelTagResponse from a dictionary.""" return cls() + + @dataclass class SetModelVersionTagRequest: name: str """Unique name of the model.""" - + version: str """Model version number.""" - + key: str """Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists, its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed to support key values up to 250 bytes in size.""" - + value: str """String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size.""" - + def as_dict(self) -> dict: """Serializes the SetModelVersionTagRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - if self.version is not None: - body["version"] = self.version + if self.key is not None: body['key'] = self.key + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the SetModelVersionTagRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - if self.version is not None: - body["version"] = self.version + if self.key is not None: body['key'] = self.key + if self.name is not None: body['name'] = self.name + if self.value is not None: body['value'] = self.value + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetModelVersionTagRequest: """Deserializes the SetModelVersionTagRequest from a dictionary.""" - return cls( - key=d.get("key", None), name=d.get("name", None), value=d.get("value", None), version=d.get("version", None) - ) + return cls(key=d.get('key', None), name=d.get('name', None), value=d.get('value', None), version=d.get('version', None)) + + @dataclass @@ -6266,58 +5318,49 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetModelVersionTagResponse: """Deserializes the SetModelVersionTagResponse from a dictionary.""" return cls() + + @dataclass class SetTag: key: str """Name of the tag. Keys up to 250 bytes in size are supported.""" - + value: str """String value of the tag being logged. Values up to 64KB in size are supported.""" - + run_id: Optional[str] = None """ID of the run under which to log the tag. Must be provided.""" - + run_uuid: Optional[str] = None """[Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be removed in a future MLflow version.""" - + def as_dict(self) -> dict: """Serializes the SetTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the SetTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetTag: """Deserializes the SetTag from a dictionary.""" - return cls( - key=d.get("key", None), - run_id=d.get("run_id", None), - run_uuid=d.get("run_uuid", None), - value=d.get("value", None), - ) + return cls(key=d.get('key', None), run_id=d.get('run_id', None), run_uuid=d.get('run_uuid', None), value=d.get('value', None)) + + @dataclass @@ -6336,138 +5379,134 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetTagResponse: """Deserializes the SetTagResponse from a dictionary.""" return cls() + + class Stage(Enum): """Stage of the model version. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage.""" - - ARCHIVED = "Archived" - NONE = "None" - PRODUCTION = "Production" - STAGING = "Staging" - + + ARCHIVED = 'Archived' + NONE = 'None' + PRODUCTION = 'Production' + STAGING = 'Staging' class Status(Enum): """The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. - + * `FAILED_REGISTRATION`: Request to register a new model version has failed. - + * `READY`: Model version is ready for use.""" - - FAILED_REGISTRATION = "FAILED_REGISTRATION" - PENDING_REGISTRATION = "PENDING_REGISTRATION" - READY = "READY" - + + FAILED_REGISTRATION = 'FAILED_REGISTRATION' + PENDING_REGISTRATION = 'PENDING_REGISTRATION' + READY = 'READY' @dataclass class TestRegistryWebhook: """Test webhook response object.""" - + body: Optional[str] = None """Body of the response from the webhook URL""" - + status_code: Optional[int] = None """Status code returned by the webhook URL""" - + def as_dict(self) -> dict: """Serializes the TestRegistryWebhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.body is not None: - body["body"] = self.body - if self.status_code is not None: - body["status_code"] = self.status_code + if self.body is not None: body['body'] = self.body + if self.status_code is not None: body['status_code'] = self.status_code return body def as_shallow_dict(self) -> dict: """Serializes the TestRegistryWebhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.body is not None: - body["body"] = self.body - if self.status_code is not None: - body["status_code"] = self.status_code + if self.body is not None: body['body'] = self.body + if self.status_code is not None: body['status_code'] = self.status_code return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhook: """Deserializes the TestRegistryWebhook from a dictionary.""" - return cls(body=d.get("body", None), status_code=d.get("status_code", None)) + return cls(body=d.get('body', None), status_code=d.get('status_code', None)) + + @dataclass class TestRegistryWebhookRequest: id: str """Webhook ID""" - + event: Optional[RegistryWebhookEvent] = None """If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the test trigger uses a randomly chosen event associated with the webhook.""" - + def as_dict(self) -> dict: """Serializes the TestRegistryWebhookRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.event is not None: - body["event"] = self.event.value - if self.id is not None: - body["id"] = self.id + if self.event is not None: body['event'] = self.event.value + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the TestRegistryWebhookRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.event is not None: - body["event"] = self.event - if self.id is not None: - body["id"] = self.id + if self.event is not None: body['event'] = self.event + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookRequest: """Deserializes the TestRegistryWebhookRequest from a dictionary.""" - return cls(event=_enum(d, "event", RegistryWebhookEvent), id=d.get("id", None)) + return cls(event=_enum(d, 'event', RegistryWebhookEvent), id=d.get('id', None)) + + @dataclass class TestRegistryWebhookResponse: webhook: Optional[TestRegistryWebhook] = None """Test webhook response object.""" - + def as_dict(self) -> dict: """Serializes the TestRegistryWebhookResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.webhook: - body["webhook"] = self.webhook.as_dict() + if self.webhook: body['webhook'] = self.webhook.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TestRegistryWebhookResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.webhook: - body["webhook"] = self.webhook + if self.webhook: body['webhook'] = self.webhook return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookResponse: """Deserializes the TestRegistryWebhookResponse from a dictionary.""" - return cls(webhook=_from_dict(d, "webhook", TestRegistryWebhook)) + return cls(webhook=_from_dict(d, 'webhook', TestRegistryWebhook)) + + @dataclass class TransitionModelVersionStageDatabricks: name: str """Name of the model.""" - + version: str """Version of the model.""" - + stage: Stage """Target stage of the transition. Valid values are: @@ -6478,68 +5517,54 @@ class TransitionModelVersionStageDatabricks: * `Production`: Production stage. * `Archived`: Archived stage.""" - + archive_existing_versions: bool """Specifies whether to archive all current model versions in the target stage.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the TransitionModelVersionStageDatabricks into a dictionary suitable for use as a JSON request body.""" body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage.value - if self.version is not None: - body["version"] = self.version + if self.archive_existing_versions is not None: body['archive_existing_versions'] = self.archive_existing_versions + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage.value + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the TransitionModelVersionStageDatabricks into a shallow dictionary of its immediate attributes.""" body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version + if self.archive_existing_versions is not None: body['archive_existing_versions'] = self.archive_existing_versions + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.stage is not None: body['stage'] = self.stage + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransitionModelVersionStageDatabricks: """Deserializes the TransitionModelVersionStageDatabricks from a dictionary.""" - return cls( - archive_existing_versions=d.get("archive_existing_versions", None), - comment=d.get("comment", None), - name=d.get("name", None), - stage=_enum(d, "stage", Stage), - version=d.get("version", None), - ) + return cls(archive_existing_versions=d.get('archive_existing_versions', None), comment=d.get('comment', None), name=d.get('name', None), stage=_enum(d, 'stage', Stage), version=d.get('version', None)) + + @dataclass class TransitionRequest: """Transition request details.""" - + available_actions: Optional[List[ActivityAction]] = None """Array of actions on the activity allowed for the current viewer.""" - + comment: Optional[str] = None """User-provided comment associated with the transition request.""" - + creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + to_stage: Optional[Stage] = None """Target stage of the transition (if the activity is stage transition related). Valid values are: @@ -6550,163 +5575,145 @@ class TransitionRequest: * `Production`: Production stage. * `Archived`: Archived stage.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + def as_dict(self) -> dict: """Serializes the TransitionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.available_actions: - body["available_actions"] = [v.value for v in self.available_actions] - if self.comment is not None: - body["comment"] = self.comment - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.to_stage is not None: - body["to_stage"] = self.to_stage.value - if self.user_id is not None: - body["user_id"] = self.user_id + if self.available_actions: body['available_actions'] = [v.value for v in self.available_actions] + if self.comment is not None: body['comment'] = self.comment + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.to_stage is not None: body['to_stage'] = self.to_stage.value + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the TransitionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.available_actions: - body["available_actions"] = self.available_actions - if self.comment is not None: - body["comment"] = self.comment - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.to_stage is not None: - body["to_stage"] = self.to_stage - if self.user_id is not None: - body["user_id"] = self.user_id + if self.available_actions: body['available_actions'] = self.available_actions + if self.comment is not None: body['comment'] = self.comment + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.to_stage is not None: body['to_stage'] = self.to_stage + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransitionRequest: """Deserializes the TransitionRequest from a dictionary.""" - return cls( - available_actions=_repeated_enum(d, "available_actions", ActivityAction), - comment=d.get("comment", None), - creation_timestamp=d.get("creation_timestamp", None), - to_stage=_enum(d, "to_stage", Stage), - user_id=d.get("user_id", None), - ) + return cls(available_actions=_repeated_enum(d, 'available_actions', ActivityAction), comment=d.get('comment', None), creation_timestamp=d.get('creation_timestamp', None), to_stage=_enum(d, 'to_stage', Stage), user_id=d.get('user_id', None)) + + @dataclass class TransitionStageResponse: model_version: Optional[ModelVersionDatabricks] = None - + def as_dict(self) -> dict: """Serializes the TransitionStageResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_version: - body["model_version"] = self.model_version.as_dict() + if self.model_version: body['model_version'] = self.model_version.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TransitionStageResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_version: - body["model_version"] = self.model_version + if self.model_version: body['model_version'] = self.model_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransitionStageResponse: """Deserializes the TransitionStageResponse from a dictionary.""" - return cls(model_version=_from_dict(d, "model_version", ModelVersionDatabricks)) + return cls(model_version=_from_dict(d, 'model_version', ModelVersionDatabricks)) + + @dataclass class UpdateComment: id: str """Unique identifier of an activity""" - + comment: str """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the UpdateComment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id + if self.comment is not None: body['comment'] = self.comment + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateComment into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id + if self.comment is not None: body['comment'] = self.comment + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateComment: """Deserializes the UpdateComment from a dictionary.""" - return cls(comment=d.get("comment", None), id=d.get("id", None)) + return cls(comment=d.get('comment', None), id=d.get('id', None)) + + @dataclass class UpdateCommentResponse: comment: Optional[CommentObject] = None """Comment details.""" - + def as_dict(self) -> dict: """Serializes the UpdateCommentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment: - body["comment"] = self.comment.as_dict() + if self.comment: body['comment'] = self.comment.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCommentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment: - body["comment"] = self.comment + if self.comment: body['comment'] = self.comment return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCommentResponse: """Deserializes the UpdateCommentResponse from a dictionary.""" - return cls(comment=_from_dict(d, "comment", CommentObject)) + return cls(comment=_from_dict(d, 'comment', CommentObject)) + + @dataclass class UpdateExperiment: experiment_id: str """ID of the associated experiment.""" - + new_name: Optional[str] = None """If provided, the experiment's name is changed to the new name. The new name must be unique.""" - + def as_dict(self) -> dict: """Serializes the UpdateExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.new_name is not None: - body["new_name"] = self.new_name + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.new_name is not None: body['new_name'] = self.new_name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.new_name is not None: - body["new_name"] = self.new_name + if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.new_name is not None: body['new_name'] = self.new_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExperiment: """Deserializes the UpdateExperiment from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None), new_name=d.get("new_name", None)) + return cls(experiment_id=d.get('experiment_id', None), new_name=d.get('new_name', None)) + + @dataclass @@ -6725,38 +5732,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateExperimentResponse: """Deserializes the UpdateExperimentResponse from a dictionary.""" return cls() + + @dataclass class UpdateModelRequest: name: str """Registered model unique name identifier.""" - + description: Optional[str] = None """If provided, updates the description for this `registered_model`.""" - + def as_dict(self) -> dict: """Serializes the UpdateModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateModelRequest: """Deserializes the UpdateModelRequest from a dictionary.""" - return cls(description=d.get("description", None), name=d.get("name", None)) + return cls(description=d.get('description', None), name=d.get('name', None)) + + @dataclass @@ -6775,45 +5782,43 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateModelResponse: """Deserializes the UpdateModelResponse from a dictionary.""" return cls() + + @dataclass class UpdateModelVersionRequest: name: str """Name of the registered model""" - + version: str """Model version number""" - + description: Optional[str] = None """If provided, updates the description for this `registered_model`.""" - + def as_dict(self) -> dict: """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.version is not None: body['version'] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.version is not None: body['version'] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionRequest: """Deserializes the UpdateModelVersionRequest from a dictionary.""" - return cls(description=d.get("description", None), name=d.get("name", None), version=d.get("version", None)) + return cls(description=d.get('description', None), name=d.get('name', None), version=d.get('version', None)) + + @dataclass @@ -6832,16 +5837,18 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionResponse: """Deserializes the UpdateModelVersionResponse from a dictionary.""" return cls() + + @dataclass class UpdateRegistryWebhook: id: str """Webhook ID""" - + description: Optional[str] = None """User-specified description for the webhook.""" - + events: Optional[List[RegistryWebhookEvent]] = None """Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. @@ -6871,11 +5878,11 @@ class UpdateRegistryWebhook: to production. * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - + http_url_spec: Optional[HttpUrlSpec] = None - + job_spec: Optional[JobSpec] = None - + status: Optional[RegistryWebhookStatus] = None """Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. @@ -6884,148 +5891,116 @@ class UpdateRegistryWebhook: * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event.""" - + def as_dict(self) -> dict: """Serializes the UpdateRegistryWebhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = [v.value for v in self.events] - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec.as_dict() - if self.id is not None: - body["id"] = self.id - if self.job_spec: - body["job_spec"] = self.job_spec.as_dict() - if self.status is not None: - body["status"] = self.status.value + if self.description is not None: body['description'] = self.description + if self.events: body['events'] = [v.value for v in self.events] + if self.http_url_spec: body['http_url_spec'] = self.http_url_spec.as_dict() + if self.id is not None: body['id'] = self.id + if self.job_spec: body['job_spec'] = self.job_spec.as_dict() + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRegistryWebhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = self.events - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec - if self.id is not None: - body["id"] = self.id - if self.job_spec: - body["job_spec"] = self.job_spec - if self.status is not None: - body["status"] = self.status + if self.description is not None: body['description'] = self.description + if self.events: body['events'] = self.events + if self.http_url_spec: body['http_url_spec'] = self.http_url_spec + if self.id is not None: body['id'] = self.id + if self.job_spec: body['job_spec'] = self.job_spec + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRegistryWebhook: """Deserializes the UpdateRegistryWebhook from a dictionary.""" - return cls( - description=d.get("description", None), - events=_repeated_enum(d, "events", RegistryWebhookEvent), - http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec), - id=d.get("id", None), - job_spec=_from_dict(d, "job_spec", JobSpec), - status=_enum(d, "status", RegistryWebhookStatus), - ) + return cls(description=d.get('description', None), events=_repeated_enum(d, 'events', RegistryWebhookEvent), http_url_spec=_from_dict(d, 'http_url_spec', HttpUrlSpec), id=d.get('id', None), job_spec=_from_dict(d, 'job_spec', JobSpec), status=_enum(d, 'status', RegistryWebhookStatus)) + + @dataclass class UpdateRun: end_time: Optional[int] = None """Unix timestamp in milliseconds of when the run ended.""" - + run_id: Optional[str] = None """ID of the run to update. Must be provided.""" - + run_name: Optional[str] = None """Updated name of the run.""" - + run_uuid: Optional[str] = None """[Deprecated, use `run_id` instead] ID of the run to update. This field will be removed in a future MLflow version.""" - + status: Optional[UpdateRunStatus] = None """Updated status of the run.""" - + def as_dict(self) -> dict: """Serializes the UpdateRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end_time is not None: - body["end_time"] = self.end_time - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.status is not None: - body["status"] = self.status.value + if self.end_time is not None: body['end_time'] = self.end_time + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.end_time is not None: - body["end_time"] = self.end_time - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.status is not None: - body["status"] = self.status + if self.end_time is not None: body['end_time'] = self.end_time + if self.run_id is not None: body['run_id'] = self.run_id + if self.run_name is not None: body['run_name'] = self.run_name + if self.run_uuid is not None: body['run_uuid'] = self.run_uuid + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRun: """Deserializes the UpdateRun from a dictionary.""" - return cls( - end_time=d.get("end_time", None), - run_id=d.get("run_id", None), - run_name=d.get("run_name", None), - run_uuid=d.get("run_uuid", None), - status=_enum(d, "status", UpdateRunStatus), - ) + return cls(end_time=d.get('end_time', None), run_id=d.get('run_id', None), run_name=d.get('run_name', None), run_uuid=d.get('run_uuid', None), status=_enum(d, 'status', UpdateRunStatus)) + + @dataclass class UpdateRunResponse: run_info: Optional[RunInfo] = None """Updated metadata of the run.""" - + def as_dict(self) -> dict: """Serializes the UpdateRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_info: - body["run_info"] = self.run_info.as_dict() + if self.run_info: body['run_info'] = self.run_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_info: - body["run_info"] = self.run_info + if self.run_info: body['run_info'] = self.run_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRunResponse: """Deserializes the UpdateRunResponse from a dictionary.""" - return cls(run_info=_from_dict(d, "run_info", RunInfo)) + return cls(run_info=_from_dict(d, 'run_info', RunInfo)) + + class UpdateRunStatus(Enum): """Status of a run.""" - - FAILED = "FAILED" - FINISHED = "FINISHED" - KILLED = "KILLED" - RUNNING = "RUNNING" - SCHEDULED = "SCHEDULED" - + + FAILED = 'FAILED' + FINISHED = 'FINISHED' + KILLED = 'KILLED' + RUNNING = 'RUNNING' + SCHEDULED = 'SCHEDULED' @dataclass class UpdateWebhookResponse: @@ -7043,38 +6018,50 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateWebhookResponse: """Deserializes the UpdateWebhookResponse from a dictionary.""" return cls() + + class ViewType(Enum): """Qualifier for the view type.""" + + ACTIVE_ONLY = 'ACTIVE_ONLY' + ALL = 'ALL' + DELETED_ONLY = 'DELETED_ONLY' - ACTIVE_ONLY = "ACTIVE_ONLY" - ALL = "ALL" - DELETED_ONLY = "DELETED_ONLY" class ExperimentsAPI: """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment. Each experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking server. - + Experiments are located in the workspace file tree. You manage experiments using the same tools you use to manage other workspace objects such as folders, notebooks, and libraries.""" - + def __init__(self, api_client): self._api = api_client + - def create_experiment( - self, name: str, *, artifact_location: Optional[str] = None, tags: Optional[List[ExperimentTag]] = None - ) -> CreateExperimentResponse: - """Create experiment. + + + + + + + def create_experiment(self + , name: str + , * + , artifact_location: Optional[str] = None, tags: Optional[List[ExperimentTag]] = None) -> CreateExperimentResponse: + """Create experiment. + Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that another experiment with the same name does not already exist and fails if another experiment with the same name already exists. - + Throws `RESOURCE_ALREADY_EXISTS` if an experiment with the given name exists. - + :param name: str Experiment name. :param artifact_location: str (optional) @@ -7085,36 +6072,31 @@ def create_experiment( depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to support up to 20 tags per request. - + :returns: :class:`CreateExperimentResponse` """ body = {} - if artifact_location is not None: - body["artifact_location"] = artifact_location - if name is not None: - body["name"] = name - if tags is not None: - body["tags"] = [v.as_dict() for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/experiments/create", body=body, headers=headers) + if artifact_location is not None: body['artifact_location'] = artifact_location + if name is not None: body['name'] = name + if tags is not None: body['tags'] = [v.as_dict() for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/experiments/create', body=body + + , headers=headers + ) return CreateExperimentResponse.from_dict(res) - def create_logged_model( - self, - experiment_id: str, - *, - model_type: Optional[str] = None, - name: Optional[str] = None, - params: Optional[List[LoggedModelParameter]] = None, - source_run_id: Optional[str] = None, - tags: Optional[List[LoggedModelTag]] = None, - ) -> CreateLoggedModelResponse: - """Create a logged model. + + + + def create_logged_model(self + , experiment_id: str + , * + , model_type: Optional[str] = None, name: Optional[str] = None, params: Optional[List[LoggedModelParameter]] = None, source_run_id: Optional[str] = None, tags: Optional[List[LoggedModelTag]] = None) -> CreateLoggedModelResponse: + """Create a logged model. + :param experiment_id: str The ID of the experiment that owns the model. :param model_type: str (optional) @@ -7127,45 +6109,38 @@ def create_logged_model( The ID of the run that created the model. :param tags: List[:class:`LoggedModelTag`] (optional) Tags attached to the model. - + :returns: :class:`CreateLoggedModelResponse` """ body = {} - if experiment_id is not None: - body["experiment_id"] = experiment_id - if model_type is not None: - body["model_type"] = model_type - if name is not None: - body["name"] = name - if params is not None: - body["params"] = [v.as_dict() for v in params] - if source_run_id is not None: - body["source_run_id"] = source_run_id - if tags is not None: - body["tags"] = [v.as_dict() for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/logged-models", body=body, headers=headers) + if experiment_id is not None: body['experiment_id'] = experiment_id + if model_type is not None: body['model_type'] = model_type + if name is not None: body['name'] = name + if params is not None: body['params'] = [v.as_dict() for v in params] + if source_run_id is not None: body['source_run_id'] = source_run_id + if tags is not None: body['tags'] = [v.as_dict() for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/logged-models', body=body + + , headers=headers + ) return CreateLoggedModelResponse.from_dict(res) - def create_run( - self, - *, - experiment_id: Optional[str] = None, - run_name: Optional[str] = None, - start_time: Optional[int] = None, - tags: Optional[List[RunTag]] = None, - user_id: Optional[str] = None, - ) -> CreateRunResponse: - """Create a run. + + + + def create_run(self + + , * + , experiment_id: Optional[str] = None, run_name: Optional[str] = None, start_time: Optional[int] = None, tags: Optional[List[RunTag]] = None, user_id: Optional[str] = None) -> CreateRunResponse: + """Create a run. + Creates a new run within an experiment. A run is usually a single execution of a machine learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric`, and `mlflowRunTag` associated with a single execution. - + :param experiment_id: str (optional) ID of the associated experiment. :param run_name: str (optional) @@ -7177,110 +6152,138 @@ def create_run( :param user_id: str (optional) ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in a future MLflow release. Use 'mlflow.user' tag instead. - + :returns: :class:`CreateRunResponse` """ body = {} - if experiment_id is not None: - body["experiment_id"] = experiment_id - if run_name is not None: - body["run_name"] = run_name - if start_time is not None: - body["start_time"] = start_time - if tags is not None: - body["tags"] = [v.as_dict() for v in tags] - if user_id is not None: - body["user_id"] = user_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/runs/create", body=body, headers=headers) + if experiment_id is not None: body['experiment_id'] = experiment_id + if run_name is not None: body['run_name'] = run_name + if start_time is not None: body['start_time'] = start_time + if tags is not None: body['tags'] = [v.as_dict() for v in tags] + if user_id is not None: body['user_id'] = user_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/runs/create', body=body + + , headers=headers + ) return CreateRunResponse.from_dict(res) - def delete_experiment(self, experiment_id: str): - """Delete an experiment. + + + + def delete_experiment(self + , experiment_id: str + ): + """Delete an experiment. + Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the experiment uses FileStore, artifacts associated with the experiment are also deleted. - + :param experiment_id: str ID of the associated experiment. - - + + """ body = {} - if experiment_id is not None: - body["experiment_id"] = experiment_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if experiment_id is not None: body['experiment_id'] = experiment_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/experiments/delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/experiments/delete", body=body, headers=headers) + + + - def delete_logged_model(self, model_id: str): + def delete_logged_model(self + , model_id: str + ): """Delete a logged model. - + :param model_id: str The ID of the logged model to delete. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/mlflow/logged-models/{model_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/mlflow/logged-models/{model_id}", headers=headers) + + + - def delete_logged_model_tag(self, model_id: str, tag_key: str): + def delete_logged_model_tag(self + , model_id: str, tag_key: str + ): """Delete a tag on a logged model. - + :param model_id: str The ID of the logged model to delete the tag from. :param tag_key: str The tag key. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/mlflow/logged-models/{model_id}/tags/{tag_key}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/mlflow/logged-models/{model_id}/tags/{tag_key}", headers=headers) + + + - def delete_run(self, run_id: str): + def delete_run(self + , run_id: str + ): """Delete a run. - + Marks a run for deletion. - + :param run_id: str ID of the run to delete. - - + + """ body = {} - if run_id is not None: - body["run_id"] = run_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if run_id is not None: body['run_id'] = run_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/runs/delete", body=body, headers=headers) + + + - def delete_runs( - self, experiment_id: str, max_timestamp_millis: int, *, max_runs: Optional[int] = None - ) -> DeleteRunsResponse: + def delete_runs(self + , experiment_id: str, max_timestamp_millis: int + , * + , max_runs: Optional[int] = None) -> DeleteRunsResponse: """Delete runs by creation time. - + Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on - + :param experiment_id: str The ID of the experiment containing the runs to delete. :param max_timestamp_millis: int @@ -7289,164 +6292,150 @@ def delete_runs( :param max_runs: int (optional) An optional positive integer indicating the maximum number of runs to delete. The maximum allowed value for max_runs is 10000. - + :returns: :class:`DeleteRunsResponse` """ body = {} - if experiment_id is not None: - body["experiment_id"] = experiment_id - if max_runs is not None: - body["max_runs"] = max_runs - if max_timestamp_millis is not None: - body["max_timestamp_millis"] = max_timestamp_millis - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/databricks/runs/delete-runs", body=body, headers=headers) + if experiment_id is not None: body['experiment_id'] = experiment_id + if max_runs is not None: body['max_runs'] = max_runs + if max_timestamp_millis is not None: body['max_timestamp_millis'] = max_timestamp_millis + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/databricks/runs/delete-runs', body=body + + , headers=headers + ) return DeleteRunsResponse.from_dict(res) - def delete_tag(self, run_id: str, key: str): - """Delete a tag on a run. + + + + def delete_tag(self + , run_id: str, key: str + ): + """Delete a tag on a run. + Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. - + :param run_id: str ID of the run that the tag was logged under. Must be provided. :param key: str Name of the tag. Maximum size is 255 bytes. Must be provided. - - + + """ body = {} - if key is not None: - body["key"] = key - if run_id is not None: - body["run_id"] = run_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if key is not None: body['key'] = key + if run_id is not None: body['run_id'] = run_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/delete-tag', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/runs/delete-tag", body=body, headers=headers) + + + - def finalize_logged_model(self, model_id: str, status: LoggedModelStatus) -> FinalizeLoggedModelResponse: + def finalize_logged_model(self + , model_id: str, status: LoggedModelStatus + ) -> FinalizeLoggedModelResponse: """Finalize a logged model. - + :param model_id: str The ID of the logged model to finalize. :param status: :class:`LoggedModelStatus` Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that something - went wrong when logging the model weights / agent code). - + went wrong when logging the model weights / agent code. + :returns: :class:`FinalizeLoggedModelResponse` """ body = {} - if status is not None: - body["status"] = status.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/mlflow/logged-models/{model_id}", body=body, headers=headers) + if status is not None: body['status'] = status.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/mlflow/logged-models/{model_id}', body=body + + , headers=headers + ) return FinalizeLoggedModelResponse.from_dict(res) - def get_by_name(self, experiment_name: str) -> GetExperimentByNameResponse: - """Get an experiment by name. + + + + def get_by_name(self + , experiment_name: str + ) -> GetExperimentByNameResponse: + """Get an experiment by name. + Gets metadata for an experiment. - + This endpoint will return deleted experiments, but prefers the active experiment if an active and deleted experiment share the same name. If multiple deleted experiments share the same name, the API will return one of them. - + Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists. - + :param experiment_name: str Name of the associated experiment. - + :returns: :class:`GetExperimentByNameResponse` """ - + query = {} - if experiment_name is not None: - query["experiment_name"] = experiment_name - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/mlflow/experiments/get-by-name", query=query, headers=headers) + if experiment_name is not None: query['experiment_name'] = experiment_name + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/mlflow/experiments/get-by-name', query=query + + , headers=headers + ) return GetExperimentByNameResponse.from_dict(res) - def get_credentials_for_trace_data_download(self, request_id: str) -> GetCredentialsForTraceDataDownloadResponse: - """Get credentials to download trace data. - - :param request_id: str - The ID of the trace to fetch artifact download credentials for. - - :returns: :class:`GetCredentialsForTraceDataDownloadResponse` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/mlflow/traces/{request_id}/credentials-for-data-download", headers=headers) - return GetCredentialsForTraceDataDownloadResponse.from_dict(res) - - def get_credentials_for_trace_data_upload(self, request_id: str) -> GetCredentialsForTraceDataUploadResponse: - """Get credentials to upload trace data. - - :param request_id: str - The ID of the trace to fetch artifact upload credentials for. - - :returns: :class:`GetCredentialsForTraceDataUploadResponse` - """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/mlflow/traces/{request_id}/credentials-for-data-upload", headers=headers) - return GetCredentialsForTraceDataUploadResponse.from_dict(res) + + + - def get_experiment(self, experiment_id: str) -> GetExperimentResponse: + def get_experiment(self + , experiment_id: str + ) -> GetExperimentResponse: """Get an experiment. - + Gets metadata for an experiment. This method works on deleted experiments. - + :param experiment_id: str ID of the associated experiment. - + :returns: :class:`GetExperimentResponse` """ - + query = {} - if experiment_id is not None: - query["experiment_id"] = experiment_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/mlflow/experiments/get", query=query, headers=headers) + if experiment_id is not None: query['experiment_id'] = experiment_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/mlflow/experiments/get', query=query + + , headers=headers + ) return GetExperimentResponse.from_dict(res) - def get_history( - self, - metric_key: str, - *, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - run_id: Optional[str] = None, - run_uuid: Optional[str] = None, - ) -> Iterator[Metric]: - """Get metric history for a run. + + + + def get_history(self + , metric_key: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None, run_id: Optional[str] = None, run_uuid: Optional[str] = None) -> Iterator[Metric]: + """Get metric history for a run. + Gets a list of all values for the specified metric for a given run. - + :param metric_key: str Name of the metric. :param max_results: int (optional) @@ -7459,131 +6448,157 @@ def get_history( :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run from which to fetch metric values. This field will be removed in a future MLflow version. - + :returns: Iterator over :class:`Metric` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if metric_key is not None: - query["metric_key"] = metric_key - if page_token is not None: - query["page_token"] = page_token - if run_id is not None: - query["run_id"] = run_id - if run_uuid is not None: - query["run_uuid"] = run_uuid - headers = { - "Accept": "application/json", - } - + if max_results is not None: query['max_results'] = max_results + if metric_key is not None: query['metric_key'] = metric_key + if page_token is not None: query['page_token'] = page_token + if run_id is not None: query['run_id'] = run_id + if run_uuid is not None: query['run_uuid'] = run_uuid + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/mlflow/metrics/get-history", query=query, headers=headers) - if "metrics" in json: - for v in json["metrics"]: - yield Metric.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def get_logged_model(self, model_id: str) -> GetLoggedModelResponse: - """Get a logged model. + json = self._api.do('GET','/api/2.0/mlflow/metrics/get-history', query=query + + , headers=headers + ) + if 'metrics' in json: + for v in json['metrics']: + yield Metric.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def get_logged_model(self + , model_id: str + ) -> GetLoggedModelResponse: + """Get a logged model. + :param model_id: str The ID of the logged model to retrieve. - + :returns: :class:`GetLoggedModelResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/mlflow/logged-models/{model_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/mlflow/logged-models/{model_id}' + + , headers=headers + ) return GetLoggedModelResponse.from_dict(res) - def get_permission_levels(self, experiment_id: str) -> GetExperimentPermissionLevelsResponse: - """Get experiment permission levels. + + + + def get_permission_levels(self + , experiment_id: str + ) -> GetExperimentPermissionLevelsResponse: + """Get experiment permission levels. + Gets the permission levels that a user can have on an object. - + :param experiment_id: str The experiment for which to get or manage permissions. - + :returns: :class:`GetExperimentPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/experiments/{experiment_id}/permissionLevels", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/experiments/{experiment_id}/permissionLevels' + + , headers=headers + ) return GetExperimentPermissionLevelsResponse.from_dict(res) - def get_permissions(self, experiment_id: str) -> ExperimentPermissions: - """Get experiment permissions. + + + + def get_permissions(self + , experiment_id: str + ) -> ExperimentPermissions: + """Get experiment permissions. + Gets the permissions of an experiment. Experiments can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. - + :returns: :class:`ExperimentPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/experiments/{experiment_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/experiments/{experiment_id}' + + , headers=headers + ) return ExperimentPermissions.from_dict(res) - def get_run(self, run_id: str, *, run_uuid: Optional[str] = None) -> GetRunResponse: - """Get a run. + + + + def get_run(self + , run_id: str + , * + , run_uuid: Optional[str] = None) -> GetRunResponse: + """Get a run. + Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the same key are logged for a run, return only the value with the latest timestamp. - + If there are multiple values with the latest timestamp, return the maximum of these values. - + :param run_id: str ID of the run to fetch. Must be provided. :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run to fetch. This field will be removed in a future MLflow version. - + :returns: :class:`GetRunResponse` """ - + query = {} - if run_id is not None: - query["run_id"] = run_id - if run_uuid is not None: - query["run_uuid"] = run_uuid - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/mlflow/runs/get", query=query, headers=headers) + if run_id is not None: query['run_id'] = run_id + if run_uuid is not None: query['run_uuid'] = run_uuid + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/mlflow/runs/get', query=query + + , headers=headers + ) return GetRunResponse.from_dict(res) - def list_artifacts( - self, - *, - page_token: Optional[str] = None, - path: Optional[str] = None, - run_id: Optional[str] = None, - run_uuid: Optional[str] = None, - ) -> Iterator[FileInfo]: - """List artifacts. + + + + def list_artifacts(self + + , * + , page_token: Optional[str] = None, path: Optional[str] = None, run_id: Optional[str] = None, run_uuid: Optional[str] = None) -> Iterator[FileInfo]: + """List artifacts. + List artifacts for a run. Takes an optional `artifact_path` prefix which if specified, the response contains only artifacts with the specified prefix. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). - + :param page_token: str (optional) The token indicating the page of artifact results to fetch. `page_token` is not supported when listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. @@ -7597,43 +6612,44 @@ def list_artifacts( :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run whose artifacts to list. This field will be removed in a future MLflow version. - + :returns: Iterator over :class:`FileInfo` """ - + query = {} - if page_token is not None: - query["page_token"] = page_token - if path is not None: - query["path"] = path - if run_id is not None: - query["run_id"] = run_id - if run_uuid is not None: - query["run_uuid"] = run_uuid - headers = { - "Accept": "application/json", - } - + if page_token is not None: query['page_token'] = page_token + if path is not None: query['path'] = path + if run_id is not None: query['run_id'] = run_id + if run_uuid is not None: query['run_uuid'] = run_uuid + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/mlflow/artifacts/list", query=query, headers=headers) - if "files" in json: - for v in json["files"]: - yield FileInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_experiments( - self, - *, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - view_type: Optional[ViewType] = None, - ) -> Iterator[Experiment]: - """List experiments. + json = self._api.do('GET','/api/2.0/mlflow/artifacts/list', query=query + + , headers=headers + ) + if 'files' in json: + for v in json['files']: + yield FileInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Gets a list of all experiments. + + + + def list_experiments(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None, view_type: Optional[ViewType] = None) -> Iterator[Experiment]: + """List experiments. + + Gets a list of all experiments. + :param max_results: int (optional) Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If `max_results` is too large, it'll be automatically capped at 1000. Callers of this endpoint are @@ -7642,116 +6658,82 @@ def list_experiments( Token indicating the page of experiments to fetch :param view_type: :class:`ViewType` (optional) Qualifier for type of experiments to be returned. If unspecified, return only active experiments. - + :returns: Iterator over :class:`Experiment` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - if view_type is not None: - query["view_type"] = view_type.value - headers = { - "Accept": "application/json", - } - + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if view_type is not None: query['view_type'] = view_type.value + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/mlflow/experiments/list", query=query, headers=headers) - if "experiments" in json: - for v in json["experiments"]: - yield Experiment.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_logged_model_artifacts( - self, model_id: str, *, artifact_directory_path: Optional[str] = None, page_token: Optional[str] = None - ) -> ListLoggedModelArtifactsResponse: - """List artifacts for a logged model. - - List artifacts for a logged model. Takes an optional ``artifact_directory_path`` prefix which if - specified, the response contains only artifacts with the specified prefix. - - :param model_id: str - The ID of the logged model for which to list the artifacts. - :param artifact_directory_path: str (optional) - Filter artifacts matching this path (a relative path from the root artifact directory). - :param page_token: str (optional) - Token indicating the page of artifact results to fetch. `page_token` is not supported when listing - artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call - `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports - pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). + json = self._api.do('GET','/api/2.0/mlflow/experiments/list', query=query + + , headers=headers + ) + if 'experiments' in json: + for v in json['experiments']: + yield Experiment.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - :returns: :class:`ListLoggedModelArtifactsResponse` - """ + + + - query = {} - if artifact_directory_path is not None: - query["artifact_directory_path"] = artifact_directory_path - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/mlflow/logged-models/{model_id}/artifacts/directories", query=query, headers=headers - ) - return ListLoggedModelArtifactsResponse.from_dict(res) - - def log_batch( - self, - *, - metrics: Optional[List[Metric]] = None, - params: Optional[List[Param]] = None, - run_id: Optional[str] = None, - tags: Optional[List[RunTag]] = None, - ): + def log_batch(self + + , * + , metrics: Optional[List[Metric]] = None, params: Optional[List[Param]] = None, run_id: Optional[str] = None, tags: Optional[List[RunTag]] = None): """Log a batch of metrics/params/tags for a run. - + Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server will respond with an error (non-200 status code). - + In case of error (due to internal server error or an invalid request), partial data may be written. - + You can write metrics, params, and tags in interleaving fashion, but within a given entity type are guaranteed to follow the order specified in the request body. - + The overwrite behavior for metrics, params, and tags is as follows: - + * Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to the set of values for the metric with the provided key. - + * Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple tag values with the same key are provided in the same API request, the last-provided tag value is written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent. - + * Parameters: once written, param values cannot be changed (attempting to overwrite a param value will result in an error). However, logging the same param (key, value) is permitted. Specifically, logging a param is idempotent. - + Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB in size and contain: - + * No more than 1000 metrics, params, and tags in total - + * Up to 1000 metrics - + * Up to 100 params - + * Up to 100 tags - + For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900 metrics, 50 params, and 51 tags is invalid. - + The following limits also apply to metric, param, and tag keys and values: - + * Metric keys, param keys, and tag keys can be up to 250 characters in length - + * Parameter and tag values can be up to 250 characters in length - + :param metrics: List[:class:`Metric`] (optional) Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and tags in total. @@ -7763,100 +6745,102 @@ def log_batch( :param tags: List[:class:`RunTag`] (optional) Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags in total. - - + + """ body = {} - if metrics is not None: - body["metrics"] = [v.as_dict() for v in metrics] - if params is not None: - body["params"] = [v.as_dict() for v in params] - if run_id is not None: - body["run_id"] = run_id - if tags is not None: - body["tags"] = [v.as_dict() for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/mlflow/runs/log-batch", body=body, headers=headers) - - def log_inputs( - self, run_id: str, *, datasets: Optional[List[DatasetInput]] = None, models: Optional[List[ModelInput]] = None - ): - """Log inputs to a run. + if metrics is not None: body['metrics'] = [v.as_dict() for v in metrics] + if params is not None: body['params'] = [v.as_dict() for v in params] + if run_id is not None: body['run_id'] = run_id + if tags is not None: body['tags'] = [v.as_dict() for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/log-batch', body=body + + , headers=headers + ) + - **NOTE:** Experimental: This API may change or be removed in a future release without warning. + + + + def log_inputs(self + , run_id: str + , * + , datasets: Optional[List[DatasetInput]] = None, models: Optional[List[ModelInput]] = None): + """Log inputs to a run. + + **NOTE:** Experimental: This API may change or be removed in a future release without warning. + Logs inputs, such as datasets and models, to an MLflow Run. - + :param run_id: str ID of the run to log under :param datasets: List[:class:`DatasetInput`] (optional) Dataset inputs :param models: List[:class:`ModelInput`] (optional) Model inputs - - + + """ body = {} - if datasets is not None: - body["datasets"] = [v.as_dict() for v in datasets] - if models is not None: - body["models"] = [v.as_dict() for v in models] - if run_id is not None: - body["run_id"] = run_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if datasets is not None: body['datasets'] = [v.as_dict() for v in datasets] + if models is not None: body['models'] = [v.as_dict() for v in models] + if run_id is not None: body['run_id'] = run_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/log-inputs', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/runs/log-inputs", body=body, headers=headers) + + + - def log_logged_model_params(self, model_id: str, *, params: Optional[List[LoggedModelParameter]] = None): + def log_logged_model_params(self + , model_id: str + , * + , params: Optional[List[LoggedModelParameter]] = None): """Log params for a logged model. - + Logs params for a logged model. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training. A param can be logged only once for a logged model, and attempting to overwrite an existing param with a different value will result in an error - + :param model_id: str The ID of the logged model to log params for. :param params: List[:class:`LoggedModelParameter`] (optional) Parameters to attach to the model. - - + + """ body = {} - if params is not None: - body["params"] = [v.as_dict() for v in params] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", f"/api/2.0/mlflow/logged-models/{model_id}/params", body=body, headers=headers) - - def log_metric( - self, - key: str, - value: float, - timestamp: int, - *, - dataset_digest: Optional[str] = None, - dataset_name: Optional[str] = None, - model_id: Optional[str] = None, - run_id: Optional[str] = None, - run_uuid: Optional[str] = None, - step: Optional[int] = None, - ): - """Log a metric for a run. + if params is not None: body['params'] = [v.as_dict() for v in params] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST',f'/api/2.0/mlflow/logged-models/{model_id}/params', body=body + + , headers=headers + ) + + + + + + def log_metric(self + , key: str, value: float, timestamp: int + , * + , dataset_digest: Optional[str] = None, dataset_name: Optional[str] = None, model_id: Optional[str] = None, run_id: Optional[str] = None, run_uuid: Optional[str] = None, step: Optional[int] = None): + """Log a metric for a run. + Log a metric for a run. A metric is a key-value pair (string key, float value) with an associated timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be logged multiple times. - + :param key: str Name of the metric. :param value: float @@ -7878,92 +6862,103 @@ def log_metric( removed in a future MLflow version. :param step: int (optional) Step at which to log the metric - - + + """ body = {} - if dataset_digest is not None: - body["dataset_digest"] = dataset_digest - if dataset_name is not None: - body["dataset_name"] = dataset_name - if key is not None: - body["key"] = key - if model_id is not None: - body["model_id"] = model_id - if run_id is not None: - body["run_id"] = run_id - if run_uuid is not None: - body["run_uuid"] = run_uuid - if step is not None: - body["step"] = step - if timestamp is not None: - body["timestamp"] = timestamp - if value is not None: - body["value"] = value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/mlflow/runs/log-metric", body=body, headers=headers) - - def log_model(self, *, model_json: Optional[str] = None, run_id: Optional[str] = None): - """Log a model. + if dataset_digest is not None: body['dataset_digest'] = dataset_digest + if dataset_name is not None: body['dataset_name'] = dataset_name + if key is not None: body['key'] = key + if model_id is not None: body['model_id'] = model_id + if run_id is not None: body['run_id'] = run_id + if run_uuid is not None: body['run_uuid'] = run_uuid + if step is not None: body['step'] = step + if timestamp is not None: body['timestamp'] = timestamp + if value is not None: body['value'] = value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/log-metric', body=body + + , headers=headers + ) + - **NOTE:** Experimental: This API may change or be removed in a future release without warning. + + + + def log_model(self + + , * + , model_json: Optional[str] = None, run_id: Optional[str] = None): + """Log a model. + + **NOTE:** Experimental: This API may change or be removed in a future release without warning. + :param model_json: str (optional) MLmodel file in json format. :param run_id: str (optional) ID of the run to log under - - + + """ body = {} - if model_json is not None: - body["model_json"] = model_json - if run_id is not None: - body["run_id"] = run_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if model_json is not None: body['model_json'] = model_json + if run_id is not None: body['run_id'] = run_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/log-model', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/runs/log-model", body=body, headers=headers) + + + - def log_outputs(self, run_id: str, *, models: Optional[List[ModelOutput]] = None): + def log_outputs(self + , run_id: str + , * + , models: Optional[List[ModelOutput]] = None): """Log outputs from a run. - + **NOTE**: Experimental: This API may change or be removed in a future release without warning. - + Logs outputs, such as models, from an MLflow Run. - + :param run_id: str The ID of the Run from which to log outputs. :param models: List[:class:`ModelOutput`] (optional) The model outputs from the Run. - - + + """ body = {} - if models is not None: - body["models"] = [v.as_dict() for v in models] - if run_id is not None: - body["run_id"] = run_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if models is not None: body['models'] = [v.as_dict() for v in models] + if run_id is not None: body['run_id'] = run_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/outputs', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/runs/outputs", body=body, headers=headers) + + + - def log_param(self, key: str, value: str, *, run_id: Optional[str] = None, run_uuid: Optional[str] = None): + def log_param(self + , key: str, value: str + , * + , run_id: Optional[str] = None, run_uuid: Optional[str] = None): """Log a param for a run. - + Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A param can be logged only once for a run. - + :param key: str Name of the param. Maximum size is 255 bytes. :param value: str @@ -7973,80 +6968,94 @@ def log_param(self, key: str, value: str, *, run_id: Optional[str] = None, run_u :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will be removed in a future MLflow version. - - + + """ body = {} - if key is not None: - body["key"] = key - if run_id is not None: - body["run_id"] = run_id - if run_uuid is not None: - body["run_uuid"] = run_uuid - if value is not None: - body["value"] = value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/mlflow/runs/log-parameter", body=body, headers=headers) - - def restore_experiment(self, experiment_id: str): - """Restore an experiment. + if key is not None: body['key'] = key + if run_id is not None: body['run_id'] = run_id + if run_uuid is not None: body['run_uuid'] = run_uuid + if value is not None: body['value'] = value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/log-parameter', body=body + + , headers=headers + ) + + + + + + def restore_experiment(self + , experiment_id: str + ): + """Restore an experiment. + Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are also restored. - + Throws `RESOURCE_DOES_NOT_EXIST` if experiment was never created or was permanently deleted. - + :param experiment_id: str ID of the associated experiment. - - + + """ body = {} - if experiment_id is not None: - body["experiment_id"] = experiment_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if experiment_id is not None: body['experiment_id'] = experiment_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/experiments/restore', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/experiments/restore", body=body, headers=headers) + + + - def restore_run(self, run_id: str): + def restore_run(self + , run_id: str + ): """Restore a run. - + Restores a deleted run. This also restores associated metadata, runs, metrics, params, and tags. - + Throws `RESOURCE_DOES_NOT_EXIST` if the run was never created or was permanently deleted. - + :param run_id: str ID of the run to restore. - - + + """ body = {} - if run_id is not None: - body["run_id"] = run_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if run_id is not None: body['run_id'] = run_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/restore', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/runs/restore", body=body, headers=headers) + + + - def restore_runs( - self, experiment_id: str, min_timestamp_millis: int, *, max_runs: Optional[int] = None - ) -> RestoreRunsResponse: + def restore_runs(self + , experiment_id: str, min_timestamp_millis: int + , * + , max_runs: Optional[int] = None) -> RestoreRunsResponse: """Restore runs by deletion time. - + Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on - + :param experiment_id: str The ID of the experiment containing the runs to restore. :param min_timestamp_millis: int @@ -8055,37 +7064,33 @@ def restore_runs( :param max_runs: int (optional) An optional positive integer indicating the maximum number of runs to restore. The maximum allowed value for max_runs is 10000. - + :returns: :class:`RestoreRunsResponse` """ body = {} - if experiment_id is not None: - body["experiment_id"] = experiment_id - if max_runs is not None: - body["max_runs"] = max_runs - if min_timestamp_millis is not None: - body["min_timestamp_millis"] = min_timestamp_millis - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/databricks/runs/restore-runs", body=body, headers=headers) + if experiment_id is not None: body['experiment_id'] = experiment_id + if max_runs is not None: body['max_runs'] = max_runs + if min_timestamp_millis is not None: body['min_timestamp_millis'] = min_timestamp_millis + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/databricks/runs/restore-runs', body=body + + , headers=headers + ) return RestoreRunsResponse.from_dict(res) - def search_experiments( - self, - *, - filter: Optional[str] = None, - max_results: Optional[int] = None, - order_by: Optional[List[str]] = None, - page_token: Optional[str] = None, - view_type: Optional[ViewType] = None, - ) -> Iterator[Experiment]: - """Search experiments. + + + + def search_experiments(self + + , * + , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None, view_type: Optional[ViewType] = None) -> Iterator[Experiment]: + """Search experiments. + Searches for experiments that satisfy specified search criteria. - + :param filter: str (optional) String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'") :param max_results: int (optional) @@ -8098,48 +7103,44 @@ def search_experiments( Token indicating the page of experiments to fetch :param view_type: :class:`ViewType` (optional) Qualifier for type of experiments to be returned. If unspecified, return only active experiments. - + :returns: Iterator over :class:`Experiment` """ body = {} - if filter is not None: - body["filter"] = filter - if max_results is not None: - body["max_results"] = max_results - if order_by is not None: - body["order_by"] = [v for v in order_by] - if page_token is not None: - body["page_token"] = page_token - if view_type is not None: - body["view_type"] = view_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - + if filter is not None: body['filter'] = filter + if max_results is not None: body['max_results'] = max_results + if order_by is not None: body['order_by'] = [v for v in order_by] + if page_token is not None: body['page_token'] = page_token + if view_type is not None: body['view_type'] = view_type.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + + while True: - json = self._api.do("POST", "/api/2.0/mlflow/experiments/search", body=body, headers=headers) - if "experiments" in json: - for v in json["experiments"]: - yield Experiment.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - body["page_token"] = json["next_page_token"] - - def search_logged_models( - self, - *, - datasets: Optional[List[SearchLoggedModelsDataset]] = None, - experiment_ids: Optional[List[str]] = None, - filter: Optional[str] = None, - max_results: Optional[int] = None, - order_by: Optional[List[SearchLoggedModelsOrderBy]] = None, - page_token: Optional[str] = None, - ) -> SearchLoggedModelsResponse: - """Search logged models. + json = self._api.do('POST','/api/2.0/mlflow/experiments/search', body=body + + , headers=headers + ) + if 'experiments' in json: + for v in json['experiments']: + yield Experiment.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + body['page_token'] = json['next_page_token'] + - Search for Logged Models that satisfy specified search criteria. + + + + def search_logged_models(self + + , * + , datasets: Optional[List[SearchLoggedModelsDataset]] = None, experiment_ids: Optional[List[str]] = None, filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[SearchLoggedModelsOrderBy]] = None, page_token: Optional[str] = None) -> SearchLoggedModelsResponse: + """Search logged models. + + Search for Logged Models that satisfy specified search criteria. + :param datasets: List[:class:`SearchLoggedModelsDataset`] (optional) List of datasets on which to apply the metrics filter clauses. For example, a filter with `metrics.accuracy > 0.9` and dataset info with name "test_dataset" means we will return all logged @@ -8151,7 +7152,7 @@ def search_logged_models( :param filter: str (optional) A filter expression over logged model info and data that allows returning a subset of logged models. The syntax is a subset of SQL that supports AND'ing together binary operations. - + Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``. :param max_results: int (optional) The maximum number of Logged Models to return. The maximum limit is 50. @@ -8159,58 +7160,50 @@ def search_logged_models( The list of columns for ordering the results, with additional fields for sorting criteria. :param page_token: str (optional) The token indicating the page of logged models to fetch. - + :returns: :class:`SearchLoggedModelsResponse` """ body = {} - if datasets is not None: - body["datasets"] = [v.as_dict() for v in datasets] - if experiment_ids is not None: - body["experiment_ids"] = [v for v in experiment_ids] - if filter is not None: - body["filter"] = filter - if max_results is not None: - body["max_results"] = max_results - if order_by is not None: - body["order_by"] = [v.as_dict() for v in order_by] - if page_token is not None: - body["page_token"] = page_token - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/logged-models/search", body=body, headers=headers) + if datasets is not None: body['datasets'] = [v.as_dict() for v in datasets] + if experiment_ids is not None: body['experiment_ids'] = [v for v in experiment_ids] + if filter is not None: body['filter'] = filter + if max_results is not None: body['max_results'] = max_results + if order_by is not None: body['order_by'] = [v.as_dict() for v in order_by] + if page_token is not None: body['page_token'] = page_token + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/logged-models/search', body=body + + , headers=headers + ) return SearchLoggedModelsResponse.from_dict(res) - def search_runs( - self, - *, - experiment_ids: Optional[List[str]] = None, - filter: Optional[str] = None, - max_results: Optional[int] = None, - order_by: Optional[List[str]] = None, - page_token: Optional[str] = None, - run_view_type: Optional[ViewType] = None, - ) -> Iterator[Run]: - """Search for runs. + + + + def search_runs(self + + , * + , experiment_ids: Optional[List[str]] = None, filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None, run_view_type: Optional[ViewType] = None) -> Iterator[Run]: + """Search for runs. + Searches for runs that satisfy expressions. - + Search expressions can use `mlflowMetric` and `mlflowParam` keys. - + :param experiment_ids: List[str] (optional) List of experiment IDs to search over. :param filter: str (optional) A filter expression over params, metrics, and tags, that allows returning a subset of runs. The syntax is a subset of SQL that supports ANDing together binary operations between a param, metric, or tag and a constant. - + Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'` - + You can select columns with special characters (hyphen, space, period, etc.) by using double quotes: `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'` - + Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`. :param max_results: int (optional) Maximum number of runs desired. Max threshold is 50000 @@ -8224,114 +7217,133 @@ def search_runs( Token for the current page of runs. :param run_view_type: :class:`ViewType` (optional) Whether to display only active, only deleted, or all runs. Defaults to only active runs. - + :returns: Iterator over :class:`Run` """ body = {} - if experiment_ids is not None: - body["experiment_ids"] = [v for v in experiment_ids] - if filter is not None: - body["filter"] = filter - if max_results is not None: - body["max_results"] = max_results - if order_by is not None: - body["order_by"] = [v for v in order_by] - if page_token is not None: - body["page_token"] = page_token - if run_view_type is not None: - body["run_view_type"] = run_view_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - + if experiment_ids is not None: body['experiment_ids'] = [v for v in experiment_ids] + if filter is not None: body['filter'] = filter + if max_results is not None: body['max_results'] = max_results + if order_by is not None: body['order_by'] = [v for v in order_by] + if page_token is not None: body['page_token'] = page_token + if run_view_type is not None: body['run_view_type'] = run_view_type.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + + while True: - json = self._api.do("POST", "/api/2.0/mlflow/runs/search", body=body, headers=headers) - if "runs" in json: - for v in json["runs"]: - yield Run.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - body["page_token"] = json["next_page_token"] - - def set_experiment_tag(self, experiment_id: str, key: str, value: str): - """Set a tag for an experiment. + json = self._api.do('POST','/api/2.0/mlflow/runs/search', body=body + + , headers=headers + ) + if 'runs' in json: + for v in json['runs']: + yield Run.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + body['page_token'] = json['next_page_token'] + - Sets a tag on an experiment. Experiment tags are metadata that can be updated. + + + + def set_experiment_tag(self + , experiment_id: str, key: str, value: str + ): + """Set a tag for an experiment. + + Sets a tag on an experiment. Experiment tags are metadata that can be updated. + :param experiment_id: str ID of the experiment under which to log the tag. Must be provided. :param key: str Name of the tag. Keys up to 250 bytes in size are supported. :param value: str String value of the tag being logged. Values up to 64KB in size are supported. - - + + """ body = {} - if experiment_id is not None: - body["experiment_id"] = experiment_id - if key is not None: - body["key"] = key - if value is not None: - body["value"] = value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if experiment_id is not None: body['experiment_id'] = experiment_id + if key is not None: body['key'] = key + if value is not None: body['value'] = value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/experiments/set-experiment-tag', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/experiments/set-experiment-tag", body=body, headers=headers) + + + - def set_logged_model_tags(self, model_id: str, *, tags: Optional[List[LoggedModelTag]] = None): + def set_logged_model_tags(self + , model_id: str + , * + , tags: Optional[List[LoggedModelTag]] = None): """Set a tag for a logged model. - + :param model_id: str The ID of the logged model to set the tags on. :param tags: List[:class:`LoggedModelTag`] (optional) The tags to set on the logged model. - - + + """ body = {} - if tags is not None: - body["tags"] = [v.as_dict() for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if tags is not None: body['tags'] = [v.as_dict() for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/mlflow/logged-models/{model_id}/tags', body=body + + , headers=headers + ) + - self._api.do("PATCH", f"/api/2.0/mlflow/logged-models/{model_id}/tags", body=body, headers=headers) + + + - def set_permissions( - self, experiment_id: str, *, access_control_list: Optional[List[ExperimentAccessControlRequest]] = None - ) -> ExperimentPermissions: + def set_permissions(self + , experiment_id: str + , * + , access_control_list: Optional[List[ExperimentAccessControlRequest]] = None) -> ExperimentPermissions: """Set experiment permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional) - + :returns: :class:`ExperimentPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/permissions/experiments/{experiment_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/experiments/{experiment_id}', body=body + + , headers=headers + ) return ExperimentPermissions.from_dict(res) - def set_tag(self, key: str, value: str, *, run_id: Optional[str] = None, run_uuid: Optional[str] = None): - """Set a tag for a run. + + + + def set_tag(self + , key: str, value: str + , * + , run_id: Optional[str] = None, run_uuid: Optional[str] = None): + """Set a tag for a run. + Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. - + :param key: str Name of the tag. Keys up to 250 bytes in size are supported. :param value: str @@ -8341,86 +7353,92 @@ def set_tag(self, key: str, value: str, *, run_id: Optional[str] = None, run_uui :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be removed in a future MLflow version. - - + + """ body = {} - if key is not None: - body["key"] = key - if run_id is not None: - body["run_id"] = run_id - if run_uuid is not None: - body["run_uuid"] = run_uuid - if value is not None: - body["value"] = value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/mlflow/runs/set-tag", body=body, headers=headers) - - def update_experiment(self, experiment_id: str, *, new_name: Optional[str] = None): - """Update an experiment. + if key is not None: body['key'] = key + if run_id is not None: body['run_id'] = run_id + if run_uuid is not None: body['run_uuid'] = run_uuid + if value is not None: body['value'] = value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/runs/set-tag', body=body + + , headers=headers + ) + - Updates experiment metadata. + + + + def update_experiment(self + , experiment_id: str + , * + , new_name: Optional[str] = None): + """Update an experiment. + + Updates experiment metadata. + :param experiment_id: str ID of the associated experiment. :param new_name: str (optional) If provided, the experiment's name is changed to the new name. The new name must be unique. - - + + """ body = {} - if experiment_id is not None: - body["experiment_id"] = experiment_id - if new_name is not None: - body["new_name"] = new_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if experiment_id is not None: body['experiment_id'] = experiment_id + if new_name is not None: body['new_name'] = new_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/experiments/update', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/experiments/update", body=body, headers=headers) + + + - def update_permissions( - self, experiment_id: str, *, access_control_list: Optional[List[ExperimentAccessControlRequest]] = None - ) -> ExperimentPermissions: + def update_permissions(self + , experiment_id: str + , * + , access_control_list: Optional[List[ExperimentAccessControlRequest]] = None) -> ExperimentPermissions: """Update experiment permissions. - + Updates the permissions on an experiment. Experiments can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional) - + :returns: :class:`ExperimentPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/permissions/experiments/{experiment_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/experiments/{experiment_id}', body=body + + , headers=headers + ) return ExperimentPermissions.from_dict(res) - def update_run( - self, - *, - end_time: Optional[int] = None, - run_id: Optional[str] = None, - run_name: Optional[str] = None, - run_uuid: Optional[str] = None, - status: Optional[UpdateRunStatus] = None, - ) -> UpdateRunResponse: - """Update a run. + + + + def update_run(self + + , * + , end_time: Optional[int] = None, run_id: Optional[str] = None, run_name: Optional[str] = None, run_uuid: Optional[str] = None, status: Optional[UpdateRunStatus] = None) -> UpdateRunResponse: + """Update a run. + Updates run metadata. - + :param end_time: int (optional) Unix timestamp in milliseconds of when the run ended. :param run_id: str (optional) @@ -8432,95 +7450,75 @@ def update_run( MLflow version. :param status: :class:`UpdateRunStatus` (optional) Updated status of the run. - + :returns: :class:`UpdateRunResponse` """ body = {} - if end_time is not None: - body["end_time"] = end_time - if run_id is not None: - body["run_id"] = run_id - if run_name is not None: - body["run_name"] = run_name - if run_uuid is not None: - body["run_uuid"] = run_uuid - if status is not None: - body["status"] = status.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/runs/update", body=body, headers=headers) + if end_time is not None: body['end_time'] = end_time + if run_id is not None: body['run_id'] = run_id + if run_name is not None: body['run_name'] = run_name + if run_uuid is not None: body['run_uuid'] = run_uuid + if status is not None: body['status'] = status.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/runs/update', body=body + + , headers=headers + ) return UpdateRunResponse.from_dict(res) - + + class ForecastingAPI: """The Forecasting API allows you to create and get serverless forecasting experiments""" - + def __init__(self, api_client): self._api = api_client + - def wait_get_experiment_forecasting_succeeded( - self, - experiment_id: str, - timeout=timedelta(minutes=120), - callback: Optional[Callable[[ForecastingExperiment], None]] = None, - ) -> ForecastingExperiment: - deadline = time.time() + timeout.total_seconds() - target_states = (ForecastingExperimentState.SUCCEEDED,) - failure_states = ( - ForecastingExperimentState.FAILED, - ForecastingExperimentState.CANCELLED, - ) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get_experiment(experiment_id=experiment_id) - status = poll.state - status_message = f"current status: {status}" - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach SUCCEEDED, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"experiment_id={experiment_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def create_experiment( - self, - train_data_path: str, - target_column: str, - time_column: str, - forecast_granularity: str, - forecast_horizon: int, - *, - custom_weights_column: Optional[str] = None, - experiment_path: Optional[str] = None, - future_feature_data_path: Optional[str] = None, - holiday_regions: Optional[List[str]] = None, - include_features: Optional[List[str]] = None, - max_runtime: Optional[int] = None, - prediction_data_path: Optional[str] = None, - primary_metric: Optional[str] = None, - register_to: Optional[str] = None, - split_column: Optional[str] = None, - timeseries_identifier_columns: Optional[List[str]] = None, - training_frameworks: Optional[List[str]] = None, - ) -> Wait[ForecastingExperiment]: - """Create a forecasting experiment. + - Creates a serverless forecasting experiment. Returns the experiment ID. + + def wait_get_experiment_forecasting_succeeded(self, experiment_id: str, + timeout=timedelta(minutes=120), callback: Optional[Callable[[ForecastingExperiment], None]] = None) -> ForecastingExperiment: + deadline = time.time() + timeout.total_seconds() + target_states = (ForecastingExperimentState.SUCCEEDED, ) + failure_states = (ForecastingExperimentState.FAILED, ForecastingExperimentState.CANCELLED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_experiment(experiment_id=experiment_id) + status = poll.state + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"experiment_id={experiment_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + def create_experiment(self + , train_data_path: str, target_column: str, time_column: str, forecast_granularity: str, forecast_horizon: int + , * + , custom_weights_column: Optional[str] = None, experiment_path: Optional[str] = None, future_feature_data_path: Optional[str] = None, holiday_regions: Optional[List[str]] = None, include_features: Optional[List[str]] = None, max_runtime: Optional[int] = None, prediction_data_path: Optional[str] = None, primary_metric: Optional[str] = None, register_to: Optional[str] = None, split_column: Optional[str] = None, timeseries_identifier_columns: Optional[List[str]] = None, training_frameworks: Optional[List[str]] = None) -> Wait[ForecastingExperiment]: + """Create a forecasting experiment. + + Creates a serverless forecasting experiment. Returns the experiment ID. + :param train_data_path: str The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used as training data for the forecasting model. @@ -8569,255 +7567,216 @@ def create_experiment( :param training_frameworks: List[str] (optional) List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', 'DeepAR'. An empty list includes all supported frameworks. - + :returns: Long-running operation waiter for :class:`ForecastingExperiment`. See :method:wait_get_experiment_forecasting_succeeded for more details. """ body = {} - if custom_weights_column is not None: - body["custom_weights_column"] = custom_weights_column - if experiment_path is not None: - body["experiment_path"] = experiment_path - if forecast_granularity is not None: - body["forecast_granularity"] = forecast_granularity - if forecast_horizon is not None: - body["forecast_horizon"] = forecast_horizon - if future_feature_data_path is not None: - body["future_feature_data_path"] = future_feature_data_path - if holiday_regions is not None: - body["holiday_regions"] = [v for v in holiday_regions] - if include_features is not None: - body["include_features"] = [v for v in include_features] - if max_runtime is not None: - body["max_runtime"] = max_runtime - if prediction_data_path is not None: - body["prediction_data_path"] = prediction_data_path - if primary_metric is not None: - body["primary_metric"] = primary_metric - if register_to is not None: - body["register_to"] = register_to - if split_column is not None: - body["split_column"] = split_column - if target_column is not None: - body["target_column"] = target_column - if time_column is not None: - body["time_column"] = time_column - if timeseries_identifier_columns is not None: - body["timeseries_identifier_columns"] = [v for v in timeseries_identifier_columns] - if train_data_path is not None: - body["train_data_path"] = train_data_path - if training_frameworks is not None: - body["training_frameworks"] = [v for v in training_frameworks] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.0/automl/create-forecasting-experiment", body=body, headers=headers) - return Wait( - self.wait_get_experiment_forecasting_succeeded, - response=CreateForecastingExperimentResponse.from_dict(op_response), - experiment_id=op_response["experiment_id"], - ) - - def create_experiment_and_wait( - self, - train_data_path: str, - target_column: str, - time_column: str, - forecast_granularity: str, - forecast_horizon: int, - *, - custom_weights_column: Optional[str] = None, - experiment_path: Optional[str] = None, - future_feature_data_path: Optional[str] = None, - holiday_regions: Optional[List[str]] = None, - include_features: Optional[List[str]] = None, - max_runtime: Optional[int] = None, - prediction_data_path: Optional[str] = None, - primary_metric: Optional[str] = None, - register_to: Optional[str] = None, - split_column: Optional[str] = None, - timeseries_identifier_columns: Optional[List[str]] = None, - training_frameworks: Optional[List[str]] = None, - timeout=timedelta(minutes=120), - ) -> ForecastingExperiment: - return self.create_experiment( - custom_weights_column=custom_weights_column, - experiment_path=experiment_path, - forecast_granularity=forecast_granularity, - forecast_horizon=forecast_horizon, - future_feature_data_path=future_feature_data_path, - holiday_regions=holiday_regions, - include_features=include_features, - max_runtime=max_runtime, - prediction_data_path=prediction_data_path, - primary_metric=primary_metric, - register_to=register_to, - split_column=split_column, - target_column=target_column, - time_column=time_column, - timeseries_identifier_columns=timeseries_identifier_columns, - train_data_path=train_data_path, - training_frameworks=training_frameworks, - ).result(timeout=timeout) - - def get_experiment(self, experiment_id: str) -> ForecastingExperiment: - """Get a forecasting experiment. + if custom_weights_column is not None: body['custom_weights_column'] = custom_weights_column + if experiment_path is not None: body['experiment_path'] = experiment_path + if forecast_granularity is not None: body['forecast_granularity'] = forecast_granularity + if forecast_horizon is not None: body['forecast_horizon'] = forecast_horizon + if future_feature_data_path is not None: body['future_feature_data_path'] = future_feature_data_path + if holiday_regions is not None: body['holiday_regions'] = [v for v in holiday_regions] + if include_features is not None: body['include_features'] = [v for v in include_features] + if max_runtime is not None: body['max_runtime'] = max_runtime + if prediction_data_path is not None: body['prediction_data_path'] = prediction_data_path + if primary_metric is not None: body['primary_metric'] = primary_metric + if register_to is not None: body['register_to'] = register_to + if split_column is not None: body['split_column'] = split_column + if target_column is not None: body['target_column'] = target_column + if time_column is not None: body['time_column'] = time_column + if timeseries_identifier_columns is not None: body['timeseries_identifier_columns'] = [v for v in timeseries_identifier_columns] + if train_data_path is not None: body['train_data_path'] = train_data_path + if training_frameworks is not None: body['training_frameworks'] = [v for v in training_frameworks] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.0/automl/create-forecasting-experiment', body=body + + , headers=headers + ) + return Wait(self.wait_get_experiment_forecasting_succeeded + , response = CreateForecastingExperimentResponse.from_dict(op_response) + , experiment_id=op_response['experiment_id']) - Public RPC to get forecasting experiment + + def create_experiment_and_wait(self + , train_data_path: str, target_column: str, time_column: str, forecast_granularity: str, forecast_horizon: int + , * + , custom_weights_column: Optional[str] = None, experiment_path: Optional[str] = None, future_feature_data_path: Optional[str] = None, holiday_regions: Optional[List[str]] = None, include_features: Optional[List[str]] = None, max_runtime: Optional[int] = None, prediction_data_path: Optional[str] = None, primary_metric: Optional[str] = None, register_to: Optional[str] = None, split_column: Optional[str] = None, timeseries_identifier_columns: Optional[List[str]] = None, training_frameworks: Optional[List[str]] = None, + timeout=timedelta(minutes=120)) -> ForecastingExperiment: + return self.create_experiment(custom_weights_column=custom_weights_column, experiment_path=experiment_path, forecast_granularity=forecast_granularity, forecast_horizon=forecast_horizon, future_feature_data_path=future_feature_data_path, holiday_regions=holiday_regions, include_features=include_features, max_runtime=max_runtime, prediction_data_path=prediction_data_path, primary_metric=primary_metric, register_to=register_to, split_column=split_column, target_column=target_column, time_column=time_column, timeseries_identifier_columns=timeseries_identifier_columns, train_data_path=train_data_path, training_frameworks=training_frameworks).result(timeout=timeout) + + + + def get_experiment(self + , experiment_id: str + ) -> ForecastingExperiment: + """Get a forecasting experiment. + + Public RPC to get forecasting experiment + :param experiment_id: str The unique ID of a forecasting experiment - + :returns: :class:`ForecastingExperiment` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/automl/get-forecasting-experiment/{experiment_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/automl/get-forecasting-experiment/{experiment_id}' + + , headers=headers + ) return ForecastingExperiment.from_dict(res) - + + class ModelRegistryAPI: """Note: This API reference documents APIs for the Workspace Model Registry. Databricks recommends using [Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides centralized model governance, cross-workspace access, lineage, and deployment. Workspace Model Registry will be deprecated in the future. - + The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models.""" - + def __init__(self, api_client): self._api = api_client + - def approve_transition_request( - self, name: str, version: str, stage: Stage, archive_existing_versions: bool, *, comment: Optional[str] = None - ) -> ApproveTransitionRequestResponse: - """Approve transition request. + - Approves a model version stage transition request. + + + + + def approve_transition_request(self + , name: str, version: str, stage: Stage, archive_existing_versions: bool + , * + , comment: Optional[str] = None) -> ApproveTransitionRequestResponse: + """Approve transition request. + + Approves a model version stage transition request. + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param archive_existing_versions: bool Specifies whether to archive all current model versions in the target stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`ApproveTransitionRequestResponse` """ body = {} - if archive_existing_versions is not None: - body["archive_existing_versions"] = archive_existing_versions - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if stage is not None: - body["stage"] = stage.value - if version is not None: - body["version"] = version - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/transition-requests/approve", body=body, headers=headers) + if archive_existing_versions is not None: body['archive_existing_versions'] = archive_existing_versions + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if stage is not None: body['stage'] = stage.value + if version is not None: body['version'] = version + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/transition-requests/approve', body=body + + , headers=headers + ) return ApproveTransitionRequestResponse.from_dict(res) - def create_comment(self, name: str, version: str, comment: str) -> CreateCommentResponse: - """Post a comment. + + + + def create_comment(self + , name: str, version: str, comment: str + ) -> CreateCommentResponse: + """Post a comment. + Posts a comment on a model version. A comment can be submitted either by a user or programmatically to display relevant information about the model. For example, test results or deployment errors. - + :param name: str Name of the model. :param version: str Version of the model. :param comment: str User-provided comment on the action. - + :returns: :class:`CreateCommentResponse` """ body = {} - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if version is not None: - body["version"] = version - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/comments/create", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if version is not None: body['version'] = version + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/comments/create', body=body + + , headers=headers + ) return CreateCommentResponse.from_dict(res) - def create_model( - self, name: str, *, description: Optional[str] = None, tags: Optional[List[ModelTag]] = None - ) -> CreateModelResponse: - """Create a model. + + + + def create_model(self + , name: str + , * + , description: Optional[str] = None, tags: Optional[List[ModelTag]] = None) -> CreateModelResponse: + """Create a model. + Creates a new registered model with the name specified in the request body. - + Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. - + :param name: str Register models under this name :param description: str (optional) Optional description for registered model. :param tags: List[:class:`ModelTag`] (optional) Additional metadata for registered model. - + :returns: :class:`CreateModelResponse` """ body = {} - if description is not None: - body["description"] = description - if name is not None: - body["name"] = name - if tags is not None: - body["tags"] = [v.as_dict() for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/registered-models/create", body=body, headers=headers) + if description is not None: body['description'] = description + if name is not None: body['name'] = name + if tags is not None: body['tags'] = [v.as_dict() for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/registered-models/create', body=body + + , headers=headers + ) return CreateModelResponse.from_dict(res) - def create_model_version( - self, - name: str, - source: str, - *, - description: Optional[str] = None, - run_id: Optional[str] = None, - run_link: Optional[str] = None, - tags: Optional[List[ModelVersionTag]] = None, - ) -> CreateModelVersionResponse: - """Create a model version. + + + + def create_model_version(self + , name: str, source: str + , * + , description: Optional[str] = None, run_id: Optional[str] = None, run_link: Optional[str] = None, tags: Optional[List[ModelVersionTag]] = None) -> CreateModelVersionResponse: + """Create a model version. + Creates a model version. - + :param name: str Register model under this name :param source: str @@ -8832,249 +7791,270 @@ def create_model_version( hosted at another instance of MLflow. :param tags: List[:class:`ModelVersionTag`] (optional) Additional metadata for model version. - + :returns: :class:`CreateModelVersionResponse` """ body = {} - if description is not None: - body["description"] = description - if name is not None: - body["name"] = name - if run_id is not None: - body["run_id"] = run_id - if run_link is not None: - body["run_link"] = run_link - if source is not None: - body["source"] = source - if tags is not None: - body["tags"] = [v.as_dict() for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/model-versions/create", body=body, headers=headers) + if description is not None: body['description'] = description + if name is not None: body['name'] = name + if run_id is not None: body['run_id'] = run_id + if run_link is not None: body['run_link'] = run_link + if source is not None: body['source'] = source + if tags is not None: body['tags'] = [v.as_dict() for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/model-versions/create', body=body + + , headers=headers + ) return CreateModelVersionResponse.from_dict(res) - def create_transition_request( - self, name: str, version: str, stage: Stage, *, comment: Optional[str] = None - ) -> CreateTransitionRequestResponse: - """Make a transition request. + + + + def create_transition_request(self + , name: str, version: str, stage: Stage + , * + , comment: Optional[str] = None) -> CreateTransitionRequestResponse: + """Make a transition request. + Creates a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`CreateTransitionRequestResponse` """ body = {} - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if stage is not None: - body["stage"] = stage.value - if version is not None: - body["version"] = version - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/transition-requests/create", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if stage is not None: body['stage'] = stage.value + if version is not None: body['version'] = version + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/transition-requests/create', body=body + + , headers=headers + ) return CreateTransitionRequestResponse.from_dict(res) - def create_webhook( - self, - events: List[RegistryWebhookEvent], - *, - description: Optional[str] = None, - http_url_spec: Optional[HttpUrlSpec] = None, - job_spec: Optional[JobSpec] = None, - model_name: Optional[str] = None, - status: Optional[RegistryWebhookStatus] = None, - ) -> CreateWebhookResponse: - """Create a webhook. + + + + def create_webhook(self + , events: List[RegistryWebhookEvent] + , * + , description: Optional[str] = None, http_url_spec: Optional[HttpUrlSpec] = None, job_spec: Optional[JobSpec] = None, model_name: Optional[str] = None, status: Optional[RegistryWebhookStatus] = None) -> CreateWebhookResponse: + """Create a webhook. + **NOTE**: This endpoint is in Public Preview. - + Creates a registry webhook. - + :param events: List[:class:`RegistryWebhookEvent`] Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. - + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - + * `COMMENT_CREATED`: A user wrote a comment on a registered model. - + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be specified for a registry-wide webhook, which can be created by not specifying a model name in the create request. - + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to staging. - + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to production. - + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. :param description: str (optional) User-specified description for the webhook. :param http_url_spec: :class:`HttpUrlSpec` (optional) :param job_spec: :class:`JobSpec` (optional) :param model_name: str (optional) - Name of the model whose events would trigger this webhook. + If model name is not specified, a registry-wide webhook is created that listens for the specified + events across all versions of all registered models. :param status: :class:`RegistryWebhookStatus` (optional) Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event. - + :returns: :class:`CreateWebhookResponse` """ body = {} - if description is not None: - body["description"] = description - if events is not None: - body["events"] = [v.value for v in events] - if http_url_spec is not None: - body["http_url_spec"] = http_url_spec.as_dict() - if job_spec is not None: - body["job_spec"] = job_spec.as_dict() - if model_name is not None: - body["model_name"] = model_name - if status is not None: - body["status"] = status.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/registry-webhooks/create", body=body, headers=headers) + if description is not None: body['description'] = description + if events is not None: body['events'] = [v.value for v in events] + if http_url_spec is not None: body['http_url_spec'] = http_url_spec.as_dict() + if job_spec is not None: body['job_spec'] = job_spec.as_dict() + if model_name is not None: body['model_name'] = model_name + if status is not None: body['status'] = status.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/registry-webhooks/create', body=body + + , headers=headers + ) return CreateWebhookResponse.from_dict(res) - def delete_comment(self, id: str): - """Delete a comment. + + + + def delete_comment(self + , id: str + ): + """Delete a comment. + Deletes a comment on a model version. - + :param id: str - - + Unique identifier of an activity + + """ - + query = {} - if id is not None: - query["id"] = id - headers = { - "Accept": "application/json", - } + if id is not None: query['id'] = id + headers = {'Accept': 'application/json',} + + self._api.do('DELETE','/api/2.0/mlflow/comments/delete', query=query + + , headers=headers + ) + - self._api.do("DELETE", "/api/2.0/mlflow/comments/delete", query=query, headers=headers) + + + - def delete_model(self, name: str): + def delete_model(self + , name: str + ): """Delete a model. - + Deletes a registered model. - + :param name: str Registered model unique name identifier. - - + + """ - + query = {} - if name is not None: - query["name"] = name - headers = { - "Accept": "application/json", - } + if name is not None: query['name'] = name + headers = {'Accept': 'application/json',} + + self._api.do('DELETE','/api/2.0/mlflow/registered-models/delete', query=query + + , headers=headers + ) + - self._api.do("DELETE", "/api/2.0/mlflow/registered-models/delete", query=query, headers=headers) + + + - def delete_model_tag(self, name: str, key: str): + def delete_model_tag(self + , name: str, key: str + ): """Delete a model tag. - + Deletes the tag for a registered model. - + :param name: str Name of the registered model that the tag was logged under. :param key: str Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size is 250 bytes. - - + + """ - + query = {} - if key is not None: - query["key"] = key - if name is not None: - query["name"] = name - headers = { - "Accept": "application/json", - } + if key is not None: query['key'] = key + if name is not None: query['name'] = name + headers = {'Accept': 'application/json',} + + self._api.do('DELETE','/api/2.0/mlflow/registered-models/delete-tag', query=query + + , headers=headers + ) + - self._api.do("DELETE", "/api/2.0/mlflow/registered-models/delete-tag", query=query, headers=headers) + + + - def delete_model_version(self, name: str, version: str): + def delete_model_version(self + , name: str, version: str + ): """Delete a model version. - + Deletes a model version. - + :param name: str Name of the registered model :param version: str Model version number - - + + """ - + query = {} - if name is not None: - query["name"] = name - if version is not None: - query["version"] = version - headers = { - "Accept": "application/json", - } + if name is not None: query['name'] = name + if version is not None: query['version'] = version + headers = {'Accept': 'application/json',} + + self._api.do('DELETE','/api/2.0/mlflow/model-versions/delete', query=query + + , headers=headers + ) + - self._api.do("DELETE", "/api/2.0/mlflow/model-versions/delete", query=query, headers=headers) + + + - def delete_model_version_tag(self, name: str, version: str, key: str): + def delete_model_version_tag(self + , name: str, version: str, key: str + ): """Delete a model version tag. - + Deletes a model version tag. - + :param name: str Name of the registered model that the tag was logged under. :param version: str @@ -9082,308 +8062,361 @@ def delete_model_version_tag(self, name: str, version: str, key: str): :param key: str Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size is 250 bytes. - - + + """ - + query = {} - if key is not None: - query["key"] = key - if name is not None: - query["name"] = name - if version is not None: - query["version"] = version - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", "/api/2.0/mlflow/model-versions/delete-tag", query=query, headers=headers) - - def delete_transition_request( - self, - name: str, - version: str, - stage: DeleteTransitionRequestStage, - creator: str, - *, - comment: Optional[str] = None, - ): - """Delete a transition request. + if key is not None: query['key'] = key + if name is not None: query['name'] = name + if version is not None: query['version'] = version + headers = {'Accept': 'application/json',} + + self._api.do('DELETE','/api/2.0/mlflow/model-versions/delete-tag', query=query + + , headers=headers + ) + - Cancels a model version stage transition request. + + + + def delete_transition_request(self + , name: str, version: str, stage: DeleteTransitionRequestStage, creator: str + , * + , comment: Optional[str] = None): + """Delete a transition request. + + Cancels a model version stage transition request. + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`DeleteTransitionRequestStage` Target stage of the transition request. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param creator: str Username of the user who created this request. Of the transition requests matching the specified details, only the one transition created by this user will be deleted. :param comment: str (optional) User-provided comment on the action. - - + + """ - + query = {} - if comment is not None: - query["comment"] = comment - if creator is not None: - query["creator"] = creator - if name is not None: - query["name"] = name - if stage is not None: - query["stage"] = stage.value - if version is not None: - query["version"] = version - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", "/api/2.0/mlflow/transition-requests/delete", query=query, headers=headers) - - def delete_webhook(self, *, id: Optional[str] = None): - """Delete a webhook. + if comment is not None: query['comment'] = comment + if creator is not None: query['creator'] = creator + if name is not None: query['name'] = name + if stage is not None: query['stage'] = stage.value + if version is not None: query['version'] = version + headers = {'Accept': 'application/json',} + + self._api.do('DELETE','/api/2.0/mlflow/transition-requests/delete', query=query + + , headers=headers + ) + - **NOTE:** This endpoint is in Public Preview. + + + + def delete_webhook(self + + , * + , id: Optional[str] = None): + """Delete a webhook. + + **NOTE:** This endpoint is in Public Preview. + Deletes a registry webhook. - + :param id: str (optional) Webhook ID required to delete a registry webhook. - - + + """ - + query = {} - if id is not None: - query["id"] = id - headers = { - "Accept": "application/json", - } + if id is not None: query['id'] = id + headers = {'Accept': 'application/json',} + + self._api.do('DELETE','/api/2.0/mlflow/registry-webhooks/delete', query=query + + , headers=headers + ) + - self._api.do("DELETE", "/api/2.0/mlflow/registry-webhooks/delete", query=query, headers=headers) + + + - def get_latest_versions(self, name: str, *, stages: Optional[List[str]] = None) -> Iterator[ModelVersion]: + def get_latest_versions(self + , name: str + , * + , stages: Optional[List[str]] = None) -> Iterator[ModelVersion]: """Get the latest version. - + Gets the latest version of a registered model. - + :param name: str Registered model unique name identifier. :param stages: List[str] (optional) List of stages. - + :returns: Iterator over :class:`ModelVersion` """ body = {} - if name is not None: - body["name"] = name - if stages is not None: - body["stages"] = [v for v in stages] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - json = self._api.do("POST", "/api/2.0/mlflow/registered-models/get-latest-versions", body=body, headers=headers) + if name is not None: body['name'] = name + if stages is not None: body['stages'] = [v for v in stages] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + json = self._api.do('POST','/api/2.0/mlflow/registered-models/get-latest-versions', body=body + + , headers=headers + ) parsed = GetLatestVersionsResponse.from_dict(json).model_versions return parsed if parsed is not None else [] + - def get_model(self, name: str) -> GetModelResponse: - """Get model. + + + + def get_model(self + , name: str + ) -> GetModelResponse: + """Get model. + Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also returns the model's Databricks workspace ID and the permission level of the requesting user on the model. - + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel - + :param name: str Registered model unique name identifier. - + :returns: :class:`GetModelResponse` """ - + query = {} - if name is not None: - query["name"] = name - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/mlflow/databricks/registered-models/get", query=query, headers=headers) + if name is not None: query['name'] = name + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/mlflow/databricks/registered-models/get', query=query + + , headers=headers + ) return GetModelResponse.from_dict(res) - def get_model_version(self, name: str, version: str) -> GetModelVersionResponse: - """Get a model version. + + + + def get_model_version(self + , name: str, version: str + ) -> GetModelVersionResponse: + """Get a model version. + Get a model version. - + :param name: str Name of the registered model :param version: str Model version number - + :returns: :class:`GetModelVersionResponse` """ - + query = {} - if name is not None: - query["name"] = name - if version is not None: - query["version"] = version - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/mlflow/model-versions/get", query=query, headers=headers) + if name is not None: query['name'] = name + if version is not None: query['version'] = version + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/mlflow/model-versions/get', query=query + + , headers=headers + ) return GetModelVersionResponse.from_dict(res) - def get_model_version_download_uri(self, name: str, version: str) -> GetModelVersionDownloadUriResponse: - """Get a model version URI. + + + + def get_model_version_download_uri(self + , name: str, version: str + ) -> GetModelVersionDownloadUriResponse: + """Get a model version URI. + Gets a URI to download the model version. - + :param name: str Name of the registered model :param version: str Model version number - + :returns: :class:`GetModelVersionDownloadUriResponse` """ - + query = {} - if name is not None: - query["name"] = name - if version is not None: - query["version"] = version - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/mlflow/model-versions/get-download-uri", query=query, headers=headers) + if name is not None: query['name'] = name + if version is not None: query['version'] = version + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/mlflow/model-versions/get-download-uri', query=query + + , headers=headers + ) return GetModelVersionDownloadUriResponse.from_dict(res) - def get_permission_levels(self, registered_model_id: str) -> GetRegisteredModelPermissionLevelsResponse: - """Get registered model permission levels. + + + + def get_permission_levels(self + , registered_model_id: str + ) -> GetRegisteredModelPermissionLevelsResponse: + """Get registered model permission levels. + Gets the permission levels that a user can have on an object. - + :param registered_model_id: str The registered model for which to get or manage permissions. - + :returns: :class:`GetRegisteredModelPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/permissions/registered-models/{registered_model_id}/permissionLevels", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/registered-models/{registered_model_id}/permissionLevels' + + , headers=headers + ) return GetRegisteredModelPermissionLevelsResponse.from_dict(res) - def get_permissions(self, registered_model_id: str) -> RegisteredModelPermissions: - """Get registered model permissions. + + + + def get_permissions(self + , registered_model_id: str + ) -> RegisteredModelPermissions: + """Get registered model permissions. + Gets the permissions of a registered model. Registered models can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. - + :returns: :class:`RegisteredModelPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/registered-models/{registered_model_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/registered-models/{registered_model_id}' + + , headers=headers + ) return RegisteredModelPermissions.from_dict(res) - def list_models(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Model]: - """List models. + + + + def list_models(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Model]: + """List models. + Lists all available registered models, up to the limit specified in __max_results__. - + :param max_results: int (optional) Maximum number of registered models desired. Max threshold is 1000. :param page_token: str (optional) Pagination token to go to the next page based on a previous query. - + :returns: Iterator over :class:`Model` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/mlflow/registered-models/list", query=query, headers=headers) - if "registered_models" in json: - for v in json["registered_models"]: - yield Model.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_transition_requests(self, name: str, version: str) -> Iterator[Activity]: - """List transition requests. + json = self._api.do('GET','/api/2.0/mlflow/registered-models/list', query=query + + , headers=headers + ) + if 'registered_models' in json: + for v in json['registered_models']: + yield Model.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Gets a list of all open stage transition requests for the model version. + + + + def list_transition_requests(self + , name: str, version: str + ) -> Iterator[Activity]: + """List transition requests. + + Gets a list of all open stage transition requests for the model version. + :param name: str Name of the model. :param version: str Version of the model. - + :returns: Iterator over :class:`Activity` """ - + query = {} - if name is not None: - query["name"] = name - if version is not None: - query["version"] = version - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/mlflow/transition-requests/list", query=query, headers=headers) + if name is not None: query['name'] = name + if version is not None: query['version'] = version + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/mlflow/transition-requests/list', query=query + + , headers=headers + ) parsed = ListTransitionRequestsResponse.from_dict(json).requests return parsed if parsed is not None else [] + - def list_webhooks( - self, - *, - events: Optional[List[RegistryWebhookEvent]] = None, - model_name: Optional[str] = None, - page_token: Optional[str] = None, - ) -> Iterator[RegistryWebhook]: - """List registry webhooks. + + + + def list_webhooks(self + + , * + , events: Optional[List[RegistryWebhookEvent]] = None, model_name: Optional[str] = None, page_token: Optional[str] = None) -> Iterator[RegistryWebhook]: + """List registry webhooks. + **NOTE:** This endpoint is in Public Preview. - + Lists all registry webhooks. - + :param events: List[:class:`RegistryWebhookEvent`] (optional) If `events` is specified, any webhook with one or more of the specified trigger events is included in the output. If `events` is not specified, webhooks of all event types are included in the output. @@ -9392,110 +8425,117 @@ def list_webhooks( associated model. :param page_token: str (optional) Token indicating the page of artifact results to fetch - + :returns: Iterator over :class:`RegistryWebhook` """ - + query = {} - if events is not None: - query["events"] = [v.value for v in events] - if model_name is not None: - query["model_name"] = model_name - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if events is not None: query['events'] = [v.value for v in events] + if model_name is not None: query['model_name'] = model_name + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/mlflow/registry-webhooks/list", query=query, headers=headers) - if "webhooks" in json: - for v in json["webhooks"]: - yield RegistryWebhook.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def reject_transition_request( - self, name: str, version: str, stage: Stage, *, comment: Optional[str] = None - ) -> RejectTransitionRequestResponse: - """Reject a transition request. + json = self._api.do('GET','/api/2.0/mlflow/registry-webhooks/list', query=query + + , headers=headers + ) + if 'webhooks' in json: + for v in json['webhooks']: + yield RegistryWebhook.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Rejects a model version stage transition request. + + + + def reject_transition_request(self + , name: str, version: str, stage: Stage + , * + , comment: Optional[str] = None) -> RejectTransitionRequestResponse: + """Reject a transition request. + + Rejects a model version stage transition request. + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`RejectTransitionRequestResponse` """ body = {} - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if stage is not None: - body["stage"] = stage.value - if version is not None: - body["version"] = version - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/transition-requests/reject", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if stage is not None: body['stage'] = stage.value + if version is not None: body['version'] = version + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/transition-requests/reject', body=body + + , headers=headers + ) return RejectTransitionRequestResponse.from_dict(res) - def rename_model(self, name: str, *, new_name: Optional[str] = None) -> RenameModelResponse: - """Rename a model. + + + + def rename_model(self + , name: str + , * + , new_name: Optional[str] = None) -> RenameModelResponse: + """Rename a model. + Renames a registered model. - + :param name: str Registered model unique name identifier. :param new_name: str (optional) If provided, updates the name for this `registered_model`. - + :returns: :class:`RenameModelResponse` """ body = {} - if name is not None: - body["name"] = name - if new_name is not None: - body["new_name"] = new_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/registered-models/rename", body=body, headers=headers) + if name is not None: body['name'] = name + if new_name is not None: body['new_name'] = new_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/registered-models/rename', body=body + + , headers=headers + ) return RenameModelResponse.from_dict(res) - def search_model_versions( - self, - *, - filter: Optional[str] = None, - max_results: Optional[int] = None, - order_by: Optional[List[str]] = None, - page_token: Optional[str] = None, - ) -> Iterator[ModelVersion]: - """Searches model versions. + + + + def search_model_versions(self + + , * + , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None) -> Iterator[ModelVersion]: + """Searches model versions. + Searches for specific model versions based on the supplied __filter__. - + :param filter: str (optional) String filter condition, like "name='my-model-name'". Must be a single boolean condition, with string values wrapped in single quotes. @@ -9507,44 +8547,44 @@ def search_model_versions( timestamp, followed by name ASC, followed by version DESC. :param page_token: str (optional) Pagination token to go to next page based on previous search query. - + :returns: Iterator over :class:`ModelVersion` """ - + query = {} - if filter is not None: - query["filter"] = filter - if max_results is not None: - query["max_results"] = max_results - if order_by is not None: - query["order_by"] = [v for v in order_by] - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if filter is not None: query['filter'] = filter + if max_results is not None: query['max_results'] = max_results + if order_by is not None: query['order_by'] = [v for v in order_by] + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/mlflow/model-versions/search", query=query, headers=headers) - if "model_versions" in json: - for v in json["model_versions"]: - yield ModelVersion.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def search_models( - self, - *, - filter: Optional[str] = None, - max_results: Optional[int] = None, - order_by: Optional[List[str]] = None, - page_token: Optional[str] = None, - ) -> Iterator[Model]: - """Search models. + json = self._api.do('GET','/api/2.0/mlflow/model-versions/search', query=query + + , headers=headers + ) + if 'model_versions' in json: + for v in json['model_versions']: + yield ModelVersion.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Search for registered models based on the specified __filter__. + + + + def search_models(self + + , * + , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None) -> Iterator[Model]: + """Search models. + + Search for registered models based on the specified __filter__. + :param filter: str (optional) String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically as "name LIKE '%my-model-name%'". Single boolean condition, with string values wrapped in single @@ -9557,37 +8597,43 @@ def search_models( name ASC. :param page_token: str (optional) Pagination token to go to the next page based on a previous search query. - + :returns: Iterator over :class:`Model` """ - + query = {} - if filter is not None: - query["filter"] = filter - if max_results is not None: - query["max_results"] = max_results - if order_by is not None: - query["order_by"] = [v for v in order_by] - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if filter is not None: query['filter'] = filter + if max_results is not None: query['max_results'] = max_results + if order_by is not None: query['order_by'] = [v for v in order_by] + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/mlflow/registered-models/search", query=query, headers=headers) - if "registered_models" in json: - for v in json["registered_models"]: - yield Model.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def set_model_tag(self, name: str, key: str, value: str): - """Set a tag. + json = self._api.do('GET','/api/2.0/mlflow/registered-models/search', query=query + + , headers=headers + ) + if 'registered_models' in json: + for v in json['registered_models']: + yield Model.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Sets a tag on a registered model. + + + + def set_model_tag(self + , name: str, key: str, value: str + ): + """Set a tag. + + Sets a tag on a registered model. + :param name: str Unique name of the model. :param key: str @@ -9597,28 +8643,32 @@ def set_model_tag(self, name: str, key: str, value: str): :param value: str String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size. - - + + """ body = {} - if key is not None: - body["key"] = key - if name is not None: - body["name"] = name - if value is not None: - body["value"] = value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if key is not None: body['key'] = key + if name is not None: body['name'] = name + if value is not None: body['value'] = value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/registered-models/set-tag', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/mlflow/registered-models/set-tag", body=body, headers=headers) + + + - def set_model_version_tag(self, name: str, version: str, key: str, value: str): + def set_model_version_tag(self + , name: str, version: str, key: str, value: str + ): """Set a version tag. - + Sets a model version tag. - + :param name: str Unique name of the model. :param version: str @@ -9630,260 +8680,269 @@ def set_model_version_tag(self, name: str, version: str, key: str, value: str): :param value: str String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size. - - + + """ body = {} - if key is not None: - body["key"] = key - if name is not None: - body["name"] = name - if value is not None: - body["value"] = value - if version is not None: - body["version"] = version - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/mlflow/model-versions/set-tag", body=body, headers=headers) - - def set_permissions( - self, - registered_model_id: str, - *, - access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None, - ) -> RegisteredModelPermissions: - """Set registered model permissions. + if key is not None: body['key'] = key + if name is not None: body['name'] = name + if value is not None: body['value'] = value + if version is not None: body['version'] = version + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/mlflow/model-versions/set-tag', body=body + + , headers=headers + ) + + + + + + def set_permissions(self + , registered_model_id: str + , * + , access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None) -> RegisteredModelPermissions: + """Set registered model permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional) - + :returns: :class:`RegisteredModelPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.0/permissions/registered-models/{registered_model_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/registered-models/{registered_model_id}', body=body + + , headers=headers + ) return RegisteredModelPermissions.from_dict(res) - def test_registry_webhook( - self, id: str, *, event: Optional[RegistryWebhookEvent] = None - ) -> TestRegistryWebhookResponse: - """Test a webhook. + + + + def test_registry_webhook(self + , id: str + , * + , event: Optional[RegistryWebhookEvent] = None) -> TestRegistryWebhookResponse: + """Test a webhook. + **NOTE:** This endpoint is in Public Preview. - + Tests a registry webhook. - + :param id: str Webhook ID :param event: :class:`RegistryWebhookEvent` (optional) If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the test trigger uses a randomly chosen event associated with the webhook. - + :returns: :class:`TestRegistryWebhookResponse` """ body = {} - if event is not None: - body["event"] = event.value - if id is not None: - body["id"] = id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/mlflow/registry-webhooks/test", body=body, headers=headers) + if event is not None: body['event'] = event.value + if id is not None: body['id'] = id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/registry-webhooks/test', body=body + + , headers=headers + ) return TestRegistryWebhookResponse.from_dict(res) - def transition_stage( - self, name: str, version: str, stage: Stage, archive_existing_versions: bool, *, comment: Optional[str] = None - ) -> TransitionStageResponse: - """Transition a stage. + + + + def transition_stage(self + , name: str, version: str, stage: Stage, archive_existing_versions: bool + , * + , comment: Optional[str] = None) -> TransitionStageResponse: + """Transition a stage. + Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] that also accepts a comment associated with the transition to be recorded.", - + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param archive_existing_versions: bool Specifies whether to archive all current model versions in the target stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`TransitionStageResponse` """ body = {} - if archive_existing_versions is not None: - body["archive_existing_versions"] = archive_existing_versions - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if stage is not None: - body["stage"] = stage.value - if version is not None: - body["version"] = version - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", "/api/2.0/mlflow/databricks/model-versions/transition-stage", body=body, headers=headers - ) + if archive_existing_versions is not None: body['archive_existing_versions'] = archive_existing_versions + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if stage is not None: body['stage'] = stage.value + if version is not None: body['version'] = version + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/mlflow/databricks/model-versions/transition-stage', body=body + + , headers=headers + ) return TransitionStageResponse.from_dict(res) - def update_comment(self, id: str, comment: str) -> UpdateCommentResponse: - """Update a comment. + + + + def update_comment(self + , id: str, comment: str + ) -> UpdateCommentResponse: + """Update a comment. + Post an edit to a comment on a model version. - + :param id: str Unique identifier of an activity :param comment: str User-provided comment on the action. - + :returns: :class:`UpdateCommentResponse` """ body = {} - if comment is not None: - body["comment"] = comment - if id is not None: - body["id"] = id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", "/api/2.0/mlflow/comments/update", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if id is not None: body['id'] = id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/mlflow/comments/update', body=body + + , headers=headers + ) return UpdateCommentResponse.from_dict(res) - def update_model(self, name: str, *, description: Optional[str] = None): - """Update model. + + + + def update_model(self + , name: str + , * + , description: Optional[str] = None): + """Update model. + Updates a registered model. - + :param name: str Registered model unique name identifier. :param description: str (optional) If provided, updates the description for this `registered_model`. - - + + """ body = {} - if description is not None: - body["description"] = description - if name is not None: - body["name"] = name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if description is not None: body['description'] = description + if name is not None: body['name'] = name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH','/api/2.0/mlflow/registered-models/update', body=body + + , headers=headers + ) + - self._api.do("PATCH", "/api/2.0/mlflow/registered-models/update", body=body, headers=headers) + + + - def update_model_version(self, name: str, version: str, *, description: Optional[str] = None): + def update_model_version(self + , name: str, version: str + , * + , description: Optional[str] = None): """Update model version. - + Updates the model version. - + :param name: str Name of the registered model :param version: str Model version number :param description: str (optional) If provided, updates the description for this `registered_model`. - - + + """ body = {} - if description is not None: - body["description"] = description - if name is not None: - body["name"] = name - if version is not None: - body["version"] = version - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", "/api/2.0/mlflow/model-versions/update", body=body, headers=headers) - - def update_permissions( - self, - registered_model_id: str, - *, - access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None, - ) -> RegisteredModelPermissions: - """Update registered model permissions. + if description is not None: body['description'] = description + if name is not None: body['name'] = name + if version is not None: body['version'] = version + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH','/api/2.0/mlflow/model-versions/update', body=body + + , headers=headers + ) + + + + + + def update_permissions(self + , registered_model_id: str + , * + , access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None) -> RegisteredModelPermissions: + """Update registered model permissions. + Updates the permissions on a registered model. Registered models can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional) - + :returns: :class:`RegisteredModelPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/permissions/registered-models/{registered_model_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/registered-models/{registered_model_id}', body=body + + , headers=headers + ) return RegisteredModelPermissions.from_dict(res) - def update_webhook( - self, - id: str, - *, - description: Optional[str] = None, - events: Optional[List[RegistryWebhookEvent]] = None, - http_url_spec: Optional[HttpUrlSpec] = None, - job_spec: Optional[JobSpec] = None, - status: Optional[RegistryWebhookStatus] = None, - ): - """Update a webhook. + + + + def update_webhook(self + , id: str + , * + , description: Optional[str] = None, events: Optional[List[RegistryWebhookEvent]] = None, http_url_spec: Optional[HttpUrlSpec] = None, job_spec: Optional[JobSpec] = None, status: Optional[RegistryWebhookStatus] = None): + """Update a webhook. + **NOTE:** This endpoint is in Public Preview. - + Updates a registry webhook. - + :param id: str Webhook ID :param description: str (optional) @@ -9891,61 +8950,59 @@ def update_webhook( :param events: List[:class:`RegistryWebhookEvent`] (optional) Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. - + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - + * `COMMENT_CREATED`: A user wrote a comment on a registered model. - + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be specified for a registry-wide webhook, which can be created by not specifying a model name in the create request. - + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to staging. - + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to production. - + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. :param http_url_spec: :class:`HttpUrlSpec` (optional) :param job_spec: :class:`JobSpec` (optional) :param status: :class:`RegistryWebhookStatus` (optional) Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event. - - + + """ body = {} - if description is not None: - body["description"] = description - if events is not None: - body["events"] = [v.value for v in events] - if http_url_spec is not None: - body["http_url_spec"] = http_url_spec.as_dict() - if id is not None: - body["id"] = id - if job_spec is not None: - body["job_spec"] = job_spec.as_dict() - if status is not None: - body["status"] = status.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", "/api/2.0/mlflow/registry-webhooks/update", body=body, headers=headers) + if description is not None: body['description'] = description + if events is not None: body['events'] = [v.value for v in events] + if http_url_spec is not None: body['http_url_spec'] = http_url_spec.as_dict() + if id is not None: body['id'] = id + if job_spec is not None: body['job_spec'] = job_spec.as_dict() + if status is not None: body['status'] = status.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH','/api/2.0/mlflow/registry-webhooks/update', body=body + + , headers=headers + ) + + + + \ No newline at end of file diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 030633eb8..0edf98fed 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -1,189 +1,173 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging from dataclasses import dataclass -from typing import Any, Dict, Iterator, List, Optional +from datetime import timedelta +from enum import Enum +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ._internal import _from_dict, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token + +_LOG = logging.getLogger('databricks.sdk') -_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + + @dataclass class CreateCustomAppIntegration: confidential: Optional[bool] = None """This field indicates whether an OAuth client secret is required to authenticate this client.""" - + name: Optional[str] = None """Name of the custom OAuth app""" - + redirect_urls: Optional[List[str]] = None """List of OAuth redirect urls""" - + scopes: Optional[List[str]] = None """OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid, profile, email.""" - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" - + user_authorized_scopes: Optional[List[str]] = None """Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes.""" - + def as_dict(self) -> dict: """Serializes the CreateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.confidential is not None: - body["confidential"] = self.confidential - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = [v for v in self.redirect_urls] - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: - body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] + if self.confidential is not None: body['confidential'] = self.confidential + if self.name is not None: body['name'] = self.name + if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] + if self.scopes: body['scopes'] = [v for v in self.scopes] + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() + if self.user_authorized_scopes: body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes] return body def as_shallow_dict(self) -> dict: """Serializes the CreateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" body = {} - if self.confidential is not None: - body["confidential"] = self.confidential - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = self.redirect_urls - if self.scopes: - body["scopes"] = self.scopes - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - if self.user_authorized_scopes: - body["user_authorized_scopes"] = self.user_authorized_scopes + if self.confidential is not None: body['confidential'] = self.confidential + if self.name is not None: body['name'] = self.name + if self.redirect_urls: body['redirect_urls'] = self.redirect_urls + if self.scopes: body['scopes'] = self.scopes + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCustomAppIntegration: """Deserializes the CreateCustomAppIntegration from a dictionary.""" - return cls( - confidential=d.get("confidential", None), - name=d.get("name", None), - redirect_urls=d.get("redirect_urls", None), - scopes=d.get("scopes", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - user_authorized_scopes=d.get("user_authorized_scopes", None), - ) + return cls(confidential=d.get('confidential', None), name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy), user_authorized_scopes=d.get('user_authorized_scopes', None)) + + @dataclass class CreateCustomAppIntegrationOutput: client_id: Optional[str] = None """OAuth client-id generated by the Databricks""" - + client_secret: Optional[str] = None """OAuth client-secret generated by the Databricks. If this is a confidential OAuth app client-secret will be generated.""" - + integration_id: Optional[str] = None """Unique integration id for the custom OAuth app""" - + def as_dict(self) -> dict: """Serializes the CreateCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.client_id is not None: - body["client_id"] = self.client_id - if self.client_secret is not None: - body["client_secret"] = self.client_secret - if self.integration_id is not None: - body["integration_id"] = self.integration_id + if self.client_id is not None: body['client_id'] = self.client_id + if self.client_secret is not None: body['client_secret'] = self.client_secret + if self.integration_id is not None: body['integration_id'] = self.integration_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.client_id is not None: - body["client_id"] = self.client_id - if self.client_secret is not None: - body["client_secret"] = self.client_secret - if self.integration_id is not None: - body["integration_id"] = self.integration_id + if self.client_id is not None: body['client_id'] = self.client_id + if self.client_secret is not None: body['client_secret'] = self.client_secret + if self.integration_id is not None: body['integration_id'] = self.integration_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCustomAppIntegrationOutput: """Deserializes the CreateCustomAppIntegrationOutput from a dictionary.""" - return cls( - client_id=d.get("client_id", None), - client_secret=d.get("client_secret", None), - integration_id=d.get("integration_id", None), - ) + return cls(client_id=d.get('client_id', None), client_secret=d.get('client_secret', None), integration_id=d.get('integration_id', None)) + + @dataclass class CreatePublishedAppIntegration: app_id: Optional[str] = None """App id of the OAuth published app integration. For example power-bi, tableau-deskop""" - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" - + def as_dict(self) -> dict: """Serializes the CreatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() + if self.app_id is not None: body['app_id'] = self.app_id + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy + if self.app_id is not None: body['app_id'] = self.app_id + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePublishedAppIntegration: """Deserializes the CreatePublishedAppIntegration from a dictionary.""" - return cls( - app_id=d.get("app_id", None), token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy) - ) + return cls(app_id=d.get('app_id', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) + + @dataclass class CreatePublishedAppIntegrationOutput: integration_id: Optional[str] = None """Unique integration id for the published OAuth app""" - + def as_dict(self) -> dict: """Serializes the CreatePublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id + if self.integration_id is not None: body['integration_id'] = self.integration_id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id + if self.integration_id is not None: body['integration_id'] = self.integration_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePublishedAppIntegrationOutput: """Deserializes the CreatePublishedAppIntegrationOutput from a dictionary.""" - return cls(integration_id=d.get("integration_id", None)) + return cls(integration_id=d.get('integration_id', None)) + + + + + @dataclass @@ -191,107 +175,88 @@ class CreateServicePrincipalSecretRequest: lifetime: Optional[str] = None """The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a default lifetime of 730 days (63072000s).""" - + service_principal_id: Optional[int] = None """The service principal ID.""" - + def as_dict(self) -> dict: """Serializes the CreateServicePrincipalSecretRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.lifetime is not None: - body["lifetime"] = self.lifetime - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id + if self.lifetime is not None: body['lifetime'] = self.lifetime + if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateServicePrincipalSecretRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.lifetime is not None: - body["lifetime"] = self.lifetime - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id + if self.lifetime is not None: body['lifetime'] = self.lifetime + if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateServicePrincipalSecretRequest: """Deserializes the CreateServicePrincipalSecretRequest from a dictionary.""" - return cls(lifetime=d.get("lifetime", None), service_principal_id=d.get("service_principal_id", None)) + return cls(lifetime=d.get('lifetime', None), service_principal_id=d.get('service_principal_id', None)) + + @dataclass class CreateServicePrincipalSecretResponse: create_time: Optional[str] = None """UTC time when the secret was created""" - + expire_time: Optional[str] = None """UTC time when the secret will expire. If the field is not present, the secret does not expire.""" - + id: Optional[str] = None """ID of the secret""" - + secret: Optional[str] = None """Secret Value""" - + secret_hash: Optional[str] = None """Secret Hash""" - + status: Optional[str] = None """Status of the secret""" - + update_time: Optional[str] = None """UTC time when the secret was updated""" - + def as_dict(self) -> dict: """Serializes the CreateServicePrincipalSecretResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.expire_time is not None: - body["expire_time"] = self.expire_time - if self.id is not None: - body["id"] = self.id - if self.secret is not None: - body["secret"] = self.secret - if self.secret_hash is not None: - body["secret_hash"] = self.secret_hash - if self.status is not None: - body["status"] = self.status - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.expire_time is not None: body['expire_time'] = self.expire_time + if self.id is not None: body['id'] = self.id + if self.secret is not None: body['secret'] = self.secret + if self.secret_hash is not None: body['secret_hash'] = self.secret_hash + if self.status is not None: body['status'] = self.status + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the CreateServicePrincipalSecretResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.expire_time is not None: - body["expire_time"] = self.expire_time - if self.id is not None: - body["id"] = self.id - if self.secret is not None: - body["secret"] = self.secret - if self.secret_hash is not None: - body["secret_hash"] = self.secret_hash - if self.status is not None: - body["status"] = self.status - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.expire_time is not None: body['expire_time'] = self.expire_time + if self.id is not None: body['id'] = self.id + if self.secret is not None: body['secret'] = self.secret + if self.secret_hash is not None: body['secret_hash'] = self.secret_hash + if self.status is not None: body['status'] = self.status + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateServicePrincipalSecretResponse: """Deserializes the CreateServicePrincipalSecretResponse from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - expire_time=d.get("expire_time", None), - id=d.get("id", None), - secret=d.get("secret", None), - secret_hash=d.get("secret_hash", None), - status=d.get("status", None), - update_time=d.get("update_time", None), - ) + return cls(create_time=d.get('create_time', None), expire_time=d.get('expire_time', None), id=d.get('id', None), secret=d.get('secret', None), secret_hash=d.get('secret_hash', None), status=d.get('status', None), update_time=d.get('update_time', None)) + + + + + @dataclass @@ -310,6 +275,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCustomAppIntegrationOutput: """Deserializes the DeleteCustomAppIntegrationOutput from a dictionary.""" return cls() + + + + + @dataclass @@ -328,6 +298,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeletePublishedAppIntegrationOutput: """Deserializes the DeletePublishedAppIntegrationOutput from a dictionary.""" return cls() + + + + + @dataclass @@ -346,16 +321,24 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + + + + + + + @dataclass class FederationPolicy: create_time: Optional[str] = None """Creation time of the federation policy.""" - + description: Optional[str] = None """Description of the federation policy.""" - + name: Optional[str] = None """Resource name for the federation policy. Example values include `accounts//federationPolicies/my-federation-policy` for Account Federation Policies, @@ -364,733 +347,603 @@ class FederationPolicy: for Service Principal Federation Policies. Typically an output parameter, which does not need to be specified in create or update requests. If specified in a request, must match the value in the request URL.""" - + oidc_policy: Optional[OidcFederationPolicy] = None """Specifies the policy to use for validating OIDC claims in your federated tokens.""" - + policy_id: Optional[str] = None """The ID of the federation policy.""" - + service_principal_id: Optional[int] = None """The service principal ID that this federation policy applies to. Only set for service principal federation policies.""" - + uid: Optional[str] = None """Unique, immutable id of the federation policy.""" - + update_time: Optional[str] = None """Last update time of the federation policy.""" - + def as_dict(self) -> dict: """Serializes the FederationPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.oidc_policy: - body["oidc_policy"] = self.oidc_policy.as_dict() - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id - if self.uid is not None: - body["uid"] = self.uid - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.oidc_policy: body['oidc_policy'] = self.oidc_policy.as_dict() + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id + if self.uid is not None: body['uid'] = self.uid + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the FederationPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.oidc_policy: - body["oidc_policy"] = self.oidc_policy - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id - if self.uid is not None: - body["uid"] = self.uid - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.oidc_policy: body['oidc_policy'] = self.oidc_policy + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id + if self.uid is not None: body['uid'] = self.uid + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FederationPolicy: """Deserializes the FederationPolicy from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - description=d.get("description", None), - name=d.get("name", None), - oidc_policy=_from_dict(d, "oidc_policy", OidcFederationPolicy), - policy_id=d.get("policy_id", None), - service_principal_id=d.get("service_principal_id", None), - uid=d.get("uid", None), - update_time=d.get("update_time", None), - ) + return cls(create_time=d.get('create_time', None), description=d.get('description', None), name=d.get('name', None), oidc_policy=_from_dict(d, 'oidc_policy', OidcFederationPolicy), policy_id=d.get('policy_id', None), service_principal_id=d.get('service_principal_id', None), uid=d.get('uid', None), update_time=d.get('update_time', None)) + + + + + @dataclass class GetCustomAppIntegrationOutput: client_id: Optional[str] = None """The client id of the custom OAuth app""" - + confidential: Optional[bool] = None """This field indicates whether an OAuth client secret is required to authenticate this client.""" - + create_time: Optional[str] = None - + created_by: Optional[int] = None - + creator_username: Optional[str] = None - + integration_id: Optional[str] = None """ID of this custom app""" - + name: Optional[str] = None """The display name of the custom OAuth app""" - + redirect_urls: Optional[List[str]] = None """List of OAuth redirect urls""" - + scopes: Optional[List[str]] = None - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" - + user_authorized_scopes: Optional[List[str]] = None """Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes.""" - + def as_dict(self) -> dict: """Serializes the GetCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.client_id is not None: - body["client_id"] = self.client_id - if self.confidential is not None: - body["confidential"] = self.confidential - if self.create_time is not None: - body["create_time"] = self.create_time - if self.created_by is not None: - body["created_by"] = self.created_by - if self.creator_username is not None: - body["creator_username"] = self.creator_username - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = [v for v in self.redirect_urls] - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: - body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] + if self.client_id is not None: body['client_id'] = self.client_id + if self.confidential is not None: body['confidential'] = self.confidential + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by + if self.creator_username is not None: body['creator_username'] = self.creator_username + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.name is not None: body['name'] = self.name + if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] + if self.scopes: body['scopes'] = [v for v in self.scopes] + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() + if self.user_authorized_scopes: body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes] return body def as_shallow_dict(self) -> dict: """Serializes the GetCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.client_id is not None: - body["client_id"] = self.client_id - if self.confidential is not None: - body["confidential"] = self.confidential - if self.create_time is not None: - body["create_time"] = self.create_time - if self.created_by is not None: - body["created_by"] = self.created_by - if self.creator_username is not None: - body["creator_username"] = self.creator_username - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = self.redirect_urls - if self.scopes: - body["scopes"] = self.scopes - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - if self.user_authorized_scopes: - body["user_authorized_scopes"] = self.user_authorized_scopes + if self.client_id is not None: body['client_id'] = self.client_id + if self.confidential is not None: body['confidential'] = self.confidential + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by + if self.creator_username is not None: body['creator_username'] = self.creator_username + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.name is not None: body['name'] = self.name + if self.redirect_urls: body['redirect_urls'] = self.redirect_urls + if self.scopes: body['scopes'] = self.scopes + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetCustomAppIntegrationOutput: """Deserializes the GetCustomAppIntegrationOutput from a dictionary.""" - return cls( - client_id=d.get("client_id", None), - confidential=d.get("confidential", None), - create_time=d.get("create_time", None), - created_by=d.get("created_by", None), - creator_username=d.get("creator_username", None), - integration_id=d.get("integration_id", None), - name=d.get("name", None), - redirect_urls=d.get("redirect_urls", None), - scopes=d.get("scopes", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - user_authorized_scopes=d.get("user_authorized_scopes", None), - ) + return cls(client_id=d.get('client_id', None), confidential=d.get('confidential', None), create_time=d.get('create_time', None), created_by=d.get('created_by', None), creator_username=d.get('creator_username', None), integration_id=d.get('integration_id', None), name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy), user_authorized_scopes=d.get('user_authorized_scopes', None)) + + + + + @dataclass class GetCustomAppIntegrationsOutput: apps: Optional[List[GetCustomAppIntegrationOutput]] = None """List of Custom OAuth App Integrations defined for the account.""" - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the GetCustomAppIntegrationsOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apps: - body["apps"] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetCustomAppIntegrationsOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.apps: - body["apps"] = self.apps - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.apps: body['apps'] = self.apps + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetCustomAppIntegrationsOutput: """Deserializes the GetCustomAppIntegrationsOutput from a dictionary.""" - return cls( - apps=_repeated_dict(d, "apps", GetCustomAppIntegrationOutput), - next_page_token=d.get("next_page_token", None), - ) + return cls(apps=_repeated_dict(d, 'apps', GetCustomAppIntegrationOutput), next_page_token=d.get('next_page_token', None)) + + @dataclass class GetPublishedAppIntegrationOutput: app_id: Optional[str] = None """App-id of the published app integration""" - + create_time: Optional[str] = None - + created_by: Optional[int] = None - + integration_id: Optional[str] = None """Unique integration id for the published OAuth app""" - + name: Optional[str] = None """Display name of the published OAuth app""" - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" - + def as_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.create_time is not None: - body["create_time"] = self.create_time - if self.created_by is not None: - body["created_by"] = self.created_by - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.name is not None: - body["name"] = self.name - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() + if self.app_id is not None: body['app_id'] = self.app_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.name is not None: body['name'] = self.name + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.create_time is not None: - body["create_time"] = self.create_time - if self.created_by is not None: - body["created_by"] = self.created_by - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.name is not None: - body["name"] = self.name - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy + if self.app_id is not None: body['app_id'] = self.app_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.name is not None: body['name'] = self.name + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPublishedAppIntegrationOutput: """Deserializes the GetPublishedAppIntegrationOutput from a dictionary.""" - return cls( - app_id=d.get("app_id", None), - create_time=d.get("create_time", None), - created_by=d.get("created_by", None), - integration_id=d.get("integration_id", None), - name=d.get("name", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - ) + return cls(app_id=d.get('app_id', None), create_time=d.get('create_time', None), created_by=d.get('created_by', None), integration_id=d.get('integration_id', None), name=d.get('name', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) + + + + + @dataclass class GetPublishedAppIntegrationsOutput: apps: Optional[List[GetPublishedAppIntegrationOutput]] = None """List of Published OAuth App Integrations defined for the account.""" - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationsOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apps: - body["apps"] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationsOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.apps: - body["apps"] = self.apps - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.apps: body['apps'] = self.apps + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPublishedAppIntegrationsOutput: """Deserializes the GetPublishedAppIntegrationsOutput from a dictionary.""" - return cls( - apps=_repeated_dict(d, "apps", GetPublishedAppIntegrationOutput), - next_page_token=d.get("next_page_token", None), - ) + return cls(apps=_repeated_dict(d, 'apps', GetPublishedAppIntegrationOutput), next_page_token=d.get('next_page_token', None)) + + @dataclass class GetPublishedAppsOutput: apps: Optional[List[PublishedAppOutput]] = None """List of Published OAuth Apps.""" - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" - + def as_dict(self) -> dict: """Serializes the GetPublishedAppsOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apps: - body["apps"] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetPublishedAppsOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.apps: - body["apps"] = self.apps - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.apps: body['apps'] = self.apps + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPublishedAppsOutput: """Deserializes the GetPublishedAppsOutput from a dictionary.""" - return cls(apps=_repeated_dict(d, "apps", PublishedAppOutput), next_page_token=d.get("next_page_token", None)) + return cls(apps=_repeated_dict(d, 'apps', PublishedAppOutput), next_page_token=d.get('next_page_token', None)) + + + + + + + + + + + @dataclass class ListFederationPoliciesResponse: next_page_token: Optional[str] = None - + policies: Optional[List[FederationPolicy]] = None - + def as_dict(self) -> dict: """Serializes the ListFederationPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policies: - body["policies"] = [v.as_dict() for v in self.policies] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = [v.as_dict() for v in self.policies] return body def as_shallow_dict(self) -> dict: """Serializes the ListFederationPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policies: - body["policies"] = self.policies + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = self.policies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFederationPoliciesResponse: """Deserializes the ListFederationPoliciesResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), policies=_repeated_dict(d, "policies", FederationPolicy) - ) + return cls(next_page_token=d.get('next_page_token', None), policies=_repeated_dict(d, 'policies', FederationPolicy)) + + + + + + + + + + + + + + @dataclass class ListServicePrincipalSecretsResponse: next_page_token: Optional[str] = None """A token, which can be sent as `page_token` to retrieve the next page.""" - + secrets: Optional[List[SecretInfo]] = None """List of the secrets""" - + def as_dict(self) -> dict: """Serializes the ListServicePrincipalSecretsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.secrets: - body["secrets"] = [v.as_dict() for v in self.secrets] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets] return body def as_shallow_dict(self) -> dict: """Serializes the ListServicePrincipalSecretsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.secrets: - body["secrets"] = self.secrets + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.secrets: body['secrets'] = self.secrets return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalSecretsResponse: """Deserializes the ListServicePrincipalSecretsResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), secrets=_repeated_dict(d, "secrets", SecretInfo)) + return cls(next_page_token=d.get('next_page_token', None), secrets=_repeated_dict(d, 'secrets', SecretInfo)) + + @dataclass class OidcFederationPolicy: """Specifies the policy to use for validating OIDC claims in your federated tokens.""" - + audiences: Optional[List[str]] = None """The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience identifier is intended to represent the recipient of the token. Can be any non-empty string value. As long as the audience in the token matches at least one audience in the policy, the token is considered a match. If audiences is unspecified, defaults to your Databricks account id.""" - + issuer: Optional[str] = None """The required token issuer, as specified in the 'iss' claim of federated tokens.""" - + jwks_json: Optional[str] = None """The public keys used to validate the signature of federated tokens, in JWKS format. Most use cases should not need to specify this field. If jwks_uri and jwks_json are both unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for discovering public keys.""" - + jwks_uri: Optional[str] = None """URL of the public keys used to validate the signature of federated tokens, in JWKS format. Most use cases should not need to specify this field. If jwks_uri and jwks_json are both unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for discovering public keys.""" - + subject: Optional[str] = None """The required token subject, as specified in the subject claim of federated tokens. Must be specified for service principal federation policies. Must not be specified for account federation policies.""" - + subject_claim: Optional[str] = None """The claim that contains the subject of the token. If unspecified, the default value is 'sub'.""" - + def as_dict(self) -> dict: """Serializes the OidcFederationPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.audiences: - body["audiences"] = [v for v in self.audiences] - if self.issuer is not None: - body["issuer"] = self.issuer - if self.jwks_json is not None: - body["jwks_json"] = self.jwks_json - if self.jwks_uri is not None: - body["jwks_uri"] = self.jwks_uri - if self.subject is not None: - body["subject"] = self.subject - if self.subject_claim is not None: - body["subject_claim"] = self.subject_claim + if self.audiences: body['audiences'] = [v for v in self.audiences] + if self.issuer is not None: body['issuer'] = self.issuer + if self.jwks_json is not None: body['jwks_json'] = self.jwks_json + if self.jwks_uri is not None: body['jwks_uri'] = self.jwks_uri + if self.subject is not None: body['subject'] = self.subject + if self.subject_claim is not None: body['subject_claim'] = self.subject_claim return body def as_shallow_dict(self) -> dict: """Serializes the OidcFederationPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.audiences: - body["audiences"] = self.audiences - if self.issuer is not None: - body["issuer"] = self.issuer - if self.jwks_json is not None: - body["jwks_json"] = self.jwks_json - if self.jwks_uri is not None: - body["jwks_uri"] = self.jwks_uri - if self.subject is not None: - body["subject"] = self.subject - if self.subject_claim is not None: - body["subject_claim"] = self.subject_claim + if self.audiences: body['audiences'] = self.audiences + if self.issuer is not None: body['issuer'] = self.issuer + if self.jwks_json is not None: body['jwks_json'] = self.jwks_json + if self.jwks_uri is not None: body['jwks_uri'] = self.jwks_uri + if self.subject is not None: body['subject'] = self.subject + if self.subject_claim is not None: body['subject_claim'] = self.subject_claim return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OidcFederationPolicy: """Deserializes the OidcFederationPolicy from a dictionary.""" - return cls( - audiences=d.get("audiences", None), - issuer=d.get("issuer", None), - jwks_json=d.get("jwks_json", None), - jwks_uri=d.get("jwks_uri", None), - subject=d.get("subject", None), - subject_claim=d.get("subject_claim", None), - ) + return cls(audiences=d.get('audiences', None), issuer=d.get('issuer', None), jwks_json=d.get('jwks_json', None), jwks_uri=d.get('jwks_uri', None), subject=d.get('subject', None), subject_claim=d.get('subject_claim', None)) + + @dataclass class PublishedAppOutput: app_id: Optional[str] = None """Unique ID of the published OAuth app.""" - + client_id: Optional[str] = None """Client ID of the published OAuth app. It is the client_id in the OAuth flow""" - + description: Optional[str] = None """Description of the published OAuth app.""" - + is_confidential_client: Optional[bool] = None """Whether the published OAuth app is a confidential client. It is always false for published OAuth apps.""" - + name: Optional[str] = None """The display name of the published OAuth app.""" - + redirect_urls: Optional[List[str]] = None """Redirect URLs of the published OAuth app.""" - + scopes: Optional[List[str]] = None """Required scopes for the published OAuth app.""" - + def as_dict(self) -> dict: """Serializes the PublishedAppOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.client_id is not None: - body["client_id"] = self.client_id - if self.description is not None: - body["description"] = self.description - if self.is_confidential_client is not None: - body["is_confidential_client"] = self.is_confidential_client - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = [v for v in self.redirect_urls] - if self.scopes: - body["scopes"] = [v for v in self.scopes] + if self.app_id is not None: body['app_id'] = self.app_id + if self.client_id is not None: body['client_id'] = self.client_id + if self.description is not None: body['description'] = self.description + if self.is_confidential_client is not None: body['is_confidential_client'] = self.is_confidential_client + if self.name is not None: body['name'] = self.name + if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] + if self.scopes: body['scopes'] = [v for v in self.scopes] return body def as_shallow_dict(self) -> dict: """Serializes the PublishedAppOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.client_id is not None: - body["client_id"] = self.client_id - if self.description is not None: - body["description"] = self.description - if self.is_confidential_client is not None: - body["is_confidential_client"] = self.is_confidential_client - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = self.redirect_urls - if self.scopes: - body["scopes"] = self.scopes + if self.app_id is not None: body['app_id'] = self.app_id + if self.client_id is not None: body['client_id'] = self.client_id + if self.description is not None: body['description'] = self.description + if self.is_confidential_client is not None: body['is_confidential_client'] = self.is_confidential_client + if self.name is not None: body['name'] = self.name + if self.redirect_urls: body['redirect_urls'] = self.redirect_urls + if self.scopes: body['scopes'] = self.scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PublishedAppOutput: """Deserializes the PublishedAppOutput from a dictionary.""" - return cls( - app_id=d.get("app_id", None), - client_id=d.get("client_id", None), - description=d.get("description", None), - is_confidential_client=d.get("is_confidential_client", None), - name=d.get("name", None), - redirect_urls=d.get("redirect_urls", None), - scopes=d.get("scopes", None), - ) + return cls(app_id=d.get('app_id', None), client_id=d.get('client_id', None), description=d.get('description', None), is_confidential_client=d.get('is_confidential_client', None), name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None)) + + @dataclass class SecretInfo: create_time: Optional[str] = None """UTC time when the secret was created""" - + expire_time: Optional[str] = None """UTC time when the secret will expire. If the field is not present, the secret does not expire.""" - + id: Optional[str] = None """ID of the secret""" - + secret_hash: Optional[str] = None """Secret Hash""" - + status: Optional[str] = None """Status of the secret""" - + update_time: Optional[str] = None """UTC time when the secret was updated""" - + def as_dict(self) -> dict: """Serializes the SecretInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.expire_time is not None: - body["expire_time"] = self.expire_time - if self.id is not None: - body["id"] = self.id - if self.secret_hash is not None: - body["secret_hash"] = self.secret_hash - if self.status is not None: - body["status"] = self.status - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.expire_time is not None: body['expire_time'] = self.expire_time + if self.id is not None: body['id'] = self.id + if self.secret_hash is not None: body['secret_hash'] = self.secret_hash + if self.status is not None: body['status'] = self.status + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the SecretInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.expire_time is not None: - body["expire_time"] = self.expire_time - if self.id is not None: - body["id"] = self.id - if self.secret_hash is not None: - body["secret_hash"] = self.secret_hash - if self.status is not None: - body["status"] = self.status - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.expire_time is not None: body['expire_time'] = self.expire_time + if self.id is not None: body['id'] = self.id + if self.secret_hash is not None: body['secret_hash'] = self.secret_hash + if self.status is not None: body['status'] = self.status + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SecretInfo: """Deserializes the SecretInfo from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - expire_time=d.get("expire_time", None), - id=d.get("id", None), - secret_hash=d.get("secret_hash", None), - status=d.get("status", None), - update_time=d.get("update_time", None), - ) + return cls(create_time=d.get('create_time', None), expire_time=d.get('expire_time', None), id=d.get('id', None), secret_hash=d.get('secret_hash', None), status=d.get('status', None), update_time=d.get('update_time', None)) + + @dataclass class TokenAccessPolicy: access_token_ttl_in_minutes: Optional[int] = None """access token time to live in minutes""" - + refresh_token_ttl_in_minutes: Optional[int] = None """refresh token time to live in minutes""" - + def as_dict(self) -> dict: """Serializes the TokenAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_token_ttl_in_minutes is not None: - body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes - if self.refresh_token_ttl_in_minutes is not None: - body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes + if self.access_token_ttl_in_minutes is not None: body['access_token_ttl_in_minutes'] = self.access_token_ttl_in_minutes + if self.refresh_token_ttl_in_minutes is not None: body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes return body def as_shallow_dict(self) -> dict: """Serializes the TokenAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_token_ttl_in_minutes is not None: - body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes - if self.refresh_token_ttl_in_minutes is not None: - body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes + if self.access_token_ttl_in_minutes is not None: body['access_token_ttl_in_minutes'] = self.access_token_ttl_in_minutes + if self.refresh_token_ttl_in_minutes is not None: body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenAccessPolicy: """Deserializes the TokenAccessPolicy from a dictionary.""" - return cls( - access_token_ttl_in_minutes=d.get("access_token_ttl_in_minutes", None), - refresh_token_ttl_in_minutes=d.get("refresh_token_ttl_in_minutes", None), - ) + return cls(access_token_ttl_in_minutes=d.get('access_token_ttl_in_minutes', None), refresh_token_ttl_in_minutes=d.get('refresh_token_ttl_in_minutes', None)) + + + + + @dataclass class UpdateCustomAppIntegration: integration_id: Optional[str] = None - + redirect_urls: Optional[List[str]] = None """List of OAuth redirect urls to be updated in the custom OAuth app integration""" - + scopes: Optional[List[str]] = None """List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs this will fully replace the existing values instead of appending""" - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy to be updated in the custom OAuth app integration""" - + user_authorized_scopes: Optional[List[str]] = None """Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes.""" - + def as_dict(self) -> dict: """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.redirect_urls: - body["redirect_urls"] = [v for v in self.redirect_urls] - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: - body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] + if self.scopes: body['scopes'] = [v for v in self.scopes] + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() + if self.user_authorized_scopes: body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.redirect_urls: - body["redirect_urls"] = self.redirect_urls - if self.scopes: - body["scopes"] = self.scopes - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - if self.user_authorized_scopes: - body["user_authorized_scopes"] = self.user_authorized_scopes + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.redirect_urls: body['redirect_urls'] = self.redirect_urls + if self.scopes: body['scopes'] = self.scopes + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomAppIntegration: """Deserializes the UpdateCustomAppIntegration from a dictionary.""" - return cls( - integration_id=d.get("integration_id", None), - redirect_urls=d.get("redirect_urls", None), - scopes=d.get("scopes", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - user_authorized_scopes=d.get("user_authorized_scopes", None), - ) + return cls(integration_id=d.get('integration_id', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy), user_authorized_scopes=d.get('user_authorized_scopes', None)) + + @dataclass @@ -1109,40 +962,37 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomAppIntegrationOutput: """Deserializes the UpdateCustomAppIntegrationOutput from a dictionary.""" return cls() + + @dataclass class UpdatePublishedAppIntegration: integration_id: Optional[str] = None - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy to be updated in the published OAuth app integration""" - + def as_dict(self) -> dict: """Serializes the UpdatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy + if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.token_access_policy: body['token_access_policy'] = self.token_access_policy return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePublishedAppIntegration: """Deserializes the UpdatePublishedAppIntegration from a dictionary.""" - return cls( - integration_id=d.get("integration_id", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - ) + return cls(integration_id=d.get('integration_id', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) + + @dataclass @@ -1161,24 +1011,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdatePublishedAppIntegrationOutput: """Deserializes the UpdatePublishedAppIntegrationOutput from a dictionary.""" return cls() + + + + + + + class AccountFederationPolicyAPI: """These APIs manage account federation policies. - + Account federation policies allow users and service principals in your Databricks account to securely access Databricks APIs using tokens from your trusted identity providers (IdPs). - + With token federation, your users and service principals can exchange tokens from your IdP for Databricks OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage Databricks secrets, and allows you to centralize management of token issuance policies in your IdP. Databricks token federation is typically used in combination with [SCIM], so users in your IdP are synchronized into your Databricks account. - + Token federation is configured in your Databricks account using an account federation policy. An account federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from * how to determine which Databricks user, or subject, a token is issued for - + To configure a federation policy, you provide the following: * The required token __issuer__, as specified in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to @@ -1189,122 +1046,149 @@ class AccountFederationPolicyAPI: public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for discovering public keys. - + An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"] subject_claim: "sub" ``` - + An example JWT token body that matches this policy and could be used to authenticate to Databricks as user `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub": "username@mycompany.com" } ``` - + You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if your users do not already have the ability to generate tokens that are compatible with your federation policy. - + You do not need to configure an OAuth application in Databricks to use token federation. - + [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html""" - + def __init__(self, api_client): self._api = api_client + - def create(self, policy: FederationPolicy, *, policy_id: Optional[str] = None) -> FederationPolicy: - """Create account federation policy. + + + + + + + def create(self + , policy: FederationPolicy + , * + , policy_id: Optional[str] = None) -> FederationPolicy: + """Create account federation policy. + :param policy: :class:`FederationPolicy` :param policy_id: str (optional) The identifier for the federation policy. The identifier must contain only lowercase alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if policy_id is not None: - query["policy_id"] = policy_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/accounts/{self._api.account_id}/federationPolicies", - query=query, - body=body, - headers=headers, - ) + if policy_id is not None: query['policy_id'] = policy_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies', query=query, body=body + + , headers=headers + ) return FederationPolicy.from_dict(res) - def delete(self, policy_id: str): - """Delete account federation policy. + + + + def delete(self + , policy_id: str + ): + """Delete account federation policy. + :param policy_id: str The identifier for the federation policy. - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", f"/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}", headers=headers - ) - - def get(self, policy_id: str) -> FederationPolicy: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}' + + , headers=headers + ) + + + + + + + def get(self + , policy_id: str + ) -> FederationPolicy: """Get account federation policy. - + :param policy_id: str The identifier for the federation policy. - + :returns: :class:`FederationPolicy` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}' + + , headers=headers + ) return FederationPolicy.from_dict(res) - def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FederationPolicy]: - """List account federation policies. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FederationPolicy]: + """List account federation policies. + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/federationPolicies", query=query, headers=headers - ) - if "policies" in json: - for v in json["policies"]: - yield FederationPolicy.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, policy_id: str, policy: FederationPolicy, *, update_mask: Optional[str] = None - ) -> FederationPolicy: + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies', query=query + + , headers=headers + ) + if 'policies' in json: + for v in json['policies']: + yield FederationPolicy.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , policy_id: str, policy: FederationPolicy + , * + , update_mask: Optional[str] = None) -> FederationPolicy: """Update account federation policy. - + :param policy_id: str The identifier for the federation policy. :param policy: :class:`FederationPolicy` @@ -1314,51 +1198,47 @@ def update( should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'description,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if update_mask is not None: - query["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}", - query=query, - body=body, - headers=headers, - ) + if update_mask is not None: query['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}', query=query, body=body + + , headers=headers + ) return FederationPolicy.from_dict(res) - + + class CustomAppIntegrationAPI: """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - *, - confidential: Optional[bool] = None, - name: Optional[str] = None, - redirect_urls: Optional[List[str]] = None, - scopes: Optional[List[str]] = None, - token_access_policy: Optional[TokenAccessPolicy] = None, - user_authorized_scopes: Optional[List[str]] = None, - ) -> CreateCustomAppIntegrationOutput: - """Create Custom OAuth App Integration. + - Create Custom OAuth App Integration. + - You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. + + + def create(self + + , * + , confidential: Optional[bool] = None, name: Optional[str] = None, redirect_urls: Optional[List[str]] = None, scopes: Optional[List[str]] = None, token_access_policy: Optional[TokenAccessPolicy] = None, user_authorized_scopes: Optional[List[str]] = None) -> CreateCustomAppIntegrationOutput: + """Create Custom OAuth App Integration. + + Create Custom OAuth App Integration. + + You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. + :param confidential: bool (optional) This field indicates whether an OAuth client secret is required to authenticate this client. :param name: str (optional) @@ -1373,135 +1253,127 @@ def create( :param user_authorized_scopes: List[str] (optional) Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes. - + :returns: :class:`CreateCustomAppIntegrationOutput` """ body = {} - if confidential is not None: - body["confidential"] = confidential - if name is not None: - body["name"] = name - if redirect_urls is not None: - body["redirect_urls"] = [v for v in redirect_urls] - if scopes is not None: - body["scopes"] = [v for v in scopes] - if token_access_policy is not None: - body["token_access_policy"] = token_access_policy.as_dict() - if user_authorized_scopes is not None: - body["user_authorized_scopes"] = [v for v in user_authorized_scopes] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations", - body=body, - headers=headers, - ) + if confidential is not None: body['confidential'] = confidential + if name is not None: body['name'] = name + if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls] + if scopes is not None: body['scopes'] = [v for v in scopes] + if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() + if user_authorized_scopes is not None: body['user_authorized_scopes'] = [v for v in user_authorized_scopes] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations', body=body + + , headers=headers + ) return CreateCustomAppIntegrationOutput.from_dict(res) - def delete(self, integration_id: str): - """Delete Custom OAuth App Integration. + + + + def delete(self + , integration_id: str + ): + """Delete Custom OAuth App Integration. + Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param integration_id: str - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}", - headers=headers, - ) - - def get(self, integration_id: str) -> GetCustomAppIntegrationOutput: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}' + + , headers=headers + ) + + + + + + + def get(self + , integration_id: str + ) -> GetCustomAppIntegrationOutput: """Get OAuth Custom App Integration. - + Gets the Custom OAuth App Integration for the given integration id. - + :param integration_id: str The OAuth app integration ID. - + :returns: :class:`GetCustomAppIntegrationOutput` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}' + + , headers=headers + ) return GetCustomAppIntegrationOutput.from_dict(res) - def list( - self, - *, - include_creator_username: Optional[bool] = None, - page_size: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[GetCustomAppIntegrationOutput]: - """Get custom oauth app integrations. + + + + def list(self + + , * + , include_creator_username: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[GetCustomAppIntegrationOutput]: + """Get custom oauth app integrations. + Get the list of custom OAuth app integrations for the specified Databricks account - + :param include_creator_username: bool (optional) :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`GetCustomAppIntegrationOutput` """ - + query = {} - if include_creator_username is not None: - query["include_creator_username"] = include_creator_username - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if include_creator_username is not None: query['include_creator_username'] = include_creator_username + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations", - query=query, - headers=headers, - ) - if "apps" in json: - for v in json["apps"]: - yield GetCustomAppIntegrationOutput.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - integration_id: str, - *, - redirect_urls: Optional[List[str]] = None, - scopes: Optional[List[str]] = None, - token_access_policy: Optional[TokenAccessPolicy] = None, - user_authorized_scopes: Optional[List[str]] = None, - ): + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations', query=query + + , headers=headers + ) + if 'apps' in json: + for v in json['apps']: + yield GetCustomAppIntegrationOutput.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , integration_id: str + , * + , redirect_urls: Optional[List[str]] = None, scopes: Optional[List[str]] = None, token_access_policy: Optional[TokenAccessPolicy] = None, user_authorized_scopes: Optional[List[str]] = None): """Updates Custom OAuth App Integration. - + Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param integration_id: str :param redirect_urls: List[str] (optional) List of OAuth redirect urls to be updated in the custom OAuth app integration @@ -1513,240 +1385,257 @@ def update( :param user_authorized_scopes: List[str] (optional) Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes. - - + + """ body = {} - if redirect_urls is not None: - body["redirect_urls"] = [v for v in redirect_urls] - if scopes is not None: - body["scopes"] = [v for v in scopes] - if token_access_policy is not None: - body["token_access_policy"] = token_access_policy.as_dict() - if user_authorized_scopes is not None: - body["user_authorized_scopes"] = [v for v in user_authorized_scopes] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}", - body=body, - headers=headers, - ) - - + if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls] + if scopes is not None: body['scopes'] = [v for v in scopes] + if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() + if user_authorized_scopes is not None: body['user_authorized_scopes'] = [v for v in user_authorized_scopes] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}', body=body + + , headers=headers + ) + + + + class OAuthPublishedAppsAPI: """These APIs enable administrators to view all the available published OAuth applications in Databricks. Administrators can add the published OAuth applications to their account through the OAuth Published App Integration APIs.""" - + def __init__(self, api_client): self._api = api_client + - def list( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[PublishedAppOutput]: - """Get all the published OAuth apps. + - Get all the available published OAuth apps in Databricks. + + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PublishedAppOutput]: + """Get all the published OAuth apps. + + Get all the available published OAuth apps in Databricks. + :param page_size: int (optional) The max number of OAuth published apps to return in one page. :param page_token: str (optional) A token that can be used to get the next page of results. - + :returns: Iterator over :class:`PublishedAppOutput` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps", query=query, headers=headers - ) - if "apps" in json: - for v in json["apps"]: - yield PublishedAppOutput.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps', query=query + + , headers=headers + ) + if 'apps' in json: + for v in json['apps']: + yield PublishedAppOutput.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + class PublishedAppIntegrationAPI: """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, *, app_id: Optional[str] = None, token_access_policy: Optional[TokenAccessPolicy] = None - ) -> CreatePublishedAppIntegrationOutput: - """Create Published OAuth App Integration. + - Create Published OAuth App Integration. + - You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. + + + def create(self + + , * + , app_id: Optional[str] = None, token_access_policy: Optional[TokenAccessPolicy] = None) -> CreatePublishedAppIntegrationOutput: + """Create Published OAuth App Integration. + + Create Published OAuth App Integration. + + You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. + :param app_id: str (optional) App id of the OAuth published app integration. For example power-bi, tableau-deskop :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy - + :returns: :class:`CreatePublishedAppIntegrationOutput` """ body = {} - if app_id is not None: - body["app_id"] = app_id - if token_access_policy is not None: - body["token_access_policy"] = token_access_policy.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations", - body=body, - headers=headers, - ) + if app_id is not None: body['app_id'] = app_id + if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations', body=body + + , headers=headers + ) return CreatePublishedAppIntegrationOutput.from_dict(res) - def delete(self, integration_id: str): - """Delete Published OAuth App Integration. + + + + def delete(self + , integration_id: str + ): + """Delete Published OAuth App Integration. + Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param integration_id: str - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}", - headers=headers, - ) - - def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}' + + , headers=headers + ) + + + + + + + def get(self + , integration_id: str + ) -> GetPublishedAppIntegrationOutput: """Get OAuth Published App Integration. - + Gets the Published OAuth App Integration for the given integration id. - + :param integration_id: str - + :returns: :class:`GetPublishedAppIntegrationOutput` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}' + + , headers=headers + ) return GetPublishedAppIntegrationOutput.from_dict(res) - def list( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[GetPublishedAppIntegrationOutput]: - """Get published oauth app integrations. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[GetPublishedAppIntegrationOutput]: + """Get published oauth app integrations. + Get the list of published OAuth app integrations for the specified Databricks account - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`GetPublishedAppIntegrationOutput` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations", - query=query, - headers=headers, - ) - if "apps" in json: - for v in json["apps"]: - yield GetPublishedAppIntegrationOutput.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update(self, integration_id: str, *, token_access_policy: Optional[TokenAccessPolicy] = None): + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations', query=query + + , headers=headers + ) + if 'apps' in json: + for v in json['apps']: + yield GetPublishedAppIntegrationOutput.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , integration_id: str + , * + , token_access_policy: Optional[TokenAccessPolicy] = None): """Updates Published OAuth App Integration. - + Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param integration_id: str :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy to be updated in the published OAuth app integration - - + + """ body = {} - if token_access_policy is not None: - body["token_access_policy"] = token_access_policy.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}", - body=body, - headers=headers, - ) - - + if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}', body=body + + , headers=headers + ) + + + + class ServicePrincipalFederationPolicyAPI: """These APIs manage service principal federation policies. - + Service principal federation, also known as Workload Identity Federation, allows your automated workloads running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets. With Workload Identity Federation, your application (or workload) authenticates to Databricks as a Databricks service principal, using tokens provided by the workload runtime. - + Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever possible. Workload Identity Federation is supported by many popular services, including Github Actions, Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others. - + Workload identity federation is configured in your Databricks account using a service principal federation policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the Databricks service principal - + To configure a federation policy, you provide the following: * The required token __issuer__, as specified in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the workload identity provider. * The required token __subject__, as specified in the “sub” claim of @@ -1758,138 +1647,154 @@ class ServicePrincipalFederationPolicyAPI: of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on the issuer’s well known endpoint for discovering public keys. - + An example service principal federation policy, for a Github Actions workload, is: ``` issuer: "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject: "repo:my-github-org/my-repo:environment:prod" ``` - + An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ``` { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub": "repo:my-github-org/my-repo:environment:prod" } ``` - + You may also need to configure the workload runtime to generate tokens for your workloads. - + You do not need to configure an OAuth application in Databricks to use token federation.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, service_principal_id: int, policy: FederationPolicy, *, policy_id: Optional[str] = None - ) -> FederationPolicy: - """Create service principal federation policy. + + + + + + + def create(self + , service_principal_id: int, policy: FederationPolicy + , * + , policy_id: Optional[str] = None) -> FederationPolicy: + """Create service principal federation policy. + :param service_principal_id: int The service principal id for the federation policy. :param policy: :class:`FederationPolicy` :param policy_id: str (optional) The identifier for the federation policy. The identifier must contain only lowercase alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if policy_id is not None: - query["policy_id"] = policy_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies", - query=query, - body=body, - headers=headers, - ) + if policy_id is not None: query['policy_id'] = policy_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies', query=query, body=body + + , headers=headers + ) return FederationPolicy.from_dict(res) - def delete(self, service_principal_id: int, policy_id: str): - """Delete service principal federation policy. + + + + def delete(self + , service_principal_id: int, policy_id: str + ): + """Delete service principal federation policy. + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str The identifier for the federation policy. - - + + """ - - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}", - headers=headers, - ) - - def get(self, service_principal_id: int, policy_id: str) -> FederationPolicy: + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}' + + , headers=headers + ) + + + + + + + def get(self + , service_principal_id: int, policy_id: str + ) -> FederationPolicy: """Get service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str The identifier for the federation policy. - + :returns: :class:`FederationPolicy` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}' + + , headers=headers + ) return FederationPolicy.from_dict(res) - def list( - self, service_principal_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[FederationPolicy]: - """List service principal federation policies. + + + + def list(self + , service_principal_id: int + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FederationPolicy]: + """List service principal federation policies. + :param service_principal_id: int The service principal id for the federation policy. :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies", - query=query, - headers=headers, - ) - if "policies" in json: - for v in json["policies"]: - yield FederationPolicy.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, service_principal_id: int, policy_id: str, policy: FederationPolicy, *, update_mask: Optional[str] = None - ) -> FederationPolicy: + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies', query=query + + , headers=headers + ) + if 'policies' in json: + for v in json['policies']: + yield FederationPolicy.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update(self + , service_principal_id: int, policy_id: str, policy: FederationPolicy + , * + , update_mask: Optional[str] = None) -> FederationPolicy: """Update service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str @@ -1901,103 +1806,112 @@ def update( should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'description,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if update_mask is not None: - query["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}", - query=query, - body=body, - headers=headers, - ) + if update_mask is not None: query['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}', query=query, body=body + + , headers=headers + ) return FederationPolicy.from_dict(res) - + + class ServicePrincipalSecretsAPI: """These APIs enable administrators to manage service principal secrets. - + You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using OAuth tokens for service principals], - + In addition, the generated secrets can be used to configure the Databricks Terraform Provider to authenticate with the service principal. For more information, see [Databricks Terraform Provider]. - + [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html - [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal - """ - + [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal""" + def __init__(self, api_client): self._api = api_client + - def create( - self, service_principal_id: int, *, lifetime: Optional[str] = None - ) -> CreateServicePrincipalSecretResponse: - """Create service principal secret. + - Create a secret for the given service principal. + + + + + def create(self + , service_principal_id: int + , * + , lifetime: Optional[str] = None) -> CreateServicePrincipalSecretResponse: + """Create service principal secret. + + Create a secret for the given service principal. + :param service_principal_id: int The service principal ID. :param lifetime: str (optional) The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a default lifetime of 730 days (63072000s). - + :returns: :class:`CreateServicePrincipalSecretResponse` """ body = {} - if lifetime is not None: - body["lifetime"] = lifetime - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets", - body=body, - headers=headers, - ) + if lifetime is not None: body['lifetime'] = lifetime + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets', body=body + + , headers=headers + ) return CreateServicePrincipalSecretResponse.from_dict(res) - def delete(self, service_principal_id: int, secret_id: str): - """Delete service principal secret. + + + + def delete(self + , service_principal_id: int, secret_id: str + ): + """Delete service principal secret. + Delete a secret from the given service principal. - + :param service_principal_id: int The service principal ID. :param secret_id: str The secret ID. - - + + """ - + headers = {} - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}", - headers=headers, - ) - - def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]: + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}' + + , headers=headers + ) + + + + + + + def list(self + , service_principal_id: int + , * + , page_token: Optional[str] = None) -> Iterator[SecretInfo]: """List service principal secrets. - + List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the secret values. - + :param service_principal_id: int The service principal ID. :param page_token: str (optional) @@ -2007,27 +1921,28 @@ def list(self, service_principal_id: int, *, page_token: Optional[str] = None) - previous request. To list all of the secrets for a service principal, it is necessary to continue requesting pages of entries until the response contains no `next_page_token`. Note that the number of entries returned must not be used to determine when the listing is complete. - + :returns: Iterator over :class:`SecretInfo` """ - + query = {} - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets", - query=query, - headers=headers, - ) - if "secrets" in json: - for v in json["secrets"]: - yield SecretInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets', query=query + + , headers=headers + ) + if 'secrets' in json: + for v in json['secrets']: + yield SecretInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + \ No newline at end of file diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index 943810a33..ad1af2456 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -1,99 +1,105 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') +from databricks.sdk.service import compute +from databricks.sdk.service import compute +from databricks.sdk.service import compute +from databricks.sdk.service import compute +from databricks.sdk.service import compute from databricks.sdk.service import compute # all definitions in this file are in alphabetical order - @dataclass class CreatePipeline: allow_duplicate_names: Optional[bool] = None """If false, deployment will fail if name conflicts with that of another pipeline.""" - + budget_policy_id: Optional[str] = None """Budget policy of this pipeline.""" - + catalog: Optional[str] = None """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.""" - + channel: Optional[str] = None """DLT Release Channel that specifies which version to use.""" - + clusters: Optional[List[PipelineCluster]] = None """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str, str]] = None + + configuration: Optional[Dict[str,str]] = None """String-String configuration for this pipeline execution.""" - + continuous: Optional[bool] = None """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - + deployment: Optional[PipelineDeployment] = None """Deployment type of this pipeline.""" - + development: Optional[bool] = None """Whether the pipeline is in Development mode. Defaults to false.""" - + dry_run: Optional[bool] = None - + edition: Optional[str] = None """Pipeline product edition.""" - + event_log: Optional[EventLogSpec] = None """Event log configuration for this pipeline""" - + filters: Optional[Filters] = None """Filters on which Pipeline packages to include in the deployed graph.""" - + gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None """The definition of a gateway pipeline to support change data capture.""" - + id: Optional[str] = None """Unique identifier for this pipeline.""" - + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.""" - + libraries: Optional[List[PipelineLibrary]] = None """Libraries or code needed by this deployment.""" - + name: Optional[str] = None """Friendly identifier for this pipeline.""" - + notifications: Optional[List[Notifications]] = None """List of notification settings for this pipeline.""" - + photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" - + restart_window: Optional[RestartWindow] = None """Restart window of this pipeline.""" - + root_path: Optional[str] = None """Root path for this pipeline. This is used as the root directory when editing the pipeline in the Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution.""" - + run_as: Optional[RunAs] = None """Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created @@ -101,288 +107,206 @@ class CreatePipeline: Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.""" - + schema: Optional[str] = None """The default schema (database) where tables are read from or published to.""" - + serverless: Optional[bool] = None """Whether serverless compute is enabled for this pipeline.""" - + storage: Optional[str] = None """DBFS root directory for storing checkpoints and tables.""" - + + tags: Optional[Dict[str,str]] = None + """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, + and are therefore subject to the same limitations. A maximum of 25 tags can be added to the + pipeline.""" + target: Optional[str] = None """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.""" - + trigger: Optional[PipelineTrigger] = None """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" - + def as_dict(self) -> dict: """Serializes the CreatePipeline into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = [v.as_dict() for v in self.clusters] - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.development is not None: - body["development"] = self.development - if self.dry_run is not None: - body["dry_run"] = self.dry_run - if self.edition is not None: - body["edition"] = self.edition - if self.event_log: - body["event_log"] = self.event_log.as_dict() - if self.filters: - body["filters"] = self.filters.as_dict() - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition.as_dict() - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition.as_dict() - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = [v.as_dict() for v in self.notifications] - if self.photon is not None: - body["photon"] = self.photon - if self.restart_window: - body["restart_window"] = self.restart_window.as_dict() - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger.as_dict() + if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.channel is not None: body['channel'] = self.channel + if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] + if self.configuration: body['configuration'] = self.configuration + if self.continuous is not None: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment.as_dict() + if self.development is not None: body['development'] = self.development + if self.dry_run is not None: body['dry_run'] = self.dry_run + if self.edition is not None: body['edition'] = self.edition + if self.event_log: body['event_log'] = self.event_log.as_dict() + if self.filters: body['filters'] = self.filters.as_dict() + if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict() + if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict() + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.name is not None: body['name'] = self.name + if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] + if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window.as_dict() + if self.root_path is not None: body['root_path'] = self.root_path + if self.run_as: body['run_as'] = self.run_as.as_dict() + if self.schema is not None: body['schema'] = self.schema + if self.serverless is not None: body['serverless'] = self.serverless + if self.storage is not None: body['storage'] = self.storage + if self.tags: body['tags'] = self.tags + if self.target is not None: body['target'] = self.target + if self.trigger: body['trigger'] = self.trigger.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = self.clusters - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.development is not None: - body["development"] = self.development - if self.dry_run is not None: - body["dry_run"] = self.dry_run - if self.edition is not None: - body["edition"] = self.edition - if self.event_log: - body["event_log"] = self.event_log - if self.filters: - body["filters"] = self.filters - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition - if self.libraries: - body["libraries"] = self.libraries - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = self.notifications - if self.photon is not None: - body["photon"] = self.photon - if self.restart_window: - body["restart_window"] = self.restart_window - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger + if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.channel is not None: body['channel'] = self.channel + if self.clusters: body['clusters'] = self.clusters + if self.configuration: body['configuration'] = self.configuration + if self.continuous is not None: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.development is not None: body['development'] = self.development + if self.dry_run is not None: body['dry_run'] = self.dry_run + if self.edition is not None: body['edition'] = self.edition + if self.event_log: body['event_log'] = self.event_log + if self.filters: body['filters'] = self.filters + if self.gateway_definition: body['gateway_definition'] = self.gateway_definition + if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition + if self.libraries: body['libraries'] = self.libraries + if self.name is not None: body['name'] = self.name + if self.notifications: body['notifications'] = self.notifications + if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window + if self.root_path is not None: body['root_path'] = self.root_path + if self.run_as: body['run_as'] = self.run_as + if self.schema is not None: body['schema'] = self.schema + if self.serverless is not None: body['serverless'] = self.serverless + if self.storage is not None: body['storage'] = self.storage + if self.tags: body['tags'] = self.tags + if self.target is not None: body['target'] = self.target + if self.trigger: body['trigger'] = self.trigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePipeline: """Deserializes the CreatePipeline from a dictionary.""" - return cls( - allow_duplicate_names=d.get("allow_duplicate_names", None), - budget_policy_id=d.get("budget_policy_id", None), - catalog=d.get("catalog", None), - channel=d.get("channel", None), - clusters=_repeated_dict(d, "clusters", PipelineCluster), - configuration=d.get("configuration", None), - continuous=d.get("continuous", None), - deployment=_from_dict(d, "deployment", PipelineDeployment), - development=d.get("development", None), - dry_run=d.get("dry_run", None), - edition=d.get("edition", None), - event_log=_from_dict(d, "event_log", EventLogSpec), - filters=_from_dict(d, "filters", Filters), - gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), - id=d.get("id", None), - ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), - libraries=_repeated_dict(d, "libraries", PipelineLibrary), - name=d.get("name", None), - notifications=_repeated_dict(d, "notifications", Notifications), - photon=d.get("photon", None), - restart_window=_from_dict(d, "restart_window", RestartWindow), - root_path=d.get("root_path", None), - run_as=_from_dict(d, "run_as", RunAs), - schema=d.get("schema", None), - serverless=d.get("serverless", None), - storage=d.get("storage", None), - target=d.get("target", None), - trigger=_from_dict(d, "trigger", PipelineTrigger), - ) + return cls(allow_duplicate_names=d.get('allow_duplicate_names', None), budget_policy_id=d.get('budget_policy_id', None), catalog=d.get('catalog', None), channel=d.get('channel', None), clusters=_repeated_dict(d, 'clusters', PipelineCluster), configuration=d.get('configuration', None), continuous=d.get('continuous', None), deployment=_from_dict(d, 'deployment', PipelineDeployment), development=d.get('development', None), dry_run=d.get('dry_run', None), edition=d.get('edition', None), event_log=_from_dict(d, 'event_log', EventLogSpec), filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), restart_window=_from_dict(d, 'restart_window', RestartWindow), root_path=d.get('root_path', None), run_as=_from_dict(d, 'run_as', RunAs), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), tags=d.get('tags', None), target=d.get('target', None), trigger=_from_dict(d, 'trigger', PipelineTrigger)) + + @dataclass class CreatePipelineResponse: effective_settings: Optional[PipelineSpec] = None """Only returned when dry_run is true.""" - + pipeline_id: Optional[str] = None """The unique identifier for the newly created pipeline. Only returned when dry_run is false.""" - + def as_dict(self) -> dict: """Serializes the CreatePipelineResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.effective_settings: - body["effective_settings"] = self.effective_settings.as_dict() - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id + if self.effective_settings: body['effective_settings'] = self.effective_settings.as_dict() + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePipelineResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.effective_settings: - body["effective_settings"] = self.effective_settings - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id + if self.effective_settings: body['effective_settings'] = self.effective_settings + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePipelineResponse: """Deserializes the CreatePipelineResponse from a dictionary.""" - return cls( - effective_settings=_from_dict(d, "effective_settings", PipelineSpec), pipeline_id=d.get("pipeline_id", None) - ) + return cls(effective_settings=_from_dict(d, 'effective_settings', PipelineSpec), pipeline_id=d.get('pipeline_id', None)) + + @dataclass class CronTrigger: quartz_cron_schedule: Optional[str] = None - + timezone_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CronTrigger into a dictionary suitable for use as a JSON request body.""" body = {} - if self.quartz_cron_schedule is not None: - body["quartz_cron_schedule"] = self.quartz_cron_schedule - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the CronTrigger into a shallow dictionary of its immediate attributes.""" body = {} - if self.quartz_cron_schedule is not None: - body["quartz_cron_schedule"] = self.quartz_cron_schedule - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CronTrigger: """Deserializes the CronTrigger from a dictionary.""" - return cls(quartz_cron_schedule=d.get("quartz_cron_schedule", None), timezone_id=d.get("timezone_id", None)) + return cls(quartz_cron_schedule=d.get('quartz_cron_schedule', None), timezone_id=d.get('timezone_id', None)) + + @dataclass class DataPlaneId: instance: Optional[str] = None """The instance name of the data plane emitting an event.""" - + seq_no: Optional[int] = None """A sequence number, unique and increasing within the data plane instance.""" - + def as_dict(self) -> dict: """Serializes the DataPlaneId into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance is not None: - body["instance"] = self.instance - if self.seq_no is not None: - body["seq_no"] = self.seq_no + if self.instance is not None: body['instance'] = self.instance + if self.seq_no is not None: body['seq_no'] = self.seq_no return body def as_shallow_dict(self) -> dict: """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance is not None: - body["instance"] = self.instance - if self.seq_no is not None: - body["seq_no"] = self.seq_no + if self.instance is not None: body['instance'] = self.instance + if self.seq_no is not None: body['seq_no'] = self.seq_no return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataPlaneId: """Deserializes the DataPlaneId from a dictionary.""" - return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None)) + return cls(instance=d.get('instance', None), seq_no=d.get('seq_no', None)) + + class DayOfWeek(Enum): """Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used.""" + + FRIDAY = 'FRIDAY' + MONDAY = 'MONDAY' + SATURDAY = 'SATURDAY' + SUNDAY = 'SUNDAY' + THURSDAY = 'THURSDAY' + TUESDAY = 'TUESDAY' + WEDNESDAY = 'WEDNESDAY' + - FRIDAY = "FRIDAY" - MONDAY = "MONDAY" - SATURDAY = "SATURDAY" - SUNDAY = "SUNDAY" - THURSDAY = "THURSDAY" - TUESDAY = "TUESDAY" - WEDNESDAY = "WEDNESDAY" @dataclass @@ -401,93 +325,94 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse: """Deserializes the DeletePipelineResponse from a dictionary.""" return cls() + + class DeploymentKind(Enum): """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a Databricks Asset Bundle.""" - - BUNDLE = "BUNDLE" - + + BUNDLE = 'BUNDLE' @dataclass class EditPipeline: allow_duplicate_names: Optional[bool] = None """If false, deployment will fail if name has changed and conflicts the name of another pipeline.""" - + budget_policy_id: Optional[str] = None """Budget policy of this pipeline.""" - + catalog: Optional[str] = None """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.""" - + channel: Optional[str] = None """DLT Release Channel that specifies which version to use.""" - + clusters: Optional[List[PipelineCluster]] = None """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str, str]] = None + + configuration: Optional[Dict[str,str]] = None """String-String configuration for this pipeline execution.""" - + continuous: Optional[bool] = None """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - + deployment: Optional[PipelineDeployment] = None """Deployment type of this pipeline.""" - + development: Optional[bool] = None """Whether the pipeline is in Development mode. Defaults to false.""" - + edition: Optional[str] = None """Pipeline product edition.""" - + event_log: Optional[EventLogSpec] = None """Event log configuration for this pipeline""" - + expected_last_modified: Optional[int] = None """If present, the last-modified time of the pipeline settings before the edit. If the settings were modified after that time, then the request will fail with a conflict.""" - + filters: Optional[Filters] = None """Filters on which Pipeline packages to include in the deployed graph.""" - + gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None """The definition of a gateway pipeline to support change data capture.""" - + id: Optional[str] = None """Unique identifier for this pipeline.""" - + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.""" - + libraries: Optional[List[PipelineLibrary]] = None """Libraries or code needed by this deployment.""" - + name: Optional[str] = None """Friendly identifier for this pipeline.""" - + notifications: Optional[List[Notifications]] = None """List of notification settings for this pipeline.""" - + photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" - + pipeline_id: Optional[str] = None """Unique identifier for this pipeline.""" - + restart_window: Optional[RestartWindow] = None """Restart window of this pipeline.""" - + root_path: Optional[str] = None """Root path for this pipeline. This is used as the root directory when editing the pipeline in the Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution.""" - + run_as: Optional[RunAs] = None """Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created @@ -495,184 +420,105 @@ class EditPipeline: Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.""" - + schema: Optional[str] = None """The default schema (database) where tables are read from or published to.""" - + serverless: Optional[bool] = None """Whether serverless compute is enabled for this pipeline.""" - + storage: Optional[str] = None """DBFS root directory for storing checkpoints and tables.""" - + + tags: Optional[Dict[str,str]] = None + """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, + and are therefore subject to the same limitations. A maximum of 25 tags can be added to the + pipeline.""" + target: Optional[str] = None """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.""" - + trigger: Optional[PipelineTrigger] = None """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" - + def as_dict(self) -> dict: """Serializes the EditPipeline into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = [v.as_dict() for v in self.clusters] - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.development is not None: - body["development"] = self.development - if self.edition is not None: - body["edition"] = self.edition - if self.event_log: - body["event_log"] = self.event_log.as_dict() - if self.expected_last_modified is not None: - body["expected_last_modified"] = self.expected_last_modified - if self.filters: - body["filters"] = self.filters.as_dict() - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition.as_dict() - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition.as_dict() - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = [v.as_dict() for v in self.notifications] - if self.photon is not None: - body["photon"] = self.photon - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.restart_window: - body["restart_window"] = self.restart_window.as_dict() - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger.as_dict() + if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.channel is not None: body['channel'] = self.channel + if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] + if self.configuration: body['configuration'] = self.configuration + if self.continuous is not None: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment.as_dict() + if self.development is not None: body['development'] = self.development + if self.edition is not None: body['edition'] = self.edition + if self.event_log: body['event_log'] = self.event_log.as_dict() + if self.expected_last_modified is not None: body['expected_last_modified'] = self.expected_last_modified + if self.filters: body['filters'] = self.filters.as_dict() + if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict() + if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict() + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.name is not None: body['name'] = self.name + if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] + if self.photon is not None: body['photon'] = self.photon + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.restart_window: body['restart_window'] = self.restart_window.as_dict() + if self.root_path is not None: body['root_path'] = self.root_path + if self.run_as: body['run_as'] = self.run_as.as_dict() + if self.schema is not None: body['schema'] = self.schema + if self.serverless is not None: body['serverless'] = self.serverless + if self.storage is not None: body['storage'] = self.storage + if self.tags: body['tags'] = self.tags + if self.target is not None: body['target'] = self.target + if self.trigger: body['trigger'] = self.trigger.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EditPipeline into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = self.clusters - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.development is not None: - body["development"] = self.development - if self.edition is not None: - body["edition"] = self.edition - if self.event_log: - body["event_log"] = self.event_log - if self.expected_last_modified is not None: - body["expected_last_modified"] = self.expected_last_modified - if self.filters: - body["filters"] = self.filters - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition - if self.libraries: - body["libraries"] = self.libraries - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = self.notifications - if self.photon is not None: - body["photon"] = self.photon - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.restart_window: - body["restart_window"] = self.restart_window - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger + if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.channel is not None: body['channel'] = self.channel + if self.clusters: body['clusters'] = self.clusters + if self.configuration: body['configuration'] = self.configuration + if self.continuous is not None: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.development is not None: body['development'] = self.development + if self.edition is not None: body['edition'] = self.edition + if self.event_log: body['event_log'] = self.event_log + if self.expected_last_modified is not None: body['expected_last_modified'] = self.expected_last_modified + if self.filters: body['filters'] = self.filters + if self.gateway_definition: body['gateway_definition'] = self.gateway_definition + if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition + if self.libraries: body['libraries'] = self.libraries + if self.name is not None: body['name'] = self.name + if self.notifications: body['notifications'] = self.notifications + if self.photon is not None: body['photon'] = self.photon + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.restart_window: body['restart_window'] = self.restart_window + if self.root_path is not None: body['root_path'] = self.root_path + if self.run_as: body['run_as'] = self.run_as + if self.schema is not None: body['schema'] = self.schema + if self.serverless is not None: body['serverless'] = self.serverless + if self.storage is not None: body['storage'] = self.storage + if self.tags: body['tags'] = self.tags + if self.target is not None: body['target'] = self.target + if self.trigger: body['trigger'] = self.trigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditPipeline: """Deserializes the EditPipeline from a dictionary.""" - return cls( - allow_duplicate_names=d.get("allow_duplicate_names", None), - budget_policy_id=d.get("budget_policy_id", None), - catalog=d.get("catalog", None), - channel=d.get("channel", None), - clusters=_repeated_dict(d, "clusters", PipelineCluster), - configuration=d.get("configuration", None), - continuous=d.get("continuous", None), - deployment=_from_dict(d, "deployment", PipelineDeployment), - development=d.get("development", None), - edition=d.get("edition", None), - event_log=_from_dict(d, "event_log", EventLogSpec), - expected_last_modified=d.get("expected_last_modified", None), - filters=_from_dict(d, "filters", Filters), - gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), - id=d.get("id", None), - ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), - libraries=_repeated_dict(d, "libraries", PipelineLibrary), - name=d.get("name", None), - notifications=_repeated_dict(d, "notifications", Notifications), - photon=d.get("photon", None), - pipeline_id=d.get("pipeline_id", None), - restart_window=_from_dict(d, "restart_window", RestartWindow), - root_path=d.get("root_path", None), - run_as=_from_dict(d, "run_as", RunAs), - schema=d.get("schema", None), - serverless=d.get("serverless", None), - storage=d.get("storage", None), - target=d.get("target", None), - trigger=_from_dict(d, "trigger", PipelineTrigger), - ) + return cls(allow_duplicate_names=d.get('allow_duplicate_names', None), budget_policy_id=d.get('budget_policy_id', None), catalog=d.get('catalog', None), channel=d.get('channel', None), clusters=_repeated_dict(d, 'clusters', PipelineCluster), configuration=d.get('configuration', None), continuous=d.get('continuous', None), deployment=_from_dict(d, 'deployment', PipelineDeployment), development=d.get('development', None), edition=d.get('edition', None), event_log=_from_dict(d, 'event_log', EventLogSpec), expected_last_modified=d.get('expected_last_modified', None), filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), pipeline_id=d.get('pipeline_id', None), restart_window=_from_dict(d, 'restart_window', RestartWindow), root_path=d.get('root_path', None), run_as=_from_dict(d, 'run_as', RunAs), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), tags=d.get('tags', None), target=d.get('target', None), trigger=_from_dict(d, 'trigger', PipelineTrigger)) + + @dataclass @@ -691,360 +537,321 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditPipelineResponse: """Deserializes the EditPipelineResponse from a dictionary.""" return cls() + + @dataclass class ErrorDetail: exceptions: Optional[List[SerializedException]] = None """The exception thrown for this error, with its chain of cause.""" - + fatal: Optional[bool] = None """Whether this error is considered fatal, that is, unrecoverable.""" - + def as_dict(self) -> dict: """Serializes the ErrorDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exceptions: - body["exceptions"] = [v.as_dict() for v in self.exceptions] - if self.fatal is not None: - body["fatal"] = self.fatal + if self.exceptions: body['exceptions'] = [v.as_dict() for v in self.exceptions] + if self.fatal is not None: body['fatal'] = self.fatal return body def as_shallow_dict(self) -> dict: """Serializes the ErrorDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.exceptions: - body["exceptions"] = self.exceptions - if self.fatal is not None: - body["fatal"] = self.fatal + if self.exceptions: body['exceptions'] = self.exceptions + if self.fatal is not None: body['fatal'] = self.fatal return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ErrorDetail: """Deserializes the ErrorDetail from a dictionary.""" - return cls(exceptions=_repeated_dict(d, "exceptions", SerializedException), fatal=d.get("fatal", None)) + return cls(exceptions=_repeated_dict(d, 'exceptions', SerializedException), fatal=d.get('fatal', None)) + + class EventLevel(Enum): """The severity level of the event.""" - - ERROR = "ERROR" - INFO = "INFO" - METRICS = "METRICS" - WARN = "WARN" - + + ERROR = 'ERROR' + INFO = 'INFO' + METRICS = 'METRICS' + WARN = 'WARN' @dataclass class EventLogSpec: """Configurable event log parameters.""" - + catalog: Optional[str] = None """The UC catalog the event log is published under.""" - + name: Optional[str] = None """The name the event log is published to in UC.""" - + schema: Optional[str] = None """The UC schema the event log is published under.""" - + def as_dict(self) -> dict: """Serializes the EventLogSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog is not None: - body["catalog"] = self.catalog - if self.name is not None: - body["name"] = self.name - if self.schema is not None: - body["schema"] = self.schema + if self.catalog is not None: body['catalog'] = self.catalog + if self.name is not None: body['name'] = self.name + if self.schema is not None: body['schema'] = self.schema return body def as_shallow_dict(self) -> dict: """Serializes the EventLogSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog is not None: - body["catalog"] = self.catalog - if self.name is not None: - body["name"] = self.name - if self.schema is not None: - body["schema"] = self.schema + if self.catalog is not None: body['catalog'] = self.catalog + if self.name is not None: body['name'] = self.name + if self.schema is not None: body['schema'] = self.schema return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EventLogSpec: """Deserializes the EventLogSpec from a dictionary.""" - return cls(catalog=d.get("catalog", None), name=d.get("name", None), schema=d.get("schema", None)) + return cls(catalog=d.get('catalog', None), name=d.get('name', None), schema=d.get('schema', None)) + + @dataclass class FileLibrary: path: Optional[str] = None """The absolute path of the source code.""" - + def as_dict(self) -> dict: """Serializes the FileLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: - body["path"] = self.path + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the FileLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: - body["path"] = self.path + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileLibrary: """Deserializes the FileLibrary from a dictionary.""" - return cls(path=d.get("path", None)) + return cls(path=d.get('path', None)) + + @dataclass class Filters: exclude: Optional[List[str]] = None """Paths to exclude.""" - + include: Optional[List[str]] = None """Paths to include.""" - + def as_dict(self) -> dict: """Serializes the Filters into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exclude: - body["exclude"] = [v for v in self.exclude] - if self.include: - body["include"] = [v for v in self.include] + if self.exclude: body['exclude'] = [v for v in self.exclude] + if self.include: body['include'] = [v for v in self.include] return body def as_shallow_dict(self) -> dict: """Serializes the Filters into a shallow dictionary of its immediate attributes.""" body = {} - if self.exclude: - body["exclude"] = self.exclude - if self.include: - body["include"] = self.include + if self.exclude: body['exclude'] = self.exclude + if self.include: body['include'] = self.include return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Filters: """Deserializes the Filters from a dictionary.""" - return cls(exclude=d.get("exclude", None), include=d.get("include", None)) + return cls(exclude=d.get('exclude', None), include=d.get('include', None)) + + + + + @dataclass class GetPipelinePermissionLevelsResponse: permission_levels: Optional[List[PipelinePermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetPipelinePermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetPipelinePermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPipelinePermissionLevelsResponse: """Deserializes the GetPipelinePermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", PipelinePermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', PipelinePermissionsDescription)) + + + + + + + + @dataclass class GetPipelineResponse: cause: Optional[str] = None """An optional message detailing the cause of the pipeline state.""" - + cluster_id: Optional[str] = None """The ID of the cluster that the pipeline is running on.""" - + creator_user_name: Optional[str] = None """The username of the pipeline creator.""" - + effective_budget_policy_id: Optional[str] = None """Serverless budget policy ID of this pipeline.""" - + health: Optional[GetPipelineResponseHealth] = None """The health of a pipeline.""" - + last_modified: Optional[int] = None """The last time the pipeline settings were modified or created.""" - + latest_updates: Optional[List[UpdateStateInfo]] = None """Status of the latest updates for the pipeline. Ordered with the newest update first.""" - + name: Optional[str] = None """A human friendly identifier for the pipeline, taken from the `spec`.""" - + pipeline_id: Optional[str] = None """The ID of the pipeline.""" - + run_as_user_name: Optional[str] = None """Username of the user that the pipeline will run on behalf of.""" - + spec: Optional[PipelineSpec] = None """The pipeline specification. This field is not returned when called by `ListPipelines`.""" - + state: Optional[PipelineState] = None """The pipeline state.""" - + def as_dict(self) -> dict: """Serializes the GetPipelineResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cause is not None: - body["cause"] = self.cause - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.health is not None: - body["health"] = self.health.value - if self.last_modified is not None: - body["last_modified"] = self.last_modified - if self.latest_updates: - body["latest_updates"] = [v.as_dict() for v in self.latest_updates] - if self.name is not None: - body["name"] = self.name - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.spec: - body["spec"] = self.spec.as_dict() - if self.state is not None: - body["state"] = self.state.value + if self.cause is not None: body['cause'] = self.cause + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.health is not None: body['health'] = self.health.value + if self.last_modified is not None: body['last_modified'] = self.last_modified + if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates] + if self.name is not None: body['name'] = self.name + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.spec: body['spec'] = self.spec.as_dict() + if self.state is not None: body['state'] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the GetPipelineResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.cause is not None: - body["cause"] = self.cause - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.health is not None: - body["health"] = self.health - if self.last_modified is not None: - body["last_modified"] = self.last_modified - if self.latest_updates: - body["latest_updates"] = self.latest_updates - if self.name is not None: - body["name"] = self.name - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.spec: - body["spec"] = self.spec - if self.state is not None: - body["state"] = self.state + if self.cause is not None: body['cause'] = self.cause + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.health is not None: body['health'] = self.health + if self.last_modified is not None: body['last_modified'] = self.last_modified + if self.latest_updates: body['latest_updates'] = self.latest_updates + if self.name is not None: body['name'] = self.name + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.spec: body['spec'] = self.spec + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPipelineResponse: """Deserializes the GetPipelineResponse from a dictionary.""" - return cls( - cause=d.get("cause", None), - cluster_id=d.get("cluster_id", None), - creator_user_name=d.get("creator_user_name", None), - effective_budget_policy_id=d.get("effective_budget_policy_id", None), - health=_enum(d, "health", GetPipelineResponseHealth), - last_modified=d.get("last_modified", None), - latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo), - name=d.get("name", None), - pipeline_id=d.get("pipeline_id", None), - run_as_user_name=d.get("run_as_user_name", None), - spec=_from_dict(d, "spec", PipelineSpec), - state=_enum(d, "state", PipelineState), - ) + return cls(cause=d.get('cause', None), cluster_id=d.get('cluster_id', None), creator_user_name=d.get('creator_user_name', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), health=_enum(d, 'health', GetPipelineResponseHealth), last_modified=d.get('last_modified', None), latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo), name=d.get('name', None), pipeline_id=d.get('pipeline_id', None), run_as_user_name=d.get('run_as_user_name', None), spec=_from_dict(d, 'spec', PipelineSpec), state=_enum(d, 'state', PipelineState)) + + class GetPipelineResponseHealth(Enum): """The health of a pipeline.""" + + HEALTHY = 'HEALTHY' + UNHEALTHY = 'UNHEALTHY' + - HEALTHY = "HEALTHY" - UNHEALTHY = "UNHEALTHY" @dataclass class GetUpdateResponse: update: Optional[UpdateInfo] = None """The current update info.""" - + def as_dict(self) -> dict: """Serializes the GetUpdateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.update: - body["update"] = self.update.as_dict() + if self.update: body['update'] = self.update.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetUpdateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.update: - body["update"] = self.update + if self.update: body['update'] = self.update return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetUpdateResponse: """Deserializes the GetUpdateResponse from a dictionary.""" - return cls(update=_from_dict(d, "update", UpdateInfo)) + return cls(update=_from_dict(d, 'update', UpdateInfo)) + + @dataclass class IngestionConfig: report: Optional[ReportSpec] = None """Select a specific source report.""" - + schema: Optional[SchemaSpec] = None """Select all tables from a specific source schema.""" - + table: Optional[TableSpec] = None """Select a specific source table.""" - + def as_dict(self) -> dict: """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.report: - body["report"] = self.report.as_dict() - if self.schema: - body["schema"] = self.schema.as_dict() - if self.table: - body["table"] = self.table.as_dict() + if self.report: body['report'] = self.report.as_dict() + if self.schema: body['schema'] = self.schema.as_dict() + if self.table: body['table'] = self.table.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the IngestionConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.report: - body["report"] = self.report - if self.schema: - body["schema"] = self.schema - if self.table: - body["table"] = self.table + if self.report: body['report'] = self.report + if self.schema: body['schema'] = self.schema + if self.table: body['table'] = self.table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IngestionConfig: """Deserializes the IngestionConfig from a dictionary.""" - return cls( - report=_from_dict(d, "report", ReportSpec), - schema=_from_dict(d, "schema", SchemaSpec), - table=_from_dict(d, "table", TableSpec), - ) + return cls(report=_from_dict(d, 'report', ReportSpec), schema=_from_dict(d, 'schema', SchemaSpec), table=_from_dict(d, 'table', TableSpec)) + + @dataclass @@ -1052,62 +859,48 @@ class IngestionGatewayPipelineDefinition: connection_name: str """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" - + gateway_storage_catalog: str """Required, Immutable. The name of the catalog for the gateway pipeline's storage location.""" - + gateway_storage_schema: str """Required, Immutable. The name of the schema for the gateway pipelines's storage location.""" - + connection_id: Optional[str] = None """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" - + gateway_storage_name: Optional[str] = None """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.""" - + def as_dict(self) -> dict: """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_id is not None: - body["connection_id"] = self.connection_id - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.gateway_storage_catalog is not None: - body["gateway_storage_catalog"] = self.gateway_storage_catalog - if self.gateway_storage_name is not None: - body["gateway_storage_name"] = self.gateway_storage_name - if self.gateway_storage_schema is not None: - body["gateway_storage_schema"] = self.gateway_storage_schema + if self.connection_id is not None: body['connection_id'] = self.connection_id + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.gateway_storage_catalog is not None: body['gateway_storage_catalog'] = self.gateway_storage_catalog + if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name + if self.gateway_storage_schema is not None: body['gateway_storage_schema'] = self.gateway_storage_schema return body def as_shallow_dict(self) -> dict: """Serializes the IngestionGatewayPipelineDefinition into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_id is not None: - body["connection_id"] = self.connection_id - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.gateway_storage_catalog is not None: - body["gateway_storage_catalog"] = self.gateway_storage_catalog - if self.gateway_storage_name is not None: - body["gateway_storage_name"] = self.gateway_storage_name - if self.gateway_storage_schema is not None: - body["gateway_storage_schema"] = self.gateway_storage_schema + if self.connection_id is not None: body['connection_id'] = self.connection_id + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.gateway_storage_catalog is not None: body['gateway_storage_catalog'] = self.gateway_storage_catalog + if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name + if self.gateway_storage_schema is not None: body['gateway_storage_schema'] = self.gateway_storage_schema return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IngestionGatewayPipelineDefinition: """Deserializes the IngestionGatewayPipelineDefinition from a dictionary.""" - return cls( - connection_id=d.get("connection_id", None), - connection_name=d.get("connection_name", None), - gateway_storage_catalog=d.get("gateway_storage_catalog", None), - gateway_storage_name=d.get("gateway_storage_name", None), - gateway_storage_schema=d.get("gateway_storage_schema", None), - ) + return cls(connection_id=d.get('connection_id', None), connection_name=d.get('connection_name', None), gateway_storage_catalog=d.get('gateway_storage_catalog', None), gateway_storage_name=d.get('gateway_storage_name', None), gateway_storage_schema=d.get('gateway_storage_schema', None)) + + @dataclass @@ -1115,155 +908,139 @@ class IngestionPipelineDefinition: connection_name: Optional[str] = None """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.""" - + ingestion_gateway_id: Optional[str] = None """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.""" - + objects: Optional[List[IngestionConfig]] = None """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" - + source_type: Optional[IngestionSourceType] = None """The type of the foreign source. The source type will be inferred from the source connection or ingestion gateway. This field is output only and will be ignored if provided.""" - + table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings are applied to all tables in the pipeline.""" - + def as_dict(self) -> dict: """Serializes the IngestionPipelineDefinition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.ingestion_gateway_id is not None: - body["ingestion_gateway_id"] = self.ingestion_gateway_id - if self.objects: - body["objects"] = [v.as_dict() for v in self.objects] - if self.source_type is not None: - body["source_type"] = self.source_type.value - if self.table_configuration: - body["table_configuration"] = self.table_configuration.as_dict() + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id + if self.objects: body['objects'] = [v.as_dict() for v in self.objects] + if self.source_type is not None: body['source_type'] = self.source_type.value + if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the IngestionPipelineDefinition into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.ingestion_gateway_id is not None: - body["ingestion_gateway_id"] = self.ingestion_gateway_id - if self.objects: - body["objects"] = self.objects - if self.source_type is not None: - body["source_type"] = self.source_type - if self.table_configuration: - body["table_configuration"] = self.table_configuration + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id + if self.objects: body['objects'] = self.objects + if self.source_type is not None: body['source_type'] = self.source_type + if self.table_configuration: body['table_configuration'] = self.table_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: """Deserializes the IngestionPipelineDefinition from a dictionary.""" - return cls( - connection_name=d.get("connection_name", None), - ingestion_gateway_id=d.get("ingestion_gateway_id", None), - objects=_repeated_dict(d, "objects", IngestionConfig), - source_type=_enum(d, "source_type", IngestionSourceType), - table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig), - ) + return cls(connection_name=d.get('connection_name', None), ingestion_gateway_id=d.get('ingestion_gateway_id', None), objects=_repeated_dict(d, 'objects', IngestionConfig), source_type=_enum(d, 'source_type', IngestionSourceType), table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) + + class IngestionSourceType(Enum): + + + DYNAMICS365 = 'DYNAMICS365' + GA4_RAW_DATA = 'GA4_RAW_DATA' + MANAGED_POSTGRESQL = 'MANAGED_POSTGRESQL' + MYSQL = 'MYSQL' + NETSUITE = 'NETSUITE' + ORACLE = 'ORACLE' + POSTGRESQL = 'POSTGRESQL' + SALESFORCE = 'SALESFORCE' + SERVICENOW = 'SERVICENOW' + SHAREPOINT = 'SHAREPOINT' + SQLSERVER = 'SQLSERVER' + TERADATA = 'TERADATA' + WORKDAY_RAAS = 'WORKDAY_RAAS' + - DYNAMICS365 = "DYNAMICS365" - GA4_RAW_DATA = "GA4_RAW_DATA" - MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL" - MYSQL = "MYSQL" - NETSUITE = "NETSUITE" - ORACLE = "ORACLE" - POSTGRESQL = "POSTGRESQL" - SALESFORCE = "SALESFORCE" - SERVICENOW = "SERVICENOW" - SHAREPOINT = "SHAREPOINT" - SQLSERVER = "SQLSERVER" - WORKDAY_RAAS = "WORKDAY_RAAS" @dataclass class ListPipelineEventsResponse: events: Optional[List[PipelineEvent]] = None """The list of events matching the request criteria.""" - + next_page_token: Optional[str] = None """If present, a token to fetch the next page of events.""" - + prev_page_token: Optional[str] = None """If present, a token to fetch the previous page of events.""" - + def as_dict(self) -> dict: """Serializes the ListPipelineEventsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.events: - body["events"] = [v.as_dict() for v in self.events] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.events: body['events'] = [v.as_dict() for v in self.events] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListPipelineEventsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.events: - body["events"] = self.events - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token + if self.events: body['events'] = self.events + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPipelineEventsResponse: """Deserializes the ListPipelineEventsResponse from a dictionary.""" - return cls( - events=_repeated_dict(d, "events", PipelineEvent), - next_page_token=d.get("next_page_token", None), - prev_page_token=d.get("prev_page_token", None), - ) + return cls(events=_repeated_dict(d, 'events', PipelineEvent), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) + + + + + @dataclass class ListPipelinesResponse: next_page_token: Optional[str] = None """If present, a token to fetch the next page of events.""" - + statuses: Optional[List[PipelineStateInfo]] = None """The list of events matching the request criteria.""" - + def as_dict(self) -> dict: """Serializes the ListPipelinesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.statuses: - body["statuses"] = [v.as_dict() for v in self.statuses] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses] return body def as_shallow_dict(self) -> dict: """Serializes the ListPipelinesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.statuses: - body["statuses"] = self.statuses + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.statuses: body['statuses'] = self.statuses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPipelinesResponse: """Deserializes the ListPipelinesResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), statuses=_repeated_dict(d, "statuses", PipelineStateInfo) - ) + return cls(next_page_token=d.get('next_page_token', None), statuses=_repeated_dict(d, 'statuses', PipelineStateInfo)) + + + + + @dataclass @@ -1271,42 +1048,34 @@ class ListUpdatesResponse: next_page_token: Optional[str] = None """If present, then there are more results, and this a token to be used in a subsequent request to fetch the next page.""" - + prev_page_token: Optional[str] = None """If present, then this token can be used in a subsequent request to fetch the previous page.""" - + updates: Optional[List[UpdateInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListUpdatesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token - if self.updates: - body["updates"] = [v.as_dict() for v in self.updates] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.updates: body['updates'] = [v.as_dict() for v in self.updates] return body def as_shallow_dict(self) -> dict: """Serializes the ListUpdatesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.prev_page_token is not None: - body["prev_page_token"] = self.prev_page_token - if self.updates: - body["updates"] = self.updates + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.updates: body['updates'] = self.updates return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListUpdatesResponse: """Deserializes the ListUpdatesResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - prev_page_token=d.get("prev_page_token", None), - updates=_repeated_dict(d, "updates", UpdateInfo), - ) + return cls(next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None), updates=_repeated_dict(d, 'updates', UpdateInfo)) + + @dataclass @@ -1325,39 +1094,40 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ManualTrigger: """Deserializes the ManualTrigger from a dictionary.""" return cls() + + class MaturityLevel(Enum): """Maturity level for EventDetails.""" - - DEPRECATED = "DEPRECATED" - EVOLVING = "EVOLVING" - STABLE = "STABLE" - + + DEPRECATED = 'DEPRECATED' + EVOLVING = 'EVOLVING' + STABLE = 'STABLE' @dataclass class NotebookLibrary: path: Optional[str] = None """The absolute path of the source code.""" - + def as_dict(self) -> dict: """Serializes the NotebookLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: - body["path"] = self.path + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the NotebookLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: - body["path"] = self.path + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotebookLibrary: """Deserializes the NotebookLibrary from a dictionary.""" - return cls(path=d.get("path", None)) + return cls(path=d.get('path', None)) + + @dataclass @@ -1369,350 +1139,273 @@ class Notifications: * `on-update-success`: A pipeline update completes successfully. * `on-update-failure`: Each time a pipeline update fails. * `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error. * `on-flow-failure`: A single data flow fails.""" - + email_recipients: Optional[List[str]] = None """A list of email addresses notified when a configured alert is triggered.""" - + def as_dict(self) -> dict: """Serializes the Notifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alerts: - body["alerts"] = [v for v in self.alerts] - if self.email_recipients: - body["email_recipients"] = [v for v in self.email_recipients] + if self.alerts: body['alerts'] = [v for v in self.alerts] + if self.email_recipients: body['email_recipients'] = [v for v in self.email_recipients] return body def as_shallow_dict(self) -> dict: """Serializes the Notifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.alerts: - body["alerts"] = self.alerts - if self.email_recipients: - body["email_recipients"] = self.email_recipients + if self.alerts: body['alerts'] = self.alerts + if self.email_recipients: body['email_recipients'] = self.email_recipients return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Notifications: """Deserializes the Notifications from a dictionary.""" - return cls(alerts=d.get("alerts", None), email_recipients=d.get("email_recipients", None)) + return cls(alerts=d.get('alerts', None), email_recipients=d.get('email_recipients', None)) + + @dataclass class Origin: batch_id: Optional[int] = None """The id of a batch. Unique within a flow.""" - + cloud: Optional[str] = None """The cloud provider, e.g., AWS or Azure.""" - + cluster_id: Optional[str] = None """The id of the cluster where an execution happens. Unique within a region.""" - + dataset_name: Optional[str] = None """The name of a dataset. Unique within a pipeline.""" - + flow_id: Optional[str] = None """The id of the flow. Globally unique. Incremental queries will generally reuse the same id while complete queries will have a new id per update.""" - + flow_name: Optional[str] = None """The name of the flow. Not unique.""" - + host: Optional[str] = None """The optional host name where the event was triggered""" - + maintenance_id: Optional[str] = None """The id of a maintenance run. Globally unique.""" - + materialization_name: Optional[str] = None """Materialization name.""" - + org_id: Optional[int] = None """The org id of the user. Unique within a cloud.""" - + pipeline_id: Optional[str] = None """The id of the pipeline. Globally unique.""" - + pipeline_name: Optional[str] = None """The name of the pipeline. Not unique.""" - + region: Optional[str] = None """The cloud region.""" - + request_id: Optional[str] = None """The id of the request that caused an update.""" - + table_id: Optional[str] = None """The id of a (delta) table. Globally unique.""" - + uc_resource_id: Optional[str] = None """The Unity Catalog id of the MV or ST being updated.""" - + update_id: Optional[str] = None """The id of an execution. Globally unique.""" - + def as_dict(self) -> dict: """Serializes the Origin into a dictionary suitable for use as a JSON request body.""" body = {} - if self.batch_id is not None: - body["batch_id"] = self.batch_id - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.flow_id is not None: - body["flow_id"] = self.flow_id - if self.flow_name is not None: - body["flow_name"] = self.flow_name - if self.host is not None: - body["host"] = self.host - if self.maintenance_id is not None: - body["maintenance_id"] = self.maintenance_id - if self.materialization_name is not None: - body["materialization_name"] = self.materialization_name - if self.org_id is not None: - body["org_id"] = self.org_id - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.pipeline_name is not None: - body["pipeline_name"] = self.pipeline_name - if self.region is not None: - body["region"] = self.region - if self.request_id is not None: - body["request_id"] = self.request_id - if self.table_id is not None: - body["table_id"] = self.table_id - if self.uc_resource_id is not None: - body["uc_resource_id"] = self.uc_resource_id - if self.update_id is not None: - body["update_id"] = self.update_id + if self.batch_id is not None: body['batch_id'] = self.batch_id + if self.cloud is not None: body['cloud'] = self.cloud + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.flow_id is not None: body['flow_id'] = self.flow_id + if self.flow_name is not None: body['flow_name'] = self.flow_name + if self.host is not None: body['host'] = self.host + if self.maintenance_id is not None: body['maintenance_id'] = self.maintenance_id + if self.materialization_name is not None: body['materialization_name'] = self.materialization_name + if self.org_id is not None: body['org_id'] = self.org_id + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.pipeline_name is not None: body['pipeline_name'] = self.pipeline_name + if self.region is not None: body['region'] = self.region + if self.request_id is not None: body['request_id'] = self.request_id + if self.table_id is not None: body['table_id'] = self.table_id + if self.uc_resource_id is not None: body['uc_resource_id'] = self.uc_resource_id + if self.update_id is not None: body['update_id'] = self.update_id return body def as_shallow_dict(self) -> dict: """Serializes the Origin into a shallow dictionary of its immediate attributes.""" body = {} - if self.batch_id is not None: - body["batch_id"] = self.batch_id - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.flow_id is not None: - body["flow_id"] = self.flow_id - if self.flow_name is not None: - body["flow_name"] = self.flow_name - if self.host is not None: - body["host"] = self.host - if self.maintenance_id is not None: - body["maintenance_id"] = self.maintenance_id - if self.materialization_name is not None: - body["materialization_name"] = self.materialization_name - if self.org_id is not None: - body["org_id"] = self.org_id - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.pipeline_name is not None: - body["pipeline_name"] = self.pipeline_name - if self.region is not None: - body["region"] = self.region - if self.request_id is not None: - body["request_id"] = self.request_id - if self.table_id is not None: - body["table_id"] = self.table_id - if self.uc_resource_id is not None: - body["uc_resource_id"] = self.uc_resource_id - if self.update_id is not None: - body["update_id"] = self.update_id + if self.batch_id is not None: body['batch_id'] = self.batch_id + if self.cloud is not None: body['cloud'] = self.cloud + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.flow_id is not None: body['flow_id'] = self.flow_id + if self.flow_name is not None: body['flow_name'] = self.flow_name + if self.host is not None: body['host'] = self.host + if self.maintenance_id is not None: body['maintenance_id'] = self.maintenance_id + if self.materialization_name is not None: body['materialization_name'] = self.materialization_name + if self.org_id is not None: body['org_id'] = self.org_id + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.pipeline_name is not None: body['pipeline_name'] = self.pipeline_name + if self.region is not None: body['region'] = self.region + if self.request_id is not None: body['request_id'] = self.request_id + if self.table_id is not None: body['table_id'] = self.table_id + if self.uc_resource_id is not None: body['uc_resource_id'] = self.uc_resource_id + if self.update_id is not None: body['update_id'] = self.update_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Origin: """Deserializes the Origin from a dictionary.""" - return cls( - batch_id=d.get("batch_id", None), - cloud=d.get("cloud", None), - cluster_id=d.get("cluster_id", None), - dataset_name=d.get("dataset_name", None), - flow_id=d.get("flow_id", None), - flow_name=d.get("flow_name", None), - host=d.get("host", None), - maintenance_id=d.get("maintenance_id", None), - materialization_name=d.get("materialization_name", None), - org_id=d.get("org_id", None), - pipeline_id=d.get("pipeline_id", None), - pipeline_name=d.get("pipeline_name", None), - region=d.get("region", None), - request_id=d.get("request_id", None), - table_id=d.get("table_id", None), - uc_resource_id=d.get("uc_resource_id", None), - update_id=d.get("update_id", None), - ) + return cls(batch_id=d.get('batch_id', None), cloud=d.get('cloud', None), cluster_id=d.get('cluster_id', None), dataset_name=d.get('dataset_name', None), flow_id=d.get('flow_id', None), flow_name=d.get('flow_name', None), host=d.get('host', None), maintenance_id=d.get('maintenance_id', None), materialization_name=d.get('materialization_name', None), org_id=d.get('org_id', None), pipeline_id=d.get('pipeline_id', None), pipeline_name=d.get('pipeline_name', None), region=d.get('region', None), request_id=d.get('request_id', None), table_id=d.get('table_id', None), uc_resource_id=d.get('uc_resource_id', None), update_id=d.get('update_id', None)) + + @dataclass class PathPattern: include: Optional[str] = None """The source code to include for pipelines""" - + def as_dict(self) -> dict: """Serializes the PathPattern into a dictionary suitable for use as a JSON request body.""" body = {} - if self.include is not None: - body["include"] = self.include + if self.include is not None: body['include'] = self.include return body def as_shallow_dict(self) -> dict: """Serializes the PathPattern into a shallow dictionary of its immediate attributes.""" body = {} - if self.include is not None: - body["include"] = self.include + if self.include is not None: body['include'] = self.include return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PathPattern: """Deserializes the PathPattern from a dictionary.""" - return cls(include=d.get("include", None)) + return cls(include=d.get('include', None)) + + @dataclass class PipelineAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[PipelinePermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the PipelineAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PipelineAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineAccessControlRequest: """Deserializes the PipelineAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", PipelinePermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', PipelinePermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class PipelineAccessControlResponse: all_permissions: Optional[List[PipelinePermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the PipelineAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PipelineAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineAccessControlResponse: """Deserializes the PipelineAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", PipelinePermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', PipelinePermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class PipelineCluster: apply_policy_default_values: Optional[bool] = None """Note: This field won't be persisted. Only API users will check this field.""" - + autoscale: Optional[PipelineClusterAutoscale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + aws_attributes: Optional[compute.AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[compute.AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[compute.ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Only dbfs destinations are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -1720,41 +1413,41 @@ class PipelineCluster: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable local disk encryption for the cluster.""" - + gcp_attributes: Optional[compute.GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[compute.InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + label: Optional[str] = None """A label for the cluster specification, either `default` to configure the default cluster, or `maintenance` to configure the maintenance cluster. This field is optional. The default value is `default`.""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -1764,15 +1457,15 @@ class PipelineCluster: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - - spark_conf: Optional[Dict[str, str]] = None + + spark_conf: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. See :method:clusters/create for more details.""" - - spark_env_vars: Optional[Dict[str, str]] = None + + spark_env_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -1784,122 +1477,66 @@ class PipelineCluster: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + def as_dict(self) -> dict: """Serializes the PipelineCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.label is not None: - body["label"] = self.label - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.label is not None: body['label'] = self.label + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] return body def as_shallow_dict(self) -> dict: """Serializes the PipelineCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.label is not None: - body["label"] = self.label - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys + if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values + if self.autoscale: body['autoscale'] = self.autoscale + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes + if self.init_scripts: body['init_scripts'] = self.init_scripts + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.label is not None: body['label'] = self.label + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineCluster: """Deserializes the PipelineCluster from a dictionary.""" - return cls( - apply_policy_default_values=d.get("apply_policy_default_values", None), - autoscale=_from_dict(d, "autoscale", PipelineClusterAutoscale), - aws_attributes=_from_dict(d, "aws_attributes", compute.AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", compute.AzureAttributes), - cluster_log_conf=_from_dict(d, "cluster_log_conf", compute.ClusterLogConf), - custom_tags=d.get("custom_tags", None), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", compute.GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", compute.InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - label=d.get("label", None), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - ssh_public_keys=d.get("ssh_public_keys", None), - ) + return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', PipelineClusterAutoscale), aws_attributes=_from_dict(d, 'aws_attributes', compute.AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', compute.AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', compute.ClusterLogConf), custom_tags=d.get('custom_tags', None), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', compute.GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', compute.InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), label=d.get('label', None), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), ssh_public_keys=d.get('ssh_public_keys', None)) + + @dataclass @@ -1907,47 +1544,39 @@ class PipelineClusterAutoscale: min_workers: int """The minimum number of workers the cluster can scale down to when underutilized. It is also the initial number of workers the cluster will have after creation.""" - + max_workers: int """The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`.""" - + mode: Optional[PipelineClusterAutoscaleMode] = None """Databricks Enhanced Autoscaling optimizes cluster utilization by automatically allocating cluster resources based on workload volume, with minimal impact to the data processing latency of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy autoscaling feature is used for `maintenance` clusters.""" - + def as_dict(self) -> dict: """Serializes the PipelineClusterAutoscale into a dictionary suitable for use as a JSON request body.""" body = {} - if self.max_workers is not None: - body["max_workers"] = self.max_workers - if self.min_workers is not None: - body["min_workers"] = self.min_workers - if self.mode is not None: - body["mode"] = self.mode.value + if self.max_workers is not None: body['max_workers'] = self.max_workers + if self.min_workers is not None: body['min_workers'] = self.min_workers + if self.mode is not None: body['mode'] = self.mode.value return body def as_shallow_dict(self) -> dict: """Serializes the PipelineClusterAutoscale into a shallow dictionary of its immediate attributes.""" body = {} - if self.max_workers is not None: - body["max_workers"] = self.max_workers - if self.min_workers is not None: - body["min_workers"] = self.min_workers - if self.mode is not None: - body["mode"] = self.mode + if self.max_workers is not None: body['max_workers'] = self.max_workers + if self.min_workers is not None: body['min_workers'] = self.min_workers + if self.mode is not None: body['mode'] = self.mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineClusterAutoscale: """Deserializes the PipelineClusterAutoscale from a dictionary.""" - return cls( - max_workers=d.get("max_workers", None), - min_workers=d.get("min_workers", None), - mode=_enum(d, "mode", PipelineClusterAutoscaleMode), - ) + return cls(max_workers=d.get('max_workers', None), min_workers=d.get('min_workers', None), mode=_enum(d, 'mode', PipelineClusterAutoscaleMode)) + + class PipelineClusterAutoscaleMode(Enum): @@ -1955,780 +1584,599 @@ class PipelineClusterAutoscaleMode(Enum): cluster resources based on workload volume, with minimal impact to the data processing latency of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy autoscaling feature is used for `maintenance` clusters.""" - - ENHANCED = "ENHANCED" - LEGACY = "LEGACY" - + + ENHANCED = 'ENHANCED' + LEGACY = 'LEGACY' @dataclass class PipelineDeployment: kind: DeploymentKind """The deployment method that manages the pipeline.""" - + metadata_file_path: Optional[str] = None """The path to the file containing metadata about the deployment.""" - + def as_dict(self) -> dict: """Serializes the PipelineDeployment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.kind is not None: - body["kind"] = self.kind.value - if self.metadata_file_path is not None: - body["metadata_file_path"] = self.metadata_file_path + if self.kind is not None: body['kind'] = self.kind.value + if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path return body def as_shallow_dict(self) -> dict: """Serializes the PipelineDeployment into a shallow dictionary of its immediate attributes.""" body = {} - if self.kind is not None: - body["kind"] = self.kind - if self.metadata_file_path is not None: - body["metadata_file_path"] = self.metadata_file_path + if self.kind is not None: body['kind'] = self.kind + if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineDeployment: """Deserializes the PipelineDeployment from a dictionary.""" - return cls(kind=_enum(d, "kind", DeploymentKind), metadata_file_path=d.get("metadata_file_path", None)) + return cls(kind=_enum(d, 'kind', DeploymentKind), metadata_file_path=d.get('metadata_file_path', None)) + + @dataclass class PipelineEvent: error: Optional[ErrorDetail] = None """Information about an error captured by the event.""" - + event_type: Optional[str] = None """The event type. Should always correspond to the details""" - + id: Optional[str] = None """A time-based, globally unique id.""" - + level: Optional[EventLevel] = None """The severity level of the event.""" - + maturity_level: Optional[MaturityLevel] = None """Maturity level for event_type.""" - + message: Optional[str] = None """The display message associated with the event.""" - + origin: Optional[Origin] = None """Describes where the event originates from.""" - + sequence: Optional[Sequencing] = None """A sequencing object to identify and order events.""" - + timestamp: Optional[str] = None """The time of the event.""" - + def as_dict(self) -> dict: """Serializes the PipelineEvent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error: - body["error"] = self.error.as_dict() - if self.event_type is not None: - body["event_type"] = self.event_type - if self.id is not None: - body["id"] = self.id - if self.level is not None: - body["level"] = self.level.value - if self.maturity_level is not None: - body["maturity_level"] = self.maturity_level.value - if self.message is not None: - body["message"] = self.message - if self.origin: - body["origin"] = self.origin.as_dict() - if self.sequence: - body["sequence"] = self.sequence.as_dict() - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.error: body['error'] = self.error.as_dict() + if self.event_type is not None: body['event_type'] = self.event_type + if self.id is not None: body['id'] = self.id + if self.level is not None: body['level'] = self.level.value + if self.maturity_level is not None: body['maturity_level'] = self.maturity_level.value + if self.message is not None: body['message'] = self.message + if self.origin: body['origin'] = self.origin.as_dict() + if self.sequence: body['sequence'] = self.sequence.as_dict() + if self.timestamp is not None: body['timestamp'] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the PipelineEvent into a shallow dictionary of its immediate attributes.""" body = {} - if self.error: - body["error"] = self.error - if self.event_type is not None: - body["event_type"] = self.event_type - if self.id is not None: - body["id"] = self.id - if self.level is not None: - body["level"] = self.level - if self.maturity_level is not None: - body["maturity_level"] = self.maturity_level - if self.message is not None: - body["message"] = self.message - if self.origin: - body["origin"] = self.origin - if self.sequence: - body["sequence"] = self.sequence - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.error: body['error'] = self.error + if self.event_type is not None: body['event_type'] = self.event_type + if self.id is not None: body['id'] = self.id + if self.level is not None: body['level'] = self.level + if self.maturity_level is not None: body['maturity_level'] = self.maturity_level + if self.message is not None: body['message'] = self.message + if self.origin: body['origin'] = self.origin + if self.sequence: body['sequence'] = self.sequence + if self.timestamp is not None: body['timestamp'] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineEvent: """Deserializes the PipelineEvent from a dictionary.""" - return cls( - error=_from_dict(d, "error", ErrorDetail), - event_type=d.get("event_type", None), - id=d.get("id", None), - level=_enum(d, "level", EventLevel), - maturity_level=_enum(d, "maturity_level", MaturityLevel), - message=d.get("message", None), - origin=_from_dict(d, "origin", Origin), - sequence=_from_dict(d, "sequence", Sequencing), - timestamp=d.get("timestamp", None), - ) + return cls(error=_from_dict(d, 'error', ErrorDetail), event_type=d.get('event_type', None), id=d.get('id', None), level=_enum(d, 'level', EventLevel), maturity_level=_enum(d, 'maturity_level', MaturityLevel), message=d.get('message', None), origin=_from_dict(d, 'origin', Origin), sequence=_from_dict(d, 'sequence', Sequencing), timestamp=d.get('timestamp', None)) + + @dataclass class PipelineLibrary: file: Optional[FileLibrary] = None """The path to a file that defines a pipeline and is stored in the Databricks Repos.""" - + glob: Optional[PathPattern] = None """The unified field to include source codes. Each entry can be a notebook path, a file path, or a folder path that ends `/**`. This field cannot be used together with `notebook` or `file`.""" - + jar: Optional[str] = None """URI of the jar to be installed. Currently only DBFS is supported.""" - + maven: Optional[compute.MavenLibrary] = None """Specification of a maven library to be installed.""" - + notebook: Optional[NotebookLibrary] = None """The path to a notebook that defines a pipeline and is stored in the Databricks workspace.""" - + whl: Optional[str] = None """URI of the whl to be installed.""" - + def as_dict(self) -> dict: """Serializes the PipelineLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file: - body["file"] = self.file.as_dict() - if self.glob: - body["glob"] = self.glob.as_dict() - if self.jar is not None: - body["jar"] = self.jar - if self.maven: - body["maven"] = self.maven.as_dict() - if self.notebook: - body["notebook"] = self.notebook.as_dict() - if self.whl is not None: - body["whl"] = self.whl + if self.file: body['file'] = self.file.as_dict() + if self.glob: body['glob'] = self.glob.as_dict() + if self.jar is not None: body['jar'] = self.jar + if self.maven: body['maven'] = self.maven.as_dict() + if self.notebook: body['notebook'] = self.notebook.as_dict() + if self.whl is not None: body['whl'] = self.whl return body def as_shallow_dict(self) -> dict: """Serializes the PipelineLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.file: - body["file"] = self.file - if self.glob: - body["glob"] = self.glob - if self.jar is not None: - body["jar"] = self.jar - if self.maven: - body["maven"] = self.maven - if self.notebook: - body["notebook"] = self.notebook - if self.whl is not None: - body["whl"] = self.whl + if self.file: body['file'] = self.file + if self.glob: body['glob'] = self.glob + if self.jar is not None: body['jar'] = self.jar + if self.maven: body['maven'] = self.maven + if self.notebook: body['notebook'] = self.notebook + if self.whl is not None: body['whl'] = self.whl return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineLibrary: """Deserializes the PipelineLibrary from a dictionary.""" - return cls( - file=_from_dict(d, "file", FileLibrary), - glob=_from_dict(d, "glob", PathPattern), - jar=d.get("jar", None), - maven=_from_dict(d, "maven", compute.MavenLibrary), - notebook=_from_dict(d, "notebook", NotebookLibrary), - whl=d.get("whl", None), - ) + return cls(file=_from_dict(d, 'file', FileLibrary), glob=_from_dict(d, 'glob', PathPattern), jar=d.get('jar', None), maven=_from_dict(d, 'maven', compute.MavenLibrary), notebook=_from_dict(d, 'notebook', NotebookLibrary), whl=d.get('whl', None)) + + @dataclass class PipelinePermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[PipelinePermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PipelinePermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PipelinePermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelinePermission: """Deserializes the PipelinePermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", PipelinePermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', PipelinePermissionLevel)) + + class PipelinePermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = "CAN_MANAGE" - CAN_RUN = "CAN_RUN" - CAN_VIEW = "CAN_VIEW" - IS_OWNER = "IS_OWNER" - + + CAN_MANAGE = 'CAN_MANAGE' + CAN_RUN = 'CAN_RUN' + CAN_VIEW = 'CAN_VIEW' + IS_OWNER = 'IS_OWNER' @dataclass class PipelinePermissions: access_control_list: Optional[List[PipelineAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the PipelinePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the PipelinePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissions: """Deserializes the PipelinePermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', PipelineAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class PipelinePermissionsDescription: description: Optional[str] = None - + permission_level: Optional[PipelinePermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PipelinePermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PipelinePermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsDescription: """Deserializes the PipelinePermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", PipelinePermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', PipelinePermissionLevel)) + + @dataclass class PipelinePermissionsRequest: access_control_list: Optional[List[PipelineAccessControlRequest]] = None - + pipeline_id: Optional[str] = None """The pipeline for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the PipelinePermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id return body def as_shallow_dict(self) -> dict: """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsRequest: """Deserializes the PipelinePermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlRequest), - pipeline_id=d.get("pipeline_id", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', PipelineAccessControlRequest), pipeline_id=d.get('pipeline_id', None)) + + @dataclass class PipelineSpec: budget_policy_id: Optional[str] = None """Budget policy of this pipeline.""" - + catalog: Optional[str] = None """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.""" - + channel: Optional[str] = None """DLT Release Channel that specifies which version to use.""" - + clusters: Optional[List[PipelineCluster]] = None """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str, str]] = None + + configuration: Optional[Dict[str,str]] = None """String-String configuration for this pipeline execution.""" - + continuous: Optional[bool] = None """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - + deployment: Optional[PipelineDeployment] = None """Deployment type of this pipeline.""" - + development: Optional[bool] = None """Whether the pipeline is in Development mode. Defaults to false.""" - + edition: Optional[str] = None """Pipeline product edition.""" - + event_log: Optional[EventLogSpec] = None """Event log configuration for this pipeline""" - + filters: Optional[Filters] = None """Filters on which Pipeline packages to include in the deployed graph.""" - + gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None """The definition of a gateway pipeline to support change data capture.""" - + id: Optional[str] = None """Unique identifier for this pipeline.""" - + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.""" - + libraries: Optional[List[PipelineLibrary]] = None """Libraries or code needed by this deployment.""" - + name: Optional[str] = None """Friendly identifier for this pipeline.""" - + notifications: Optional[List[Notifications]] = None """List of notification settings for this pipeline.""" - + photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" - + restart_window: Optional[RestartWindow] = None """Restart window of this pipeline.""" - + root_path: Optional[str] = None """Root path for this pipeline. This is used as the root directory when editing the pipeline in the Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution.""" - + schema: Optional[str] = None """The default schema (database) where tables are read from or published to.""" - + serverless: Optional[bool] = None """Whether serverless compute is enabled for this pipeline.""" - + storage: Optional[str] = None """DBFS root directory for storing checkpoints and tables.""" - + + tags: Optional[Dict[str,str]] = None + """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, + and are therefore subject to the same limitations. A maximum of 25 tags can be added to the + pipeline.""" + target: Optional[str] = None """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.""" - + trigger: Optional[PipelineTrigger] = None """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" - + def as_dict(self) -> dict: """Serializes the PipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = [v.as_dict() for v in self.clusters] - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.development is not None: - body["development"] = self.development - if self.edition is not None: - body["edition"] = self.edition - if self.event_log: - body["event_log"] = self.event_log.as_dict() - if self.filters: - body["filters"] = self.filters.as_dict() - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition.as_dict() - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition.as_dict() - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = [v.as_dict() for v in self.notifications] - if self.photon is not None: - body["photon"] = self.photon - if self.restart_window: - body["restart_window"] = self.restart_window.as_dict() - if self.root_path is not None: - body["root_path"] = self.root_path - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger.as_dict() + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.channel is not None: body['channel'] = self.channel + if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] + if self.configuration: body['configuration'] = self.configuration + if self.continuous is not None: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment.as_dict() + if self.development is not None: body['development'] = self.development + if self.edition is not None: body['edition'] = self.edition + if self.event_log: body['event_log'] = self.event_log.as_dict() + if self.filters: body['filters'] = self.filters.as_dict() + if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict() + if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict() + if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.name is not None: body['name'] = self.name + if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] + if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window.as_dict() + if self.root_path is not None: body['root_path'] = self.root_path + if self.schema is not None: body['schema'] = self.schema + if self.serverless is not None: body['serverless'] = self.serverless + if self.storage is not None: body['storage'] = self.storage + if self.tags: body['tags'] = self.tags + if self.target is not None: body['target'] = self.target + if self.trigger: body['trigger'] = self.trigger.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PipelineSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = self.clusters - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.development is not None: - body["development"] = self.development - if self.edition is not None: - body["edition"] = self.edition - if self.event_log: - body["event_log"] = self.event_log - if self.filters: - body["filters"] = self.filters - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition - if self.libraries: - body["libraries"] = self.libraries - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = self.notifications - if self.photon is not None: - body["photon"] = self.photon - if self.restart_window: - body["restart_window"] = self.restart_window - if self.root_path is not None: - body["root_path"] = self.root_path - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.catalog is not None: body['catalog'] = self.catalog + if self.channel is not None: body['channel'] = self.channel + if self.clusters: body['clusters'] = self.clusters + if self.configuration: body['configuration'] = self.configuration + if self.continuous is not None: body['continuous'] = self.continuous + if self.deployment: body['deployment'] = self.deployment + if self.development is not None: body['development'] = self.development + if self.edition is not None: body['edition'] = self.edition + if self.event_log: body['event_log'] = self.event_log + if self.filters: body['filters'] = self.filters + if self.gateway_definition: body['gateway_definition'] = self.gateway_definition + if self.id is not None: body['id'] = self.id + if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition + if self.libraries: body['libraries'] = self.libraries + if self.name is not None: body['name'] = self.name + if self.notifications: body['notifications'] = self.notifications + if self.photon is not None: body['photon'] = self.photon + if self.restart_window: body['restart_window'] = self.restart_window + if self.root_path is not None: body['root_path'] = self.root_path + if self.schema is not None: body['schema'] = self.schema + if self.serverless is not None: body['serverless'] = self.serverless + if self.storage is not None: body['storage'] = self.storage + if self.tags: body['tags'] = self.tags + if self.target is not None: body['target'] = self.target + if self.trigger: body['trigger'] = self.trigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineSpec: """Deserializes the PipelineSpec from a dictionary.""" - return cls( - budget_policy_id=d.get("budget_policy_id", None), - catalog=d.get("catalog", None), - channel=d.get("channel", None), - clusters=_repeated_dict(d, "clusters", PipelineCluster), - configuration=d.get("configuration", None), - continuous=d.get("continuous", None), - deployment=_from_dict(d, "deployment", PipelineDeployment), - development=d.get("development", None), - edition=d.get("edition", None), - event_log=_from_dict(d, "event_log", EventLogSpec), - filters=_from_dict(d, "filters", Filters), - gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), - id=d.get("id", None), - ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), - libraries=_repeated_dict(d, "libraries", PipelineLibrary), - name=d.get("name", None), - notifications=_repeated_dict(d, "notifications", Notifications), - photon=d.get("photon", None), - restart_window=_from_dict(d, "restart_window", RestartWindow), - root_path=d.get("root_path", None), - schema=d.get("schema", None), - serverless=d.get("serverless", None), - storage=d.get("storage", None), - target=d.get("target", None), - trigger=_from_dict(d, "trigger", PipelineTrigger), - ) + return cls(budget_policy_id=d.get('budget_policy_id', None), catalog=d.get('catalog', None), channel=d.get('channel', None), clusters=_repeated_dict(d, 'clusters', PipelineCluster), configuration=d.get('configuration', None), continuous=d.get('continuous', None), deployment=_from_dict(d, 'deployment', PipelineDeployment), development=d.get('development', None), edition=d.get('edition', None), event_log=_from_dict(d, 'event_log', EventLogSpec), filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), restart_window=_from_dict(d, 'restart_window', RestartWindow), root_path=d.get('root_path', None), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), tags=d.get('tags', None), target=d.get('target', None), trigger=_from_dict(d, 'trigger', PipelineTrigger)) + + class PipelineState(Enum): """The pipeline state.""" - - DELETED = "DELETED" - DEPLOYING = "DEPLOYING" - FAILED = "FAILED" - IDLE = "IDLE" - RECOVERING = "RECOVERING" - RESETTING = "RESETTING" - RUNNING = "RUNNING" - STARTING = "STARTING" - STOPPING = "STOPPING" - + + DELETED = 'DELETED' + DEPLOYING = 'DEPLOYING' + FAILED = 'FAILED' + IDLE = 'IDLE' + RECOVERING = 'RECOVERING' + RESETTING = 'RESETTING' + RUNNING = 'RUNNING' + STARTING = 'STARTING' + STOPPING = 'STOPPING' @dataclass class PipelineStateInfo: cluster_id: Optional[str] = None """The unique identifier of the cluster running the pipeline.""" - + creator_user_name: Optional[str] = None """The username of the pipeline creator.""" - + health: Optional[PipelineStateInfoHealth] = None """The health of a pipeline.""" - + latest_updates: Optional[List[UpdateStateInfo]] = None """Status of the latest updates for the pipeline. Ordered with the newest update first.""" - + name: Optional[str] = None """The user-friendly name of the pipeline.""" - + pipeline_id: Optional[str] = None """The unique identifier of the pipeline.""" - + run_as_user_name: Optional[str] = None """The username that the pipeline runs as. This is a read only value derived from the pipeline owner.""" - + state: Optional[PipelineState] = None """The pipeline state.""" - + def as_dict(self) -> dict: """Serializes the PipelineStateInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.health is not None: - body["health"] = self.health.value - if self.latest_updates: - body["latest_updates"] = [v.as_dict() for v in self.latest_updates] - if self.name is not None: - body["name"] = self.name - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.state is not None: - body["state"] = self.state.value + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.health is not None: body['health'] = self.health.value + if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates] + if self.name is not None: body['name'] = self.name + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.state is not None: body['state'] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the PipelineStateInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.creator_user_name is not None: - body["creator_user_name"] = self.creator_user_name - if self.health is not None: - body["health"] = self.health - if self.latest_updates: - body["latest_updates"] = self.latest_updates - if self.name is not None: - body["name"] = self.name - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.state is not None: - body["state"] = self.state + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.health is not None: body['health'] = self.health + if self.latest_updates: body['latest_updates'] = self.latest_updates + if self.name is not None: body['name'] = self.name + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineStateInfo: """Deserializes the PipelineStateInfo from a dictionary.""" - return cls( - cluster_id=d.get("cluster_id", None), - creator_user_name=d.get("creator_user_name", None), - health=_enum(d, "health", PipelineStateInfoHealth), - latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo), - name=d.get("name", None), - pipeline_id=d.get("pipeline_id", None), - run_as_user_name=d.get("run_as_user_name", None), - state=_enum(d, "state", PipelineState), - ) + return cls(cluster_id=d.get('cluster_id', None), creator_user_name=d.get('creator_user_name', None), health=_enum(d, 'health', PipelineStateInfoHealth), latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo), name=d.get('name', None), pipeline_id=d.get('pipeline_id', None), run_as_user_name=d.get('run_as_user_name', None), state=_enum(d, 'state', PipelineState)) + + class PipelineStateInfoHealth(Enum): """The health of a pipeline.""" - - HEALTHY = "HEALTHY" - UNHEALTHY = "UNHEALTHY" - + + HEALTHY = 'HEALTHY' + UNHEALTHY = 'UNHEALTHY' @dataclass class PipelineTrigger: cron: Optional[CronTrigger] = None - + manual: Optional[ManualTrigger] = None - + def as_dict(self) -> dict: """Serializes the PipelineTrigger into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cron: - body["cron"] = self.cron.as_dict() - if self.manual: - body["manual"] = self.manual.as_dict() + if self.cron: body['cron'] = self.cron.as_dict() + if self.manual: body['manual'] = self.manual.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PipelineTrigger into a shallow dictionary of its immediate attributes.""" body = {} - if self.cron: - body["cron"] = self.cron - if self.manual: - body["manual"] = self.manual + if self.cron: body['cron'] = self.cron + if self.manual: body['manual'] = self.manual return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineTrigger: """Deserializes the PipelineTrigger from a dictionary.""" - return cls(cron=_from_dict(d, "cron", CronTrigger), manual=_from_dict(d, "manual", ManualTrigger)) + return cls(cron=_from_dict(d, 'cron', CronTrigger), manual=_from_dict(d, 'manual', ManualTrigger)) + + @dataclass class ReportSpec: source_url: str """Required. Report URL in the source system.""" - + destination_catalog: str """Required. Destination catalog to store table.""" - + destination_schema: str """Required. Destination schema to store table.""" - + destination_table: Optional[str] = None """Required. Destination table name. The pipeline fails if a table with that name already exists.""" - + table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object.""" - + def as_dict(self) -> dict: """Serializes the ReportSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_catalog is not None: - body["destination_catalog"] = self.destination_catalog - if self.destination_schema is not None: - body["destination_schema"] = self.destination_schema - if self.destination_table is not None: - body["destination_table"] = self.destination_table - if self.source_url is not None: - body["source_url"] = self.source_url - if self.table_configuration: - body["table_configuration"] = self.table_configuration.as_dict() + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.destination_table is not None: body['destination_table'] = self.destination_table + if self.source_url is not None: body['source_url'] = self.source_url + if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ReportSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_catalog is not None: - body["destination_catalog"] = self.destination_catalog - if self.destination_schema is not None: - body["destination_schema"] = self.destination_schema - if self.destination_table is not None: - body["destination_table"] = self.destination_table - if self.source_url is not None: - body["source_url"] = self.source_url - if self.table_configuration: - body["table_configuration"] = self.table_configuration + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.destination_table is not None: body['destination_table'] = self.destination_table + if self.source_url is not None: body['source_url'] = self.source_url + if self.table_configuration: body['table_configuration'] = self.table_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ReportSpec: """Deserializes the ReportSpec from a dictionary.""" - return cls( - destination_catalog=d.get("destination_catalog", None), - destination_schema=d.get("destination_schema", None), - destination_table=d.get("destination_table", None), - source_url=d.get("source_url", None), - table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig), - ) + return cls(destination_catalog=d.get('destination_catalog', None), destination_schema=d.get('destination_schema', None), destination_table=d.get('destination_table', None), source_url=d.get('source_url', None), table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) + + @dataclass @@ -2736,46 +2184,38 @@ class RestartWindow: start_hour: int """An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. Continuous pipeline restart is triggered only within a five-hour window starting at this hour.""" - + days_of_week: Optional[List[DayOfWeek]] = None """Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used.""" - + time_zone_id: Optional[str] = None """Time zone id of restart window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details. If not specified, UTC will be used.""" - + def as_dict(self) -> dict: """Serializes the RestartWindow into a dictionary suitable for use as a JSON request body.""" body = {} - if self.days_of_week: - body["days_of_week"] = [v.value for v in self.days_of_week] - if self.start_hour is not None: - body["start_hour"] = self.start_hour - if self.time_zone_id is not None: - body["time_zone_id"] = self.time_zone_id + if self.days_of_week: body['days_of_week'] = [v.value for v in self.days_of_week] + if self.start_hour is not None: body['start_hour'] = self.start_hour + if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id return body def as_shallow_dict(self) -> dict: """Serializes the RestartWindow into a shallow dictionary of its immediate attributes.""" body = {} - if self.days_of_week: - body["days_of_week"] = self.days_of_week - if self.start_hour is not None: - body["start_hour"] = self.start_hour - if self.time_zone_id is not None: - body["time_zone_id"] = self.time_zone_id + if self.days_of_week: body['days_of_week'] = self.days_of_week + if self.start_hour is not None: body['start_hour'] = self.start_hour + if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestartWindow: """Deserializes the RestartWindow from a dictionary.""" - return cls( - days_of_week=_repeated_enum(d, "days_of_week", DayOfWeek), - start_hour=d.get("start_hour", None), - time_zone_id=d.get("time_zone_id", None), - ) + return cls(days_of_week=_repeated_enum(d, 'days_of_week', DayOfWeek), start_hour=d.get('start_hour', None), time_zone_id=d.get('time_zone_id', None)) + + @dataclass @@ -2783,338 +2223,280 @@ class RunAs: """Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.""" - + service_principal_name: Optional[str] = None """Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.""" - + user_name: Optional[str] = None """The email of an active workspace user. Users can only set this field to their own email.""" - + def as_dict(self) -> dict: """Serializes the RunAs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RunAs into a shallow dictionary of its immediate attributes.""" body = {} - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunAs: """Deserializes the RunAs from a dictionary.""" - return cls(service_principal_name=d.get("service_principal_name", None), user_name=d.get("user_name", None)) + return cls(service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class SchemaSpec: source_schema: str """Required. Schema name in the source database.""" - + destination_catalog: str """Required. Destination catalog to store tables.""" - + destination_schema: str """Required. Destination schema to store tables in. Tables with the same name as the source tables are created in this destination schema. The pipeline fails If a table with the same name already exists.""" - + source_catalog: Optional[str] = None """The source catalog name. Might be optional depending on the type of source.""" - + table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the IngestionPipelineDefinition object.""" - + def as_dict(self) -> dict: """Serializes the SchemaSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_catalog is not None: - body["destination_catalog"] = self.destination_catalog - if self.destination_schema is not None: - body["destination_schema"] = self.destination_schema - if self.source_catalog is not None: - body["source_catalog"] = self.source_catalog - if self.source_schema is not None: - body["source_schema"] = self.source_schema - if self.table_configuration: - body["table_configuration"] = self.table_configuration.as_dict() + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.source_catalog is not None: body['source_catalog'] = self.source_catalog + if self.source_schema is not None: body['source_schema'] = self.source_schema + if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SchemaSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_catalog is not None: - body["destination_catalog"] = self.destination_catalog - if self.destination_schema is not None: - body["destination_schema"] = self.destination_schema - if self.source_catalog is not None: - body["source_catalog"] = self.source_catalog - if self.source_schema is not None: - body["source_schema"] = self.source_schema - if self.table_configuration: - body["table_configuration"] = self.table_configuration + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.source_catalog is not None: body['source_catalog'] = self.source_catalog + if self.source_schema is not None: body['source_schema'] = self.source_schema + if self.table_configuration: body['table_configuration'] = self.table_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SchemaSpec: """Deserializes the SchemaSpec from a dictionary.""" - return cls( - destination_catalog=d.get("destination_catalog", None), - destination_schema=d.get("destination_schema", None), - source_catalog=d.get("source_catalog", None), - source_schema=d.get("source_schema", None), - table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig), - ) + return cls(destination_catalog=d.get('destination_catalog', None), destination_schema=d.get('destination_schema', None), source_catalog=d.get('source_catalog', None), source_schema=d.get('source_schema', None), table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) + + @dataclass class Sequencing: control_plane_seq_no: Optional[int] = None """A sequence number, unique and increasing within the control plane.""" - + data_plane_id: Optional[DataPlaneId] = None """the ID assigned by the data plane.""" - + def as_dict(self) -> dict: """Serializes the Sequencing into a dictionary suitable for use as a JSON request body.""" body = {} - if self.control_plane_seq_no is not None: - body["control_plane_seq_no"] = self.control_plane_seq_no - if self.data_plane_id: - body["data_plane_id"] = self.data_plane_id.as_dict() + if self.control_plane_seq_no is not None: body['control_plane_seq_no'] = self.control_plane_seq_no + if self.data_plane_id: body['data_plane_id'] = self.data_plane_id.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Sequencing into a shallow dictionary of its immediate attributes.""" body = {} - if self.control_plane_seq_no is not None: - body["control_plane_seq_no"] = self.control_plane_seq_no - if self.data_plane_id: - body["data_plane_id"] = self.data_plane_id + if self.control_plane_seq_no is not None: body['control_plane_seq_no'] = self.control_plane_seq_no + if self.data_plane_id: body['data_plane_id'] = self.data_plane_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Sequencing: """Deserializes the Sequencing from a dictionary.""" - return cls( - control_plane_seq_no=d.get("control_plane_seq_no", None), - data_plane_id=_from_dict(d, "data_plane_id", DataPlaneId), - ) + return cls(control_plane_seq_no=d.get('control_plane_seq_no', None), data_plane_id=_from_dict(d, 'data_plane_id', DataPlaneId)) + + @dataclass class SerializedException: class_name: Optional[str] = None """Runtime class of the exception""" - + message: Optional[str] = None """Exception message""" - + stack: Optional[List[StackFrame]] = None """Stack trace consisting of a list of stack frames""" - + def as_dict(self) -> dict: """Serializes the SerializedException into a dictionary suitable for use as a JSON request body.""" body = {} - if self.class_name is not None: - body["class_name"] = self.class_name - if self.message is not None: - body["message"] = self.message - if self.stack: - body["stack"] = [v.as_dict() for v in self.stack] + if self.class_name is not None: body['class_name'] = self.class_name + if self.message is not None: body['message'] = self.message + if self.stack: body['stack'] = [v.as_dict() for v in self.stack] return body def as_shallow_dict(self) -> dict: """Serializes the SerializedException into a shallow dictionary of its immediate attributes.""" body = {} - if self.class_name is not None: - body["class_name"] = self.class_name - if self.message is not None: - body["message"] = self.message - if self.stack: - body["stack"] = self.stack + if self.class_name is not None: body['class_name'] = self.class_name + if self.message is not None: body['message'] = self.message + if self.stack: body['stack'] = self.stack return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SerializedException: """Deserializes the SerializedException from a dictionary.""" - return cls( - class_name=d.get("class_name", None), - message=d.get("message", None), - stack=_repeated_dict(d, "stack", StackFrame), - ) + return cls(class_name=d.get('class_name', None), message=d.get('message', None), stack=_repeated_dict(d, 'stack', StackFrame)) + + @dataclass class StackFrame: declaring_class: Optional[str] = None """Class from which the method call originated""" - + file_name: Optional[str] = None """File where the method is defined""" - + line_number: Optional[int] = None """Line from which the method was called""" - + method_name: Optional[str] = None """Name of the method which was called""" - + def as_dict(self) -> dict: """Serializes the StackFrame into a dictionary suitable for use as a JSON request body.""" body = {} - if self.declaring_class is not None: - body["declaring_class"] = self.declaring_class - if self.file_name is not None: - body["file_name"] = self.file_name - if self.line_number is not None: - body["line_number"] = self.line_number - if self.method_name is not None: - body["method_name"] = self.method_name + if self.declaring_class is not None: body['declaring_class'] = self.declaring_class + if self.file_name is not None: body['file_name'] = self.file_name + if self.line_number is not None: body['line_number'] = self.line_number + if self.method_name is not None: body['method_name'] = self.method_name return body def as_shallow_dict(self) -> dict: """Serializes the StackFrame into a shallow dictionary of its immediate attributes.""" body = {} - if self.declaring_class is not None: - body["declaring_class"] = self.declaring_class - if self.file_name is not None: - body["file_name"] = self.file_name - if self.line_number is not None: - body["line_number"] = self.line_number - if self.method_name is not None: - body["method_name"] = self.method_name + if self.declaring_class is not None: body['declaring_class'] = self.declaring_class + if self.file_name is not None: body['file_name'] = self.file_name + if self.line_number is not None: body['line_number'] = self.line_number + if self.method_name is not None: body['method_name'] = self.method_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StackFrame: """Deserializes the StackFrame from a dictionary.""" - return cls( - declaring_class=d.get("declaring_class", None), - file_name=d.get("file_name", None), - line_number=d.get("line_number", None), - method_name=d.get("method_name", None), - ) + return cls(declaring_class=d.get('declaring_class', None), file_name=d.get('file_name', None), line_number=d.get('line_number', None), method_name=d.get('method_name', None)) + + @dataclass class StartUpdate: cause: Optional[StartUpdateCause] = None """What triggered this update.""" - + full_refresh: Optional[bool] = None """If true, this update will reset all tables before running.""" - + full_refresh_selection: Optional[List[str]] = None """A list of tables to update with fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" - + pipeline_id: Optional[str] = None - + refresh_selection: Optional[List[str]] = None """A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" - + validate_only: Optional[bool] = None """If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets.""" - + def as_dict(self) -> dict: """Serializes the StartUpdate into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cause is not None: - body["cause"] = self.cause.value - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.full_refresh_selection: - body["full_refresh_selection"] = [v for v in self.full_refresh_selection] - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.refresh_selection: - body["refresh_selection"] = [v for v in self.refresh_selection] - if self.validate_only is not None: - body["validate_only"] = self.validate_only + if self.cause is not None: body['cause'] = self.cause.value + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.full_refresh_selection: body['full_refresh_selection'] = [v for v in self.full_refresh_selection] + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.refresh_selection: body['refresh_selection'] = [v for v in self.refresh_selection] + if self.validate_only is not None: body['validate_only'] = self.validate_only return body def as_shallow_dict(self) -> dict: """Serializes the StartUpdate into a shallow dictionary of its immediate attributes.""" body = {} - if self.cause is not None: - body["cause"] = self.cause - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.full_refresh_selection: - body["full_refresh_selection"] = self.full_refresh_selection - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.refresh_selection: - body["refresh_selection"] = self.refresh_selection - if self.validate_only is not None: - body["validate_only"] = self.validate_only + if self.cause is not None: body['cause'] = self.cause + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.refresh_selection: body['refresh_selection'] = self.refresh_selection + if self.validate_only is not None: body['validate_only'] = self.validate_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StartUpdate: """Deserializes the StartUpdate from a dictionary.""" - return cls( - cause=_enum(d, "cause", StartUpdateCause), - full_refresh=d.get("full_refresh", None), - full_refresh_selection=d.get("full_refresh_selection", None), - pipeline_id=d.get("pipeline_id", None), - refresh_selection=d.get("refresh_selection", None), - validate_only=d.get("validate_only", None), - ) + return cls(cause=_enum(d, 'cause', StartUpdateCause), full_refresh=d.get('full_refresh', None), full_refresh_selection=d.get('full_refresh_selection', None), pipeline_id=d.get('pipeline_id', None), refresh_selection=d.get('refresh_selection', None), validate_only=d.get('validate_only', None)) + + class StartUpdateCause(Enum): """What triggered this update.""" - - API_CALL = "API_CALL" - INFRASTRUCTURE_MAINTENANCE = "INFRASTRUCTURE_MAINTENANCE" - JOB_TASK = "JOB_TASK" - RETRY_ON_FAILURE = "RETRY_ON_FAILURE" - SCHEMA_CHANGE = "SCHEMA_CHANGE" - SERVICE_UPGRADE = "SERVICE_UPGRADE" - USER_ACTION = "USER_ACTION" - + + API_CALL = 'API_CALL' + INFRASTRUCTURE_MAINTENANCE = 'INFRASTRUCTURE_MAINTENANCE' + JOB_TASK = 'JOB_TASK' + RETRY_ON_FAILURE = 'RETRY_ON_FAILURE' + SCHEMA_CHANGE = 'SCHEMA_CHANGE' + SERVICE_UPGRADE = 'SERVICE_UPGRADE' + USER_ACTION = 'USER_ACTION' @dataclass class StartUpdateResponse: update_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the StartUpdateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.update_id is not None: - body["update_id"] = self.update_id + if self.update_id is not None: body['update_id'] = self.update_id return body def as_shallow_dict(self) -> dict: """Serializes the StartUpdateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.update_id is not None: - body["update_id"] = self.update_id + if self.update_id is not None: body['update_id'] = self.update_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StartUpdateResponse: """Deserializes the StartUpdateResponse from a dictionary.""" - return cls(update_id=d.get("update_id", None)) + return cls(update_id=d.get('update_id', None)) + + @dataclass @@ -3133,83 +2515,68 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> StopPipelineResponse: """Deserializes the StopPipelineResponse from a dictionary.""" return cls() + + + + + @dataclass class TableSpec: source_table: str """Required. Table name in the source database.""" - + destination_catalog: str """Required. Destination catalog to store table.""" - + destination_schema: str """Required. Destination schema to store table.""" - + destination_table: Optional[str] = None """Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used.""" - + source_catalog: Optional[str] = None """Source catalog name. Might be optional depending on the type of source.""" - + source_schema: Optional[str] = None """Schema name in the source database. Might be optional depending on the type of source.""" - + table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.""" - + def as_dict(self) -> dict: """Serializes the TableSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_catalog is not None: - body["destination_catalog"] = self.destination_catalog - if self.destination_schema is not None: - body["destination_schema"] = self.destination_schema - if self.destination_table is not None: - body["destination_table"] = self.destination_table - if self.source_catalog is not None: - body["source_catalog"] = self.source_catalog - if self.source_schema is not None: - body["source_schema"] = self.source_schema - if self.source_table is not None: - body["source_table"] = self.source_table - if self.table_configuration: - body["table_configuration"] = self.table_configuration.as_dict() + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.destination_table is not None: body['destination_table'] = self.destination_table + if self.source_catalog is not None: body['source_catalog'] = self.source_catalog + if self.source_schema is not None: body['source_schema'] = self.source_schema + if self.source_table is not None: body['source_table'] = self.source_table + if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TableSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_catalog is not None: - body["destination_catalog"] = self.destination_catalog - if self.destination_schema is not None: - body["destination_schema"] = self.destination_schema - if self.destination_table is not None: - body["destination_table"] = self.destination_table - if self.source_catalog is not None: - body["source_catalog"] = self.source_catalog - if self.source_schema is not None: - body["source_schema"] = self.source_schema - if self.source_table is not None: - body["source_table"] = self.source_table - if self.table_configuration: - body["table_configuration"] = self.table_configuration + if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog + if self.destination_schema is not None: body['destination_schema'] = self.destination_schema + if self.destination_table is not None: body['destination_table'] = self.destination_table + if self.source_catalog is not None: body['source_catalog'] = self.source_catalog + if self.source_schema is not None: body['source_schema'] = self.source_schema + if self.source_table is not None: body['source_table'] = self.source_table + if self.table_configuration: body['table_configuration'] = self.table_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableSpec: """Deserializes the TableSpec from a dictionary.""" - return cls( - destination_catalog=d.get("destination_catalog", None), - destination_schema=d.get("destination_schema", None), - destination_table=d.get("destination_table", None), - source_catalog=d.get("source_catalog", None), - source_schema=d.get("source_schema", None), - source_table=d.get("source_table", None), - table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig), - ) + return cls(destination_catalog=d.get('destination_catalog', None), destination_schema=d.get('destination_schema', None), destination_table=d.get('destination_table', None), source_catalog=d.get('source_catalog', None), source_schema=d.get('source_schema', None), source_table=d.get('source_table', None), table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) + + @dataclass @@ -3219,363 +2586,280 @@ class TableSpecificConfig: fully controls what columns to be ingested. When specified, all other columns including future ones will be automatically included for ingestion. This field in mutually exclusive with `include_columns`.""" - + include_columns: Optional[List[str]] = None """A list of column names to be included for the ingestion. When not specified, all columns except ones in exclude_columns will be included. Future columns will be automatically included. When specified, all other future columns will be automatically excluded from ingestion. This field in mutually exclusive with `exclude_columns`.""" - + primary_keys: Optional[List[str]] = None """The primary key of the table used to apply changes.""" - + salesforce_include_formula_fields: Optional[bool] = None """If true, formula fields defined in the table are included in the ingestion. This setting is only valid for the Salesforce connector""" - + scd_type: Optional[TableSpecificConfigScdType] = None """The SCD type to use to ingest the table.""" - + sequence_by: Optional[List[str]] = None """The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.""" - + def as_dict(self) -> dict: """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exclude_columns: - body["exclude_columns"] = [v for v in self.exclude_columns] - if self.include_columns: - body["include_columns"] = [v for v in self.include_columns] - if self.primary_keys: - body["primary_keys"] = [v for v in self.primary_keys] - if self.salesforce_include_formula_fields is not None: - body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields - if self.scd_type is not None: - body["scd_type"] = self.scd_type.value - if self.sequence_by: - body["sequence_by"] = [v for v in self.sequence_by] + if self.exclude_columns: body['exclude_columns'] = [v for v in self.exclude_columns] + if self.include_columns: body['include_columns'] = [v for v in self.include_columns] + if self.primary_keys: body['primary_keys'] = [v for v in self.primary_keys] + if self.salesforce_include_formula_fields is not None: body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields + if self.scd_type is not None: body['scd_type'] = self.scd_type.value + if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by] return body def as_shallow_dict(self) -> dict: """Serializes the TableSpecificConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.exclude_columns: - body["exclude_columns"] = self.exclude_columns - if self.include_columns: - body["include_columns"] = self.include_columns - if self.primary_keys: - body["primary_keys"] = self.primary_keys - if self.salesforce_include_formula_fields is not None: - body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields - if self.scd_type is not None: - body["scd_type"] = self.scd_type - if self.sequence_by: - body["sequence_by"] = self.sequence_by + if self.exclude_columns: body['exclude_columns'] = self.exclude_columns + if self.include_columns: body['include_columns'] = self.include_columns + if self.primary_keys: body['primary_keys'] = self.primary_keys + if self.salesforce_include_formula_fields is not None: body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields + if self.scd_type is not None: body['scd_type'] = self.scd_type + if self.sequence_by: body['sequence_by'] = self.sequence_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: """Deserializes the TableSpecificConfig from a dictionary.""" - return cls( - exclude_columns=d.get("exclude_columns", None), - include_columns=d.get("include_columns", None), - primary_keys=d.get("primary_keys", None), - salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None), - scd_type=_enum(d, "scd_type", TableSpecificConfigScdType), - sequence_by=d.get("sequence_by", None), - ) + return cls(exclude_columns=d.get('exclude_columns', None), include_columns=d.get('include_columns', None), primary_keys=d.get('primary_keys', None), salesforce_include_formula_fields=d.get('salesforce_include_formula_fields', None), scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType), sequence_by=d.get('sequence_by', None)) + + class TableSpecificConfigScdType(Enum): """The SCD type to use to ingest the table.""" - - SCD_TYPE_1 = "SCD_TYPE_1" - SCD_TYPE_2 = "SCD_TYPE_2" - + + SCD_TYPE_1 = 'SCD_TYPE_1' + SCD_TYPE_2 = 'SCD_TYPE_2' @dataclass class UpdateInfo: cause: Optional[UpdateInfoCause] = None """What triggered this update.""" - + cluster_id: Optional[str] = None """The ID of the cluster that the update is running on.""" - + config: Optional[PipelineSpec] = None """The pipeline configuration with system defaults applied where unspecified by the user. Not returned by ListUpdates.""" - + creation_time: Optional[int] = None """The time when this update was created.""" - + full_refresh: Optional[bool] = None """If true, this update will reset all tables before running.""" - + full_refresh_selection: Optional[List[str]] = None """A list of tables to update with fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" - + pipeline_id: Optional[str] = None """The ID of the pipeline.""" - + refresh_selection: Optional[List[str]] = None """A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" - + state: Optional[UpdateInfoState] = None """The update state.""" - + update_id: Optional[str] = None """The ID of this update.""" - + validate_only: Optional[bool] = None """If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets.""" - + def as_dict(self) -> dict: """Serializes the UpdateInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cause is not None: - body["cause"] = self.cause.value - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.config: - body["config"] = self.config.as_dict() - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.full_refresh_selection: - body["full_refresh_selection"] = [v for v in self.full_refresh_selection] - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.refresh_selection: - body["refresh_selection"] = [v for v in self.refresh_selection] - if self.state is not None: - body["state"] = self.state.value - if self.update_id is not None: - body["update_id"] = self.update_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only + if self.cause is not None: body['cause'] = self.cause.value + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.config: body['config'] = self.config.as_dict() + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.full_refresh_selection: body['full_refresh_selection'] = [v for v in self.full_refresh_selection] + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.refresh_selection: body['refresh_selection'] = [v for v in self.refresh_selection] + if self.state is not None: body['state'] = self.state.value + if self.update_id is not None: body['update_id'] = self.update_id + if self.validate_only is not None: body['validate_only'] = self.validate_only return body def as_shallow_dict(self) -> dict: """Serializes the UpdateInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cause is not None: - body["cause"] = self.cause - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.config: - body["config"] = self.config - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.full_refresh_selection: - body["full_refresh_selection"] = self.full_refresh_selection - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.refresh_selection: - body["refresh_selection"] = self.refresh_selection - if self.state is not None: - body["state"] = self.state - if self.update_id is not None: - body["update_id"] = self.update_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only + if self.cause is not None: body['cause'] = self.cause + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.config: body['config'] = self.config + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.refresh_selection: body['refresh_selection'] = self.refresh_selection + if self.state is not None: body['state'] = self.state + if self.update_id is not None: body['update_id'] = self.update_id + if self.validate_only is not None: body['validate_only'] = self.validate_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateInfo: """Deserializes the UpdateInfo from a dictionary.""" - return cls( - cause=_enum(d, "cause", UpdateInfoCause), - cluster_id=d.get("cluster_id", None), - config=_from_dict(d, "config", PipelineSpec), - creation_time=d.get("creation_time", None), - full_refresh=d.get("full_refresh", None), - full_refresh_selection=d.get("full_refresh_selection", None), - pipeline_id=d.get("pipeline_id", None), - refresh_selection=d.get("refresh_selection", None), - state=_enum(d, "state", UpdateInfoState), - update_id=d.get("update_id", None), - validate_only=d.get("validate_only", None), - ) + return cls(cause=_enum(d, 'cause', UpdateInfoCause), cluster_id=d.get('cluster_id', None), config=_from_dict(d, 'config', PipelineSpec), creation_time=d.get('creation_time', None), full_refresh=d.get('full_refresh', None), full_refresh_selection=d.get('full_refresh_selection', None), pipeline_id=d.get('pipeline_id', None), refresh_selection=d.get('refresh_selection', None), state=_enum(d, 'state', UpdateInfoState), update_id=d.get('update_id', None), validate_only=d.get('validate_only', None)) + + class UpdateInfoCause(Enum): """What triggered this update.""" - - API_CALL = "API_CALL" - INFRASTRUCTURE_MAINTENANCE = "INFRASTRUCTURE_MAINTENANCE" - JOB_TASK = "JOB_TASK" - RETRY_ON_FAILURE = "RETRY_ON_FAILURE" - SCHEMA_CHANGE = "SCHEMA_CHANGE" - SERVICE_UPGRADE = "SERVICE_UPGRADE" - USER_ACTION = "USER_ACTION" - + + API_CALL = 'API_CALL' + INFRASTRUCTURE_MAINTENANCE = 'INFRASTRUCTURE_MAINTENANCE' + JOB_TASK = 'JOB_TASK' + RETRY_ON_FAILURE = 'RETRY_ON_FAILURE' + SCHEMA_CHANGE = 'SCHEMA_CHANGE' + SERVICE_UPGRADE = 'SERVICE_UPGRADE' + USER_ACTION = 'USER_ACTION' class UpdateInfoState(Enum): """The update state.""" - - CANCELED = "CANCELED" - COMPLETED = "COMPLETED" - CREATED = "CREATED" - FAILED = "FAILED" - INITIALIZING = "INITIALIZING" - QUEUED = "QUEUED" - RESETTING = "RESETTING" - RUNNING = "RUNNING" - SETTING_UP_TABLES = "SETTING_UP_TABLES" - STOPPING = "STOPPING" - WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES" - + + CANCELED = 'CANCELED' + COMPLETED = 'COMPLETED' + CREATED = 'CREATED' + FAILED = 'FAILED' + INITIALIZING = 'INITIALIZING' + QUEUED = 'QUEUED' + RESETTING = 'RESETTING' + RUNNING = 'RUNNING' + SETTING_UP_TABLES = 'SETTING_UP_TABLES' + STOPPING = 'STOPPING' + WAITING_FOR_RESOURCES = 'WAITING_FOR_RESOURCES' @dataclass class UpdateStateInfo: creation_time: Optional[str] = None - + state: Optional[UpdateStateInfoState] = None """The update state.""" - + update_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateStateInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.state is not None: - body["state"] = self.state.value - if self.update_id is not None: - body["update_id"] = self.update_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.state is not None: body['state'] = self.state.value + if self.update_id is not None: body['update_id'] = self.update_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateStateInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.state is not None: - body["state"] = self.state - if self.update_id is not None: - body["update_id"] = self.update_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.state is not None: body['state'] = self.state + if self.update_id is not None: body['update_id'] = self.update_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateStateInfo: """Deserializes the UpdateStateInfo from a dictionary.""" - return cls( - creation_time=d.get("creation_time", None), - state=_enum(d, "state", UpdateStateInfoState), - update_id=d.get("update_id", None), - ) + return cls(creation_time=d.get('creation_time', None), state=_enum(d, 'state', UpdateStateInfoState), update_id=d.get('update_id', None)) + + class UpdateStateInfoState(Enum): """The update state.""" + + CANCELED = 'CANCELED' + COMPLETED = 'COMPLETED' + CREATED = 'CREATED' + FAILED = 'FAILED' + INITIALIZING = 'INITIALIZING' + QUEUED = 'QUEUED' + RESETTING = 'RESETTING' + RUNNING = 'RUNNING' + SETTING_UP_TABLES = 'SETTING_UP_TABLES' + STOPPING = 'STOPPING' + WAITING_FOR_RESOURCES = 'WAITING_FOR_RESOURCES' - CANCELED = "CANCELED" - COMPLETED = "COMPLETED" - CREATED = "CREATED" - FAILED = "FAILED" - INITIALIZING = "INITIALIZING" - QUEUED = "QUEUED" - RESETTING = "RESETTING" - RUNNING = "RUNNING" - SETTING_UP_TABLES = "SETTING_UP_TABLES" - STOPPING = "STOPPING" - WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES" class PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines. - + Delta Live Tables is a framework for building reliable, maintainable, and testable data processing pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task orchestration, cluster management, monitoring, data quality, and error handling. - + Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables manages how your data is transformed based on a target schema you define for each processing step. You can also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations.""" - + def __init__(self, api_client): self._api = api_client + - def wait_get_pipeline_idle( - self, - pipeline_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[GetPipelineResponse], None]] = None, - ) -> GetPipelineResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (PipelineState.IDLE,) - failure_states = (PipelineState.FAILED,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(pipeline_id=pipeline_id) - status = poll.state - status_message = poll.cause - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach IDLE, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"pipeline_id={pipeline_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def create( - self, - *, - allow_duplicate_names: Optional[bool] = None, - budget_policy_id: Optional[str] = None, - catalog: Optional[str] = None, - channel: Optional[str] = None, - clusters: Optional[List[PipelineCluster]] = None, - configuration: Optional[Dict[str, str]] = None, - continuous: Optional[bool] = None, - deployment: Optional[PipelineDeployment] = None, - development: Optional[bool] = None, - dry_run: Optional[bool] = None, - edition: Optional[str] = None, - event_log: Optional[EventLogSpec] = None, - filters: Optional[Filters] = None, - gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, - id: Optional[str] = None, - ingestion_definition: Optional[IngestionPipelineDefinition] = None, - libraries: Optional[List[PipelineLibrary]] = None, - name: Optional[str] = None, - notifications: Optional[List[Notifications]] = None, - photon: Optional[bool] = None, - restart_window: Optional[RestartWindow] = None, - root_path: Optional[str] = None, - run_as: Optional[RunAs] = None, - schema: Optional[str] = None, - serverless: Optional[bool] = None, - storage: Optional[str] = None, - target: Optional[str] = None, - trigger: Optional[PipelineTrigger] = None, - ) -> CreatePipelineResponse: - """Create a pipeline. + + + + def wait_get_pipeline_idle(self, pipeline_id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (PipelineState.IDLE, ) + failure_states = (PipelineState.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(pipeline_id=pipeline_id) + status = poll.state + status_message = poll.cause + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach IDLE, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"pipeline_id={pipeline_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + def create(self + + , * + , allow_duplicate_names: Optional[bool] = None, budget_policy_id: Optional[str] = None, catalog: Optional[str] = None, channel: Optional[str] = None, clusters: Optional[List[PipelineCluster]] = None, configuration: Optional[Dict[str,str]] = None, continuous: Optional[bool] = None, deployment: Optional[PipelineDeployment] = None, development: Optional[bool] = None, dry_run: Optional[bool] = None, edition: Optional[str] = None, event_log: Optional[EventLogSpec] = None, filters: Optional[Filters] = None, gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, id: Optional[str] = None, ingestion_definition: Optional[IngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, photon: Optional[bool] = None, restart_window: Optional[RestartWindow] = None, root_path: Optional[str] = None, run_as: Optional[RunAs] = None, schema: Optional[str] = None, serverless: Optional[bool] = None, storage: Optional[str] = None, tags: Optional[Dict[str,str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None) -> CreatePipelineResponse: + """Create a pipeline. + Creates a new data processing pipeline based on the requested configuration. If successful, this method returns the ID of the new pipeline. - + :param allow_duplicate_names: bool (optional) If false, deployment will fail if name conflicts with that of another pipeline. :param budget_policy_id: str (optional) @@ -3627,7 +2911,7 @@ def create( :param run_as: :class:`RunAs` (optional) Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. :param schema: str (optional) @@ -3636,187 +2920,199 @@ def create( Whether serverless compute is enabled for this pipeline. :param storage: str (optional) DBFS root directory for storing checkpoints and tables. + :param tags: Dict[str,str] (optional) + A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and + are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline. :param target: str (optional) Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. - + :returns: :class:`CreatePipelineResponse` """ body = {} - if allow_duplicate_names is not None: - body["allow_duplicate_names"] = allow_duplicate_names - if budget_policy_id is not None: - body["budget_policy_id"] = budget_policy_id - if catalog is not None: - body["catalog"] = catalog - if channel is not None: - body["channel"] = channel - if clusters is not None: - body["clusters"] = [v.as_dict() for v in clusters] - if configuration is not None: - body["configuration"] = configuration - if continuous is not None: - body["continuous"] = continuous - if deployment is not None: - body["deployment"] = deployment.as_dict() - if development is not None: - body["development"] = development - if dry_run is not None: - body["dry_run"] = dry_run - if edition is not None: - body["edition"] = edition - if event_log is not None: - body["event_log"] = event_log.as_dict() - if filters is not None: - body["filters"] = filters.as_dict() - if gateway_definition is not None: - body["gateway_definition"] = gateway_definition.as_dict() - if id is not None: - body["id"] = id - if ingestion_definition is not None: - body["ingestion_definition"] = ingestion_definition.as_dict() - if libraries is not None: - body["libraries"] = [v.as_dict() for v in libraries] - if name is not None: - body["name"] = name - if notifications is not None: - body["notifications"] = [v.as_dict() for v in notifications] - if photon is not None: - body["photon"] = photon - if restart_window is not None: - body["restart_window"] = restart_window.as_dict() - if root_path is not None: - body["root_path"] = root_path - if run_as is not None: - body["run_as"] = run_as.as_dict() - if schema is not None: - body["schema"] = schema - if serverless is not None: - body["serverless"] = serverless - if storage is not None: - body["storage"] = storage - if target is not None: - body["target"] = target - if trigger is not None: - body["trigger"] = trigger.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/pipelines", body=body, headers=headers) + if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names + if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id + if catalog is not None: body['catalog'] = catalog + if channel is not None: body['channel'] = channel + if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters] + if configuration is not None: body['configuration'] = configuration + if continuous is not None: body['continuous'] = continuous + if deployment is not None: body['deployment'] = deployment.as_dict() + if development is not None: body['development'] = development + if dry_run is not None: body['dry_run'] = dry_run + if edition is not None: body['edition'] = edition + if event_log is not None: body['event_log'] = event_log.as_dict() + if filters is not None: body['filters'] = filters.as_dict() + if gateway_definition is not None: body['gateway_definition'] = gateway_definition.as_dict() + if id is not None: body['id'] = id + if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict() + if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] + if name is not None: body['name'] = name + if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] + if photon is not None: body['photon'] = photon + if restart_window is not None: body['restart_window'] = restart_window.as_dict() + if root_path is not None: body['root_path'] = root_path + if run_as is not None: body['run_as'] = run_as.as_dict() + if schema is not None: body['schema'] = schema + if serverless is not None: body['serverless'] = serverless + if storage is not None: body['storage'] = storage + if tags is not None: body['tags'] = tags + if target is not None: body['target'] = target + if trigger is not None: body['trigger'] = trigger.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/pipelines', body=body + + , headers=headers + ) return CreatePipelineResponse.from_dict(res) - def delete(self, pipeline_id: str): - """Delete a pipeline. - - Deletes a pipeline. + + + + def delete(self + , pipeline_id: str + ): + """Delete a pipeline. + + Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and + its tables. You cannot undo this action. + :param pipeline_id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/pipelines/{pipeline_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/pipelines/{pipeline_id}", headers=headers) + + + - def get(self, pipeline_id: str) -> GetPipelineResponse: + def get(self + , pipeline_id: str + ) -> GetPipelineResponse: """Get a pipeline. - + :param pipeline_id: str - + :returns: :class:`GetPipelineResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/pipelines/{pipeline_id}' + + , headers=headers + ) return GetPipelineResponse.from_dict(res) - def get_permission_levels(self, pipeline_id: str) -> GetPipelinePermissionLevelsResponse: - """Get pipeline permission levels. + + + + def get_permission_levels(self + , pipeline_id: str + ) -> GetPipelinePermissionLevelsResponse: + """Get pipeline permission levels. + Gets the permission levels that a user can have on an object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. - + :returns: :class:`GetPipelinePermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/pipelines/{pipeline_id}/permissionLevels", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/pipelines/{pipeline_id}/permissionLevels' + + , headers=headers + ) return GetPipelinePermissionLevelsResponse.from_dict(res) - def get_permissions(self, pipeline_id: str) -> PipelinePermissions: - """Get pipeline permissions. + + + + def get_permissions(self + , pipeline_id: str + ) -> PipelinePermissions: + """Get pipeline permissions. + Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. - + :returns: :class:`PipelinePermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/pipelines/{pipeline_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/pipelines/{pipeline_id}' + + , headers=headers + ) return PipelinePermissions.from_dict(res) - def get_update(self, pipeline_id: str, update_id: str) -> GetUpdateResponse: - """Get a pipeline update. + + + + def get_update(self + , pipeline_id: str, update_id: str + ) -> GetUpdateResponse: + """Get a pipeline update. + Gets an update from an active pipeline. - + :param pipeline_id: str The ID of the pipeline. :param update_id: str The ID of the update. - + :returns: :class:`GetUpdateResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/updates/{update_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/pipelines/{pipeline_id}/updates/{update_id}' + + , headers=headers + ) return GetUpdateResponse.from_dict(res) - def list_pipeline_events( - self, - pipeline_id: str, - *, - filter: Optional[str] = None, - max_results: Optional[int] = None, - order_by: Optional[List[str]] = None, - page_token: Optional[str] = None, - ) -> Iterator[PipelineEvent]: - """List pipeline events. + + + + def list_pipeline_events(self + , pipeline_id: str + , * + , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None) -> Iterator[PipelineEvent]: + """List pipeline events. + Retrieves events for a pipeline. - + :param pipeline_id: str The pipeline to return events for. :param filter: str (optional) Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp > 'TIMESTAMP' (or >=,<,<=,=) - + Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp> '2021-07-22T06:37:33.083Z' :param max_results: int (optional) @@ -3830,51 +3126,51 @@ def list_pipeline_events( Page token returned by previous call. This field is mutually exclusive with all fields in this request except max_results. An error is returned if any fields other than max_results are set when this field is set. - + :returns: Iterator over :class:`PipelineEvent` """ - + query = {} - if filter is not None: - query["filter"] = filter - if max_results is not None: - query["max_results"] = max_results - if order_by is not None: - query["order_by"] = [v for v in order_by] - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if filter is not None: query['filter'] = filter + if max_results is not None: query['max_results'] = max_results + if order_by is not None: query['order_by'] = [v for v in order_by] + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/events", query=query, headers=headers) - if "events" in json: - for v in json["events"]: - yield PipelineEvent.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_pipelines( - self, - *, - filter: Optional[str] = None, - max_results: Optional[int] = None, - order_by: Optional[List[str]] = None, - page_token: Optional[str] = None, - ) -> Iterator[PipelineStateInfo]: - """List pipelines. + json = self._api.do('GET',f'/api/2.0/pipelines/{pipeline_id}/events', query=query + + , headers=headers + ) + if 'events' in json: + for v in json['events']: + yield PipelineEvent.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Lists pipelines defined in the Delta Live Tables system. + + + + def list_pipelines(self + + , * + , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None) -> Iterator[PipelineStateInfo]: + """List pipelines. + + Lists pipelines defined in the Delta Live Tables system. + :param filter: str (optional) Select a subset of results based on the specified criteria. The supported filters are: - + * `notebook=''` to select pipelines that reference the provided notebook path. * `name LIKE '[pattern]'` to select pipelines with a name that matches pattern. Wildcards are supported, for example: `name LIKE '%shopping%'` - + Composite filters are not supported. This field is optional. :param max_results: int (optional) The maximum number of entries to return in a single page. The system may return fewer than @@ -3886,44 +3182,44 @@ def list_pipelines( default is id asc. This field is optional. :param page_token: str (optional) Page token returned by previous call - + :returns: Iterator over :class:`PipelineStateInfo` """ - + query = {} - if filter is not None: - query["filter"] = filter - if max_results is not None: - query["max_results"] = max_results - if order_by is not None: - query["order_by"] = [v for v in order_by] - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if filter is not None: query['filter'] = filter + if max_results is not None: query['max_results'] = max_results + if order_by is not None: query['order_by'] = [v for v in order_by] + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/pipelines", query=query, headers=headers) - if "statuses" in json: - for v in json["statuses"]: - yield PipelineStateInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_updates( - self, - pipeline_id: str, - *, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - until_update_id: Optional[str] = None, - ) -> ListUpdatesResponse: - """List pipeline updates. + json = self._api.do('GET','/api/2.0/pipelines', query=query + + , headers=headers + ) + if 'statuses' in json: + for v in json['statuses']: + yield PipelineStateInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - List updates for an active pipeline. + + + + def list_updates(self + , pipeline_id: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None, until_update_id: Optional[str] = None) -> ListUpdatesResponse: + """List pipeline updates. + + List updates for an active pipeline. + :param pipeline_id: str The pipeline to return updates for. :param max_results: int (optional) @@ -3932,64 +3228,64 @@ def list_updates( Page token returned by previous call :param until_update_id: str (optional) If present, returns updates until and including this update_id. - + :returns: :class:`ListUpdatesResponse` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - if until_update_id is not None: - query["until_update_id"] = until_update_id - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/updates", query=query, headers=headers) + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + if until_update_id is not None: query['until_update_id'] = until_update_id + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/pipelines/{pipeline_id}/updates', query=query + + , headers=headers + ) return ListUpdatesResponse.from_dict(res) - def set_permissions( - self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None - ) -> PipelinePermissions: - """Set pipeline permissions. + + + + def set_permissions(self + , pipeline_id: str + , * + , access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions: + """Set pipeline permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional) - + :returns: :class:`PipelinePermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/permissions/pipelines/{pipeline_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/pipelines/{pipeline_id}', body=body + + , headers=headers + ) return PipelinePermissions.from_dict(res) - def start_update( - self, - pipeline_id: str, - *, - cause: Optional[StartUpdateCause] = None, - full_refresh: Optional[bool] = None, - full_refresh_selection: Optional[List[str]] = None, - refresh_selection: Optional[List[str]] = None, - validate_only: Optional[bool] = None, - ) -> StartUpdateResponse: - """Start a pipeline. + + + + def start_update(self + , pipeline_id: str + , * + , cause: Optional[StartUpdateCause] = None, full_refresh: Optional[bool] = None, full_refresh_selection: Optional[List[str]] = None, refresh_selection: Optional[List[str]] = None, validate_only: Optional[bool] = None) -> StartUpdateResponse: + """Start a pipeline. + Starts a new update for the pipeline. If there is already an active update for the pipeline, the request will fail and the active update will remain running. - + :param pipeline_id: str :param cause: :class:`StartUpdateCause` (optional) What triggered this update. @@ -4006,90 +3302,70 @@ def start_update( :param validate_only: bool (optional) If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets. - + :returns: :class:`StartUpdateResponse` """ body = {} - if cause is not None: - body["cause"] = cause.value - if full_refresh is not None: - body["full_refresh"] = full_refresh - if full_refresh_selection is not None: - body["full_refresh_selection"] = [v for v in full_refresh_selection] - if refresh_selection is not None: - body["refresh_selection"] = [v for v in refresh_selection] - if validate_only is not None: - body["validate_only"] = validate_only - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/updates", body=body, headers=headers) + if cause is not None: body['cause'] = cause.value + if full_refresh is not None: body['full_refresh'] = full_refresh + if full_refresh_selection is not None: body['full_refresh_selection'] = [v for v in full_refresh_selection] + if refresh_selection is not None: body['refresh_selection'] = [v for v in refresh_selection] + if validate_only is not None: body['validate_only'] = validate_only + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/pipelines/{pipeline_id}/updates', body=body + + , headers=headers + ) return StartUpdateResponse.from_dict(res) - def stop(self, pipeline_id: str) -> Wait[GetPipelineResponse]: - """Stop a pipeline. + + + + def stop(self + , pipeline_id: str + ) -> Wait[GetPipelineResponse]: + """Stop a pipeline. + Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this request is a no-op. - + :param pipeline_id: str - + :returns: Long-running operation waiter for :class:`GetPipelineResponse`. See :method:wait_get_pipeline_idle for more details. """ + + headers = {'Accept': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/pipelines/{pipeline_id}/stop' + + , headers=headers + ) + return Wait(self.wait_get_pipeline_idle + , response = StopPipelineResponse.from_dict(op_response) + , pipeline_id=pipeline_id) - headers = { - "Accept": "application/json", - } - - op_response = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/stop", headers=headers) - return Wait( - self.wait_get_pipeline_idle, response=StopPipelineResponse.from_dict(op_response), pipeline_id=pipeline_id - ) - - def stop_and_wait(self, pipeline_id: str, timeout=timedelta(minutes=20)) -> GetPipelineResponse: + + def stop_and_wait(self + , pipeline_id: str + , + timeout=timedelta(minutes=20)) -> GetPipelineResponse: return self.stop(pipeline_id=pipeline_id).result(timeout=timeout) + + + - def update( - self, - pipeline_id: str, - *, - allow_duplicate_names: Optional[bool] = None, - budget_policy_id: Optional[str] = None, - catalog: Optional[str] = None, - channel: Optional[str] = None, - clusters: Optional[List[PipelineCluster]] = None, - configuration: Optional[Dict[str, str]] = None, - continuous: Optional[bool] = None, - deployment: Optional[PipelineDeployment] = None, - development: Optional[bool] = None, - edition: Optional[str] = None, - event_log: Optional[EventLogSpec] = None, - expected_last_modified: Optional[int] = None, - filters: Optional[Filters] = None, - gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, - id: Optional[str] = None, - ingestion_definition: Optional[IngestionPipelineDefinition] = None, - libraries: Optional[List[PipelineLibrary]] = None, - name: Optional[str] = None, - notifications: Optional[List[Notifications]] = None, - photon: Optional[bool] = None, - restart_window: Optional[RestartWindow] = None, - root_path: Optional[str] = None, - run_as: Optional[RunAs] = None, - schema: Optional[str] = None, - serverless: Optional[bool] = None, - storage: Optional[str] = None, - target: Optional[str] = None, - trigger: Optional[PipelineTrigger] = None, - ): + def update(self + , pipeline_id: str + , * + , allow_duplicate_names: Optional[bool] = None, budget_policy_id: Optional[str] = None, catalog: Optional[str] = None, channel: Optional[str] = None, clusters: Optional[List[PipelineCluster]] = None, configuration: Optional[Dict[str,str]] = None, continuous: Optional[bool] = None, deployment: Optional[PipelineDeployment] = None, development: Optional[bool] = None, edition: Optional[str] = None, event_log: Optional[EventLogSpec] = None, expected_last_modified: Optional[int] = None, filters: Optional[Filters] = None, gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, id: Optional[str] = None, ingestion_definition: Optional[IngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, photon: Optional[bool] = None, restart_window: Optional[RestartWindow] = None, root_path: Optional[str] = None, run_as: Optional[RunAs] = None, schema: Optional[str] = None, serverless: Optional[bool] = None, storage: Optional[str] = None, tags: Optional[Dict[str,str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None): """Edit a pipeline. - + Updates a pipeline with the supplied configuration. - + :param pipeline_id: str Unique identifier for this pipeline. :param allow_duplicate_names: bool (optional) @@ -4145,7 +3421,7 @@ def update( :param run_as: :class:`RunAs` (optional) Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. :param schema: str (optional) @@ -4154,99 +3430,83 @@ def update( Whether serverless compute is enabled for this pipeline. :param storage: str (optional) DBFS root directory for storing checkpoints and tables. + :param tags: Dict[str,str] (optional) + A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and + are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline. :param target: str (optional) Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. - - + + """ body = {} - if allow_duplicate_names is not None: - body["allow_duplicate_names"] = allow_duplicate_names - if budget_policy_id is not None: - body["budget_policy_id"] = budget_policy_id - if catalog is not None: - body["catalog"] = catalog - if channel is not None: - body["channel"] = channel - if clusters is not None: - body["clusters"] = [v.as_dict() for v in clusters] - if configuration is not None: - body["configuration"] = configuration - if continuous is not None: - body["continuous"] = continuous - if deployment is not None: - body["deployment"] = deployment.as_dict() - if development is not None: - body["development"] = development - if edition is not None: - body["edition"] = edition - if event_log is not None: - body["event_log"] = event_log.as_dict() - if expected_last_modified is not None: - body["expected_last_modified"] = expected_last_modified - if filters is not None: - body["filters"] = filters.as_dict() - if gateway_definition is not None: - body["gateway_definition"] = gateway_definition.as_dict() - if id is not None: - body["id"] = id - if ingestion_definition is not None: - body["ingestion_definition"] = ingestion_definition.as_dict() - if libraries is not None: - body["libraries"] = [v.as_dict() for v in libraries] - if name is not None: - body["name"] = name - if notifications is not None: - body["notifications"] = [v.as_dict() for v in notifications] - if photon is not None: - body["photon"] = photon - if restart_window is not None: - body["restart_window"] = restart_window.as_dict() - if root_path is not None: - body["root_path"] = root_path - if run_as is not None: - body["run_as"] = run_as.as_dict() - if schema is not None: - body["schema"] = schema - if serverless is not None: - body["serverless"] = serverless - if storage is not None: - body["storage"] = storage - if target is not None: - body["target"] = target - if trigger is not None: - body["trigger"] = trigger.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/pipelines/{pipeline_id}", body=body, headers=headers) - - def update_permissions( - self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None - ) -> PipelinePermissions: - """Update pipeline permissions. + if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names + if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id + if catalog is not None: body['catalog'] = catalog + if channel is not None: body['channel'] = channel + if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters] + if configuration is not None: body['configuration'] = configuration + if continuous is not None: body['continuous'] = continuous + if deployment is not None: body['deployment'] = deployment.as_dict() + if development is not None: body['development'] = development + if edition is not None: body['edition'] = edition + if event_log is not None: body['event_log'] = event_log.as_dict() + if expected_last_modified is not None: body['expected_last_modified'] = expected_last_modified + if filters is not None: body['filters'] = filters.as_dict() + if gateway_definition is not None: body['gateway_definition'] = gateway_definition.as_dict() + if id is not None: body['id'] = id + if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict() + if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] + if name is not None: body['name'] = name + if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] + if photon is not None: body['photon'] = photon + if restart_window is not None: body['restart_window'] = restart_window.as_dict() + if root_path is not None: body['root_path'] = root_path + if run_as is not None: body['run_as'] = run_as.as_dict() + if schema is not None: body['schema'] = schema + if serverless is not None: body['serverless'] = serverless + if storage is not None: body['storage'] = storage + if tags is not None: body['tags'] = tags + if target is not None: body['target'] = target + if trigger is not None: body['trigger'] = trigger.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/pipelines/{pipeline_id}', body=body + + , headers=headers + ) + - Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object. + + + + def update_permissions(self + , pipeline_id: str + , * + , access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions: + """Update pipeline permissions. + + Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object. + :param pipeline_id: str The pipeline for which to get or manage permissions. :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional) - + :returns: :class:`PipelinePermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/permissions/pipelines/{pipeline_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/pipelines/{pipeline_id}', body=body + + , headers=headers + ) return PipelinePermissions.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 42feb57bc..91063317c 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -1,158 +1,147 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class AwsCredentials: sts_role: Optional[StsRole] = None - + def as_dict(self) -> dict: """Serializes the AwsCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sts_role: - body["sts_role"] = self.sts_role.as_dict() + if self.sts_role: body['sts_role'] = self.sts_role.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.sts_role: - body["sts_role"] = self.sts_role + if self.sts_role: body['sts_role'] = self.sts_role return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsCredentials: """Deserializes the AwsCredentials from a dictionary.""" - return cls(sts_role=_from_dict(d, "sts_role", StsRole)) + return cls(sts_role=_from_dict(d, 'sts_role', StsRole)) + + @dataclass class AwsKeyInfo: key_arn: str """The AWS KMS key's Amazon Resource Name (ARN).""" - + key_region: str """The AWS KMS key region.""" - + key_alias: Optional[str] = None """The AWS KMS key alias.""" - + reuse_key_for_cluster_volumes: Optional[bool] = None """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to `true` or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to use this key for encrypting EBS volumes, set to `false`.""" - + def as_dict(self) -> dict: """Serializes the AwsKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key_alias is not None: - body["key_alias"] = self.key_alias - if self.key_arn is not None: - body["key_arn"] = self.key_arn - if self.key_region is not None: - body["key_region"] = self.key_region - if self.reuse_key_for_cluster_volumes is not None: - body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes + if self.key_alias is not None: body['key_alias'] = self.key_alias + if self.key_arn is not None: body['key_arn'] = self.key_arn + if self.key_region is not None: body['key_region'] = self.key_region + if self.reuse_key_for_cluster_volumes is not None: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes return body def as_shallow_dict(self) -> dict: """Serializes the AwsKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.key_alias is not None: - body["key_alias"] = self.key_alias - if self.key_arn is not None: - body["key_arn"] = self.key_arn - if self.key_region is not None: - body["key_region"] = self.key_region - if self.reuse_key_for_cluster_volumes is not None: - body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes + if self.key_alias is not None: body['key_alias'] = self.key_alias + if self.key_arn is not None: body['key_arn'] = self.key_arn + if self.key_region is not None: body['key_region'] = self.key_region + if self.reuse_key_for_cluster_volumes is not None: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsKeyInfo: """Deserializes the AwsKeyInfo from a dictionary.""" - return cls( - key_alias=d.get("key_alias", None), - key_arn=d.get("key_arn", None), - key_region=d.get("key_region", None), - reuse_key_for_cluster_volumes=d.get("reuse_key_for_cluster_volumes", None), - ) + return cls(key_alias=d.get('key_alias', None), key_arn=d.get('key_arn', None), key_region=d.get('key_region', None), reuse_key_for_cluster_volumes=d.get('reuse_key_for_cluster_volumes', None)) + + @dataclass class AzureWorkspaceInfo: resource_group: Optional[str] = None """Azure Resource Group name""" - + subscription_id: Optional[str] = None """Azure Subscription ID""" - + def as_dict(self) -> dict: """Serializes the AzureWorkspaceInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.resource_group is not None: - body["resource_group"] = self.resource_group - if self.subscription_id is not None: - body["subscription_id"] = self.subscription_id + if self.resource_group is not None: body['resource_group'] = self.resource_group + if self.subscription_id is not None: body['subscription_id'] = self.subscription_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureWorkspaceInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.resource_group is not None: - body["resource_group"] = self.resource_group - if self.subscription_id is not None: - body["subscription_id"] = self.subscription_id + if self.resource_group is not None: body['resource_group'] = self.resource_group + if self.subscription_id is not None: body['subscription_id'] = self.subscription_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureWorkspaceInfo: """Deserializes the AzureWorkspaceInfo from a dictionary.""" - return cls(resource_group=d.get("resource_group", None), subscription_id=d.get("subscription_id", None)) + return cls(resource_group=d.get('resource_group', None), subscription_id=d.get('subscription_id', None)) + + @dataclass class CloudResourceContainer: """The general workspace configurations that are specific to cloud providers.""" - + gcp: Optional[CustomerFacingGcpCloudResourceContainer] = None """The general workspace configurations that are specific to Google Cloud.""" - + def as_dict(self) -> dict: """Serializes the CloudResourceContainer into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gcp: - body["gcp"] = self.gcp.as_dict() + if self.gcp: body['gcp'] = self.gcp.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CloudResourceContainer into a shallow dictionary of its immediate attributes.""" body = {} - if self.gcp: - body["gcp"] = self.gcp + if self.gcp: body['gcp'] = self.gcp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CloudResourceContainer: """Deserializes the CloudResourceContainer from a dictionary.""" - return cls(gcp=_from_dict(d, "gcp", CustomerFacingGcpCloudResourceContainer)) + return cls(gcp=_from_dict(d, 'gcp', CustomerFacingGcpCloudResourceContainer)) + + @dataclass @@ -160,379 +149,325 @@ class CreateAwsKeyInfo: key_arn: str """The AWS KMS key's Amazon Resource Name (ARN). Note that the key's AWS region is inferred from the ARN.""" - + key_alias: Optional[str] = None """The AWS KMS key alias.""" - + reuse_key_for_cluster_volumes: Optional[bool] = None """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to `true` or omitted, the key is also used to encrypt cluster EBS volumes. To not use this key also for encrypting EBS volumes, set this to `false`.""" - + def as_dict(self) -> dict: """Serializes the CreateAwsKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key_alias is not None: - body["key_alias"] = self.key_alias - if self.key_arn is not None: - body["key_arn"] = self.key_arn - if self.reuse_key_for_cluster_volumes is not None: - body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes + if self.key_alias is not None: body['key_alias'] = self.key_alias + if self.key_arn is not None: body['key_arn'] = self.key_arn + if self.reuse_key_for_cluster_volumes is not None: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes return body def as_shallow_dict(self) -> dict: """Serializes the CreateAwsKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.key_alias is not None: - body["key_alias"] = self.key_alias - if self.key_arn is not None: - body["key_arn"] = self.key_arn - if self.reuse_key_for_cluster_volumes is not None: - body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes + if self.key_alias is not None: body['key_alias'] = self.key_alias + if self.key_arn is not None: body['key_arn'] = self.key_arn + if self.reuse_key_for_cluster_volumes is not None: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateAwsKeyInfo: """Deserializes the CreateAwsKeyInfo from a dictionary.""" - return cls( - key_alias=d.get("key_alias", None), - key_arn=d.get("key_arn", None), - reuse_key_for_cluster_volumes=d.get("reuse_key_for_cluster_volumes", None), - ) + return cls(key_alias=d.get('key_alias', None), key_arn=d.get('key_arn', None), reuse_key_for_cluster_volumes=d.get('reuse_key_for_cluster_volumes', None)) + + @dataclass class CreateCredentialAwsCredentials: sts_role: Optional[CreateCredentialStsRole] = None - + def as_dict(self) -> dict: """Serializes the CreateCredentialAwsCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sts_role: - body["sts_role"] = self.sts_role.as_dict() + if self.sts_role: body['sts_role'] = self.sts_role.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialAwsCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.sts_role: - body["sts_role"] = self.sts_role + if self.sts_role: body['sts_role'] = self.sts_role return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialAwsCredentials: """Deserializes the CreateCredentialAwsCredentials from a dictionary.""" - return cls(sts_role=_from_dict(d, "sts_role", CreateCredentialStsRole)) + return cls(sts_role=_from_dict(d, 'sts_role', CreateCredentialStsRole)) + + @dataclass class CreateCredentialRequest: credentials_name: str """The human-readable name of the credential configuration object.""" - + aws_credentials: CreateCredentialAwsCredentials - + def as_dict(self) -> dict: """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_credentials: - body["aws_credentials"] = self.aws_credentials.as_dict() - if self.credentials_name is not None: - body["credentials_name"] = self.credentials_name + if self.aws_credentials: body['aws_credentials'] = self.aws_credentials.as_dict() + if self.credentials_name is not None: body['credentials_name'] = self.credentials_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_credentials: - body["aws_credentials"] = self.aws_credentials - if self.credentials_name is not None: - body["credentials_name"] = self.credentials_name + if self.aws_credentials: body['aws_credentials'] = self.aws_credentials + if self.credentials_name is not None: body['credentials_name'] = self.credentials_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialRequest: """Deserializes the CreateCredentialRequest from a dictionary.""" - return cls( - aws_credentials=_from_dict(d, "aws_credentials", CreateCredentialAwsCredentials), - credentials_name=d.get("credentials_name", None), - ) + return cls(aws_credentials=_from_dict(d, 'aws_credentials', CreateCredentialAwsCredentials), credentials_name=d.get('credentials_name', None)) + + @dataclass class CreateCredentialStsRole: role_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the cross account role.""" - + def as_dict(self) -> dict: """Serializes the CreateCredentialStsRole into a dictionary suitable for use as a JSON request body.""" body = {} - if self.role_arn is not None: - body["role_arn"] = self.role_arn + if self.role_arn is not None: body['role_arn'] = self.role_arn return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialStsRole into a shallow dictionary of its immediate attributes.""" body = {} - if self.role_arn is not None: - body["role_arn"] = self.role_arn + if self.role_arn is not None: body['role_arn'] = self.role_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialStsRole: """Deserializes the CreateCredentialStsRole from a dictionary.""" - return cls(role_arn=d.get("role_arn", None)) + return cls(role_arn=d.get('role_arn', None)) + + @dataclass class CreateCustomerManagedKeyRequest: use_cases: List[KeyUseCase] """The cases that the key can be used for.""" - + aws_key_info: Optional[CreateAwsKeyInfo] = None - + gcp_key_info: Optional[CreateGcpKeyInfo] = None - + def as_dict(self) -> dict: """Serializes the CreateCustomerManagedKeyRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_key_info: - body["aws_key_info"] = self.aws_key_info.as_dict() - if self.gcp_key_info: - body["gcp_key_info"] = self.gcp_key_info.as_dict() - if self.use_cases: - body["use_cases"] = [v.value for v in self.use_cases] + if self.aws_key_info: body['aws_key_info'] = self.aws_key_info.as_dict() + if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info.as_dict() + if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases] return body def as_shallow_dict(self) -> dict: """Serializes the CreateCustomerManagedKeyRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_key_info: - body["aws_key_info"] = self.aws_key_info - if self.gcp_key_info: - body["gcp_key_info"] = self.gcp_key_info - if self.use_cases: - body["use_cases"] = self.use_cases + if self.aws_key_info: body['aws_key_info'] = self.aws_key_info + if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info + if self.use_cases: body['use_cases'] = self.use_cases return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCustomerManagedKeyRequest: """Deserializes the CreateCustomerManagedKeyRequest from a dictionary.""" - return cls( - aws_key_info=_from_dict(d, "aws_key_info", CreateAwsKeyInfo), - gcp_key_info=_from_dict(d, "gcp_key_info", CreateGcpKeyInfo), - use_cases=_repeated_enum(d, "use_cases", KeyUseCase), - ) + return cls(aws_key_info=_from_dict(d, 'aws_key_info', CreateAwsKeyInfo), gcp_key_info=_from_dict(d, 'gcp_key_info', CreateGcpKeyInfo), use_cases=_repeated_enum(d, 'use_cases', KeyUseCase)) + + @dataclass class CreateGcpKeyInfo: kms_key_id: str """The GCP KMS key's resource name""" - + def as_dict(self) -> dict: """Serializes the CreateGcpKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.kms_key_id is not None: - body["kms_key_id"] = self.kms_key_id + if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateGcpKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.kms_key_id is not None: - body["kms_key_id"] = self.kms_key_id + if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateGcpKeyInfo: """Deserializes the CreateGcpKeyInfo from a dictionary.""" - return cls(kms_key_id=d.get("kms_key_id", None)) + return cls(kms_key_id=d.get('kms_key_id', None)) + + @dataclass class CreateNetworkRequest: network_name: str """The human-readable name of the network configuration.""" - + gcp_network_info: Optional[GcpNetworkInfo] = None """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and secondary IP ranges).""" - + security_group_ids: Optional[List[str]] = None """IDs of one to five security groups associated with this network. Security group IDs **cannot** be used in multiple network configurations.""" - + subnet_ids: Optional[List[str]] = None """IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple network configurations.""" - + vpc_endpoints: Optional[NetworkVpcEndpoints] = None """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" - + vpc_id: Optional[str] = None """The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations.""" - + def as_dict(self) -> dict: """Serializes the CreateNetworkRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gcp_network_info: - body["gcp_network_info"] = self.gcp_network_info.as_dict() - if self.network_name is not None: - body["network_name"] = self.network_name - if self.security_group_ids: - body["security_group_ids"] = [v for v in self.security_group_ids] - if self.subnet_ids: - body["subnet_ids"] = [v for v in self.subnet_ids] - if self.vpc_endpoints: - body["vpc_endpoints"] = self.vpc_endpoints.as_dict() - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id + if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info.as_dict() + if self.network_name is not None: body['network_name'] = self.network_name + if self.security_group_ids: body['security_group_ids'] = [v for v in self.security_group_ids] + if self.subnet_ids: body['subnet_ids'] = [v for v in self.subnet_ids] + if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints.as_dict() + if self.vpc_id is not None: body['vpc_id'] = self.vpc_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateNetworkRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.gcp_network_info: - body["gcp_network_info"] = self.gcp_network_info - if self.network_name is not None: - body["network_name"] = self.network_name - if self.security_group_ids: - body["security_group_ids"] = self.security_group_ids - if self.subnet_ids: - body["subnet_ids"] = self.subnet_ids - if self.vpc_endpoints: - body["vpc_endpoints"] = self.vpc_endpoints - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id + if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info + if self.network_name is not None: body['network_name'] = self.network_name + if self.security_group_ids: body['security_group_ids'] = self.security_group_ids + if self.subnet_ids: body['subnet_ids'] = self.subnet_ids + if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints + if self.vpc_id is not None: body['vpc_id'] = self.vpc_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateNetworkRequest: """Deserializes the CreateNetworkRequest from a dictionary.""" - return cls( - gcp_network_info=_from_dict(d, "gcp_network_info", GcpNetworkInfo), - network_name=d.get("network_name", None), - security_group_ids=d.get("security_group_ids", None), - subnet_ids=d.get("subnet_ids", None), - vpc_endpoints=_from_dict(d, "vpc_endpoints", NetworkVpcEndpoints), - vpc_id=d.get("vpc_id", None), - ) + return cls(gcp_network_info=_from_dict(d, 'gcp_network_info', GcpNetworkInfo), network_name=d.get('network_name', None), security_group_ids=d.get('security_group_ids', None), subnet_ids=d.get('subnet_ids', None), vpc_endpoints=_from_dict(d, 'vpc_endpoints', NetworkVpcEndpoints), vpc_id=d.get('vpc_id', None)) + + @dataclass class CreateStorageConfigurationRequest: storage_configuration_name: str """The human-readable name of the storage configuration.""" - + root_bucket_info: RootBucketInfo """Root S3 bucket information.""" - + def as_dict(self) -> dict: """Serializes the CreateStorageConfigurationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.root_bucket_info: - body["root_bucket_info"] = self.root_bucket_info.as_dict() - if self.storage_configuration_name is not None: - body["storage_configuration_name"] = self.storage_configuration_name + if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info.as_dict() + if self.storage_configuration_name is not None: body['storage_configuration_name'] = self.storage_configuration_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateStorageConfigurationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.root_bucket_info: - body["root_bucket_info"] = self.root_bucket_info - if self.storage_configuration_name is not None: - body["storage_configuration_name"] = self.storage_configuration_name + if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info + if self.storage_configuration_name is not None: body['storage_configuration_name'] = self.storage_configuration_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateStorageConfigurationRequest: """Deserializes the CreateStorageConfigurationRequest from a dictionary.""" - return cls( - root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo), - storage_configuration_name=d.get("storage_configuration_name", None), - ) + return cls(root_bucket_info=_from_dict(d, 'root_bucket_info', RootBucketInfo), storage_configuration_name=d.get('storage_configuration_name', None)) + + @dataclass class CreateVpcEndpointRequest: vpc_endpoint_name: str """The human-readable name of the storage configuration.""" - + aws_vpc_endpoint_id: Optional[str] = None """The ID of the VPC endpoint object in AWS.""" - + gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None """The Google Cloud specific information for this Private Service Connect endpoint.""" - + region: Optional[str] = None """The AWS region in which this VPC endpoint object exists.""" - + def as_dict(self) -> dict: """Serializes the CreateVpcEndpointRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_vpc_endpoint_id is not None: - body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: - body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info.as_dict() - if self.region is not None: - body["region"] = self.region - if self.vpc_endpoint_name is not None: - body["vpc_endpoint_name"] = self.vpc_endpoint_name + if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict() + if self.region is not None: body['region'] = self.region + if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateVpcEndpointRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_vpc_endpoint_id is not None: - body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: - body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info - if self.region is not None: - body["region"] = self.region - if self.vpc_endpoint_name is not None: - body["vpc_endpoint_name"] = self.vpc_endpoint_name + if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info + if self.region is not None: body['region'] = self.region + if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVpcEndpointRequest: """Deserializes the CreateVpcEndpointRequest from a dictionary.""" - return cls( - aws_vpc_endpoint_id=d.get("aws_vpc_endpoint_id", None), - gcp_vpc_endpoint_info=_from_dict(d, "gcp_vpc_endpoint_info", GcpVpcEndpointInfo), - region=d.get("region", None), - vpc_endpoint_name=d.get("vpc_endpoint_name", None), - ) + return cls(aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None), gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo), region=d.get('region', None), vpc_endpoint_name=d.get('vpc_endpoint_name', None)) + + @dataclass class CreateWorkspaceRequest: workspace_name: str """The workspace's human-readable name.""" - + aws_region: Optional[str] = None """The AWS region of the workspace's data plane.""" - + cloud: Optional[str] = None """The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to `gcp`.""" - + cloud_resource_container: Optional[CloudResourceContainer] = None """The general workspace configurations that are specific to cloud providers.""" - + credentials_id: Optional[str] = None """ID of the workspace's credential configuration object.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - + deployment_name: Optional[str] = None """The deployment name defines part of the subdomain for the workspace. The workspace URL for the web application and REST APIs is `.cloud.databricks.com`. For @@ -557,7 +492,7 @@ class CreateWorkspaceRequest: If a new workspace omits this property, the server generates a unique deployment name for you with the pattern `dbc-xxxxxxxx-xxxx`.""" - + gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP @@ -577,30 +512,30 @@ class CreateWorkspaceRequest: Excel spreadsheet. See [calculate subnet sizes for a new workspace]. [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html""" - + gke_config: Optional[GkeConfig] = None """The configurations for the GKE cluster of a Databricks workspace.""" - + is_no_public_ip_enabled: Optional[bool] = None """Whether no public IP is enabled for the workspace.""" - + location: Optional[str] = None """The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.""" - + managed_services_customer_managed_key_id: Optional[str] = None """The ID of the workspace's managed services encryption key configuration object. This is used to help protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`.""" - + network_id: Optional[str] = None - + pricing_tier: Optional[PricingTier] = None """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. [AWS Pricing]: https://databricks.com/product/aws-pricing""" - + private_access_settings_id: Optional[str] = None """ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace @@ -610,269 +545,197 @@ class CreateWorkspaceRequest: [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" - + storage_configuration_id: Optional[str] = None """The ID of the workspace's storage configuration object.""" - + storage_customer_managed_key_id: Optional[str] = None """The ID of the workspace's storage encryption key configuration object. This is used to encrypt the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The provided key configuration object property `use_cases` must contain `STORAGE`.""" - + def as_dict(self) -> dict: """Serializes the CreateWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cloud_resource_container: - body["cloud_resource_container"] = self.cloud_resource_container.as_dict() - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.deployment_name is not None: - body["deployment_name"] = self.deployment_name - if self.gcp_managed_network_config: - body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict() - if self.gke_config: - body["gke_config"] = self.gke_config.as_dict() - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled - if self.location is not None: - body["location"] = self.location - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.pricing_tier is not None: - body["pricing_tier"] = self.pricing_tier.value - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.cloud is not None: body['cloud'] = self.cloud + if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container.as_dict() + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.deployment_name is not None: body['deployment_name'] = self.deployment_name + if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict() + if self.gke_config: body['gke_config'] = self.gke_config.as_dict() + if self.is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled + if self.location is not None: body['location'] = self.location + if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_id is not None: body['network_id'] = self.network_id + if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier.value + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id + if self.workspace_name is not None: body['workspace_name'] = self.workspace_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cloud_resource_container: - body["cloud_resource_container"] = self.cloud_resource_container - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.deployment_name is not None: - body["deployment_name"] = self.deployment_name - if self.gcp_managed_network_config: - body["gcp_managed_network_config"] = self.gcp_managed_network_config - if self.gke_config: - body["gke_config"] = self.gke_config - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled - if self.location is not None: - body["location"] = self.location - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.pricing_tier is not None: - body["pricing_tier"] = self.pricing_tier - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.cloud is not None: body['cloud'] = self.cloud + if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.deployment_name is not None: body['deployment_name'] = self.deployment_name + if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config + if self.gke_config: body['gke_config'] = self.gke_config + if self.is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled + if self.location is not None: body['location'] = self.location + if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_id is not None: body['network_id'] = self.network_id + if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id + if self.workspace_name is not None: body['workspace_name'] = self.workspace_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWorkspaceRequest: """Deserializes the CreateWorkspaceRequest from a dictionary.""" - return cls( - aws_region=d.get("aws_region", None), - cloud=d.get("cloud", None), - cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer), - credentials_id=d.get("credentials_id", None), - custom_tags=d.get("custom_tags", None), - deployment_name=d.get("deployment_name", None), - gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig), - gke_config=_from_dict(d, "gke_config", GkeConfig), - is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None), - location=d.get("location", None), - managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), - network_id=d.get("network_id", None), - pricing_tier=_enum(d, "pricing_tier", PricingTier), - private_access_settings_id=d.get("private_access_settings_id", None), - storage_configuration_id=d.get("storage_configuration_id", None), - storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), - workspace_name=d.get("workspace_name", None), - ) + return cls(aws_region=d.get('aws_region', None), cloud=d.get('cloud', None), cloud_resource_container=_from_dict(d, 'cloud_resource_container', CloudResourceContainer), credentials_id=d.get('credentials_id', None), custom_tags=d.get('custom_tags', None), deployment_name=d.get('deployment_name', None), gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config', GcpManagedNetworkConfig), gke_config=_from_dict(d, 'gke_config', GkeConfig), is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None), location=d.get('location', None), managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id', None), network_id=d.get('network_id', None), pricing_tier=_enum(d, 'pricing_tier', PricingTier), private_access_settings_id=d.get('private_access_settings_id', None), storage_configuration_id=d.get('storage_configuration_id', None), storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None), workspace_name=d.get('workspace_name', None)) + + @dataclass class Credential: account_id: Optional[str] = None """The Databricks account ID that hosts the credential.""" - + aws_credentials: Optional[AwsCredentials] = None - + creation_time: Optional[int] = None """Time in epoch milliseconds when the credential was created.""" - + credentials_id: Optional[str] = None """Databricks credential configuration ID.""" - + credentials_name: Optional[str] = None """The human-readable name of the credential configuration object.""" - + def as_dict(self) -> dict: """Serializes the Credential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.aws_credentials: - body["aws_credentials"] = self.aws_credentials.as_dict() - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.credentials_name is not None: - body["credentials_name"] = self.credentials_name + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_credentials: body['aws_credentials'] = self.aws_credentials.as_dict() + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.credentials_name is not None: body['credentials_name'] = self.credentials_name return body def as_shallow_dict(self) -> dict: """Serializes the Credential into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.aws_credentials: - body["aws_credentials"] = self.aws_credentials - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.credentials_name is not None: - body["credentials_name"] = self.credentials_name + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_credentials: body['aws_credentials'] = self.aws_credentials + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.credentials_name is not None: body['credentials_name'] = self.credentials_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Credential: """Deserializes the Credential from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - aws_credentials=_from_dict(d, "aws_credentials", AwsCredentials), - creation_time=d.get("creation_time", None), - credentials_id=d.get("credentials_id", None), - credentials_name=d.get("credentials_name", None), - ) - + return cls(account_id=d.get('account_id', None), aws_credentials=_from_dict(d, 'aws_credentials', AwsCredentials), creation_time=d.get('creation_time', None), credentials_id=d.get('credentials_id', None), credentials_name=d.get('credentials_name', None)) + -CustomTags = Dict[str, str] @dataclass class CustomerFacingGcpCloudResourceContainer: """The general workspace configurations that are specific to Google Cloud.""" - + project_id: Optional[str] = None """The Google Cloud project ID, which the workspace uses to instantiate cloud resources for your workspace.""" - + def as_dict(self) -> dict: """Serializes the CustomerFacingGcpCloudResourceContainer into a dictionary suitable for use as a JSON request body.""" body = {} - if self.project_id is not None: - body["project_id"] = self.project_id + if self.project_id is not None: body['project_id'] = self.project_id return body def as_shallow_dict(self) -> dict: """Serializes the CustomerFacingGcpCloudResourceContainer into a shallow dictionary of its immediate attributes.""" body = {} - if self.project_id is not None: - body["project_id"] = self.project_id + if self.project_id is not None: body['project_id'] = self.project_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomerFacingGcpCloudResourceContainer: """Deserializes the CustomerFacingGcpCloudResourceContainer from a dictionary.""" - return cls(project_id=d.get("project_id", None)) + return cls(project_id=d.get('project_id', None)) + + @dataclass class CustomerManagedKey: account_id: Optional[str] = None """The Databricks account ID that holds the customer-managed key.""" - + aws_key_info: Optional[AwsKeyInfo] = None - + creation_time: Optional[int] = None """Time in epoch milliseconds when the customer key was created.""" - + customer_managed_key_id: Optional[str] = None """ID of the encryption key configuration object.""" - + gcp_key_info: Optional[GcpKeyInfo] = None - + use_cases: Optional[List[KeyUseCase]] = None """The cases that the key can be used for.""" - + def as_dict(self) -> dict: """Serializes the CustomerManagedKey into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.aws_key_info: - body["aws_key_info"] = self.aws_key_info.as_dict() - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.customer_managed_key_id is not None: - body["customer_managed_key_id"] = self.customer_managed_key_id - if self.gcp_key_info: - body["gcp_key_info"] = self.gcp_key_info.as_dict() - if self.use_cases: - body["use_cases"] = [v.value for v in self.use_cases] + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_key_info: body['aws_key_info'] = self.aws_key_info.as_dict() + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.customer_managed_key_id is not None: body['customer_managed_key_id'] = self.customer_managed_key_id + if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info.as_dict() + if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases] return body def as_shallow_dict(self) -> dict: """Serializes the CustomerManagedKey into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.aws_key_info: - body["aws_key_info"] = self.aws_key_info - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.customer_managed_key_id is not None: - body["customer_managed_key_id"] = self.customer_managed_key_id - if self.gcp_key_info: - body["gcp_key_info"] = self.gcp_key_info - if self.use_cases: - body["use_cases"] = self.use_cases + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_key_info: body['aws_key_info'] = self.aws_key_info + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.customer_managed_key_id is not None: body['customer_managed_key_id'] = self.customer_managed_key_id + if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info + if self.use_cases: body['use_cases'] = self.use_cases return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomerManagedKey: """Deserializes the CustomerManagedKey from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - aws_key_info=_from_dict(d, "aws_key_info", AwsKeyInfo), - creation_time=d.get("creation_time", None), - customer_managed_key_id=d.get("customer_managed_key_id", None), - gcp_key_info=_from_dict(d, "gcp_key_info", GcpKeyInfo), - use_cases=_repeated_enum(d, "use_cases", KeyUseCase), - ) + return cls(account_id=d.get('account_id', None), aws_key_info=_from_dict(d, 'aws_key_info', AwsKeyInfo), creation_time=d.get('creation_time', None), customer_managed_key_id=d.get('customer_managed_key_id', None), gcp_key_info=_from_dict(d, 'gcp_key_info', GcpKeyInfo), use_cases=_repeated_enum(d, 'use_cases', KeyUseCase)) + + + + + + + + + + + + + + @dataclass @@ -891,95 +754,96 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + + + + + + + + + + class EndpointUseCase(Enum): """This enumeration represents the type of Databricks VPC [endpoint service] that was used when creating this VPC endpoint. - + [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" - - DATAPLANE_RELAY_ACCESS = "DATAPLANE_RELAY_ACCESS" - WORKSPACE_ACCESS = "WORKSPACE_ACCESS" - + + DATAPLANE_RELAY_ACCESS = 'DATAPLANE_RELAY_ACCESS' + WORKSPACE_ACCESS = 'WORKSPACE_ACCESS' class ErrorType(Enum): """The AWS resource associated with this error: credentials, VPC, subnet, security group, or network ACL.""" - - CREDENTIALS = "credentials" - NETWORK_ACL = "networkAcl" - SECURITY_GROUP = "securityGroup" - SUBNET = "subnet" - VPC = "vpc" - + + CREDENTIALS = 'credentials' + NETWORK_ACL = 'networkAcl' + SECURITY_GROUP = 'securityGroup' + SUBNET = 'subnet' + VPC = 'vpc' @dataclass class ExternalCustomerInfo: authoritative_user_email: Optional[str] = None """Email of the authoritative user.""" - + authoritative_user_full_name: Optional[str] = None """The authoritative user full name.""" - + customer_name: Optional[str] = None """The legal entity name for the external workspace""" - + def as_dict(self) -> dict: """Serializes the ExternalCustomerInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authoritative_user_email is not None: - body["authoritative_user_email"] = self.authoritative_user_email - if self.authoritative_user_full_name is not None: - body["authoritative_user_full_name"] = self.authoritative_user_full_name - if self.customer_name is not None: - body["customer_name"] = self.customer_name + if self.authoritative_user_email is not None: body['authoritative_user_email'] = self.authoritative_user_email + if self.authoritative_user_full_name is not None: body['authoritative_user_full_name'] = self.authoritative_user_full_name + if self.customer_name is not None: body['customer_name'] = self.customer_name return body def as_shallow_dict(self) -> dict: """Serializes the ExternalCustomerInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.authoritative_user_email is not None: - body["authoritative_user_email"] = self.authoritative_user_email - if self.authoritative_user_full_name is not None: - body["authoritative_user_full_name"] = self.authoritative_user_full_name - if self.customer_name is not None: - body["customer_name"] = self.customer_name + if self.authoritative_user_email is not None: body['authoritative_user_email'] = self.authoritative_user_email + if self.authoritative_user_full_name is not None: body['authoritative_user_full_name'] = self.authoritative_user_full_name + if self.customer_name is not None: body['customer_name'] = self.customer_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalCustomerInfo: """Deserializes the ExternalCustomerInfo from a dictionary.""" - return cls( - authoritative_user_email=d.get("authoritative_user_email", None), - authoritative_user_full_name=d.get("authoritative_user_full_name", None), - customer_name=d.get("customer_name", None), - ) + return cls(authoritative_user_email=d.get('authoritative_user_email', None), authoritative_user_full_name=d.get('authoritative_user_full_name', None), customer_name=d.get('customer_name', None)) + + @dataclass class GcpKeyInfo: kms_key_id: str """The GCP KMS key's resource name""" - + def as_dict(self) -> dict: """Serializes the GcpKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.kms_key_id is not None: - body["kms_key_id"] = self.kms_key_id + if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id return body def as_shallow_dict(self) -> dict: """Serializes the GcpKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.kms_key_id is not None: - body["kms_key_id"] = self.kms_key_id + if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpKeyInfo: """Deserializes the GcpKeyInfo from a dictionary.""" - return cls(kms_key_id=d.get("kms_key_id", None)) + return cls(kms_key_id=d.get('kms_key_id', None)) + + @dataclass @@ -988,204 +852,185 @@ class GcpManagedNetworkConfig: It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks detects an IP range overlap. - + Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - + The sizes of these IP ranges affect the maximum number of nodes for the workspace. - + **Important**: Confirm the IP ranges used by your Databricks workspace before creating the workspace. You cannot change them after your workspace is deployed. If the IP address ranges for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To determine the address range sizes that you need, Databricks provides a calculator as a Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - - [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html - """ - + + [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html""" + gke_cluster_pod_ip_range: Optional[str] = None """The IP range from which to allocate GKE cluster pods. No bigger than `/9` and no smaller than `/21`.""" - + gke_cluster_service_ip_range: Optional[str] = None """The IP range from which to allocate GKE cluster services. No bigger than `/16` and no smaller than `/27`.""" - + subnet_cidr: Optional[str] = None """The IP range from which to allocate GKE cluster nodes. No bigger than `/9` and no smaller than `/29`.""" - + def as_dict(self) -> dict: """Serializes the GcpManagedNetworkConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gke_cluster_pod_ip_range is not None: - body["gke_cluster_pod_ip_range"] = self.gke_cluster_pod_ip_range - if self.gke_cluster_service_ip_range is not None: - body["gke_cluster_service_ip_range"] = self.gke_cluster_service_ip_range - if self.subnet_cidr is not None: - body["subnet_cidr"] = self.subnet_cidr + if self.gke_cluster_pod_ip_range is not None: body['gke_cluster_pod_ip_range'] = self.gke_cluster_pod_ip_range + if self.gke_cluster_service_ip_range is not None: body['gke_cluster_service_ip_range'] = self.gke_cluster_service_ip_range + if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr return body def as_shallow_dict(self) -> dict: """Serializes the GcpManagedNetworkConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.gke_cluster_pod_ip_range is not None: - body["gke_cluster_pod_ip_range"] = self.gke_cluster_pod_ip_range - if self.gke_cluster_service_ip_range is not None: - body["gke_cluster_service_ip_range"] = self.gke_cluster_service_ip_range - if self.subnet_cidr is not None: - body["subnet_cidr"] = self.subnet_cidr + if self.gke_cluster_pod_ip_range is not None: body['gke_cluster_pod_ip_range'] = self.gke_cluster_pod_ip_range + if self.gke_cluster_service_ip_range is not None: body['gke_cluster_service_ip_range'] = self.gke_cluster_service_ip_range + if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpManagedNetworkConfig: """Deserializes the GcpManagedNetworkConfig from a dictionary.""" - return cls( - gke_cluster_pod_ip_range=d.get("gke_cluster_pod_ip_range", None), - gke_cluster_service_ip_range=d.get("gke_cluster_service_ip_range", None), - subnet_cidr=d.get("subnet_cidr", None), - ) + return cls(gke_cluster_pod_ip_range=d.get('gke_cluster_pod_ip_range', None), gke_cluster_service_ip_range=d.get('gke_cluster_service_ip_range', None), subnet_cidr=d.get('subnet_cidr', None)) + + @dataclass class GcpNetworkInfo: """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and secondary IP ranges).""" - + network_project_id: str """The Google Cloud project ID of the VPC network.""" - + vpc_id: str """The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations.""" - + subnet_id: str """The ID of the subnet associated with this network.""" - + subnet_region: str """The Google Cloud region of the workspace data plane (for example, `us-east4`).""" - + pod_ip_range_name: str """The name of the secondary IP range for pods. A Databricks-managed GKE cluster uses this IP range for its pods. This secondary IP range can be used by only one workspace.""" - + service_ip_range_name: str """The name of the secondary IP range for services. A Databricks-managed GKE cluster uses this IP range for its services. This secondary IP range can be used by only one workspace.""" - + def as_dict(self) -> dict: """Serializes the GcpNetworkInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.network_project_id is not None: - body["network_project_id"] = self.network_project_id - if self.pod_ip_range_name is not None: - body["pod_ip_range_name"] = self.pod_ip_range_name - if self.service_ip_range_name is not None: - body["service_ip_range_name"] = self.service_ip_range_name - if self.subnet_id is not None: - body["subnet_id"] = self.subnet_id - if self.subnet_region is not None: - body["subnet_region"] = self.subnet_region - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id + if self.network_project_id is not None: body['network_project_id'] = self.network_project_id + if self.pod_ip_range_name is not None: body['pod_ip_range_name'] = self.pod_ip_range_name + if self.service_ip_range_name is not None: body['service_ip_range_name'] = self.service_ip_range_name + if self.subnet_id is not None: body['subnet_id'] = self.subnet_id + if self.subnet_region is not None: body['subnet_region'] = self.subnet_region + if self.vpc_id is not None: body['vpc_id'] = self.vpc_id return body def as_shallow_dict(self) -> dict: """Serializes the GcpNetworkInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.network_project_id is not None: - body["network_project_id"] = self.network_project_id - if self.pod_ip_range_name is not None: - body["pod_ip_range_name"] = self.pod_ip_range_name - if self.service_ip_range_name is not None: - body["service_ip_range_name"] = self.service_ip_range_name - if self.subnet_id is not None: - body["subnet_id"] = self.subnet_id - if self.subnet_region is not None: - body["subnet_region"] = self.subnet_region - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id + if self.network_project_id is not None: body['network_project_id'] = self.network_project_id + if self.pod_ip_range_name is not None: body['pod_ip_range_name'] = self.pod_ip_range_name + if self.service_ip_range_name is not None: body['service_ip_range_name'] = self.service_ip_range_name + if self.subnet_id is not None: body['subnet_id'] = self.subnet_id + if self.subnet_region is not None: body['subnet_region'] = self.subnet_region + if self.vpc_id is not None: body['vpc_id'] = self.vpc_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpNetworkInfo: """Deserializes the GcpNetworkInfo from a dictionary.""" - return cls( - network_project_id=d.get("network_project_id", None), - pod_ip_range_name=d.get("pod_ip_range_name", None), - service_ip_range_name=d.get("service_ip_range_name", None), - subnet_id=d.get("subnet_id", None), - subnet_region=d.get("subnet_region", None), - vpc_id=d.get("vpc_id", None), - ) + return cls(network_project_id=d.get('network_project_id', None), pod_ip_range_name=d.get('pod_ip_range_name', None), service_ip_range_name=d.get('service_ip_range_name', None), subnet_id=d.get('subnet_id', None), subnet_region=d.get('subnet_region', None), vpc_id=d.get('vpc_id', None)) + + @dataclass class GcpVpcEndpointInfo: """The Google Cloud specific information for this Private Service Connect endpoint.""" - + project_id: str """The Google Cloud project ID of the VPC network where the PSC connection resides.""" - + psc_endpoint_name: str """The name of the PSC endpoint in the Google Cloud project.""" - + endpoint_region: str """Region of the PSC endpoint.""" - + psc_connection_id: Optional[str] = None """The unique ID of this PSC connection.""" - + service_attachment_id: Optional[str] = None """The service attachment this PSC connection connects to.""" - + def as_dict(self) -> dict: """Serializes the GcpVpcEndpointInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoint_region is not None: - body["endpoint_region"] = self.endpoint_region - if self.project_id is not None: - body["project_id"] = self.project_id - if self.psc_connection_id is not None: - body["psc_connection_id"] = self.psc_connection_id - if self.psc_endpoint_name is not None: - body["psc_endpoint_name"] = self.psc_endpoint_name - if self.service_attachment_id is not None: - body["service_attachment_id"] = self.service_attachment_id + if self.endpoint_region is not None: body['endpoint_region'] = self.endpoint_region + if self.project_id is not None: body['project_id'] = self.project_id + if self.psc_connection_id is not None: body['psc_connection_id'] = self.psc_connection_id + if self.psc_endpoint_name is not None: body['psc_endpoint_name'] = self.psc_endpoint_name + if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id return body def as_shallow_dict(self) -> dict: """Serializes the GcpVpcEndpointInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoint_region is not None: - body["endpoint_region"] = self.endpoint_region - if self.project_id is not None: - body["project_id"] = self.project_id - if self.psc_connection_id is not None: - body["psc_connection_id"] = self.psc_connection_id - if self.psc_endpoint_name is not None: - body["psc_endpoint_name"] = self.psc_endpoint_name - if self.service_attachment_id is not None: - body["service_attachment_id"] = self.service_attachment_id + if self.endpoint_region is not None: body['endpoint_region'] = self.endpoint_region + if self.project_id is not None: body['project_id'] = self.project_id + if self.psc_connection_id is not None: body['psc_connection_id'] = self.psc_connection_id + if self.psc_endpoint_name is not None: body['psc_endpoint_name'] = self.psc_endpoint_name + if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpVpcEndpointInfo: """Deserializes the GcpVpcEndpointInfo from a dictionary.""" - return cls( - endpoint_region=d.get("endpoint_region", None), - project_id=d.get("project_id", None), - psc_connection_id=d.get("psc_connection_id", None), - psc_endpoint_name=d.get("psc_endpoint_name", None), - service_attachment_id=d.get("service_attachment_id", None), - ) + return cls(endpoint_region=d.get('endpoint_region', None), project_id=d.get('project_id', None), psc_connection_id=d.get('psc_connection_id', None), psc_endpoint_name=d.get('psc_endpoint_name', None), service_attachment_id=d.get('service_attachment_id', None)) + + + + + + + + + + + + + + + + + + + + + + + @dataclass class GkeConfig: """The configurations for the GKE cluster of a Databricks workspace.""" - + connectivity_type: Optional[GkeConfigConnectivityType] = None """Specifies the network connectivity types for the GKE nodes and the GKE master network. @@ -1194,304 +1039,252 @@ class GkeConfig: Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster have public IP addresses.""" - + master_ip_range: Optional[str] = None """The IP range from which to allocate GKE cluster master resources. This field will be ignored if GKE private cluster is not enabled. It must be exactly as big as `/28`.""" - + def as_dict(self) -> dict: """Serializes the GkeConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connectivity_type is not None: - body["connectivity_type"] = self.connectivity_type.value - if self.master_ip_range is not None: - body["master_ip_range"] = self.master_ip_range + if self.connectivity_type is not None: body['connectivity_type'] = self.connectivity_type.value + if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range return body def as_shallow_dict(self) -> dict: """Serializes the GkeConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.connectivity_type is not None: - body["connectivity_type"] = self.connectivity_type - if self.master_ip_range is not None: - body["master_ip_range"] = self.master_ip_range + if self.connectivity_type is not None: body['connectivity_type'] = self.connectivity_type + if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GkeConfig: """Deserializes the GkeConfig from a dictionary.""" - return cls( - connectivity_type=_enum(d, "connectivity_type", GkeConfigConnectivityType), - master_ip_range=d.get("master_ip_range", None), - ) + return cls(connectivity_type=_enum(d, 'connectivity_type', GkeConfigConnectivityType), master_ip_range=d.get('master_ip_range', None)) + + class GkeConfigConnectivityType(Enum): """Specifies the network connectivity types for the GKE nodes and the GKE master network. - + Set to `PRIVATE_NODE_PUBLIC_MASTER` for a private GKE cluster for the workspace. The GKE nodes will not have public IPs. - + Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster have public IP addresses.""" - - PRIVATE_NODE_PUBLIC_MASTER = "PRIVATE_NODE_PUBLIC_MASTER" - PUBLIC_NODE_PUBLIC_MASTER = "PUBLIC_NODE_PUBLIC_MASTER" - + + PRIVATE_NODE_PUBLIC_MASTER = 'PRIVATE_NODE_PUBLIC_MASTER' + PUBLIC_NODE_PUBLIC_MASTER = 'PUBLIC_NODE_PUBLIC_MASTER' class KeyUseCase(Enum): """Possible values are: * `MANAGED_SERVICES`: Encrypts notebook and secret data in the control plane * `STORAGE`: Encrypts the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes.""" - - MANAGED_SERVICES = "MANAGED_SERVICES" - STORAGE = "STORAGE" - + + MANAGED_SERVICES = 'MANAGED_SERVICES' + STORAGE = 'STORAGE' @dataclass class Network: account_id: Optional[str] = None """The Databricks account ID associated with this network configuration.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when the network was created.""" - + error_messages: Optional[List[NetworkHealth]] = None """Array of error messages about the network configuration.""" - + gcp_network_info: Optional[GcpNetworkInfo] = None """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and secondary IP ranges).""" - + network_id: Optional[str] = None """The Databricks network configuration ID.""" - + network_name: Optional[str] = None """The human-readable name of the network configuration.""" - + security_group_ids: Optional[List[str]] = None - + subnet_ids: Optional[List[str]] = None - + vpc_endpoints: Optional[NetworkVpcEndpoints] = None """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" - + vpc_id: Optional[str] = None """The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple networks.""" - + vpc_status: Optional[VpcStatus] = None """The status of this network configuration object in terms of its use in a workspace: * `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned.""" - + warning_messages: Optional[List[NetworkWarning]] = None """Array of warning messages about the network configuration.""" - + workspace_id: Optional[int] = None """Workspace ID associated with this network configuration.""" - + def as_dict(self) -> dict: """Serializes the Network into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.error_messages: - body["error_messages"] = [v.as_dict() for v in self.error_messages] - if self.gcp_network_info: - body["gcp_network_info"] = self.gcp_network_info.as_dict() - if self.network_id is not None: - body["network_id"] = self.network_id - if self.network_name is not None: - body["network_name"] = self.network_name - if self.security_group_ids: - body["security_group_ids"] = [v for v in self.security_group_ids] - if self.subnet_ids: - body["subnet_ids"] = [v for v in self.subnet_ids] - if self.vpc_endpoints: - body["vpc_endpoints"] = self.vpc_endpoints.as_dict() - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id - if self.vpc_status is not None: - body["vpc_status"] = self.vpc_status.value - if self.warning_messages: - body["warning_messages"] = [v.as_dict() for v in self.warning_messages] - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.error_messages: body['error_messages'] = [v.as_dict() for v in self.error_messages] + if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info.as_dict() + if self.network_id is not None: body['network_id'] = self.network_id + if self.network_name is not None: body['network_name'] = self.network_name + if self.security_group_ids: body['security_group_ids'] = [v for v in self.security_group_ids] + if self.subnet_ids: body['subnet_ids'] = [v for v in self.subnet_ids] + if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints.as_dict() + if self.vpc_id is not None: body['vpc_id'] = self.vpc_id + if self.vpc_status is not None: body['vpc_status'] = self.vpc_status.value + if self.warning_messages: body['warning_messages'] = [v.as_dict() for v in self.warning_messages] + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the Network into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.error_messages: - body["error_messages"] = self.error_messages - if self.gcp_network_info: - body["gcp_network_info"] = self.gcp_network_info - if self.network_id is not None: - body["network_id"] = self.network_id - if self.network_name is not None: - body["network_name"] = self.network_name - if self.security_group_ids: - body["security_group_ids"] = self.security_group_ids - if self.subnet_ids: - body["subnet_ids"] = self.subnet_ids - if self.vpc_endpoints: - body["vpc_endpoints"] = self.vpc_endpoints - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id - if self.vpc_status is not None: - body["vpc_status"] = self.vpc_status - if self.warning_messages: - body["warning_messages"] = self.warning_messages - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.error_messages: body['error_messages'] = self.error_messages + if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info + if self.network_id is not None: body['network_id'] = self.network_id + if self.network_name is not None: body['network_name'] = self.network_name + if self.security_group_ids: body['security_group_ids'] = self.security_group_ids + if self.subnet_ids: body['subnet_ids'] = self.subnet_ids + if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints + if self.vpc_id is not None: body['vpc_id'] = self.vpc_id + if self.vpc_status is not None: body['vpc_status'] = self.vpc_status + if self.warning_messages: body['warning_messages'] = self.warning_messages + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Network: """Deserializes the Network from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - creation_time=d.get("creation_time", None), - error_messages=_repeated_dict(d, "error_messages", NetworkHealth), - gcp_network_info=_from_dict(d, "gcp_network_info", GcpNetworkInfo), - network_id=d.get("network_id", None), - network_name=d.get("network_name", None), - security_group_ids=d.get("security_group_ids", None), - subnet_ids=d.get("subnet_ids", None), - vpc_endpoints=_from_dict(d, "vpc_endpoints", NetworkVpcEndpoints), - vpc_id=d.get("vpc_id", None), - vpc_status=_enum(d, "vpc_status", VpcStatus), - warning_messages=_repeated_dict(d, "warning_messages", NetworkWarning), - workspace_id=d.get("workspace_id", None), - ) + return cls(account_id=d.get('account_id', None), creation_time=d.get('creation_time', None), error_messages=_repeated_dict(d, 'error_messages', NetworkHealth), gcp_network_info=_from_dict(d, 'gcp_network_info', GcpNetworkInfo), network_id=d.get('network_id', None), network_name=d.get('network_name', None), security_group_ids=d.get('security_group_ids', None), subnet_ids=d.get('subnet_ids', None), vpc_endpoints=_from_dict(d, 'vpc_endpoints', NetworkVpcEndpoints), vpc_id=d.get('vpc_id', None), vpc_status=_enum(d, 'vpc_status', VpcStatus), warning_messages=_repeated_dict(d, 'warning_messages', NetworkWarning), workspace_id=d.get('workspace_id', None)) + + @dataclass class NetworkHealth: error_message: Optional[str] = None """Details of the error.""" - + error_type: Optional[ErrorType] = None """The AWS resource associated with this error: credentials, VPC, subnet, security group, or network ACL.""" - + def as_dict(self) -> dict: """Serializes the NetworkHealth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error_message is not None: - body["error_message"] = self.error_message - if self.error_type is not None: - body["error_type"] = self.error_type.value + if self.error_message is not None: body['error_message'] = self.error_message + if self.error_type is not None: body['error_type'] = self.error_type.value return body def as_shallow_dict(self) -> dict: """Serializes the NetworkHealth into a shallow dictionary of its immediate attributes.""" body = {} - if self.error_message is not None: - body["error_message"] = self.error_message - if self.error_type is not None: - body["error_type"] = self.error_type + if self.error_message is not None: body['error_message'] = self.error_message + if self.error_type is not None: body['error_type'] = self.error_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkHealth: """Deserializes the NetworkHealth from a dictionary.""" - return cls(error_message=d.get("error_message", None), error_type=_enum(d, "error_type", ErrorType)) + return cls(error_message=d.get('error_message', None), error_type=_enum(d, 'error_type', ErrorType)) + + @dataclass class NetworkVpcEndpoints: """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" - + rest_api: List[str] """The VPC endpoint ID used by this network to access the Databricks REST API.""" - + dataplane_relay: List[str] """The VPC endpoint ID used by this network to access the Databricks secure cluster connectivity relay.""" - + def as_dict(self) -> dict: """Serializes the NetworkVpcEndpoints into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataplane_relay: - body["dataplane_relay"] = [v for v in self.dataplane_relay] - if self.rest_api: - body["rest_api"] = [v for v in self.rest_api] + if self.dataplane_relay: body['dataplane_relay'] = [v for v in self.dataplane_relay] + if self.rest_api: body['rest_api'] = [v for v in self.rest_api] return body def as_shallow_dict(self) -> dict: """Serializes the NetworkVpcEndpoints into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataplane_relay: - body["dataplane_relay"] = self.dataplane_relay - if self.rest_api: - body["rest_api"] = self.rest_api + if self.dataplane_relay: body['dataplane_relay'] = self.dataplane_relay + if self.rest_api: body['rest_api'] = self.rest_api return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkVpcEndpoints: """Deserializes the NetworkVpcEndpoints from a dictionary.""" - return cls(dataplane_relay=d.get("dataplane_relay", None), rest_api=d.get("rest_api", None)) + return cls(dataplane_relay=d.get('dataplane_relay', None), rest_api=d.get('rest_api', None)) + + @dataclass class NetworkWarning: warning_message: Optional[str] = None """Details of the warning.""" - + warning_type: Optional[WarningType] = None """The AWS resource associated with this warning: a subnet or a security group.""" - + def as_dict(self) -> dict: """Serializes the NetworkWarning into a dictionary suitable for use as a JSON request body.""" body = {} - if self.warning_message is not None: - body["warning_message"] = self.warning_message - if self.warning_type is not None: - body["warning_type"] = self.warning_type.value + if self.warning_message is not None: body['warning_message'] = self.warning_message + if self.warning_type is not None: body['warning_type'] = self.warning_type.value return body def as_shallow_dict(self) -> dict: """Serializes the NetworkWarning into a shallow dictionary of its immediate attributes.""" body = {} - if self.warning_message is not None: - body["warning_message"] = self.warning_message - if self.warning_type is not None: - body["warning_type"] = self.warning_type + if self.warning_message is not None: body['warning_message'] = self.warning_message + if self.warning_type is not None: body['warning_type'] = self.warning_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkWarning: """Deserializes the NetworkWarning from a dictionary.""" - return cls(warning_message=d.get("warning_message", None), warning_type=_enum(d, "warning_type", WarningType)) + return cls(warning_message=d.get('warning_message', None), warning_type=_enum(d, 'warning_type', WarningType)) + + class PricingTier(Enum): """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - + [AWS Pricing]: https://databricks.com/product/aws-pricing""" - - COMMUNITY_EDITION = "COMMUNITY_EDITION" - DEDICATED = "DEDICATED" - ENTERPRISE = "ENTERPRISE" - PREMIUM = "PREMIUM" - STANDARD = "STANDARD" - UNKNOWN = "UNKNOWN" - + + COMMUNITY_EDITION = 'COMMUNITY_EDITION' + DEDICATED = 'DEDICATED' + ENTERPRISE = 'ENTERPRISE' + PREMIUM = 'PREMIUM' + STANDARD = 'STANDARD' + UNKNOWN = 'UNKNOWN' class PrivateAccessLevel(Enum): """The private access level controls which VPC endpoints can connect to the UI or API of any @@ -1499,91 +1292,70 @@ class PrivateAccessLevel(Enum): default) allows only VPC endpoints that are registered in your Databricks account connect to your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, see `allowed_vpc_endpoint_ids`.""" - - ACCOUNT = "ACCOUNT" - ENDPOINT = "ENDPOINT" - + + ACCOUNT = 'ACCOUNT' + ENDPOINT = 'ENDPOINT' @dataclass class PrivateAccessSettings: account_id: Optional[str] = None """The Databricks account ID that hosts the credential.""" - + allowed_vpc_endpoint_ids: Optional[List[str]] = None """An array of Databricks VPC endpoint IDs.""" - + private_access_level: Optional[PrivateAccessLevel] = None """The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object. * `ACCOUNT` level access (the default) allows only VPC endpoints that are registered in your Databricks account connect to your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, see `allowed_vpc_endpoint_ids`.""" - + private_access_settings_id: Optional[str] = None """Databricks private access settings ID.""" - + private_access_settings_name: Optional[str] = None """The human-readable name of the private access settings object.""" - + public_access_enabled: Optional[bool] = None """Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.""" - + region: Optional[str] = None """The cloud region for workspaces attached to this private access settings object.""" - + def as_dict(self) -> dict: """Serializes the PrivateAccessSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.allowed_vpc_endpoint_ids: - body["allowed_vpc_endpoint_ids"] = [v for v in self.allowed_vpc_endpoint_ids] - if self.private_access_level is not None: - body["private_access_level"] = self.private_access_level.value - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.private_access_settings_name is not None: - body["private_access_settings_name"] = self.private_access_settings_name - if self.public_access_enabled is not None: - body["public_access_enabled"] = self.public_access_enabled - if self.region is not None: - body["region"] = self.region + if self.account_id is not None: body['account_id'] = self.account_id + if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = [v for v in self.allowed_vpc_endpoint_ids] + if self.private_access_level is not None: body['private_access_level'] = self.private_access_level.value + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.private_access_settings_name is not None: body['private_access_settings_name'] = self.private_access_settings_name + if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled + if self.region is not None: body['region'] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the PrivateAccessSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.allowed_vpc_endpoint_ids: - body["allowed_vpc_endpoint_ids"] = self.allowed_vpc_endpoint_ids - if self.private_access_level is not None: - body["private_access_level"] = self.private_access_level - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.private_access_settings_name is not None: - body["private_access_settings_name"] = self.private_access_settings_name - if self.public_access_enabled is not None: - body["public_access_enabled"] = self.public_access_enabled - if self.region is not None: - body["region"] = self.region + if self.account_id is not None: body['account_id'] = self.account_id + if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids + if self.private_access_level is not None: body['private_access_level'] = self.private_access_level + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.private_access_settings_name is not None: body['private_access_settings_name'] = self.private_access_settings_name + if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled + if self.region is not None: body['region'] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivateAccessSettings: """Deserializes the PrivateAccessSettings from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - allowed_vpc_endpoint_ids=d.get("allowed_vpc_endpoint_ids", None), - private_access_level=_enum(d, "private_access_level", PrivateAccessLevel), - private_access_settings_id=d.get("private_access_settings_id", None), - private_access_settings_name=d.get("private_access_settings_name", None), - public_access_enabled=d.get("public_access_enabled", None), - region=d.get("region", None), - ) + return cls(account_id=d.get('account_id', None), allowed_vpc_endpoint_ids=d.get('allowed_vpc_endpoint_ids', None), private_access_level=_enum(d, 'private_access_level', PrivateAccessLevel), private_access_settings_id=d.get('private_access_settings_id', None), private_access_settings_name=d.get('private_access_settings_name', None), public_access_enabled=d.get('public_access_enabled', None), region=d.get('region', None)) + + @dataclass @@ -1602,92 +1374,80 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ReplaceResponse: """Deserializes the ReplaceResponse from a dictionary.""" return cls() + + @dataclass class RootBucketInfo: """Root S3 bucket information.""" - + bucket_name: Optional[str] = None """The name of the S3 bucket.""" - + def as_dict(self) -> dict: """Serializes the RootBucketInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bucket_name is not None: - body["bucket_name"] = self.bucket_name + if self.bucket_name is not None: body['bucket_name'] = self.bucket_name return body def as_shallow_dict(self) -> dict: """Serializes the RootBucketInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.bucket_name is not None: - body["bucket_name"] = self.bucket_name + if self.bucket_name is not None: body['bucket_name'] = self.bucket_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RootBucketInfo: """Deserializes the RootBucketInfo from a dictionary.""" - return cls(bucket_name=d.get("bucket_name", None)) + return cls(bucket_name=d.get('bucket_name', None)) + + @dataclass class StorageConfiguration: account_id: Optional[str] = None """The Databricks account ID that hosts the credential.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when the storage configuration was created.""" - + root_bucket_info: Optional[RootBucketInfo] = None """Root S3 bucket information.""" - + storage_configuration_id: Optional[str] = None """Databricks storage configuration ID.""" - + storage_configuration_name: Optional[str] = None """The human-readable name of the storage configuration.""" - + def as_dict(self) -> dict: """Serializes the StorageConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.root_bucket_info: - body["root_bucket_info"] = self.root_bucket_info.as_dict() - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_configuration_name is not None: - body["storage_configuration_name"] = self.storage_configuration_name + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info.as_dict() + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_configuration_name is not None: body['storage_configuration_name'] = self.storage_configuration_name return body def as_shallow_dict(self) -> dict: """Serializes the StorageConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.root_bucket_info: - body["root_bucket_info"] = self.root_bucket_info - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_configuration_name is not None: - body["storage_configuration_name"] = self.storage_configuration_name + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_configuration_name is not None: body['storage_configuration_name'] = self.storage_configuration_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StorageConfiguration: """Deserializes the StorageConfiguration from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - creation_time=d.get("creation_time", None), - root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo), - storage_configuration_id=d.get("storage_configuration_id", None), - storage_configuration_name=d.get("storage_configuration_name", None), - ) + return cls(account_id=d.get('account_id', None), creation_time=d.get('creation_time', None), root_bucket_info=_from_dict(d, 'root_bucket_info', RootBucketInfo), storage_configuration_id=d.get('storage_configuration_id', None), storage_configuration_name=d.get('storage_configuration_name', None)) + + @dataclass @@ -1695,32 +1455,30 @@ class StsRole: external_id: Optional[str] = None """The external ID that needs to be trusted by the cross-account role. This is always your Databricks account ID.""" - + role_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the cross account role.""" - + def as_dict(self) -> dict: """Serializes the StsRole into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_id is not None: - body["external_id"] = self.external_id - if self.role_arn is not None: - body["role_arn"] = self.role_arn + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn return body def as_shallow_dict(self) -> dict: """Serializes the StsRole into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_id is not None: - body["external_id"] = self.external_id - if self.role_arn is not None: - body["role_arn"] = self.role_arn + if self.external_id is not None: body['external_id'] = self.external_id + if self.role_arn is not None: body['role_arn'] = self.role_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StsRole: """Deserializes the StsRole from a dictionary.""" - return cls(external_id=d.get("external_id", None), role_arn=d.get("role_arn", None)) + return cls(external_id=d.get('external_id', None), role_arn=d.get('role_arn', None)) + + @dataclass @@ -1739,6 +1497,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() + + @dataclass @@ -1746,117 +1506,88 @@ class UpdateWorkspaceRequest: aws_region: Optional[str] = None """The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is available only for updating failed workspaces.""" - + credentials_id: Optional[str] = None """ID of the workspace's credential configuration object. This parameter is available for updating both failed and running workspaces.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - + managed_services_customer_managed_key_id: Optional[str] = None """The ID of the workspace's managed services encryption key configuration object. This parameter is available only for updating failed workspaces.""" - + network_connectivity_config_id: Optional[str] = None - + network_id: Optional[str] = None """The ID of the workspace's network configuration object. Used only if you already use a customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a customer-managed VPC by updating the workspace to add a network configuration ID.""" - + private_access_settings_id: Optional[str] = None """The ID of the workspace's private access settings configuration object. This parameter is available only for updating failed workspaces.""" - + storage_configuration_id: Optional[str] = None """The ID of the workspace's storage configuration object. This parameter is available only for updating failed workspaces.""" - + storage_customer_managed_key_id: Optional[str] = None """The ID of the key configuration object for workspace storage. This parameter is available for updating both failed and running workspaces.""" - + workspace_id: Optional[int] = None """Workspace ID.""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.network_id is not None: body['network_id'] = self.network_id + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.network_id is not None: body['network_id'] = self.network_id + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceRequest: """Deserializes the UpdateWorkspaceRequest from a dictionary.""" - return cls( - aws_region=d.get("aws_region", None), - credentials_id=d.get("credentials_id", None), - custom_tags=d.get("custom_tags", None), - managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), - network_connectivity_config_id=d.get("network_connectivity_config_id", None), - network_id=d.get("network_id", None), - private_access_settings_id=d.get("private_access_settings_id", None), - storage_configuration_id=d.get("storage_configuration_id", None), - storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), - workspace_id=d.get("workspace_id", None), - ) + return cls(aws_region=d.get('aws_region', None), credentials_id=d.get('credentials_id', None), custom_tags=d.get('custom_tags', None), managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), network_id=d.get('network_id', None), private_access_settings_id=d.get('private_access_settings_id', None), storage_configuration_id=d.get('storage_configuration_id', None), storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None), workspace_id=d.get('workspace_id', None)) + + @dataclass class UpsertPrivateAccessSettingsRequest: private_access_settings_name: str """The human-readable name of the private access settings object.""" - + region: str """The cloud region for workspaces associated with this private access settings object.""" - + allowed_vpc_endpoint_ids: Optional[List[str]] = None """An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the @@ -1870,78 +1601,61 @@ class UpsertPrivateAccessSettingsRequest: public internet, see [IP access lists]. [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html""" - + private_access_level: Optional[PrivateAccessLevel] = None """The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object. * `ACCOUNT` level access (the default) allows only VPC endpoints that are registered in your Databricks account connect to your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, see `allowed_vpc_endpoint_ids`.""" - + private_access_settings_id: Optional[str] = None """Databricks Account API private access settings ID.""" - + public_access_enabled: Optional[bool] = None """Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.""" - + def as_dict(self) -> dict: """Serializes the UpsertPrivateAccessSettingsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_vpc_endpoint_ids: - body["allowed_vpc_endpoint_ids"] = [v for v in self.allowed_vpc_endpoint_ids] - if self.private_access_level is not None: - body["private_access_level"] = self.private_access_level.value - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.private_access_settings_name is not None: - body["private_access_settings_name"] = self.private_access_settings_name - if self.public_access_enabled is not None: - body["public_access_enabled"] = self.public_access_enabled - if self.region is not None: - body["region"] = self.region + if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = [v for v in self.allowed_vpc_endpoint_ids] + if self.private_access_level is not None: body['private_access_level'] = self.private_access_level.value + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.private_access_settings_name is not None: body['private_access_settings_name'] = self.private_access_settings_name + if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled + if self.region is not None: body['region'] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the UpsertPrivateAccessSettingsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_vpc_endpoint_ids: - body["allowed_vpc_endpoint_ids"] = self.allowed_vpc_endpoint_ids - if self.private_access_level is not None: - body["private_access_level"] = self.private_access_level - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.private_access_settings_name is not None: - body["private_access_settings_name"] = self.private_access_settings_name - if self.public_access_enabled is not None: - body["public_access_enabled"] = self.public_access_enabled - if self.region is not None: - body["region"] = self.region + if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids + if self.private_access_level is not None: body['private_access_level'] = self.private_access_level + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.private_access_settings_name is not None: body['private_access_settings_name'] = self.private_access_settings_name + if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled + if self.region is not None: body['region'] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpsertPrivateAccessSettingsRequest: """Deserializes the UpsertPrivateAccessSettingsRequest from a dictionary.""" - return cls( - allowed_vpc_endpoint_ids=d.get("allowed_vpc_endpoint_ids", None), - private_access_level=_enum(d, "private_access_level", PrivateAccessLevel), - private_access_settings_id=d.get("private_access_settings_id", None), - private_access_settings_name=d.get("private_access_settings_name", None), - public_access_enabled=d.get("public_access_enabled", None), - region=d.get("region", None), - ) + return cls(allowed_vpc_endpoint_ids=d.get('allowed_vpc_endpoint_ids', None), private_access_level=_enum(d, 'private_access_level', PrivateAccessLevel), private_access_settings_id=d.get('private_access_settings_id', None), private_access_settings_name=d.get('private_access_settings_name', None), public_access_enabled=d.get('public_access_enabled', None), region=d.get('region', None)) + + @dataclass class VpcEndpoint: account_id: Optional[str] = None """The Databricks account ID that hosts the VPC endpoint configuration.""" - + aws_account_id: Optional[str] = None """The AWS Account in which the VPC endpoint object exists.""" - + aws_endpoint_service_id: Optional[str] = None """The ID of the Databricks [endpoint service] that this VPC endpoint is connected to. For a list of endpoint service IDs for each supported AWS region, see the [Databricks PrivateLink @@ -1949,156 +1663,125 @@ class VpcEndpoint: [Databricks PrivateLink documentation]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" - + aws_vpc_endpoint_id: Optional[str] = None """The ID of the VPC endpoint object in AWS.""" - + gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None """The Google Cloud specific information for this Private Service Connect endpoint.""" - + region: Optional[str] = None """The AWS region in which this VPC endpoint object exists.""" - + state: Optional[str] = None """The current state (such as `available` or `rejected`) of the VPC endpoint. Derived from AWS. For the full set of values, see [AWS DescribeVpcEndpoint documentation]. [AWS DescribeVpcEndpoint documentation]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-vpc-endpoints.html""" - + use_case: Optional[EndpointUseCase] = None """This enumeration represents the type of Databricks VPC [endpoint service] that was used when creating this VPC endpoint. [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" - + vpc_endpoint_id: Optional[str] = None """Databricks VPC endpoint ID. This is the Databricks-specific name of the VPC endpoint. Do not confuse this with the `aws_vpc_endpoint_id`, which is the ID within AWS of the VPC endpoint.""" - + vpc_endpoint_name: Optional[str] = None """The human-readable name of the storage configuration.""" - + def as_dict(self) -> dict: """Serializes the VpcEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.aws_account_id is not None: - body["aws_account_id"] = self.aws_account_id - if self.aws_endpoint_service_id is not None: - body["aws_endpoint_service_id"] = self.aws_endpoint_service_id - if self.aws_vpc_endpoint_id is not None: - body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: - body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info.as_dict() - if self.region is not None: - body["region"] = self.region - if self.state is not None: - body["state"] = self.state - if self.use_case is not None: - body["use_case"] = self.use_case.value - if self.vpc_endpoint_id is not None: - body["vpc_endpoint_id"] = self.vpc_endpoint_id - if self.vpc_endpoint_name is not None: - body["vpc_endpoint_name"] = self.vpc_endpoint_name + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_account_id is not None: body['aws_account_id'] = self.aws_account_id + if self.aws_endpoint_service_id is not None: body['aws_endpoint_service_id'] = self.aws_endpoint_service_id + if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict() + if self.region is not None: body['region'] = self.region + if self.state is not None: body['state'] = self.state + if self.use_case is not None: body['use_case'] = self.use_case.value + if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name return body def as_shallow_dict(self) -> dict: """Serializes the VpcEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.aws_account_id is not None: - body["aws_account_id"] = self.aws_account_id - if self.aws_endpoint_service_id is not None: - body["aws_endpoint_service_id"] = self.aws_endpoint_service_id - if self.aws_vpc_endpoint_id is not None: - body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: - body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info - if self.region is not None: - body["region"] = self.region - if self.state is not None: - body["state"] = self.state - if self.use_case is not None: - body["use_case"] = self.use_case - if self.vpc_endpoint_id is not None: - body["vpc_endpoint_id"] = self.vpc_endpoint_id - if self.vpc_endpoint_name is not None: - body["vpc_endpoint_name"] = self.vpc_endpoint_name + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_account_id is not None: body['aws_account_id'] = self.aws_account_id + if self.aws_endpoint_service_id is not None: body['aws_endpoint_service_id'] = self.aws_endpoint_service_id + if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info + if self.region is not None: body['region'] = self.region + if self.state is not None: body['state'] = self.state + if self.use_case is not None: body['use_case'] = self.use_case + if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VpcEndpoint: """Deserializes the VpcEndpoint from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - aws_account_id=d.get("aws_account_id", None), - aws_endpoint_service_id=d.get("aws_endpoint_service_id", None), - aws_vpc_endpoint_id=d.get("aws_vpc_endpoint_id", None), - gcp_vpc_endpoint_info=_from_dict(d, "gcp_vpc_endpoint_info", GcpVpcEndpointInfo), - region=d.get("region", None), - state=d.get("state", None), - use_case=_enum(d, "use_case", EndpointUseCase), - vpc_endpoint_id=d.get("vpc_endpoint_id", None), - vpc_endpoint_name=d.get("vpc_endpoint_name", None), - ) + return cls(account_id=d.get('account_id', None), aws_account_id=d.get('aws_account_id', None), aws_endpoint_service_id=d.get('aws_endpoint_service_id', None), aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None), gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo), region=d.get('region', None), state=d.get('state', None), use_case=_enum(d, 'use_case', EndpointUseCase), vpc_endpoint_id=d.get('vpc_endpoint_id', None), vpc_endpoint_name=d.get('vpc_endpoint_name', None)) + + class VpcStatus(Enum): """The status of this network configuration object in terms of its use in a workspace: * `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned.""" - - BROKEN = "BROKEN" - UNATTACHED = "UNATTACHED" - VALID = "VALID" - WARNED = "WARNED" - + + BROKEN = 'BROKEN' + UNATTACHED = 'UNATTACHED' + VALID = 'VALID' + WARNED = 'WARNED' class WarningType(Enum): """The AWS resource associated with this warning: a subnet or a security group.""" - - SECURITY_GROUP = "securityGroup" - SUBNET = "subnet" - + + SECURITY_GROUP = 'securityGroup' + SUBNET = 'subnet' @dataclass class Workspace: account_id: Optional[str] = None """Databricks account ID.""" - + aws_region: Optional[str] = None """The AWS region of the workspace data plane (for example, `us-west-2`).""" - + azure_workspace_info: Optional[AzureWorkspaceInfo] = None - + cloud: Optional[str] = None """The cloud name. This field always has the value `gcp`.""" - + cloud_resource_container: Optional[CloudResourceContainer] = None """The general workspace configurations that are specific to cloud providers.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when the workspace was created.""" - + credentials_id: Optional[str] = None """ID of the workspace's credential configuration object.""" - - custom_tags: Optional[Dict[str, str]] = None + + custom_tags: Optional[Dict[str,str]] = None """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - + deployment_name: Optional[str] = None """The deployment name defines part of the subdomain for the workspace. The workspace URL for web application and REST APIs is `.cloud.databricks.com`. This value must be unique across all non-deleted deployments across all AWS regions.""" - + external_customer_info: Optional[ExternalCustomerInfo] = None """If this workspace is for a external customer, then external_customer_info is populated. If this workspace is not for a external customer, then external_customer_info is empty.""" - + gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP @@ -2118,29 +1801,29 @@ class Workspace: Excel spreadsheet. See [calculate subnet sizes for a new workspace]. [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html""" - + gke_config: Optional[GkeConfig] = None """The configurations for the GKE cluster of a Databricks workspace.""" - + is_no_public_ip_enabled: Optional[bool] = None """Whether no public IP is enabled for the workspace.""" - + location: Optional[str] = None """The Google Cloud region of the workspace data plane in your Google account (for example, `us-east4`).""" - + managed_services_customer_managed_key_id: Optional[str] = None """ID of the key configuration for encrypting managed services.""" - + network_id: Optional[str] = None """The network configuration ID that is attached to the workspace. This field is available only if the network is a customer-managed network.""" - + pricing_tier: Optional[PricingTier] = None """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. [AWS Pricing]: https://databricks.com/product/aws-pricing""" - + private_access_settings_id: Optional[str] = None """ID of the workspace's private access settings object. Only used for PrivateLink. You must specify this ID if you are using [AWS PrivateLink] for either front-end (user-to-workspace @@ -2150,173 +1833,103 @@ class Workspace: [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" - + storage_configuration_id: Optional[str] = None """ID of the workspace's storage configuration object.""" - + storage_customer_managed_key_id: Optional[str] = None """ID of the key configuration for encrypting workspace storage.""" - + workspace_id: Optional[int] = None """A unique integer ID for the workspace""" - + workspace_name: Optional[str] = None """The human-readable name of the workspace.""" - + workspace_status: Optional[WorkspaceStatus] = None """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` initially. Continue to check the status until the status is `RUNNING`.""" - + workspace_status_message: Optional[str] = None """Message describing the current workspace status.""" - + def as_dict(self) -> dict: """Serializes the Workspace into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.azure_workspace_info: - body["azure_workspace_info"] = self.azure_workspace_info.as_dict() - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cloud_resource_container: - body["cloud_resource_container"] = self.cloud_resource_container.as_dict() - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.deployment_name is not None: - body["deployment_name"] = self.deployment_name - if self.external_customer_info: - body["external_customer_info"] = self.external_customer_info.as_dict() - if self.gcp_managed_network_config: - body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict() - if self.gke_config: - body["gke_config"] = self.gke_config.as_dict() - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled - if self.location is not None: - body["location"] = self.location - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.pricing_tier is not None: - body["pricing_tier"] = self.pricing_tier.value - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name - if self.workspace_status is not None: - body["workspace_status"] = self.workspace_status.value - if self.workspace_status_message is not None: - body["workspace_status_message"] = self.workspace_status_message + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.azure_workspace_info: body['azure_workspace_info'] = self.azure_workspace_info.as_dict() + if self.cloud is not None: body['cloud'] = self.cloud + if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container.as_dict() + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.deployment_name is not None: body['deployment_name'] = self.deployment_name + if self.external_customer_info: body['external_customer_info'] = self.external_customer_info.as_dict() + if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict() + if self.gke_config: body['gke_config'] = self.gke_config.as_dict() + if self.is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled + if self.location is not None: body['location'] = self.location + if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_id is not None: body['network_id'] = self.network_id + if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier.value + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.workspace_name is not None: body['workspace_name'] = self.workspace_name + if self.workspace_status is not None: body['workspace_status'] = self.workspace_status.value + if self.workspace_status_message is not None: body['workspace_status_message'] = self.workspace_status_message return body def as_shallow_dict(self) -> dict: """Serializes the Workspace into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.azure_workspace_info: - body["azure_workspace_info"] = self.azure_workspace_info - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cloud_resource_container: - body["cloud_resource_container"] = self.cloud_resource_container - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.deployment_name is not None: - body["deployment_name"] = self.deployment_name - if self.external_customer_info: - body["external_customer_info"] = self.external_customer_info - if self.gcp_managed_network_config: - body["gcp_managed_network_config"] = self.gcp_managed_network_config - if self.gke_config: - body["gke_config"] = self.gke_config - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled - if self.location is not None: - body["location"] = self.location - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.pricing_tier is not None: - body["pricing_tier"] = self.pricing_tier - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name - if self.workspace_status is not None: - body["workspace_status"] = self.workspace_status - if self.workspace_status_message is not None: - body["workspace_status_message"] = self.workspace_status_message + if self.account_id is not None: body['account_id'] = self.account_id + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.azure_workspace_info: body['azure_workspace_info'] = self.azure_workspace_info + if self.cloud is not None: body['cloud'] = self.cloud + if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.credentials_id is not None: body['credentials_id'] = self.credentials_id + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.deployment_name is not None: body['deployment_name'] = self.deployment_name + if self.external_customer_info: body['external_customer_info'] = self.external_customer_info + if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config + if self.gke_config: body['gke_config'] = self.gke_config + if self.is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled + if self.location is not None: body['location'] = self.location + if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id + if self.network_id is not None: body['network_id'] = self.network_id + if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier + if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id + if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.workspace_name is not None: body['workspace_name'] = self.workspace_name + if self.workspace_status is not None: body['workspace_status'] = self.workspace_status + if self.workspace_status_message is not None: body['workspace_status_message'] = self.workspace_status_message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Workspace: """Deserializes the Workspace from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - aws_region=d.get("aws_region", None), - azure_workspace_info=_from_dict(d, "azure_workspace_info", AzureWorkspaceInfo), - cloud=d.get("cloud", None), - cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer), - creation_time=d.get("creation_time", None), - credentials_id=d.get("credentials_id", None), - custom_tags=d.get("custom_tags", None), - deployment_name=d.get("deployment_name", None), - external_customer_info=_from_dict(d, "external_customer_info", ExternalCustomerInfo), - gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig), - gke_config=_from_dict(d, "gke_config", GkeConfig), - is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None), - location=d.get("location", None), - managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), - network_id=d.get("network_id", None), - pricing_tier=_enum(d, "pricing_tier", PricingTier), - private_access_settings_id=d.get("private_access_settings_id", None), - storage_configuration_id=d.get("storage_configuration_id", None), - storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), - workspace_id=d.get("workspace_id", None), - workspace_name=d.get("workspace_name", None), - workspace_status=_enum(d, "workspace_status", WorkspaceStatus), - workspace_status_message=d.get("workspace_status_message", None), - ) + return cls(account_id=d.get('account_id', None), aws_region=d.get('aws_region', None), azure_workspace_info=_from_dict(d, 'azure_workspace_info', AzureWorkspaceInfo), cloud=d.get('cloud', None), cloud_resource_container=_from_dict(d, 'cloud_resource_container', CloudResourceContainer), creation_time=d.get('creation_time', None), credentials_id=d.get('credentials_id', None), custom_tags=d.get('custom_tags', None), deployment_name=d.get('deployment_name', None), external_customer_info=_from_dict(d, 'external_customer_info', ExternalCustomerInfo), gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config', GcpManagedNetworkConfig), gke_config=_from_dict(d, 'gke_config', GkeConfig), is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None), location=d.get('location', None), managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id', None), network_id=d.get('network_id', None), pricing_tier=_enum(d, 'pricing_tier', PricingTier), private_access_settings_id=d.get('private_access_settings_id', None), storage_configuration_id=d.get('storage_configuration_id', None), storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None), workspace_id=d.get('workspace_id', None), workspace_name=d.get('workspace_name', None), workspace_status=_enum(d, 'workspace_status', WorkspaceStatus), workspace_status_message=d.get('workspace_status_message', None)) + + class WorkspaceStatus(Enum): """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` initially. Continue to check the status until the status is `RUNNING`.""" + + BANNED = 'BANNED' + CANCELLING = 'CANCELLING' + FAILED = 'FAILED' + NOT_PROVISIONED = 'NOT_PROVISIONED' + PROVISIONING = 'PROVISIONING' + RUNNING = 'RUNNING' - BANNED = "BANNED" - CANCELLING = "CANCELLING" - FAILED = "FAILED" - NOT_PROVISIONED = "NOT_PROVISIONED" - PROVISIONING = "PROVISIONING" - RUNNING = "RUNNING" class CredentialsAPI: @@ -2324,128 +1937,157 @@ class CredentialsAPI: service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the new workspace. A credential configuration encapsulates this role information, and its ID is used when creating a new workspace.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential: - """Create credential configuration. + + + + + + + def create(self + , credentials_name: str, aws_credentials: CreateCredentialAwsCredentials + ) -> Credential: + """Create credential configuration. + Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account ID) in the returned credential object, and configure the required access policy. - + Save the response's `credentials_id` field, which is the ID for your new credential configuration object. - + For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param credentials_name: str The human-readable name of the credential configuration object. :param aws_credentials: :class:`CreateCredentialAwsCredentials` - + :returns: :class:`Credential` """ body = {} - if aws_credentials is not None: - body["aws_credentials"] = aws_credentials.as_dict() - if credentials_name is not None: - body["credentials_name"] = credentials_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/credentials", body=body, headers=headers) + if aws_credentials is not None: body['aws_credentials'] = aws_credentials.as_dict() + if credentials_name is not None: body['credentials_name'] = credentials_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/credentials', body=body + + , headers=headers + ) return Credential.from_dict(res) - def delete(self, credentials_id: str): - """Delete credential configuration. + + + + def delete(self + , credentials_id: str + ): + """Delete credential configuration. + Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any workspace. - + :param credentials_id: str Databricks Account API credential configuration ID - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", f"/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}", headers=headers - ) + + + - def get(self, credentials_id: str) -> Credential: + def get(self + , credentials_id: str + ) -> Credential: """Get credential configuration. - + Gets a Databricks credential configuration object for an account, both specified by ID. - + :param credentials_id: str Databricks Account API credential configuration ID - + :returns: :class:`Credential` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}' + + , headers=headers + ) return Credential.from_dict(res) + + + + def list(self) -> Iterator[Credential]: """Get all credential configurations. - + Gets all Databricks credential configurations associated with an account specified by ID. - + :returns: Iterator over :class:`Credential` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/credentials", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/credentials' + , headers=headers + ) return [Credential.from_dict(v) for v in res] - + + class EncryptionKeysAPI: """These APIs manage encryption key configurations for this workspace (optional). A key configuration encapsulates the AWS KMS key information and some information about how the key configuration can be used. There are two possible uses for key configurations: - + * Managed services: A key configuration can be used to encrypt a workspace's notebook and secret data in the control plane, as well as Databricks SQL queries and query history. * Storage: A key configuration can be used to encrypt a workspace's DBFS and EBS data in the data plane. - + In both of these cases, the key configuration's ID is used when creating a new workspace. This Preview feature is available if your account is on the E2 version of the platform. Updating a running workspace with workspace storage encryption requires that the workspace is on the E2 version of the platform. If you have an older workspace, it might not be on the E2 version of the platform. If you are not sure, contact your Databricks representative.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - use_cases: List[KeyUseCase], - *, - aws_key_info: Optional[CreateAwsKeyInfo] = None, - gcp_key_info: Optional[CreateGcpKeyInfo] = None, - ) -> CustomerManagedKey: - """Create encryption key configuration. + + + + + + + def create(self + , use_cases: List[KeyUseCase] + , * + , aws_key_info: Optional[CreateAwsKeyInfo] = None, gcp_key_info: Optional[CreateGcpKeyInfo] = None) -> CustomerManagedKey: + """Create encryption key configuration. + Creates a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -2453,62 +2095,67 @@ def create( specified as a workspace's customer-managed key for workspace storage, the key encrypts the workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume data. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions that currently support creation of Databricks workspaces. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :param use_cases: List[:class:`KeyUseCase`] The cases that the key can be used for. :param aws_key_info: :class:`CreateAwsKeyInfo` (optional) :param gcp_key_info: :class:`CreateGcpKeyInfo` (optional) - + :returns: :class:`CustomerManagedKey` """ body = {} - if aws_key_info is not None: - body["aws_key_info"] = aws_key_info.as_dict() - if gcp_key_info is not None: - body["gcp_key_info"] = gcp_key_info.as_dict() - if use_cases is not None: - body["use_cases"] = [v.value for v in use_cases] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys", body=body, headers=headers - ) + if aws_key_info is not None: body['aws_key_info'] = aws_key_info.as_dict() + if gcp_key_info is not None: body['gcp_key_info'] = gcp_key_info.as_dict() + if use_cases is not None: body['use_cases'] = [v.value for v in use_cases] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys', body=body + + , headers=headers + ) return CustomerManagedKey.from_dict(res) - def delete(self, customer_managed_key_id: str): - """Delete encryption key configuration. + + + + def delete(self + , customer_managed_key_id: str + ): + """Delete encryption key configuration. + Deletes a customer-managed key configuration object for an account. You cannot delete a configuration that is associated with a running workspace. - + :param customer_managed_key_id: str Databricks encryption key configuration ID. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}", - headers=headers, - ) + + + - def get(self, customer_managed_key_id: str) -> CustomerManagedKey: + def get(self + , customer_managed_key_id: str + ) -> CustomerManagedKey: """Get encryption key configuration. - + Gets a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -2516,76 +2163,80 @@ def get(self, customer_managed_key_id: str) -> CustomerManagedKey: specified as a workspace's customer-managed key for storage, the key encrypts the workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume data. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions. - + This operation is available only if your account is on the E2 version of the platform.", - + :param customer_managed_key_id: str Databricks encryption key configuration ID. - + :returns: :class:`CustomerManagedKey` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}' + + , headers=headers + ) return CustomerManagedKey.from_dict(res) + + + + def list(self) -> Iterator[CustomerManagedKey]: """Get all encryption key configurations. - + Gets all customer-managed key configuration objects for an account. If the key is specified as a workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions. - + This operation is available only if your account is on the E2 version of the platform. - + :returns: Iterator over :class:`CustomerManagedKey` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys' + , headers=headers + ) return [CustomerManagedKey.from_dict(v) for v in res] - + + class NetworksAPI: """These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used when creating a new workspace if you use customer-managed VPCs.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - network_name: str, - *, - gcp_network_info: Optional[GcpNetworkInfo] = None, - security_group_ids: Optional[List[str]] = None, - subnet_ids: Optional[List[str]] = None, - vpc_endpoints: Optional[NetworkVpcEndpoints] = None, - vpc_id: Optional[str] = None, - ) -> Network: - """Create network configuration. + + + + + + + def create(self + , network_name: str + , * + , gcp_network_info: Optional[GcpNetworkInfo] = None, security_group_ids: Optional[List[str]] = None, subnet_ids: Optional[List[str]] = None, vpc_endpoints: Optional[NetworkVpcEndpoints] = None, vpc_id: Optional[str] = None) -> Network: + """Create network configuration. + Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a pre-existing VPC and subnets. - + :param network_name: str The human-readable name of the network configuration. :param gcp_network_info: :class:`GcpNetworkInfo` (optional) @@ -2600,121 +2251,138 @@ def create( :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional) If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/ :param vpc_id: str (optional) The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations. - + :returns: :class:`Network` """ body = {} - if gcp_network_info is not None: - body["gcp_network_info"] = gcp_network_info.as_dict() - if network_name is not None: - body["network_name"] = network_name - if security_group_ids is not None: - body["security_group_ids"] = [v for v in security_group_ids] - if subnet_ids is not None: - body["subnet_ids"] = [v for v in subnet_ids] - if vpc_endpoints is not None: - body["vpc_endpoints"] = vpc_endpoints.as_dict() - if vpc_id is not None: - body["vpc_id"] = vpc_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/networks", body=body, headers=headers) + if gcp_network_info is not None: body['gcp_network_info'] = gcp_network_info.as_dict() + if network_name is not None: body['network_name'] = network_name + if security_group_ids is not None: body['security_group_ids'] = [v for v in security_group_ids] + if subnet_ids is not None: body['subnet_ids'] = [v for v in subnet_ids] + if vpc_endpoints is not None: body['vpc_endpoints'] = vpc_endpoints.as_dict() + if vpc_id is not None: body['vpc_id'] = vpc_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/networks', body=body + + , headers=headers + ) return Network.from_dict(res) - def delete(self, network_id: str): - """Delete a network configuration. + + + + def delete(self + , network_id: str + ): + """Delete a network configuration. + Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. - + This operation is available only if your account is on the E2 version of the platform. - + :param network_id: str Databricks Account API network configuration ID. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/networks/{network_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers) + + + - def get(self, network_id: str) -> Network: + def get(self + , network_id: str + ) -> Network: """Get a network configuration. - + Gets a Databricks network configuration, which represents a cloud VPC and its resources. - + :param network_id: str Databricks Account API network configuration ID. - + :returns: :class:`Network` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/networks/{network_id}' + + , headers=headers + ) return Network.from_dict(res) + + + + def list(self) -> Iterator[Network]: """Get all network configurations. - + Gets a list of all Databricks network configurations for an account, specified by ID. - + This operation is available only if your account is on the E2 version of the platform. - + :returns: Iterator over :class:`Network` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/networks", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/networks' + , headers=headers + ) return [Network.from_dict(v) for v in res] - + + class PrivateAccessAPI: """These APIs manage private access settings for this account.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - private_access_settings_name: str, - region: str, - *, - allowed_vpc_endpoint_ids: Optional[List[str]] = None, - private_access_level: Optional[PrivateAccessLevel] = None, - public_access_enabled: Optional[bool] = None, - ) -> PrivateAccessSettings: - """Create private access settings. + + + + + + + def create(self + , private_access_settings_name: str, region: str + , * + , allowed_vpc_endpoint_ids: Optional[List[str]] = None, private_access_level: Optional[PrivateAccessLevel] = None, public_access_enabled: Optional[bool] = None) -> PrivateAccessSettings: + """Create private access settings. + Creates a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. - + You can share one private access settings with multiple workspaces in a single account. However, private access settings are specific to AWS regions, so only workspaces in the same AWS region can use a given private access settings object. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_name: str The human-readable name of the private access settings object. :param region: str @@ -2723,14 +2391,14 @@ def create( An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in AWS. - + Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints that in your account that can connect to your workspace over AWS PrivateLink. - + If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this control only works for PrivateLink connections. To control how your workspace is accessed via public internet, see [IP access lists]. - + [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) The private access level controls which VPC endpoints can connect to the UI or API of any workspace @@ -2742,130 +2410,132 @@ def create( Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - + :returns: :class:`PrivateAccessSettings` """ body = {} - if allowed_vpc_endpoint_ids is not None: - body["allowed_vpc_endpoint_ids"] = [v for v in allowed_vpc_endpoint_ids] - if private_access_level is not None: - body["private_access_level"] = private_access_level.value - if private_access_settings_name is not None: - body["private_access_settings_name"] = private_access_settings_name - if public_access_enabled is not None: - body["public_access_enabled"] = public_access_enabled - if region is not None: - body["region"] = region - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/private-access-settings", body=body, headers=headers - ) + if allowed_vpc_endpoint_ids is not None: body['allowed_vpc_endpoint_ids'] = [v for v in allowed_vpc_endpoint_ids] + if private_access_level is not None: body['private_access_level'] = private_access_level.value + if private_access_settings_name is not None: body['private_access_settings_name'] = private_access_settings_name + if public_access_enabled is not None: body['public_access_enabled'] = public_access_enabled + if region is not None: body['region'] = region + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings', body=body + + , headers=headers + ) return PrivateAccessSettings.from_dict(res) - def delete(self, private_access_settings_id: str): - """Delete a private access settings object. + + + + def delete(self + , private_access_settings_id: str + ): + """Delete a private access settings object. + Deletes a private access settings object, which determines how your workspace is accessed over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", - headers=headers, - ) + + + - def get(self, private_access_settings_id: str) -> PrivateAccessSettings: + def get(self + , private_access_settings_id: str + ) -> PrivateAccessSettings: """Get a private access settings object. - + Gets a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. - + :returns: :class:`PrivateAccessSettings` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}' + + , headers=headers + ) return PrivateAccessSettings.from_dict(res) + + + + def list(self) -> Iterator[PrivateAccessSettings]: """Get all private access settings objects. - + Gets a list of all private access settings objects for an account, specified by ID. - + :returns: Iterator over :class:`PrivateAccessSettings` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/private-access-settings", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings' + , headers=headers + ) return [PrivateAccessSettings.from_dict(v) for v in res] - def replace( - self, - private_access_settings_id: str, - private_access_settings_name: str, - region: str, - *, - allowed_vpc_endpoint_ids: Optional[List[str]] = None, - private_access_level: Optional[PrivateAccessLevel] = None, - public_access_enabled: Optional[bool] = None, - ): - """Replace private access settings. + + + + def replace(self + , private_access_settings_id: str, private_access_settings_name: str, region: str + , * + , allowed_vpc_endpoint_ids: Optional[List[str]] = None, private_access_level: Optional[PrivateAccessLevel] = None, public_access_enabled: Optional[bool] = None): + """Replace private access settings. + Updates an existing private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. - + This operation completely overwrites your existing private access settings object attached to your workspaces. All workspaces attached to the private access settings are affected by any change. If `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of these changes might take several minutes to propagate to the workspace API. - + You can share one private access settings object with multiple workspaces in a single account. However, private access settings are specific to AWS regions, so only workspaces in the same AWS region can use a given private access settings object. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. :param private_access_settings_name: str @@ -2876,14 +2546,14 @@ def replace( An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in AWS. - + Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints that in your account that can connect to your workspace over AWS PrivateLink. - + If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this control only works for PrivateLink connections. To control how your workspace is accessed via public internet, see [IP access lists]. - + [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) The private access level controls which VPC endpoints can connect to the UI or API of any workspace @@ -2895,166 +2565,180 @@ def replace( Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - - + + """ body = {} - if allowed_vpc_endpoint_ids is not None: - body["allowed_vpc_endpoint_ids"] = [v for v in allowed_vpc_endpoint_ids] - if private_access_level is not None: - body["private_access_level"] = private_access_level.value - if private_access_settings_name is not None: - body["private_access_settings_name"] = private_access_settings_name - if public_access_enabled is not None: - body["public_access_enabled"] = public_access_enabled - if region is not None: - body["region"] = region - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", - body=body, - headers=headers, - ) - + if allowed_vpc_endpoint_ids is not None: body['allowed_vpc_endpoint_ids'] = [v for v in allowed_vpc_endpoint_ids] + if private_access_level is not None: body['private_access_level'] = private_access_level.value + if private_access_settings_name is not None: body['private_access_settings_name'] = private_access_settings_name + if public_access_enabled is not None: body['public_access_enabled'] = public_access_enabled + if region is not None: body['region'] = region + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}', body=body + + , headers=headers + ) + + + class StorageAPI: """These APIs manage storage configurations for this workspace. A root storage S3 bucket in your account is required to store objects like cluster logs, notebook revisions, and job results. You can also use the root storage S3 bucket for storage of non-production DBFS data. A storage configuration encapsulates this bucket information, and its ID is used when creating a new workspace.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, storage_configuration_name: str, root_bucket_info: RootBucketInfo) -> StorageConfiguration: - """Create new storage configuration. + + + + + + + def create(self + , storage_configuration_name: str, root_bucket_info: RootBucketInfo + ) -> StorageConfiguration: + """Create new storage configuration. + Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your account. Databricks stores related workspace assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the required bucket policy. - + For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param storage_configuration_name: str The human-readable name of the storage configuration. :param root_bucket_info: :class:`RootBucketInfo` Root S3 bucket information. - + :returns: :class:`StorageConfiguration` """ body = {} - if root_bucket_info is not None: - body["root_bucket_info"] = root_bucket_info.as_dict() - if storage_configuration_name is not None: - body["storage_configuration_name"] = storage_configuration_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/storage-configurations", body=body, headers=headers - ) + if root_bucket_info is not None: body['root_bucket_info'] = root_bucket_info.as_dict() + if storage_configuration_name is not None: body['storage_configuration_name'] = storage_configuration_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/storage-configurations', body=body + + , headers=headers + ) return StorageConfiguration.from_dict(res) - def delete(self, storage_configuration_id: str): - """Delete storage configuration. + + + + def delete(self + , storage_configuration_id: str + ): + """Delete storage configuration. + Deletes a Databricks storage configuration. You cannot delete a storage configuration that is associated with any workspace. - + :param storage_configuration_id: str Databricks Account API storage configuration ID. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}", - headers=headers, - ) + + + - def get(self, storage_configuration_id: str) -> StorageConfiguration: + def get(self + , storage_configuration_id: str + ) -> StorageConfiguration: """Get storage configuration. - + Gets a Databricks storage configuration for an account, both specified by ID. - + :param storage_configuration_id: str Databricks Account API storage configuration ID. - + :returns: :class:`StorageConfiguration` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}' + + , headers=headers + ) return StorageConfiguration.from_dict(res) + + + + def list(self) -> Iterator[StorageConfiguration]: """Get all storage configurations. - + Gets a list of all Databricks storage configurations for your account, specified by ID. - + :returns: Iterator over :class:`StorageConfiguration` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/storage-configurations", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/storage-configurations' + , headers=headers + ) return [StorageConfiguration.from_dict(v) for v in res] - + + class VpcEndpointsAPI: """These APIs manage VPC endpoint configurations for this account.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - vpc_endpoint_name: str, - *, - aws_vpc_endpoint_id: Optional[str] = None, - gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None, - region: Optional[str] = None, - ) -> VpcEndpoint: - """Create VPC endpoint configuration. + + + + + + + def create(self + , vpc_endpoint_name: str + , * + , aws_vpc_endpoint_id: Optional[str] = None, gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None, region: Optional[str] = None) -> VpcEndpoint: + """Create VPC endpoint configuration. + Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. - + After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically accepts the VPC endpoint. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html - + :param vpc_endpoint_name: str The human-readable name of the storage configuration. :param aws_vpc_endpoint_id: str (optional) @@ -3063,173 +2747,170 @@ def create( The Google Cloud specific information for this Private Service Connect endpoint. :param region: str (optional) The AWS region in which this VPC endpoint object exists. - + :returns: :class:`VpcEndpoint` """ body = {} - if aws_vpc_endpoint_id is not None: - body["aws_vpc_endpoint_id"] = aws_vpc_endpoint_id - if gcp_vpc_endpoint_info is not None: - body["gcp_vpc_endpoint_info"] = gcp_vpc_endpoint_info.as_dict() - if region is not None: - body["region"] = region - if vpc_endpoint_name is not None: - body["vpc_endpoint_name"] = vpc_endpoint_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints", body=body, headers=headers - ) + if aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = aws_vpc_endpoint_id + if gcp_vpc_endpoint_info is not None: body['gcp_vpc_endpoint_info'] = gcp_vpc_endpoint_info.as_dict() + if region is not None: body['region'] = region + if vpc_endpoint_name is not None: body['vpc_endpoint_name'] = vpc_endpoint_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints', body=body + + , headers=headers + ) return VpcEndpoint.from_dict(res) - def delete(self, vpc_endpoint_id: str): - """Delete VPC endpoint configuration. + + + + def delete(self + , vpc_endpoint_id: str + ): + """Delete VPC endpoint configuration. + Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate privately with Databricks over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param vpc_endpoint_id: str Databricks VPC endpoint ID. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}", headers=headers - ) + + + - def get(self, vpc_endpoint_id: str) -> VpcEndpoint: + def get(self + , vpc_endpoint_id: str + ) -> VpcEndpoint: """Get a VPC endpoint configuration. - + Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html - + :param vpc_endpoint_id: str Databricks VPC endpoint ID. - + :returns: :class:`VpcEndpoint` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}' + + , headers=headers + ) return VpcEndpoint.from_dict(res) + + + + def list(self) -> Iterator[VpcEndpoint]: """Get all VPC endpoint configurations. - + Gets a list of all VPC endpoints for an account, specified by ID. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :returns: Iterator over :class:`VpcEndpoint` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints' + , headers=headers + ) return [VpcEndpoint.from_dict(v) for v in res] - + + class WorkspacesAPI: """These APIs manage workspaces for this account. A Databricks workspace is an environment for accessing all of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into folders, and provides access to data and computational resources such as clusters and jobs. - + These endpoints are available if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account.""" - + def __init__(self, api_client): self._api = api_client + - def wait_get_workspace_running( - self, workspace_id: int, timeout=timedelta(minutes=20), callback: Optional[Callable[[Workspace], None]] = None - ) -> Workspace: - deadline = time.time() + timeout.total_seconds() - target_states = (WorkspaceStatus.RUNNING,) - failure_states = ( - WorkspaceStatus.BANNED, - WorkspaceStatus.FAILED, - ) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(workspace_id=workspace_id) - status = poll.workspace_status - status_message = poll.workspace_status_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach RUNNING, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"workspace_id={workspace_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def create( - self, - workspace_name: str, - *, - aws_region: Optional[str] = None, - cloud: Optional[str] = None, - cloud_resource_container: Optional[CloudResourceContainer] = None, - credentials_id: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - deployment_name: Optional[str] = None, - gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, - gke_config: Optional[GkeConfig] = None, - is_no_public_ip_enabled: Optional[bool] = None, - location: Optional[str] = None, - managed_services_customer_managed_key_id: Optional[str] = None, - network_id: Optional[str] = None, - pricing_tier: Optional[PricingTier] = None, - private_access_settings_id: Optional[str] = None, - storage_configuration_id: Optional[str] = None, - storage_customer_managed_key_id: Optional[str] = None, - ) -> Wait[Workspace]: - """Create a new workspace. + - Creates a new workspace. + + def wait_get_workspace_running(self, workspace_id: int, + timeout=timedelta(minutes=20), callback: Optional[Callable[[Workspace], None]] = None) -> Workspace: + deadline = time.time() + timeout.total_seconds() + target_states = (WorkspaceStatus.RUNNING, ) + failure_states = (WorkspaceStatus.BANNED, WorkspaceStatus.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(workspace_id=workspace_id) + status = poll.workspace_status + status_message = poll.workspace_status_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach RUNNING, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"workspace_id={workspace_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + def create(self + , workspace_name: str + , * + , aws_region: Optional[str] = None, cloud: Optional[str] = None, cloud_resource_container: Optional[CloudResourceContainer] = None, credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, deployment_name: Optional[str] = None, gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, gke_config: Optional[GkeConfig] = None, is_no_public_ip_enabled: Optional[bool] = None, location: Optional[str] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_id: Optional[str] = None, pricing_tier: Optional[PricingTier] = None, private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]: + """Create a new workspace. + + Creates a new workspace. + **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request has been accepted and is in progress, but does not mean that the workspace deployed successfully and is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID (`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests with the workspace ID and check its status. The workspace becomes available when the status changes to `RUNNING`. - + :param workspace_name: str The workspace's human-readable name. :param aws_region: str (optional) @@ -3251,22 +2932,22 @@ def create( deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the set of characters that are allowed in a subdomain. - + To set this value, you must have a deployment name prefix. Contact your Databricks account team to add an account deployment name prefix to your account. - + Workspace deployment names follow the account prefix and a hyphen. For example, if your account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be `acme-workspace-1.cloud.databricks.com`. - + You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment name to only include the deployment prefix. For example, if your account's deployment prefix is `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and the workspace URL is `acme.cloud.databricks.com`. - + This value must be unique across all non-deleted deployments across all AWS regions. - + If a new workspace omits this property, the server generates a unique deployment name for you with the pattern `dbc-xxxxxxxx-xxxx`. :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional) @@ -3274,19 +2955,19 @@ def create( is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks detects an IP range overlap. - + Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - + The sizes of these IP ranges affect the maximum number of nodes for the workspace. - + **Important**: Confirm the IP ranges used by your Databricks workspace before creating the workspace. You cannot change them after your workspace is deployed. If the IP address ranges for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To determine the address range sizes that you need, Databricks provides a calculator as a Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - + [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html :param gke_config: :class:`GkeConfig` (optional) The configurations for the GKE cluster of a Databricks workspace. @@ -3301,15 +2982,15 @@ def create( :param network_id: str (optional) :param pricing_tier: :class:`PricingTier` (optional) The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - + [AWS Pricing]: https://databricks.com/product/aws-pricing :param private_access_settings_id: str (optional) ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), back-end (data plane to control plane connection), or both connection types. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html :param storage_configuration_id: str (optional) @@ -3318,192 +2999,147 @@ def create( The ID of the workspace's storage encryption key configuration object. This is used to encrypt the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The provided key configuration object property `use_cases` must contain `STORAGE`. - + :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. """ body = {} - if aws_region is not None: - body["aws_region"] = aws_region - if cloud is not None: - body["cloud"] = cloud - if cloud_resource_container is not None: - body["cloud_resource_container"] = cloud_resource_container.as_dict() - if credentials_id is not None: - body["credentials_id"] = credentials_id - if custom_tags is not None: - body["custom_tags"] = custom_tags - if deployment_name is not None: - body["deployment_name"] = deployment_name - if gcp_managed_network_config is not None: - body["gcp_managed_network_config"] = gcp_managed_network_config.as_dict() - if gke_config is not None: - body["gke_config"] = gke_config.as_dict() - if is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = is_no_public_ip_enabled - if location is not None: - body["location"] = location - if managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = managed_services_customer_managed_key_id - if network_id is not None: - body["network_id"] = network_id - if pricing_tier is not None: - body["pricing_tier"] = pricing_tier.value - if private_access_settings_id is not None: - body["private_access_settings_id"] = private_access_settings_id - if storage_configuration_id is not None: - body["storage_configuration_id"] = storage_configuration_id - if storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = storage_customer_managed_key_id - if workspace_name is not None: - body["workspace_name"] = workspace_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/workspaces", body=body, headers=headers - ) - return Wait( - self.wait_get_workspace_running, - response=Workspace.from_dict(op_response), - workspace_id=op_response["workspace_id"], - ) - - def create_and_wait( - self, - workspace_name: str, - *, - aws_region: Optional[str] = None, - cloud: Optional[str] = None, - cloud_resource_container: Optional[CloudResourceContainer] = None, - credentials_id: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - deployment_name: Optional[str] = None, - gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, - gke_config: Optional[GkeConfig] = None, - is_no_public_ip_enabled: Optional[bool] = None, - location: Optional[str] = None, - managed_services_customer_managed_key_id: Optional[str] = None, - network_id: Optional[str] = None, - pricing_tier: Optional[PricingTier] = None, - private_access_settings_id: Optional[str] = None, - storage_configuration_id: Optional[str] = None, - storage_customer_managed_key_id: Optional[str] = None, - timeout=timedelta(minutes=20), - ) -> Workspace: - return self.create( - aws_region=aws_region, - cloud=cloud, - cloud_resource_container=cloud_resource_container, - credentials_id=credentials_id, - custom_tags=custom_tags, - deployment_name=deployment_name, - gcp_managed_network_config=gcp_managed_network_config, - gke_config=gke_config, - is_no_public_ip_enabled=is_no_public_ip_enabled, - location=location, - managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, - network_id=network_id, - pricing_tier=pricing_tier, - private_access_settings_id=private_access_settings_id, - storage_configuration_id=storage_configuration_id, - storage_customer_managed_key_id=storage_customer_managed_key_id, - workspace_name=workspace_name, - ).result(timeout=timeout) - - def delete(self, workspace_id: int): - """Delete a workspace. + if aws_region is not None: body['aws_region'] = aws_region + if cloud is not None: body['cloud'] = cloud + if cloud_resource_container is not None: body['cloud_resource_container'] = cloud_resource_container.as_dict() + if credentials_id is not None: body['credentials_id'] = credentials_id + if custom_tags is not None: body['custom_tags'] = custom_tags + if deployment_name is not None: body['deployment_name'] = deployment_name + if gcp_managed_network_config is not None: body['gcp_managed_network_config'] = gcp_managed_network_config.as_dict() + if gke_config is not None: body['gke_config'] = gke_config.as_dict() + if is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = is_no_public_ip_enabled + if location is not None: body['location'] = location + if managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id + if network_id is not None: body['network_id'] = network_id + if pricing_tier is not None: body['pricing_tier'] = pricing_tier.value + if private_access_settings_id is not None: body['private_access_settings_id'] = private_access_settings_id + if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id + if storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = storage_customer_managed_key_id + if workspace_name is not None: body['workspace_name'] = workspace_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/workspaces', body=body + + , headers=headers + ) + return Wait(self.wait_get_workspace_running + , response = Workspace.from_dict(op_response) + , workspace_id=op_response['workspace_id']) + + + def create_and_wait(self + , workspace_name: str + , * + , aws_region: Optional[str] = None, cloud: Optional[str] = None, cloud_resource_container: Optional[CloudResourceContainer] = None, credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, deployment_name: Optional[str] = None, gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, gke_config: Optional[GkeConfig] = None, is_no_public_ip_enabled: Optional[bool] = None, location: Optional[str] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_id: Optional[str] = None, pricing_tier: Optional[PricingTier] = None, private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, + timeout=timedelta(minutes=20)) -> Workspace: + return self.create(aws_region=aws_region, cloud=cloud, cloud_resource_container=cloud_resource_container, credentials_id=credentials_id, custom_tags=custom_tags, deployment_name=deployment_name, gcp_managed_network_config=gcp_managed_network_config, gke_config=gke_config, is_no_public_ip_enabled=is_no_public_ip_enabled, location=location, managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, network_id=network_id, pricing_tier=pricing_tier, private_access_settings_id=private_access_settings_id, storage_configuration_id=storage_configuration_id, storage_customer_managed_key_id=storage_customer_managed_key_id, workspace_name=workspace_name).result(timeout=timeout) + + + + def delete(self + , workspace_id: int + ): + """Delete a workspace. + Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. However, it might take a few minutes for all workspaces resources to be deleted, depending on the size and number of workspace resources. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :param workspace_id: int Workspace ID. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers) + + + - def get(self, workspace_id: int) -> Workspace: + def get(self + , workspace_id: int + ) -> Workspace: """Get a workspace. - + Gets information including status for a Databricks workspace, specified by ID. In the response, the `workspace_status` field indicates the current status. After initial workspace creation (which is asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace becomes available when the status changes to `RUNNING`. - + For information about how to create a new workspace with this API **including error handling**, see [Create a new workspace using the Account API]. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param workspace_id: int Workspace ID. - + :returns: :class:`Workspace` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}' + + , headers=headers + ) return Workspace.from_dict(res) + + + + def list(self) -> Iterator[Workspace]: """Get all workspaces. - + Gets a list of all workspaces associated with an account, specified by ID. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :returns: Iterator over :class:`Workspace` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/workspaces", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces' + , headers=headers + ) return [Workspace.from_dict(v) for v in res] - def update( - self, - workspace_id: int, - *, - aws_region: Optional[str] = None, - credentials_id: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - managed_services_customer_managed_key_id: Optional[str] = None, - network_connectivity_config_id: Optional[str] = None, - network_id: Optional[str] = None, - private_access_settings_id: Optional[str] = None, - storage_configuration_id: Optional[str] = None, - storage_customer_managed_key_id: Optional[str] = None, - ) -> Wait[Workspace]: - """Update workspace configuration. + + + + def update(self + , workspace_id: int + , * + , aws_region: Optional[str] = None, credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]: + """Update workspace configuration. + Updates a workspace configuration for either a running workspace or a failed workspace. The elements that can be updated varies between these two use cases. - + ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace deployment for some fields, but not all fields. For a failed workspace, this request supports updates to the following fields only: - Credential configuration ID - Storage configuration ID - Network @@ -3525,14 +3161,14 @@ def update( update the network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from the workspace once attached, you can only switch to another one. - + After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` requests with the workspace ID and check the workspace status. The workspace is successful if the status changes to `RUNNING`. - + For information about how to create a new workspace with this API **including error handling**, see [Create a new workspace using the Account API]. - + ### Update a running workspace You can update a Databricks workspace configuration for running workspaces for some fields, but not all fields. For a running workspace, this request supports updating the following fields only: - Credential configuration ID - Network configuration ID. Used @@ -3558,12 +3194,12 @@ def update( network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from the workspace once attached, you can only switch to another one. - + **Important**: To update a running workspace, your workspace must have no running compute resources that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not terminate all cluster instances in the workspace before calling this API, the request will fail. - + ### Wait until changes take effect. After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` requests with the workspace ID and check the workspace status and the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes @@ -3579,22 +3215,22 @@ def update( silently to its original configuration. After the workspace has been updated, you cannot use or create clusters for another 20 minutes. If you create or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could cause other unexpected behavior. - + If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20 minute wait. - + **Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment types and subscription types. If you have questions about availability, contact your Databricks representative. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param workspace_id: int Workspace ID. :param aws_region: str (optional) @@ -3624,66 +3260,37 @@ def update( :param storage_customer_managed_key_id: str (optional) The ID of the key configuration object for workspace storage. This parameter is available for updating both failed and running workspaces. - + :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. """ body = {} - if aws_region is not None: - body["aws_region"] = aws_region - if credentials_id is not None: - body["credentials_id"] = credentials_id - if custom_tags is not None: - body["custom_tags"] = custom_tags - if managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = managed_services_customer_managed_key_id - if network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = network_connectivity_config_id - if network_id is not None: - body["network_id"] = network_id - if private_access_settings_id is not None: - body["private_access_settings_id"] = private_access_settings_id - if storage_configuration_id is not None: - body["storage_configuration_id"] = storage_configuration_id - if storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = storage_customer_managed_key_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do( - "PATCH", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", body=body, headers=headers - ) - return Wait( - self.wait_get_workspace_running, response=UpdateResponse.from_dict(op_response), workspace_id=workspace_id - ) - - def update_and_wait( - self, - workspace_id: int, - *, - aws_region: Optional[str] = None, - credentials_id: Optional[str] = None, - custom_tags: Optional[Dict[str, str]] = None, - managed_services_customer_managed_key_id: Optional[str] = None, - network_connectivity_config_id: Optional[str] = None, - network_id: Optional[str] = None, - private_access_settings_id: Optional[str] = None, - storage_configuration_id: Optional[str] = None, - storage_customer_managed_key_id: Optional[str] = None, - timeout=timedelta(minutes=20), - ) -> Workspace: - return self.update( - aws_region=aws_region, - credentials_id=credentials_id, - custom_tags=custom_tags, - managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, - network_connectivity_config_id=network_connectivity_config_id, - network_id=network_id, - private_access_settings_id=private_access_settings_id, - storage_configuration_id=storage_configuration_id, - storage_customer_managed_key_id=storage_customer_managed_key_id, - workspace_id=workspace_id, - ).result(timeout=timeout) + if aws_region is not None: body['aws_region'] = aws_region + if credentials_id is not None: body['credentials_id'] = credentials_id + if custom_tags is not None: body['custom_tags'] = custom_tags + if managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id + if network_connectivity_config_id is not None: body['network_connectivity_config_id'] = network_connectivity_config_id + if network_id is not None: body['network_id'] = network_id + if private_access_settings_id is not None: body['private_access_settings_id'] = private_access_settings_id + if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id + if storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = storage_customer_managed_key_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}', body=body + + , headers=headers + ) + return Wait(self.wait_get_workspace_running + , response = UpdateResponse.from_dict(op_response) + , workspace_id=workspace_id) + + + def update_and_wait(self + , workspace_id: int + , * + , aws_region: Optional[str] = None, credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, + timeout=timedelta(minutes=20)) -> Workspace: + return self.update(aws_region=aws_region, credentials_id=credentials_id, custom_tags=custom_tags, managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, network_connectivity_config_id=network_connectivity_config_id, network_id=network_id, private_access_settings_id=private_access_settings_id, storage_configuration_id=storage_configuration_id, storage_customer_managed_key_id=storage_customer_managed_key_id, workspace_id=workspace_id).result(timeout=timeout) + + \ No newline at end of file diff --git a/databricks/sdk/service/qualitymonitorv2.py b/databricks/sdk/service/qualitymonitorv2.py new file mode 100755 index 000000000..c304fd95d --- /dev/null +++ b/databricks/sdk/service/qualitymonitorv2.py @@ -0,0 +1,322 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations +from dataclasses import dataclass +from datetime import timedelta +from enum import Enum +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading + +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token + +_LOG = logging.getLogger('databricks.sdk') + + + +# all definitions in this file are in alphabetical order + +@dataclass +class AnomalyDetectionConfig: + last_run_id: Optional[str] = None + """Run id of the last run of the workflow""" + + latest_run_status: Optional[AnomalyDetectionRunStatus] = None + """The status of the last run of the workflow.""" + + def as_dict(self) -> dict: + """Serializes the AnomalyDetectionConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_run_id is not None: body['last_run_id'] = self.last_run_id + if self.latest_run_status is not None: body['latest_run_status'] = self.latest_run_status.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AnomalyDetectionConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_run_id is not None: body['last_run_id'] = self.last_run_id + if self.latest_run_status is not None: body['latest_run_status'] = self.latest_run_status + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AnomalyDetectionConfig: + """Deserializes the AnomalyDetectionConfig from a dictionary.""" + return cls(last_run_id=d.get('last_run_id', None), latest_run_status=_enum(d, 'latest_run_status', AnomalyDetectionRunStatus)) + + + + +class AnomalyDetectionRunStatus(Enum): + """Status of Anomaly Detection Job Run""" + + ANOMALY_DETECTION_RUN_STATUS_CANCELED = 'ANOMALY_DETECTION_RUN_STATUS_CANCELED' + ANOMALY_DETECTION_RUN_STATUS_FAILED = 'ANOMALY_DETECTION_RUN_STATUS_FAILED' + ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED = 'ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED' + ANOMALY_DETECTION_RUN_STATUS_PENDING = 'ANOMALY_DETECTION_RUN_STATUS_PENDING' + ANOMALY_DETECTION_RUN_STATUS_RUNNING = 'ANOMALY_DETECTION_RUN_STATUS_RUNNING' + ANOMALY_DETECTION_RUN_STATUS_SUCCESS = 'ANOMALY_DETECTION_RUN_STATUS_SUCCESS' + ANOMALY_DETECTION_RUN_STATUS_UNKNOWN = 'ANOMALY_DETECTION_RUN_STATUS_UNKNOWN' + ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR = 'ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR' + + + + + + + +@dataclass +class DeleteQualityMonitorResponse: + def as_dict(self) -> dict: + """Serializes the DeleteQualityMonitorResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteQualityMonitorResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteQualityMonitorResponse: + """Deserializes the DeleteQualityMonitorResponse from a dictionary.""" + return cls() + + + + + + + + + + +@dataclass +class ListQualityMonitorResponse: + next_page_token: Optional[str] = None + + quality_monitors: Optional[List[QualityMonitor]] = None + + def as_dict(self) -> dict: + """Serializes the ListQualityMonitorResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.quality_monitors: body['quality_monitors'] = [v.as_dict() for v in self.quality_monitors] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListQualityMonitorResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.quality_monitors: body['quality_monitors'] = self.quality_monitors + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListQualityMonitorResponse: + """Deserializes the ListQualityMonitorResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), quality_monitors=_repeated_dict(d, 'quality_monitors', QualityMonitor)) + + + + +@dataclass +class QualityMonitor: + object_type: str + """The type of the monitored object. Can be one of the following: schema.""" + + object_id: str + """The uuid of the request object. For example, schema id.""" + + anomaly_detection_config: Optional[AnomalyDetectionConfig] = None + + def as_dict(self) -> dict: + """Serializes the QualityMonitor into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.anomaly_detection_config: body['anomaly_detection_config'] = self.anomaly_detection_config.as_dict() + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QualityMonitor into a shallow dictionary of its immediate attributes.""" + body = {} + if self.anomaly_detection_config: body['anomaly_detection_config'] = self.anomaly_detection_config + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> QualityMonitor: + """Deserializes the QualityMonitor from a dictionary.""" + return cls(anomaly_detection_config=_from_dict(d, 'anomaly_detection_config', AnomalyDetectionConfig), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + + + + + + + + +class QualityMonitorV2API: + """Manage data quality of UC objects (currently support `schema`)""" + + def __init__(self, api_client): + self._api = api_client + + + + + + + + + + def create_quality_monitor(self + , quality_monitor: QualityMonitor + ) -> QualityMonitor: + """Create a quality monitor. + + Create a quality monitor on UC object + + :param quality_monitor: :class:`QualityMonitor` + + :returns: :class:`QualityMonitor` + """ + body = quality_monitor.as_dict() + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/quality-monitors', body=body + + , headers=headers + ) + return QualityMonitor.from_dict(res) + + + + + + def delete_quality_monitor(self + , object_type: str, object_id: str + ): + """Delete a quality monitor. + + Delete a quality monitor on UC object + + :param object_type: str + The type of the monitored object. Can be one of the following: schema. + :param object_id: str + The uuid of the request object. For example, schema id. + + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/quality-monitors/{object_type}/{object_id}' + + , headers=headers + ) + + + + + + + def get_quality_monitor(self + , object_type: str, object_id: str + ) -> QualityMonitor: + """Read a quality monitor. + + Read a quality monitor on UC object + + :param object_type: str + The type of the monitored object. Can be one of the following: schema. + :param object_id: str + The uuid of the request object. For example, schema id. + + :returns: :class:`QualityMonitor` + """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/quality-monitors/{object_type}/{object_id}' + + , headers=headers + ) + return QualityMonitor.from_dict(res) + + + + + + def list_quality_monitor(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[QualityMonitor]: + """List quality monitors. + + (Unimplemented) List quality monitors + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`QualityMonitor` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + + while True: + json = self._api.do('GET','/api/2.0/quality-monitors', query=query + + , headers=headers + ) + if 'quality_monitors' in json: + for v in json['quality_monitors']: + yield QualityMonitor.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + + def update_quality_monitor(self + , object_type: str, object_id: str, quality_monitor: QualityMonitor + ) -> QualityMonitor: + """Update a quality monitor. + + (Unimplemented) Update a quality monitor on UC object + + :param object_type: str + The type of the monitored object. Can be one of the following: schema. + :param object_id: str + The uuid of the request object. For example, schema id. + :param quality_monitor: :class:`QualityMonitor` + + :returns: :class:`QualityMonitor` + """ + body = quality_monitor.as_dict() + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/quality-monitors/{object_type}/{object_id}', body=body + + , headers=headers + ) + return QualityMonitor.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 6feb1fa01..3d4e9ed1c 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -1,26 +1,25 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import threading -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, BinaryIO, Callable, Dict, Iterator, List, Optional - +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class Ai21LabsConfig: @@ -28,37 +27,32 @@ class Ai21LabsConfig: """The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.""" - + ai21labs_api_key_plaintext: Optional[str] = None """An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the Ai21LabsConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai21labs_api_key is not None: - body["ai21labs_api_key"] = self.ai21labs_api_key - if self.ai21labs_api_key_plaintext is not None: - body["ai21labs_api_key_plaintext"] = self.ai21labs_api_key_plaintext + if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key + if self.ai21labs_api_key_plaintext is not None: body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the Ai21LabsConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai21labs_api_key is not None: - body["ai21labs_api_key"] = self.ai21labs_api_key - if self.ai21labs_api_key_plaintext is not None: - body["ai21labs_api_key_plaintext"] = self.ai21labs_api_key_plaintext + if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key + if self.ai21labs_api_key_plaintext is not None: body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Ai21LabsConfig: """Deserializes the Ai21LabsConfig from a dictionary.""" - return cls( - ai21labs_api_key=d.get("ai21labs_api_key", None), - ai21labs_api_key_plaintext=d.get("ai21labs_api_key_plaintext", None), - ) + return cls(ai21labs_api_key=d.get('ai21labs_api_key', None), ai21labs_api_key_plaintext=d.get('ai21labs_api_key_plaintext', None)) + + @dataclass @@ -66,62 +60,48 @@ class AiGatewayConfig: fallback_config: Optional[FallbackConfig] = None """Configuration for traffic fallback which auto fallbacks to other served entities if the request to a served entity fails with certain error codes, to increase availability.""" - + guardrails: Optional[AiGatewayGuardrails] = None """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.""" - + inference_table_config: Optional[AiGatewayInferenceTableConfig] = None """Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.""" - + rate_limits: Optional[List[AiGatewayRateLimit]] = None """Configuration for rate limits which can be set to limit endpoint traffic.""" - + usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None """Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config.as_dict() - if self.guardrails: - body["guardrails"] = self.guardrails.as_dict() - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config.as_dict() - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config.as_dict() + if self.fallback_config: body['fallback_config'] = self.fallback_config.as_dict() + if self.guardrails: body['guardrails'] = self.guardrails.as_dict() + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict() + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config - if self.guardrails: - body["guardrails"] = self.guardrails - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config - if self.rate_limits: - body["rate_limits"] = self.rate_limits - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config + if self.fallback_config: body['fallback_config'] = self.fallback_config + if self.guardrails: body['guardrails'] = self.guardrails + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config + if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayConfig: """Deserializes the AiGatewayConfig from a dictionary.""" - return cls( - fallback_config=_from_dict(d, "fallback_config", FallbackConfig), - guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails), - inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig), - rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit), - usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig), - ) + return cls(fallback_config=_from_dict(d, 'fallback_config', FallbackConfig), guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails), inference_table_config=_from_dict(d, 'inference_table_config', AiGatewayInferenceTableConfig), rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit), usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig)) + + @dataclass @@ -129,118 +109,102 @@ class AiGatewayGuardrailParameters: invalid_keywords: Optional[List[str]] = None """List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.""" - + pii: Optional[AiGatewayGuardrailPiiBehavior] = None """Configuration for guardrail PII filter.""" - + safety: Optional[bool] = None """Indicates whether the safety filter is enabled.""" - + valid_topics: Optional[List[str]] = None """The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayGuardrailParameters into a dictionary suitable for use as a JSON request body.""" body = {} - if self.invalid_keywords: - body["invalid_keywords"] = [v for v in self.invalid_keywords] - if self.pii: - body["pii"] = self.pii.as_dict() - if self.safety is not None: - body["safety"] = self.safety - if self.valid_topics: - body["valid_topics"] = [v for v in self.valid_topics] + if self.invalid_keywords: body['invalid_keywords'] = [v for v in self.invalid_keywords] + if self.pii: body['pii'] = self.pii.as_dict() + if self.safety is not None: body['safety'] = self.safety + if self.valid_topics: body['valid_topics'] = [v for v in self.valid_topics] return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayGuardrailParameters into a shallow dictionary of its immediate attributes.""" body = {} - if self.invalid_keywords: - body["invalid_keywords"] = self.invalid_keywords - if self.pii: - body["pii"] = self.pii - if self.safety is not None: - body["safety"] = self.safety - if self.valid_topics: - body["valid_topics"] = self.valid_topics + if self.invalid_keywords: body['invalid_keywords'] = self.invalid_keywords + if self.pii: body['pii'] = self.pii + if self.safety is not None: body['safety'] = self.safety + if self.valid_topics: body['valid_topics'] = self.valid_topics return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayGuardrailParameters: """Deserializes the AiGatewayGuardrailParameters from a dictionary.""" - return cls( - invalid_keywords=d.get("invalid_keywords", None), - pii=_from_dict(d, "pii", AiGatewayGuardrailPiiBehavior), - safety=d.get("safety", None), - valid_topics=d.get("valid_topics", None), - ) + return cls(invalid_keywords=d.get('invalid_keywords', None), pii=_from_dict(d, 'pii', AiGatewayGuardrailPiiBehavior), safety=d.get('safety', None), valid_topics=d.get('valid_topics', None)) + + @dataclass class AiGatewayGuardrailPiiBehavior: behavior: Optional[AiGatewayGuardrailPiiBehaviorBehavior] = None """Configuration for input guardrail filters.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayGuardrailPiiBehavior into a dictionary suitable for use as a JSON request body.""" body = {} - if self.behavior is not None: - body["behavior"] = self.behavior.value + if self.behavior is not None: body['behavior'] = self.behavior.value return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayGuardrailPiiBehavior into a shallow dictionary of its immediate attributes.""" body = {} - if self.behavior is not None: - body["behavior"] = self.behavior + if self.behavior is not None: body['behavior'] = self.behavior return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayGuardrailPiiBehavior: """Deserializes the AiGatewayGuardrailPiiBehavior from a dictionary.""" - return cls(behavior=_enum(d, "behavior", AiGatewayGuardrailPiiBehaviorBehavior)) - + return cls(behavior=_enum(d, 'behavior', AiGatewayGuardrailPiiBehaviorBehavior)) + -class AiGatewayGuardrailPiiBehaviorBehavior(Enum): - BLOCK = "BLOCK" - NONE = "NONE" +class AiGatewayGuardrailPiiBehaviorBehavior(Enum): + + + BLOCK = 'BLOCK' + NONE = 'NONE' @dataclass class AiGatewayGuardrails: input: Optional[AiGatewayGuardrailParameters] = None """Configuration for input guardrail filters.""" - + output: Optional[AiGatewayGuardrailParameters] = None """Configuration for output guardrail filters.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayGuardrails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.input: - body["input"] = self.input.as_dict() - if self.output: - body["output"] = self.output.as_dict() + if self.input: body['input'] = self.input.as_dict() + if self.output: body['output'] = self.output.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayGuardrails into a shallow dictionary of its immediate attributes.""" body = {} - if self.input: - body["input"] = self.input - if self.output: - body["output"] = self.output + if self.input: body['input'] = self.input + if self.output: body['output'] = self.output return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayGuardrails: """Deserializes the AiGatewayGuardrails from a dictionary.""" - return cls( - input=_from_dict(d, "input", AiGatewayGuardrailParameters), - output=_from_dict(d, "output", AiGatewayGuardrailParameters), - ) + return cls(input=_from_dict(d, 'input', AiGatewayGuardrailParameters), output=_from_dict(d, 'output', AiGatewayGuardrailParameters)) + + @dataclass @@ -248,233 +212,194 @@ class AiGatewayInferenceTableConfig: catalog_name: Optional[str] = None """The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.""" - + enabled: Optional[bool] = None """Indicates whether the inference table is enabled.""" - + schema_name: Optional[str] = None """The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.""" - + table_name_prefix: Optional[str] = None """The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayInferenceTableConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.enabled is not None: - body["enabled"] = self.enabled - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.table_name_prefix is not None: - body["table_name_prefix"] = self.table_name_prefix + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayInferenceTableConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.enabled is not None: - body["enabled"] = self.enabled - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.table_name_prefix is not None: - body["table_name_prefix"] = self.table_name_prefix + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayInferenceTableConfig: """Deserializes the AiGatewayInferenceTableConfig from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - enabled=d.get("enabled", None), - schema_name=d.get("schema_name", None), - table_name_prefix=d.get("table_name_prefix", None), - ) + return cls(catalog_name=d.get('catalog_name', None), enabled=d.get('enabled', None), schema_name=d.get('schema_name', None), table_name_prefix=d.get('table_name_prefix', None)) + + @dataclass class AiGatewayRateLimit: calls: int """Used to specify how many calls are allowed for a key within the renewal_period.""" - + renewal_period: AiGatewayRateLimitRenewalPeriod """Renewal period field for a rate limit. Currently, only 'minute' is supported.""" - + key: Optional[AiGatewayRateLimitKey] = None """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayRateLimit into a dictionary suitable for use as a JSON request body.""" body = {} - if self.calls is not None: - body["calls"] = self.calls - if self.key is not None: - body["key"] = self.key.value - if self.renewal_period is not None: - body["renewal_period"] = self.renewal_period.value + if self.calls is not None: body['calls'] = self.calls + if self.key is not None: body['key'] = self.key.value + if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayRateLimit into a shallow dictionary of its immediate attributes.""" body = {} - if self.calls is not None: - body["calls"] = self.calls - if self.key is not None: - body["key"] = self.key - if self.renewal_period is not None: - body["renewal_period"] = self.renewal_period + if self.calls is not None: body['calls'] = self.calls + if self.key is not None: body['key'] = self.key + if self.renewal_period is not None: body['renewal_period'] = self.renewal_period return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayRateLimit: """Deserializes the AiGatewayRateLimit from a dictionary.""" - return cls( - calls=d.get("calls", None), - key=_enum(d, "key", AiGatewayRateLimitKey), - renewal_period=_enum(d, "renewal_period", AiGatewayRateLimitRenewalPeriod), - ) - + return cls(calls=d.get('calls', None), key=_enum(d, 'key', AiGatewayRateLimitKey), renewal_period=_enum(d, 'renewal_period', AiGatewayRateLimitRenewalPeriod)) + -class AiGatewayRateLimitKey(Enum): - ENDPOINT = "endpoint" - USER = "user" +class AiGatewayRateLimitKey(Enum): + + + ENDPOINT = 'endpoint' + USER = 'user' class AiGatewayRateLimitRenewalPeriod(Enum): - - MINUTE = "minute" - + + + MINUTE = 'minute' @dataclass class AiGatewayUsageTrackingConfig: enabled: Optional[bool] = None """Whether to enable usage tracking.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayUsageTrackingConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.enabled is not None: body['enabled'] = self.enabled return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayUsageTrackingConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.enabled is not None: body['enabled'] = self.enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayUsageTrackingConfig: """Deserializes the AiGatewayUsageTrackingConfig from a dictionary.""" - return cls(enabled=d.get("enabled", None)) + return cls(enabled=d.get('enabled', None)) + + @dataclass class AmazonBedrockConfig: aws_region: str """The AWS region to use. Bedrock has to be enabled there.""" - + bedrock_provider: AmazonBedrockConfigBedrockProvider """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.""" - + aws_access_key_id: Optional[str] = None """The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id_plaintext`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.""" - + aws_access_key_id_plaintext: Optional[str] = None """An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.""" - + aws_secret_access_key: Optional[str] = None """The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.""" - + aws_secret_access_key_plaintext: Optional[str] = None """An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the external model will use to access AWS resources. You must authenticate using an instance profile or access keys. If you prefer to authenticate using access keys, see `aws_access_key_id`, `aws_access_key_id_plaintext`, `aws_secret_access_key` and `aws_secret_access_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the AmazonBedrockConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_access_key_id is not None: - body["aws_access_key_id"] = self.aws_access_key_id - if self.aws_access_key_id_plaintext is not None: - body["aws_access_key_id_plaintext"] = self.aws_access_key_id_plaintext - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.aws_secret_access_key is not None: - body["aws_secret_access_key"] = self.aws_secret_access_key - if self.aws_secret_access_key_plaintext is not None: - body["aws_secret_access_key_plaintext"] = self.aws_secret_access_key_plaintext - if self.bedrock_provider is not None: - body["bedrock_provider"] = self.bedrock_provider.value - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn + if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id + if self.aws_access_key_id_plaintext is not None: body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key + if self.aws_secret_access_key_plaintext is not None: body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext + if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn return body def as_shallow_dict(self) -> dict: """Serializes the AmazonBedrockConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_access_key_id is not None: - body["aws_access_key_id"] = self.aws_access_key_id - if self.aws_access_key_id_plaintext is not None: - body["aws_access_key_id_plaintext"] = self.aws_access_key_id_plaintext - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.aws_secret_access_key is not None: - body["aws_secret_access_key"] = self.aws_secret_access_key - if self.aws_secret_access_key_plaintext is not None: - body["aws_secret_access_key_plaintext"] = self.aws_secret_access_key_plaintext - if self.bedrock_provider is not None: - body["bedrock_provider"] = self.bedrock_provider - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn + if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id + if self.aws_access_key_id_plaintext is not None: body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext + if self.aws_region is not None: body['aws_region'] = self.aws_region + if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key + if self.aws_secret_access_key_plaintext is not None: body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext + if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AmazonBedrockConfig: """Deserializes the AmazonBedrockConfig from a dictionary.""" - return cls( - aws_access_key_id=d.get("aws_access_key_id", None), - aws_access_key_id_plaintext=d.get("aws_access_key_id_plaintext", None), - aws_region=d.get("aws_region", None), - aws_secret_access_key=d.get("aws_secret_access_key", None), - aws_secret_access_key_plaintext=d.get("aws_secret_access_key_plaintext", None), - bedrock_provider=_enum(d, "bedrock_provider", AmazonBedrockConfigBedrockProvider), - instance_profile_arn=d.get("instance_profile_arn", None), - ) - + return cls(aws_access_key_id=d.get('aws_access_key_id', None), aws_access_key_id_plaintext=d.get('aws_access_key_id_plaintext', None), aws_region=d.get('aws_region', None), aws_secret_access_key=d.get('aws_secret_access_key', None), aws_secret_access_key_plaintext=d.get('aws_secret_access_key_plaintext', None), bedrock_provider=_enum(d, 'bedrock_provider', AmazonBedrockConfigBedrockProvider), instance_profile_arn=d.get('instance_profile_arn', None)) + -class AmazonBedrockConfigBedrockProvider(Enum): - AI21LABS = "ai21labs" - AMAZON = "amazon" - ANTHROPIC = "anthropic" - COHERE = "cohere" +class AmazonBedrockConfigBedrockProvider(Enum): + + + AI21LABS = 'ai21labs' + AMAZON = 'amazon' + ANTHROPIC = 'anthropic' + COHERE = 'cohere' @dataclass class AnthropicConfig: @@ -482,78 +407,69 @@ class AnthropicConfig: """The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.""" - + anthropic_api_key_plaintext: Optional[str] = None """The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the AnthropicConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.anthropic_api_key is not None: - body["anthropic_api_key"] = self.anthropic_api_key - if self.anthropic_api_key_plaintext is not None: - body["anthropic_api_key_plaintext"] = self.anthropic_api_key_plaintext + if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key + if self.anthropic_api_key_plaintext is not None: body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the AnthropicConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.anthropic_api_key is not None: - body["anthropic_api_key"] = self.anthropic_api_key - if self.anthropic_api_key_plaintext is not None: - body["anthropic_api_key_plaintext"] = self.anthropic_api_key_plaintext + if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key + if self.anthropic_api_key_plaintext is not None: body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AnthropicConfig: """Deserializes the AnthropicConfig from a dictionary.""" - return cls( - anthropic_api_key=d.get("anthropic_api_key", None), - anthropic_api_key_plaintext=d.get("anthropic_api_key_plaintext", None), - ) + return cls(anthropic_api_key=d.get('anthropic_api_key', None), anthropic_api_key_plaintext=d.get('anthropic_api_key_plaintext', None)) + + @dataclass class ApiKeyAuth: key: str """The name of the API key parameter used for authentication.""" - + value: Optional[str] = None """The Databricks secret key reference for an API Key. If you prefer to paste your token directly, see `value_plaintext`.""" - + value_plaintext: Optional[str] = None """The API Key provided as a plaintext string. If you prefer to reference your token using Databricks Secrets, see `value`.""" - + def as_dict(self) -> dict: """Serializes the ApiKeyAuth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - if self.value_plaintext is not None: - body["value_plaintext"] = self.value_plaintext + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + if self.value_plaintext is not None: body['value_plaintext'] = self.value_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the ApiKeyAuth into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - if self.value_plaintext is not None: - body["value_plaintext"] = self.value_plaintext + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value + if self.value_plaintext is not None: body['value_plaintext'] = self.value_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ApiKeyAuth: """Deserializes the ApiKeyAuth from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None), value_plaintext=d.get("value_plaintext", None)) + return cls(key=d.get('key', None), value=d.get('value', None), value_plaintext=d.get('value_plaintext', None)) + + @dataclass @@ -561,53 +477,42 @@ class AutoCaptureConfigInput: catalog_name: Optional[str] = None """The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled.""" - + enabled: Optional[bool] = None """Indicates whether the inference table is enabled.""" - + schema_name: Optional[str] = None """The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled.""" - + table_name_prefix: Optional[str] = None """The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.""" - + def as_dict(self) -> dict: """Serializes the AutoCaptureConfigInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.enabled is not None: - body["enabled"] = self.enabled - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.table_name_prefix is not None: - body["table_name_prefix"] = self.table_name_prefix + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix return body def as_shallow_dict(self) -> dict: """Serializes the AutoCaptureConfigInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.enabled is not None: - body["enabled"] = self.enabled - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.table_name_prefix is not None: - body["table_name_prefix"] = self.table_name_prefix + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutoCaptureConfigInput: """Deserializes the AutoCaptureConfigInput from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - enabled=d.get("enabled", None), - schema_name=d.get("schema_name", None), - table_name_prefix=d.get("table_name_prefix", None), - ) + return cls(catalog_name=d.get('catalog_name', None), enabled=d.get('enabled', None), schema_name=d.get('schema_name', None), table_name_prefix=d.get('table_name_prefix', None)) + + @dataclass @@ -615,84 +520,70 @@ class AutoCaptureConfigOutput: catalog_name: Optional[str] = None """The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled.""" - + enabled: Optional[bool] = None """Indicates whether the inference table is enabled.""" - + schema_name: Optional[str] = None """The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled.""" - + state: Optional[AutoCaptureState] = None - + table_name_prefix: Optional[str] = None """The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.""" - + def as_dict(self) -> dict: """Serializes the AutoCaptureConfigOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.enabled is not None: - body["enabled"] = self.enabled - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.state: - body["state"] = self.state.as_dict() - if self.table_name_prefix is not None: - body["table_name_prefix"] = self.table_name_prefix + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.state: body['state'] = self.state.as_dict() + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix return body def as_shallow_dict(self) -> dict: """Serializes the AutoCaptureConfigOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.enabled is not None: - body["enabled"] = self.enabled - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.state: - body["state"] = self.state - if self.table_name_prefix is not None: - body["table_name_prefix"] = self.table_name_prefix + if self.catalog_name is not None: body['catalog_name'] = self.catalog_name + if self.enabled is not None: body['enabled'] = self.enabled + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.state: body['state'] = self.state + if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutoCaptureConfigOutput: """Deserializes the AutoCaptureConfigOutput from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - enabled=d.get("enabled", None), - schema_name=d.get("schema_name", None), - state=_from_dict(d, "state", AutoCaptureState), - table_name_prefix=d.get("table_name_prefix", None), - ) + return cls(catalog_name=d.get('catalog_name', None), enabled=d.get('enabled', None), schema_name=d.get('schema_name', None), state=_from_dict(d, 'state', AutoCaptureState), table_name_prefix=d.get('table_name_prefix', None)) + + @dataclass class AutoCaptureState: payload_table: Optional[PayloadTable] = None - + def as_dict(self) -> dict: """Serializes the AutoCaptureState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.payload_table: - body["payload_table"] = self.payload_table.as_dict() + if self.payload_table: body['payload_table'] = self.payload_table.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AutoCaptureState into a shallow dictionary of its immediate attributes.""" body = {} - if self.payload_table: - body["payload_table"] = self.payload_table + if self.payload_table: body['payload_table'] = self.payload_table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutoCaptureState: """Deserializes the AutoCaptureState from a dictionary.""" - return cls(payload_table=_from_dict(d, "payload_table", PayloadTable)) + return cls(payload_table=_from_dict(d, 'payload_table', PayloadTable)) + + @dataclass @@ -700,146 +591,136 @@ class BearerTokenAuth: token: Optional[str] = None """The Databricks secret key reference for a token. If you prefer to paste your token directly, see `token_plaintext`.""" - + token_plaintext: Optional[str] = None """The token provided as a plaintext string. If you prefer to reference your token using Databricks Secrets, see `token`.""" - + def as_dict(self) -> dict: """Serializes the BearerTokenAuth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token is not None: - body["token"] = self.token - if self.token_plaintext is not None: - body["token_plaintext"] = self.token_plaintext + if self.token is not None: body['token'] = self.token + if self.token_plaintext is not None: body['token_plaintext'] = self.token_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the BearerTokenAuth into a shallow dictionary of its immediate attributes.""" body = {} - if self.token is not None: - body["token"] = self.token - if self.token_plaintext is not None: - body["token_plaintext"] = self.token_plaintext + if self.token is not None: body['token'] = self.token + if self.token_plaintext is not None: body['token_plaintext'] = self.token_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BearerTokenAuth: """Deserializes the BearerTokenAuth from a dictionary.""" - return cls(token=d.get("token", None), token_plaintext=d.get("token_plaintext", None)) + return cls(token=d.get('token', None), token_plaintext=d.get('token_plaintext', None)) + + + + + @dataclass class BuildLogsResponse: logs: str """The logs associated with building the served entity's environment.""" - + def as_dict(self) -> dict: """Serializes the BuildLogsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.logs is not None: - body["logs"] = self.logs + if self.logs is not None: body['logs'] = self.logs return body def as_shallow_dict(self) -> dict: """Serializes the BuildLogsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.logs is not None: - body["logs"] = self.logs + if self.logs is not None: body['logs'] = self.logs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BuildLogsResponse: """Deserializes the BuildLogsResponse from a dictionary.""" - return cls(logs=d.get("logs", None)) + return cls(logs=d.get('logs', None)) + + @dataclass class ChatMessage: content: Optional[str] = None """The content of the message.""" - + role: Optional[ChatMessageRole] = None """The role of the message. One of [system, user, assistant].""" - + def as_dict(self) -> dict: """Serializes the ChatMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.role is not None: - body["role"] = self.role.value + if self.content is not None: body['content'] = self.content + if self.role is not None: body['role'] = self.role.value return body def as_shallow_dict(self) -> dict: """Serializes the ChatMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.role is not None: - body["role"] = self.role + if self.content is not None: body['content'] = self.content + if self.role is not None: body['role'] = self.role return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ChatMessage: """Deserializes the ChatMessage from a dictionary.""" - return cls(content=d.get("content", None), role=_enum(d, "role", ChatMessageRole)) + return cls(content=d.get('content', None), role=_enum(d, 'role', ChatMessageRole)) + + class ChatMessageRole(Enum): """The role of the message. One of [system, user, assistant].""" - - ASSISTANT = "assistant" - SYSTEM = "system" - USER = "user" - + + ASSISTANT = 'assistant' + SYSTEM = 'system' + USER = 'user' @dataclass class CohereConfig: cohere_api_base: Optional[str] = None """This is an optional field to provide a customized base URL for the Cohere API. If left unspecified, the standard Cohere base URL is used.""" - + cohere_api_key: Optional[str] = None """The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.""" - + cohere_api_key_plaintext: Optional[str] = None """The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the CohereConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cohere_api_base is not None: - body["cohere_api_base"] = self.cohere_api_base - if self.cohere_api_key is not None: - body["cohere_api_key"] = self.cohere_api_key - if self.cohere_api_key_plaintext is not None: - body["cohere_api_key_plaintext"] = self.cohere_api_key_plaintext + if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base + if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key + if self.cohere_api_key_plaintext is not None: body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the CohereConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.cohere_api_base is not None: - body["cohere_api_base"] = self.cohere_api_base - if self.cohere_api_key is not None: - body["cohere_api_key"] = self.cohere_api_key - if self.cohere_api_key_plaintext is not None: - body["cohere_api_key_plaintext"] = self.cohere_api_key_plaintext + if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base + if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key + if self.cohere_api_key_plaintext is not None: body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CohereConfig: """Deserializes the CohereConfig from a dictionary.""" - return cls( - cohere_api_base=d.get("cohere_api_base", None), - cohere_api_key=d.get("cohere_api_key", None), - cohere_api_key_plaintext=d.get("cohere_api_key_plaintext", None), - ) + return cls(cohere_api_base=d.get('cohere_api_base', None), cohere_api_key=d.get('cohere_api_key', None), cohere_api_key_plaintext=d.get('cohere_api_key_plaintext', None)) + + @dataclass @@ -847,59 +728,45 @@ class CreatePtEndpointRequest: name: str """The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.""" - + config: PtEndpointCoreConfig """The core config of the serving endpoint.""" - + ai_gateway: Optional[AiGatewayConfig] = None """The AI Gateway configuration for the serving endpoint.""" - + budget_policy_id: Optional[str] = None """The budget policy associated with the endpoint.""" - + tags: Optional[List[EndpointTag]] = None """Tags to be attached to the serving endpoint and automatically propagated to billing logs.""" - + def as_dict(self) -> dict: """Serializes the CreatePtEndpointRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict() + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.config: body['config'] = self.config.as_dict() + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreatePtEndpointRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.config: body['config'] = self.config + if self.name is not None: body['name'] = self.name + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePtEndpointRequest: """Deserializes the CreatePtEndpointRequest from a dictionary.""" - return cls( - ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), - budget_policy_id=d.get("budget_policy_id", None), - config=_from_dict(d, "config", PtEndpointCoreConfig), - name=d.get("name", None), - tags=_repeated_dict(d, "tags", EndpointTag), - ) + return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig), budget_policy_id=d.get('budget_policy_id', None), config=_from_dict(d, 'config', PtEndpointCoreConfig), name=d.get('name', None), tags=_repeated_dict(d, 'tags', EndpointTag)) + + @dataclass @@ -907,159 +774,129 @@ class CreateServingEndpoint: name: str """The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.""" - + ai_gateway: Optional[AiGatewayConfig] = None """The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables.""" - + budget_policy_id: Optional[str] = None """The budget policy to be applied to the serving endpoint.""" - + config: Optional[EndpointCoreConfigInput] = None """The core config of the serving endpoint.""" - + rate_limits: Optional[List[RateLimit]] = None """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits.""" - + route_optimized: Optional[bool] = None """Enable route optimization for the serving endpoint.""" - + tags: Optional[List[EndpointTag]] = None """Tags to be attached to the serving endpoint and automatically propagated to billing logs.""" - + def as_dict(self) -> dict: """Serializes the CreateServingEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - if self.route_optimized is not None: - body["route_optimized"] = self.route_optimized - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict() + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.config: body['config'] = self.config.as_dict() + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] + if self.route_optimized is not None: body['route_optimized'] = self.route_optimized + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateServingEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = self.rate_limits - if self.route_optimized is not None: - body["route_optimized"] = self.route_optimized - if self.tags: - body["tags"] = self.tags + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.config: body['config'] = self.config + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.route_optimized is not None: body['route_optimized'] = self.route_optimized + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateServingEndpoint: """Deserializes the CreateServingEndpoint from a dictionary.""" - return cls( - ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), - budget_policy_id=d.get("budget_policy_id", None), - config=_from_dict(d, "config", EndpointCoreConfigInput), - name=d.get("name", None), - rate_limits=_repeated_dict(d, "rate_limits", RateLimit), - route_optimized=d.get("route_optimized", None), - tags=_repeated_dict(d, "tags", EndpointTag), - ) + return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig), budget_policy_id=d.get('budget_policy_id', None), config=_from_dict(d, 'config', EndpointCoreConfigInput), name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', RateLimit), route_optimized=d.get('route_optimized', None), tags=_repeated_dict(d, 'tags', EndpointTag)) + + @dataclass class CustomProviderConfig: """Configs needed to create a custom provider model route.""" - + custom_provider_url: str """This is a field to provide the URL of the custom provider API.""" - + api_key_auth: Optional[ApiKeyAuth] = None """This is a field to provide API key authentication for the custom provider API. You can only specify one authentication method.""" - + bearer_token_auth: Optional[BearerTokenAuth] = None """This is a field to provide bearer token authentication for the custom provider API. You can only specify one authentication method.""" - + def as_dict(self) -> dict: """Serializes the CustomProviderConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.api_key_auth: - body["api_key_auth"] = self.api_key_auth.as_dict() - if self.bearer_token_auth: - body["bearer_token_auth"] = self.bearer_token_auth.as_dict() - if self.custom_provider_url is not None: - body["custom_provider_url"] = self.custom_provider_url + if self.api_key_auth: body['api_key_auth'] = self.api_key_auth.as_dict() + if self.bearer_token_auth: body['bearer_token_auth'] = self.bearer_token_auth.as_dict() + if self.custom_provider_url is not None: body['custom_provider_url'] = self.custom_provider_url return body def as_shallow_dict(self) -> dict: """Serializes the CustomProviderConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.api_key_auth: - body["api_key_auth"] = self.api_key_auth - if self.bearer_token_auth: - body["bearer_token_auth"] = self.bearer_token_auth - if self.custom_provider_url is not None: - body["custom_provider_url"] = self.custom_provider_url + if self.api_key_auth: body['api_key_auth'] = self.api_key_auth + if self.bearer_token_auth: body['bearer_token_auth'] = self.bearer_token_auth + if self.custom_provider_url is not None: body['custom_provider_url'] = self.custom_provider_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomProviderConfig: """Deserializes the CustomProviderConfig from a dictionary.""" - return cls( - api_key_auth=_from_dict(d, "api_key_auth", ApiKeyAuth), - bearer_token_auth=_from_dict(d, "bearer_token_auth", BearerTokenAuth), - custom_provider_url=d.get("custom_provider_url", None), - ) + return cls(api_key_auth=_from_dict(d, 'api_key_auth', ApiKeyAuth), bearer_token_auth=_from_dict(d, 'bearer_token_auth', BearerTokenAuth), custom_provider_url=d.get('custom_provider_url', None)) + + @dataclass class DataPlaneInfo: """Details necessary to query this object's API through the DataPlane APIs.""" - + authorization_details: Optional[str] = None """Authorization details as a string.""" - + endpoint_url: Optional[str] = None """The URL of the endpoint for this operation in the dataplane.""" - + def as_dict(self) -> dict: """Serializes the DataPlaneInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authorization_details is not None: - body["authorization_details"] = self.authorization_details - if self.endpoint_url is not None: - body["endpoint_url"] = self.endpoint_url + if self.authorization_details is not None: body['authorization_details'] = self.authorization_details + if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url return body def as_shallow_dict(self) -> dict: """Serializes the DataPlaneInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.authorization_details is not None: - body["authorization_details"] = self.authorization_details - if self.endpoint_url is not None: - body["endpoint_url"] = self.endpoint_url + if self.authorization_details is not None: body['authorization_details'] = self.authorization_details + if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataPlaneInfo: """Deserializes the DataPlaneInfo from a dictionary.""" - return cls(authorization_details=d.get("authorization_details", None), endpoint_url=d.get("endpoint_url", None)) + return cls(authorization_details=d.get('authorization_details', None), endpoint_url=d.get('endpoint_url', None)) + + @dataclass @@ -1067,87 +904,75 @@ class DatabricksModelServingConfig: databricks_workspace_url: str """The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.""" - + databricks_api_token: Optional[str] = None """The Databricks secret key reference for a Databricks API token that corresponds to a user or service principal with Can Query access to the model serving endpoint pointed to by this external model. If you prefer to paste your API key directly, see `databricks_api_token_plaintext`. You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.""" - + databricks_api_token_plaintext: Optional[str] = None """The Databricks API token that corresponds to a user or service principal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `databricks_api_token`. You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the DatabricksModelServingConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.databricks_api_token is not None: - body["databricks_api_token"] = self.databricks_api_token - if self.databricks_api_token_plaintext is not None: - body["databricks_api_token_plaintext"] = self.databricks_api_token_plaintext - if self.databricks_workspace_url is not None: - body["databricks_workspace_url"] = self.databricks_workspace_url + if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token + if self.databricks_api_token_plaintext is not None: body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext + if self.databricks_workspace_url is not None: body['databricks_workspace_url'] = self.databricks_workspace_url return body def as_shallow_dict(self) -> dict: """Serializes the DatabricksModelServingConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.databricks_api_token is not None: - body["databricks_api_token"] = self.databricks_api_token - if self.databricks_api_token_plaintext is not None: - body["databricks_api_token_plaintext"] = self.databricks_api_token_plaintext - if self.databricks_workspace_url is not None: - body["databricks_workspace_url"] = self.databricks_workspace_url + if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token + if self.databricks_api_token_plaintext is not None: body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext + if self.databricks_workspace_url is not None: body['databricks_workspace_url'] = self.databricks_workspace_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabricksModelServingConfig: """Deserializes the DatabricksModelServingConfig from a dictionary.""" - return cls( - databricks_api_token=d.get("databricks_api_token", None), - databricks_api_token_plaintext=d.get("databricks_api_token_plaintext", None), - databricks_workspace_url=d.get("databricks_workspace_url", None), - ) + return cls(databricks_api_token=d.get('databricks_api_token', None), databricks_api_token_plaintext=d.get('databricks_api_token_plaintext', None), databricks_workspace_url=d.get('databricks_workspace_url', None)) + + @dataclass class DataframeSplitInput: columns: Optional[List[Any]] = None - + data: Optional[List[Any]] = None - + index: Optional[List[int]] = None - + def as_dict(self) -> dict: """Serializes the DataframeSplitInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: - body["columns"] = [v for v in self.columns] - if self.data: - body["data"] = [v for v in self.data] - if self.index: - body["index"] = [v for v in self.index] + if self.columns: body['columns'] = [v for v in self.columns] + if self.data: body['data'] = [v for v in self.data] + if self.index: body['index'] = [v for v in self.index] return body def as_shallow_dict(self) -> dict: """Serializes the DataframeSplitInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: - body["columns"] = self.columns - if self.data: - body["data"] = self.data - if self.index: - body["index"] = self.index + if self.columns: body['columns'] = self.columns + if self.data: body['data'] = self.data + if self.index: body['index'] = self.index return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataframeSplitInput: """Deserializes the DataframeSplitInput from a dictionary.""" - return cls(columns=d.get("columns", None), data=d.get("data", None), index=d.get("index", None)) + return cls(columns=d.get('columns', None), data=d.get('data', None), index=d.get('index', None)) + + @dataclass @@ -1166,55 +991,51 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + + + + @dataclass class EmbeddingsV1ResponseEmbeddingElement: embedding: Optional[List[float]] = None - + index: Optional[int] = None """The index of the embedding in the response.""" - + object: Optional[EmbeddingsV1ResponseEmbeddingElementObject] = None """This will always be 'embedding'.""" - + def as_dict(self) -> dict: """Serializes the EmbeddingsV1ResponseEmbeddingElement into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding: - body["embedding"] = [v for v in self.embedding] - if self.index is not None: - body["index"] = self.index - if self.object is not None: - body["object"] = self.object.value + if self.embedding: body['embedding'] = [v for v in self.embedding] + if self.index is not None: body['index'] = self.index + if self.object is not None: body['object'] = self.object.value return body def as_shallow_dict(self) -> dict: """Serializes the EmbeddingsV1ResponseEmbeddingElement into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding: - body["embedding"] = self.embedding - if self.index is not None: - body["index"] = self.index - if self.object is not None: - body["object"] = self.object + if self.embedding: body['embedding'] = self.embedding + if self.index is not None: body['index'] = self.index + if self.object is not None: body['object'] = self.object return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmbeddingsV1ResponseEmbeddingElement: """Deserializes the EmbeddingsV1ResponseEmbeddingElement from a dictionary.""" - return cls( - embedding=d.get("embedding", None), - index=d.get("index", None), - object=_enum(d, "object", EmbeddingsV1ResponseEmbeddingElementObject), - ) + return cls(embedding=d.get('embedding', None), index=d.get('index', None), object=_enum(d, 'object', EmbeddingsV1ResponseEmbeddingElementObject)) + + class EmbeddingsV1ResponseEmbeddingElementObject(Enum): """This will always be 'embedding'.""" - - EMBEDDING = "embedding" - + + EMBEDDING = 'embedding' @dataclass class EndpointCoreConfigInput: @@ -1223,60 +1044,46 @@ class EndpointCoreConfigInput: Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or updating existing provisioned throughput endpoints that never have inference table configured; in these cases please use AI Gateway to manage inference tables.""" - + name: Optional[str] = None """The name of the serving endpoint to update. This field is required.""" - + served_entities: Optional[List[ServedEntityInput]] = None """The list of served entities under the serving endpoint config.""" - + served_models: Optional[List[ServedModelInput]] = None """(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.""" - + traffic_config: Optional[TrafficConfig] = None """The traffic configuration associated with the serving endpoint config.""" - + def as_dict(self) -> dict: """Serializes the EndpointCoreConfigInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_capture_config: - body["auto_capture_config"] = self.auto_capture_config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.served_entities: - body["served_entities"] = [v.as_dict() for v in self.served_entities] - if self.served_models: - body["served_models"] = [v.as_dict() for v in self.served_models] - if self.traffic_config: - body["traffic_config"] = self.traffic_config.as_dict() + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict() + if self.name is not None: body['name'] = self.name + if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] + if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] + if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EndpointCoreConfigInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_capture_config: - body["auto_capture_config"] = self.auto_capture_config - if self.name is not None: - body["name"] = self.name - if self.served_entities: - body["served_entities"] = self.served_entities - if self.served_models: - body["served_models"] = self.served_models - if self.traffic_config: - body["traffic_config"] = self.traffic_config + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config + if self.name is not None: body['name'] = self.name + if self.served_entities: body['served_entities'] = self.served_entities + if self.served_models: body['served_models'] = self.served_models + if self.traffic_config: body['traffic_config'] = self.traffic_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointCoreConfigInput: """Deserializes the EndpointCoreConfigInput from a dictionary.""" - return cls( - auto_capture_config=_from_dict(d, "auto_capture_config", AutoCaptureConfigInput), - name=d.get("name", None), - served_entities=_repeated_dict(d, "served_entities", ServedEntityInput), - served_models=_repeated_dict(d, "served_models", ServedModelInput), - traffic_config=_from_dict(d, "traffic_config", TrafficConfig), - ) + return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigInput), name=d.get('name', None), served_entities=_repeated_dict(d, 'served_entities', ServedEntityInput), served_models=_repeated_dict(d, 'served_models', ServedModelInput), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) + + @dataclass @@ -1286,96 +1093,77 @@ class EndpointCoreConfigOutput: Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or updating existing provisioned throughput endpoints that never have inference table configured; in these cases please use AI Gateway to manage inference tables.""" - + config_version: Optional[int] = None """The config version that the serving endpoint is currently serving.""" - + served_entities: Optional[List[ServedEntityOutput]] = None """The list of served entities under the serving endpoint config.""" - + served_models: Optional[List[ServedModelOutput]] = None """(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.""" - + traffic_config: Optional[TrafficConfig] = None """The traffic configuration associated with the serving endpoint config.""" - + def as_dict(self) -> dict: """Serializes the EndpointCoreConfigOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_capture_config: - body["auto_capture_config"] = self.auto_capture_config.as_dict() - if self.config_version is not None: - body["config_version"] = self.config_version - if self.served_entities: - body["served_entities"] = [v.as_dict() for v in self.served_entities] - if self.served_models: - body["served_models"] = [v.as_dict() for v in self.served_models] - if self.traffic_config: - body["traffic_config"] = self.traffic_config.as_dict() + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict() + if self.config_version is not None: body['config_version'] = self.config_version + if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] + if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] + if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EndpointCoreConfigOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_capture_config: - body["auto_capture_config"] = self.auto_capture_config - if self.config_version is not None: - body["config_version"] = self.config_version - if self.served_entities: - body["served_entities"] = self.served_entities - if self.served_models: - body["served_models"] = self.served_models - if self.traffic_config: - body["traffic_config"] = self.traffic_config + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config + if self.config_version is not None: body['config_version'] = self.config_version + if self.served_entities: body['served_entities'] = self.served_entities + if self.served_models: body['served_models'] = self.served_models + if self.traffic_config: body['traffic_config'] = self.traffic_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointCoreConfigOutput: """Deserializes the EndpointCoreConfigOutput from a dictionary.""" - return cls( - auto_capture_config=_from_dict(d, "auto_capture_config", AutoCaptureConfigOutput), - config_version=d.get("config_version", None), - served_entities=_repeated_dict(d, "served_entities", ServedEntityOutput), - served_models=_repeated_dict(d, "served_models", ServedModelOutput), - traffic_config=_from_dict(d, "traffic_config", TrafficConfig), - ) + return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigOutput), config_version=d.get('config_version', None), served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput), served_models=_repeated_dict(d, 'served_models', ServedModelOutput), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) + + @dataclass class EndpointCoreConfigSummary: served_entities: Optional[List[ServedEntitySpec]] = None """The list of served entities under the serving endpoint config.""" - + served_models: Optional[List[ServedModelSpec]] = None """(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.""" - + def as_dict(self) -> dict: """Serializes the EndpointCoreConfigSummary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.served_entities: - body["served_entities"] = [v.as_dict() for v in self.served_entities] - if self.served_models: - body["served_models"] = [v.as_dict() for v in self.served_models] + if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] + if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] return body def as_shallow_dict(self) -> dict: """Serializes the EndpointCoreConfigSummary into a shallow dictionary of its immediate attributes.""" body = {} - if self.served_entities: - body["served_entities"] = self.served_entities - if self.served_models: - body["served_models"] = self.served_models + if self.served_entities: body['served_entities'] = self.served_entities + if self.served_models: body['served_models'] = self.served_models return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointCoreConfigSummary: """Deserializes the EndpointCoreConfigSummary from a dictionary.""" - return cls( - served_entities=_repeated_dict(d, "served_entities", ServedEntitySpec), - served_models=_repeated_dict(d, "served_models", ServedModelSpec), - ) + return cls(served_entities=_repeated_dict(d, 'served_entities', ServedEntitySpec), served_models=_repeated_dict(d, 'served_models', ServedModelSpec)) + + @dataclass @@ -1385,68 +1173,51 @@ class EndpointPendingConfig: Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or updating existing provisioned throughput endpoints that never have inference table configured; in these cases please use AI Gateway to manage inference tables.""" - + config_version: Optional[int] = None """The config version that the serving endpoint is currently serving.""" - + served_entities: Optional[List[ServedEntityOutput]] = None """The list of served entities belonging to the last issued update to the serving endpoint.""" - + served_models: Optional[List[ServedModelOutput]] = None """(Deprecated, use served_entities instead) The list of served models belonging to the last issued update to the serving endpoint.""" - + start_time: Optional[int] = None """The timestamp when the update to the pending config started.""" - + traffic_config: Optional[TrafficConfig] = None """The traffic config defining how invocations to the serving endpoint should be routed.""" - + def as_dict(self) -> dict: """Serializes the EndpointPendingConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_capture_config: - body["auto_capture_config"] = self.auto_capture_config.as_dict() - if self.config_version is not None: - body["config_version"] = self.config_version - if self.served_entities: - body["served_entities"] = [v.as_dict() for v in self.served_entities] - if self.served_models: - body["served_models"] = [v.as_dict() for v in self.served_models] - if self.start_time is not None: - body["start_time"] = self.start_time - if self.traffic_config: - body["traffic_config"] = self.traffic_config.as_dict() + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict() + if self.config_version is not None: body['config_version'] = self.config_version + if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] + if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] + if self.start_time is not None: body['start_time'] = self.start_time + if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EndpointPendingConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_capture_config: - body["auto_capture_config"] = self.auto_capture_config - if self.config_version is not None: - body["config_version"] = self.config_version - if self.served_entities: - body["served_entities"] = self.served_entities - if self.served_models: - body["served_models"] = self.served_models - if self.start_time is not None: - body["start_time"] = self.start_time - if self.traffic_config: - body["traffic_config"] = self.traffic_config + if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config + if self.config_version is not None: body['config_version'] = self.config_version + if self.served_entities: body['served_entities'] = self.served_entities + if self.served_models: body['served_models'] = self.served_models + if self.start_time is not None: body['start_time'] = self.start_time + if self.traffic_config: body['traffic_config'] = self.traffic_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointPendingConfig: """Deserializes the EndpointPendingConfig from a dictionary.""" - return cls( - auto_capture_config=_from_dict(d, "auto_capture_config", AutoCaptureConfigOutput), - config_version=d.get("config_version", None), - served_entities=_repeated_dict(d, "served_entities", ServedEntityOutput), - served_models=_repeated_dict(d, "served_models", ServedModelOutput), - start_time=d.get("start_time", None), - traffic_config=_from_dict(d, "traffic_config", TrafficConfig), - ) + return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigOutput), config_version=d.get('config_version', None), served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput), served_models=_repeated_dict(d, 'served_models', ServedModelOutput), start_time=d.get('start_time', None), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) + + @dataclass @@ -1456,211 +1227,190 @@ class EndpointState: progress, if the update failed, or if there is no update in progress. Note that if the endpoint's config_update state value is IN_PROGRESS, another update can not be made until the update completes or fails.""" - + ready: Optional[EndpointStateReady] = None """The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is READY if all of the served entities in its active configuration are ready. If any of the actively served entities are in a non-ready state, the endpoint state will be NOT_READY.""" - + def as_dict(self) -> dict: """Serializes the EndpointState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config_update is not None: - body["config_update"] = self.config_update.value - if self.ready is not None: - body["ready"] = self.ready.value + if self.config_update is not None: body['config_update'] = self.config_update.value + if self.ready is not None: body['ready'] = self.ready.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointState into a shallow dictionary of its immediate attributes.""" body = {} - if self.config_update is not None: - body["config_update"] = self.config_update - if self.ready is not None: - body["ready"] = self.ready + if self.config_update is not None: body['config_update'] = self.config_update + if self.ready is not None: body['ready'] = self.ready return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointState: """Deserializes the EndpointState from a dictionary.""" - return cls( - config_update=_enum(d, "config_update", EndpointStateConfigUpdate), - ready=_enum(d, "ready", EndpointStateReady), - ) - + return cls(config_update=_enum(d, 'config_update', EndpointStateConfigUpdate), ready=_enum(d, 'ready', EndpointStateReady)) + -class EndpointStateConfigUpdate(Enum): - IN_PROGRESS = "IN_PROGRESS" - NOT_UPDATING = "NOT_UPDATING" - UPDATE_CANCELED = "UPDATE_CANCELED" - UPDATE_FAILED = "UPDATE_FAILED" +class EndpointStateConfigUpdate(Enum): + + + IN_PROGRESS = 'IN_PROGRESS' + NOT_UPDATING = 'NOT_UPDATING' + UPDATE_CANCELED = 'UPDATE_CANCELED' + UPDATE_FAILED = 'UPDATE_FAILED' class EndpointStateReady(Enum): - - NOT_READY = "NOT_READY" - READY = "READY" - + + + NOT_READY = 'NOT_READY' + READY = 'READY' @dataclass class EndpointTag: key: str """Key field for a serving endpoint tag.""" - + value: Optional[str] = None """Optional value field for a serving endpoint tag.""" - + def as_dict(self) -> dict: """Serializes the EndpointTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointTag: """Deserializes the EndpointTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class EndpointTags: tags: Optional[List[EndpointTag]] = None - + def as_dict(self) -> dict: """Serializes the EndpointTags into a dictionary suitable for use as a JSON request body.""" body = {} - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the EndpointTags into a shallow dictionary of its immediate attributes.""" body = {} - if self.tags: - body["tags"] = self.tags + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointTags: """Deserializes the EndpointTags from a dictionary.""" - return cls(tags=_repeated_dict(d, "tags", EndpointTag)) + return cls(tags=_repeated_dict(d, 'tags', EndpointTag)) + + + + + @dataclass class ExportMetricsResponse: contents: Optional[BinaryIO] = None - + def as_dict(self) -> dict: """Serializes the ExportMetricsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: - body["contents"] = self.contents + if self.contents: body['contents'] = self.contents return body def as_shallow_dict(self) -> dict: """Serializes the ExportMetricsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: - body["contents"] = self.contents + if self.contents: body['contents'] = self.contents return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExportMetricsResponse: """Deserializes the ExportMetricsResponse from a dictionary.""" - return cls(contents=d.get("contents", None)) + return cls(contents=d.get('contents', None)) + + @dataclass class ExternalFunctionRequest: """Simple Proto message for testing""" - + connection_name: str """The connection name to use. This is required to identify the external connection.""" - + method: ExternalFunctionRequestHttpMethod """The HTTP method to use (e.g., 'GET', 'POST').""" - + path: str """The relative path for the API endpoint. This is required.""" - + headers: Optional[str] = None """Additional headers for the request. If not provided, only auth headers from connections would be passed.""" - + json: Optional[str] = None """The JSON payload to send in the request body.""" - + params: Optional[str] = None """Query parameters for the request.""" - + def as_dict(self) -> dict: """Serializes the ExternalFunctionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.headers is not None: - body["headers"] = self.headers - if self.json is not None: - body["json"] = self.json - if self.method is not None: - body["method"] = self.method.value - if self.params is not None: - body["params"] = self.params - if self.path is not None: - body["path"] = self.path + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.headers is not None: body['headers'] = self.headers + if self.json is not None: body['json'] = self.json + if self.method is not None: body['method'] = self.method.value + if self.params is not None: body['params'] = self.params + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the ExternalFunctionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.headers is not None: - body["headers"] = self.headers - if self.json is not None: - body["json"] = self.json - if self.method is not None: - body["method"] = self.method - if self.params is not None: - body["params"] = self.params - if self.path is not None: - body["path"] = self.path + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.headers is not None: body['headers'] = self.headers + if self.json is not None: body['json'] = self.json + if self.method is not None: body['method'] = self.method + if self.params is not None: body['params'] = self.params + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalFunctionRequest: """Deserializes the ExternalFunctionRequest from a dictionary.""" - return cls( - connection_name=d.get("connection_name", None), - headers=d.get("headers", None), - json=d.get("json", None), - method=_enum(d, "method", ExternalFunctionRequestHttpMethod), - params=d.get("params", None), - path=d.get("path", None), - ) - + return cls(connection_name=d.get('connection_name', None), headers=d.get('headers', None), json=d.get('json', None), method=_enum(d, 'method', ExternalFunctionRequestHttpMethod), params=d.get('params', None), path=d.get('path', None)) + -class ExternalFunctionRequestHttpMethod(Enum): - DELETE = "DELETE" - GET = "GET" - PATCH = "PATCH" - POST = "POST" - PUT = "PUT" +class ExternalFunctionRequestHttpMethod(Enum): + + + DELETE = 'DELETE' + GET = 'GET' + PATCH = 'PATCH' + POST = 'POST' + PUT = 'PUT' @dataclass class ExternalModel: @@ -1668,173 +1418,128 @@ class ExternalModel: """The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'.""" - + name: str """The name of the external model.""" - + task: str """The task type of the external model.""" - + ai21labs_config: Optional[Ai21LabsConfig] = None """AI21Labs Config. Only required if the provider is 'ai21labs'.""" - + amazon_bedrock_config: Optional[AmazonBedrockConfig] = None """Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'.""" - + anthropic_config: Optional[AnthropicConfig] = None """Anthropic Config. Only required if the provider is 'anthropic'.""" - + cohere_config: Optional[CohereConfig] = None """Cohere Config. Only required if the provider is 'cohere'.""" - + custom_provider_config: Optional[CustomProviderConfig] = None """Custom Provider Config. Only required if the provider is 'custom'.""" - + databricks_model_serving_config: Optional[DatabricksModelServingConfig] = None """Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.""" - + google_cloud_vertex_ai_config: Optional[GoogleCloudVertexAiConfig] = None """Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.""" - + openai_config: Optional[OpenAiConfig] = None """OpenAI Config. Only required if the provider is 'openai'.""" - + palm_config: Optional[PaLmConfig] = None """PaLM Config. Only required if the provider is 'palm'.""" - + def as_dict(self) -> dict: """Serializes the ExternalModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai21labs_config: - body["ai21labs_config"] = self.ai21labs_config.as_dict() - if self.amazon_bedrock_config: - body["amazon_bedrock_config"] = self.amazon_bedrock_config.as_dict() - if self.anthropic_config: - body["anthropic_config"] = self.anthropic_config.as_dict() - if self.cohere_config: - body["cohere_config"] = self.cohere_config.as_dict() - if self.custom_provider_config: - body["custom_provider_config"] = self.custom_provider_config.as_dict() - if self.databricks_model_serving_config: - body["databricks_model_serving_config"] = self.databricks_model_serving_config.as_dict() - if self.google_cloud_vertex_ai_config: - body["google_cloud_vertex_ai_config"] = self.google_cloud_vertex_ai_config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.openai_config: - body["openai_config"] = self.openai_config.as_dict() - if self.palm_config: - body["palm_config"] = self.palm_config.as_dict() - if self.provider is not None: - body["provider"] = self.provider.value - if self.task is not None: - body["task"] = self.task + if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config.as_dict() + if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config.as_dict() + if self.anthropic_config: body['anthropic_config'] = self.anthropic_config.as_dict() + if self.cohere_config: body['cohere_config'] = self.cohere_config.as_dict() + if self.custom_provider_config: body['custom_provider_config'] = self.custom_provider_config.as_dict() + if self.databricks_model_serving_config: body['databricks_model_serving_config'] = self.databricks_model_serving_config.as_dict() + if self.google_cloud_vertex_ai_config: body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config.as_dict() + if self.name is not None: body['name'] = self.name + if self.openai_config: body['openai_config'] = self.openai_config.as_dict() + if self.palm_config: body['palm_config'] = self.palm_config.as_dict() + if self.provider is not None: body['provider'] = self.provider.value + if self.task is not None: body['task'] = self.task return body def as_shallow_dict(self) -> dict: """Serializes the ExternalModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai21labs_config: - body["ai21labs_config"] = self.ai21labs_config - if self.amazon_bedrock_config: - body["amazon_bedrock_config"] = self.amazon_bedrock_config - if self.anthropic_config: - body["anthropic_config"] = self.anthropic_config - if self.cohere_config: - body["cohere_config"] = self.cohere_config - if self.custom_provider_config: - body["custom_provider_config"] = self.custom_provider_config - if self.databricks_model_serving_config: - body["databricks_model_serving_config"] = self.databricks_model_serving_config - if self.google_cloud_vertex_ai_config: - body["google_cloud_vertex_ai_config"] = self.google_cloud_vertex_ai_config - if self.name is not None: - body["name"] = self.name - if self.openai_config: - body["openai_config"] = self.openai_config - if self.palm_config: - body["palm_config"] = self.palm_config - if self.provider is not None: - body["provider"] = self.provider - if self.task is not None: - body["task"] = self.task + if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config + if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config + if self.anthropic_config: body['anthropic_config'] = self.anthropic_config + if self.cohere_config: body['cohere_config'] = self.cohere_config + if self.custom_provider_config: body['custom_provider_config'] = self.custom_provider_config + if self.databricks_model_serving_config: body['databricks_model_serving_config'] = self.databricks_model_serving_config + if self.google_cloud_vertex_ai_config: body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config + if self.name is not None: body['name'] = self.name + if self.openai_config: body['openai_config'] = self.openai_config + if self.palm_config: body['palm_config'] = self.palm_config + if self.provider is not None: body['provider'] = self.provider + if self.task is not None: body['task'] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalModel: """Deserializes the ExternalModel from a dictionary.""" - return cls( - ai21labs_config=_from_dict(d, "ai21labs_config", Ai21LabsConfig), - amazon_bedrock_config=_from_dict(d, "amazon_bedrock_config", AmazonBedrockConfig), - anthropic_config=_from_dict(d, "anthropic_config", AnthropicConfig), - cohere_config=_from_dict(d, "cohere_config", CohereConfig), - custom_provider_config=_from_dict(d, "custom_provider_config", CustomProviderConfig), - databricks_model_serving_config=_from_dict( - d, "databricks_model_serving_config", DatabricksModelServingConfig - ), - google_cloud_vertex_ai_config=_from_dict(d, "google_cloud_vertex_ai_config", GoogleCloudVertexAiConfig), - name=d.get("name", None), - openai_config=_from_dict(d, "openai_config", OpenAiConfig), - palm_config=_from_dict(d, "palm_config", PaLmConfig), - provider=_enum(d, "provider", ExternalModelProvider), - task=d.get("task", None), - ) - + return cls(ai21labs_config=_from_dict(d, 'ai21labs_config', Ai21LabsConfig), amazon_bedrock_config=_from_dict(d, 'amazon_bedrock_config', AmazonBedrockConfig), anthropic_config=_from_dict(d, 'anthropic_config', AnthropicConfig), cohere_config=_from_dict(d, 'cohere_config', CohereConfig), custom_provider_config=_from_dict(d, 'custom_provider_config', CustomProviderConfig), databricks_model_serving_config=_from_dict(d, 'databricks_model_serving_config', DatabricksModelServingConfig), google_cloud_vertex_ai_config=_from_dict(d, 'google_cloud_vertex_ai_config', GoogleCloudVertexAiConfig), name=d.get('name', None), openai_config=_from_dict(d, 'openai_config', OpenAiConfig), palm_config=_from_dict(d, 'palm_config', PaLmConfig), provider=_enum(d, 'provider', ExternalModelProvider), task=d.get('task', None)) + -class ExternalModelProvider(Enum): - AI21LABS = "ai21labs" - AMAZON_BEDROCK = "amazon-bedrock" - ANTHROPIC = "anthropic" - COHERE = "cohere" - CUSTOM = "custom" - DATABRICKS_MODEL_SERVING = "databricks-model-serving" - GOOGLE_CLOUD_VERTEX_AI = "google-cloud-vertex-ai" - OPENAI = "openai" - PALM = "palm" +class ExternalModelProvider(Enum): + + + AI21LABS = 'ai21labs' + AMAZON_BEDROCK = 'amazon-bedrock' + ANTHROPIC = 'anthropic' + COHERE = 'cohere' + CUSTOM = 'custom' + DATABRICKS_MODEL_SERVING = 'databricks-model-serving' + GOOGLE_CLOUD_VERTEX_AI = 'google-cloud-vertex-ai' + OPENAI = 'openai' + PALM = 'palm' @dataclass class ExternalModelUsageElement: completion_tokens: Optional[int] = None """The number of tokens in the chat/completions response.""" - + prompt_tokens: Optional[int] = None """The number of tokens in the prompt.""" - + total_tokens: Optional[int] = None """The total number of tokens in the prompt and response.""" - + def as_dict(self) -> dict: """Serializes the ExternalModelUsageElement into a dictionary suitable for use as a JSON request body.""" body = {} - if self.completion_tokens is not None: - body["completion_tokens"] = self.completion_tokens - if self.prompt_tokens is not None: - body["prompt_tokens"] = self.prompt_tokens - if self.total_tokens is not None: - body["total_tokens"] = self.total_tokens + if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens + if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens + if self.total_tokens is not None: body['total_tokens'] = self.total_tokens return body def as_shallow_dict(self) -> dict: """Serializes the ExternalModelUsageElement into a shallow dictionary of its immediate attributes.""" body = {} - if self.completion_tokens is not None: - body["completion_tokens"] = self.completion_tokens - if self.prompt_tokens is not None: - body["prompt_tokens"] = self.prompt_tokens - if self.total_tokens is not None: - body["total_tokens"] = self.total_tokens + if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens + if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens + if self.total_tokens is not None: body['total_tokens'] = self.total_tokens return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalModelUsageElement: """Deserializes the ExternalModelUsageElement from a dictionary.""" - return cls( - completion_tokens=d.get("completion_tokens", None), - prompt_tokens=d.get("prompt_tokens", None), - total_tokens=d.get("total_tokens", None), - ) + return cls(completion_tokens=d.get('completion_tokens', None), prompt_tokens=d.get('prompt_tokens', None), total_tokens=d.get('total_tokens', None)) + + @dataclass @@ -1845,137 +1550,138 @@ class FallbackConfig: other served entities in the same endpoint, following the order of served entity list, until a successful response is returned. If all attempts fail, return the last response with the error code.""" - + def as_dict(self) -> dict: """Serializes the FallbackConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.enabled is not None: body['enabled'] = self.enabled return body def as_shallow_dict(self) -> dict: """Serializes the FallbackConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled + if self.enabled is not None: body['enabled'] = self.enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FallbackConfig: """Deserializes the FallbackConfig from a dictionary.""" - return cls(enabled=d.get("enabled", None)) + return cls(enabled=d.get('enabled', None)) + + @dataclass class FoundationModel: """All fields are not sensitive as they are hard-coded in the system and made available to customers.""" - + description: Optional[str] = None - + display_name: Optional[str] = None - + docs: Optional[str] = None - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the FoundationModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.docs is not None: - body["docs"] = self.docs - if self.name is not None: - body["name"] = self.name + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.docs is not None: body['docs'] = self.docs + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the FoundationModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.docs is not None: - body["docs"] = self.docs - if self.name is not None: - body["name"] = self.name + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.docs is not None: body['docs'] = self.docs + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FoundationModel: """Deserializes the FoundationModel from a dictionary.""" - return cls( - description=d.get("description", None), - display_name=d.get("display_name", None), - docs=d.get("docs", None), - name=d.get("name", None), - ) + return cls(description=d.get('description', None), display_name=d.get('display_name', None), docs=d.get('docs', None), name=d.get('name', None)) + + + + + @dataclass class GetOpenApiResponse: contents: Optional[BinaryIO] = None - + def as_dict(self) -> dict: """Serializes the GetOpenApiResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: - body["contents"] = self.contents + if self.contents: body['contents'] = self.contents return body def as_shallow_dict(self) -> dict: """Serializes the GetOpenApiResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: - body["contents"] = self.contents + if self.contents: body['contents'] = self.contents return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetOpenApiResponse: """Deserializes the GetOpenApiResponse from a dictionary.""" - return cls(contents=d.get("contents", None)) + return cls(contents=d.get('contents', None)) + + + + + @dataclass class GetServingEndpointPermissionLevelsResponse: permission_levels: Optional[List[ServingEndpointPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetServingEndpointPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetServingEndpointPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetServingEndpointPermissionLevelsResponse: """Deserializes the GetServingEndpointPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", ServingEndpointPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', ServingEndpointPermissionsDescription)) + + + + + + + + @dataclass class GoogleCloudVertexAiConfig: project_id: str """This is the Google Cloud project id that the service account is associated with.""" - + region: str """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more details. Some models are only available in specific regions. [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations""" - + private_key: Optional[str] = None """The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys]. @@ -1984,7 +1690,7 @@ class GoogleCloudVertexAiConfig: [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" - + private_key_plaintext: Optional[str] = None """The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys]. If you @@ -1993,246 +1699,206 @@ class GoogleCloudVertexAiConfig: [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" - + def as_dict(self) -> dict: """Serializes the GoogleCloudVertexAiConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.private_key is not None: - body["private_key"] = self.private_key - if self.private_key_plaintext is not None: - body["private_key_plaintext"] = self.private_key_plaintext - if self.project_id is not None: - body["project_id"] = self.project_id - if self.region is not None: - body["region"] = self.region + if self.private_key is not None: body['private_key'] = self.private_key + if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext + if self.project_id is not None: body['project_id'] = self.project_id + if self.region is not None: body['region'] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the GoogleCloudVertexAiConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.private_key is not None: - body["private_key"] = self.private_key - if self.private_key_plaintext is not None: - body["private_key_plaintext"] = self.private_key_plaintext - if self.project_id is not None: - body["project_id"] = self.project_id - if self.region is not None: - body["region"] = self.region + if self.private_key is not None: body['private_key'] = self.private_key + if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext + if self.project_id is not None: body['project_id'] = self.project_id + if self.region is not None: body['region'] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GoogleCloudVertexAiConfig: """Deserializes the GoogleCloudVertexAiConfig from a dictionary.""" - return cls( - private_key=d.get("private_key", None), - private_key_plaintext=d.get("private_key_plaintext", None), - project_id=d.get("project_id", None), - region=d.get("region", None), - ) + return cls(private_key=d.get('private_key', None), private_key_plaintext=d.get('private_key_plaintext', None), project_id=d.get('project_id', None), region=d.get('region', None)) + + @dataclass class HttpRequestResponse: contents: Optional[BinaryIO] = None - + def as_dict(self) -> dict: """Serializes the HttpRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: - body["contents"] = self.contents + if self.contents: body['contents'] = self.contents return body def as_shallow_dict(self) -> dict: """Serializes the HttpRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: - body["contents"] = self.contents + if self.contents: body['contents'] = self.contents return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> HttpRequestResponse: """Deserializes the HttpRequestResponse from a dictionary.""" - return cls(contents=d.get("contents", None)) + return cls(contents=d.get('contents', None)) + + @dataclass class ListEndpointsResponse: endpoints: Optional[List[ServingEndpoint]] = None """The list of endpoints.""" - + def as_dict(self) -> dict: """Serializes the ListEndpointsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoints: - body["endpoints"] = [v.as_dict() for v in self.endpoints] + if self.endpoints: body['endpoints'] = [v.as_dict() for v in self.endpoints] return body def as_shallow_dict(self) -> dict: """Serializes the ListEndpointsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoints: - body["endpoints"] = self.endpoints + if self.endpoints: body['endpoints'] = self.endpoints return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListEndpointsResponse: """Deserializes the ListEndpointsResponse from a dictionary.""" - return cls(endpoints=_repeated_dict(d, "endpoints", ServingEndpoint)) + return cls(endpoints=_repeated_dict(d, 'endpoints', ServingEndpoint)) + + + + + @dataclass class ModelDataPlaneInfo: """A representation of all DataPlaneInfo for operations that can be done on a model through Data Plane APIs.""" - + query_info: Optional[DataPlaneInfo] = None """Information required to query DataPlane API 'query' endpoint.""" - + def as_dict(self) -> dict: """Serializes the ModelDataPlaneInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.query_info: - body["query_info"] = self.query_info.as_dict() + if self.query_info: body['query_info'] = self.query_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ModelDataPlaneInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.query_info: - body["query_info"] = self.query_info + if self.query_info: body['query_info'] = self.query_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelDataPlaneInfo: """Deserializes the ModelDataPlaneInfo from a dictionary.""" - return cls(query_info=_from_dict(d, "query_info", DataPlaneInfo)) + return cls(query_info=_from_dict(d, 'query_info', DataPlaneInfo)) + + @dataclass class OpenAiConfig: """Configs needed to create an OpenAI model route.""" - + microsoft_entra_client_id: Optional[str] = None """This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.""" - + microsoft_entra_client_secret: Optional[str] = None """The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication. If you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`. You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.""" - + microsoft_entra_client_secret_plaintext: Optional[str] = None """The client secret used for Microsoft Entra ID authentication provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`. You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.""" - + microsoft_entra_tenant_id: Optional[str] = None """This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.""" - + openai_api_base: Optional[str] = None """This is a field to provide a customized base URl for the OpenAI API. For Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service provided by Azure. For other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.""" - + openai_api_key: Optional[str] = None """The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.""" - + openai_api_key_plaintext: Optional[str] = None """The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.""" - + openai_api_type: Optional[str] = None """This is an optional field to specify the type of OpenAI API to use. For Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security access validation protocol. For access token validation, use azure. For authentication using Azure Active Directory (Azure AD) use, azuread.""" - + openai_api_version: Optional[str] = None """This is an optional field to specify the OpenAI API version. For Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to utilize, specified by a date.""" - + openai_deployment_name: Optional[str] = None """This field is only required for Azure OpenAI and is the name of the deployment resource for the Azure OpenAI service.""" - + openai_organization: Optional[str] = None """This is an optional field to specify the organization in OpenAI or Azure OpenAI.""" - + def as_dict(self) -> dict: """Serializes the OpenAiConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.microsoft_entra_client_id is not None: - body["microsoft_entra_client_id"] = self.microsoft_entra_client_id - if self.microsoft_entra_client_secret is not None: - body["microsoft_entra_client_secret"] = self.microsoft_entra_client_secret - if self.microsoft_entra_client_secret_plaintext is not None: - body["microsoft_entra_client_secret_plaintext"] = self.microsoft_entra_client_secret_plaintext - if self.microsoft_entra_tenant_id is not None: - body["microsoft_entra_tenant_id"] = self.microsoft_entra_tenant_id - if self.openai_api_base is not None: - body["openai_api_base"] = self.openai_api_base - if self.openai_api_key is not None: - body["openai_api_key"] = self.openai_api_key - if self.openai_api_key_plaintext is not None: - body["openai_api_key_plaintext"] = self.openai_api_key_plaintext - if self.openai_api_type is not None: - body["openai_api_type"] = self.openai_api_type - if self.openai_api_version is not None: - body["openai_api_version"] = self.openai_api_version - if self.openai_deployment_name is not None: - body["openai_deployment_name"] = self.openai_deployment_name - if self.openai_organization is not None: - body["openai_organization"] = self.openai_organization + if self.microsoft_entra_client_id is not None: body['microsoft_entra_client_id'] = self.microsoft_entra_client_id + if self.microsoft_entra_client_secret is not None: body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret + if self.microsoft_entra_client_secret_plaintext is not None: body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext + if self.microsoft_entra_tenant_id is not None: body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id + if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base + if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key + if self.openai_api_key_plaintext is not None: body['openai_api_key_plaintext'] = self.openai_api_key_plaintext + if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type + if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version + if self.openai_deployment_name is not None: body['openai_deployment_name'] = self.openai_deployment_name + if self.openai_organization is not None: body['openai_organization'] = self.openai_organization return body def as_shallow_dict(self) -> dict: """Serializes the OpenAiConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.microsoft_entra_client_id is not None: - body["microsoft_entra_client_id"] = self.microsoft_entra_client_id - if self.microsoft_entra_client_secret is not None: - body["microsoft_entra_client_secret"] = self.microsoft_entra_client_secret - if self.microsoft_entra_client_secret_plaintext is not None: - body["microsoft_entra_client_secret_plaintext"] = self.microsoft_entra_client_secret_plaintext - if self.microsoft_entra_tenant_id is not None: - body["microsoft_entra_tenant_id"] = self.microsoft_entra_tenant_id - if self.openai_api_base is not None: - body["openai_api_base"] = self.openai_api_base - if self.openai_api_key is not None: - body["openai_api_key"] = self.openai_api_key - if self.openai_api_key_plaintext is not None: - body["openai_api_key_plaintext"] = self.openai_api_key_plaintext - if self.openai_api_type is not None: - body["openai_api_type"] = self.openai_api_type - if self.openai_api_version is not None: - body["openai_api_version"] = self.openai_api_version - if self.openai_deployment_name is not None: - body["openai_deployment_name"] = self.openai_deployment_name - if self.openai_organization is not None: - body["openai_organization"] = self.openai_organization + if self.microsoft_entra_client_id is not None: body['microsoft_entra_client_id'] = self.microsoft_entra_client_id + if self.microsoft_entra_client_secret is not None: body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret + if self.microsoft_entra_client_secret_plaintext is not None: body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext + if self.microsoft_entra_tenant_id is not None: body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id + if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base + if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key + if self.openai_api_key_plaintext is not None: body['openai_api_key_plaintext'] = self.openai_api_key_plaintext + if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type + if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version + if self.openai_deployment_name is not None: body['openai_deployment_name'] = self.openai_deployment_name + if self.openai_organization is not None: body['openai_organization'] = self.openai_organization return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OpenAiConfig: """Deserializes the OpenAiConfig from a dictionary.""" - return cls( - microsoft_entra_client_id=d.get("microsoft_entra_client_id", None), - microsoft_entra_client_secret=d.get("microsoft_entra_client_secret", None), - microsoft_entra_client_secret_plaintext=d.get("microsoft_entra_client_secret_plaintext", None), - microsoft_entra_tenant_id=d.get("microsoft_entra_tenant_id", None), - openai_api_base=d.get("openai_api_base", None), - openai_api_key=d.get("openai_api_key", None), - openai_api_key_plaintext=d.get("openai_api_key_plaintext", None), - openai_api_type=d.get("openai_api_type", None), - openai_api_version=d.get("openai_api_version", None), - openai_deployment_name=d.get("openai_deployment_name", None), - openai_organization=d.get("openai_organization", None), - ) + return cls(microsoft_entra_client_id=d.get('microsoft_entra_client_id', None), microsoft_entra_client_secret=d.get('microsoft_entra_client_secret', None), microsoft_entra_client_secret_plaintext=d.get('microsoft_entra_client_secret_plaintext', None), microsoft_entra_tenant_id=d.get('microsoft_entra_tenant_id', None), openai_api_base=d.get('openai_api_base', None), openai_api_key=d.get('openai_api_key', None), openai_api_key_plaintext=d.get('openai_api_key_plaintext', None), openai_api_type=d.get('openai_api_type', None), openai_api_version=d.get('openai_api_version', None), openai_deployment_name=d.get('openai_deployment_name', None), openai_organization=d.get('openai_organization', None)) + + @dataclass @@ -2241,149 +1907,128 @@ class PaLmConfig: """The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.""" - + palm_api_key_plaintext: Optional[str] = None """The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the PaLmConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.palm_api_key is not None: - body["palm_api_key"] = self.palm_api_key - if self.palm_api_key_plaintext is not None: - body["palm_api_key_plaintext"] = self.palm_api_key_plaintext + if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key + if self.palm_api_key_plaintext is not None: body['palm_api_key_plaintext'] = self.palm_api_key_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the PaLmConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.palm_api_key is not None: - body["palm_api_key"] = self.palm_api_key - if self.palm_api_key_plaintext is not None: - body["palm_api_key_plaintext"] = self.palm_api_key_plaintext + if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key + if self.palm_api_key_plaintext is not None: body['palm_api_key_plaintext'] = self.palm_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PaLmConfig: """Deserializes the PaLmConfig from a dictionary.""" - return cls( - palm_api_key=d.get("palm_api_key", None), palm_api_key_plaintext=d.get("palm_api_key_plaintext", None) - ) + return cls(palm_api_key=d.get('palm_api_key', None), palm_api_key_plaintext=d.get('palm_api_key_plaintext', None)) + + @dataclass class PatchServingEndpointTags: add_tags: Optional[List[EndpointTag]] = None """List of endpoint tags to add""" - + delete_tags: Optional[List[str]] = None """List of tag keys to delete""" - + name: Optional[str] = None """The name of the serving endpoint who's tags to patch. This field is required.""" - + def as_dict(self) -> dict: """Serializes the PatchServingEndpointTags into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add_tags: - body["add_tags"] = [v.as_dict() for v in self.add_tags] - if self.delete_tags: - body["delete_tags"] = [v for v in self.delete_tags] - if self.name is not None: - body["name"] = self.name + if self.add_tags: body['add_tags'] = [v.as_dict() for v in self.add_tags] + if self.delete_tags: body['delete_tags'] = [v for v in self.delete_tags] + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the PatchServingEndpointTags into a shallow dictionary of its immediate attributes.""" body = {} - if self.add_tags: - body["add_tags"] = self.add_tags - if self.delete_tags: - body["delete_tags"] = self.delete_tags - if self.name is not None: - body["name"] = self.name + if self.add_tags: body['add_tags'] = self.add_tags + if self.delete_tags: body['delete_tags'] = self.delete_tags + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PatchServingEndpointTags: """Deserializes the PatchServingEndpointTags from a dictionary.""" - return cls( - add_tags=_repeated_dict(d, "add_tags", EndpointTag), - delete_tags=d.get("delete_tags", None), - name=d.get("name", None), - ) + return cls(add_tags=_repeated_dict(d, 'add_tags', EndpointTag), delete_tags=d.get('delete_tags', None), name=d.get('name', None)) + + @dataclass class PayloadTable: name: Optional[str] = None - + status: Optional[str] = None - + status_message: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the PayloadTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.status is not None: - body["status"] = self.status - if self.status_message is not None: - body["status_message"] = self.status_message + if self.name is not None: body['name'] = self.name + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message return body def as_shallow_dict(self) -> dict: """Serializes the PayloadTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.status is not None: - body["status"] = self.status - if self.status_message is not None: - body["status_message"] = self.status_message + if self.name is not None: body['name'] = self.name + if self.status is not None: body['status'] = self.status + if self.status_message is not None: body['status_message'] = self.status_message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PayloadTable: """Deserializes the PayloadTable from a dictionary.""" - return cls(name=d.get("name", None), status=d.get("status", None), status_message=d.get("status_message", None)) + return cls(name=d.get('name', None), status=d.get('status', None), status_message=d.get('status_message', None)) + + @dataclass class PtEndpointCoreConfig: served_entities: Optional[List[PtServedModel]] = None """The list of served entities under the serving endpoint config.""" - + traffic_config: Optional[TrafficConfig] = None - + def as_dict(self) -> dict: """Serializes the PtEndpointCoreConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.served_entities: - body["served_entities"] = [v.as_dict() for v in self.served_entities] - if self.traffic_config: - body["traffic_config"] = self.traffic_config.as_dict() + if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] + if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PtEndpointCoreConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.served_entities: - body["served_entities"] = self.served_entities - if self.traffic_config: - body["traffic_config"] = self.traffic_config + if self.served_entities: body['served_entities'] = self.served_entities + if self.traffic_config: body['traffic_config'] = self.traffic_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PtEndpointCoreConfig: """Deserializes the PtEndpointCoreConfig from a dictionary.""" - return cls( - served_entities=_repeated_dict(d, "served_entities", PtServedModel), - traffic_config=_from_dict(d, "traffic_config", TrafficConfig), - ) + return cls(served_entities=_repeated_dict(d, 'served_entities', PtServedModel), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) + + @dataclass @@ -2393,53 +2038,42 @@ class PtServedModel: a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.""" - + provisioned_model_units: int """The number of model units to be provisioned.""" - + entity_version: Optional[str] = None - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + def as_dict(self) -> dict: """Serializes the PtServedModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entity_name is not None: - body["entity_name"] = self.entity_name - if self.entity_version is not None: - body["entity_version"] = self.entity_version - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units return body def as_shallow_dict(self) -> dict: """Serializes the PtServedModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.entity_name is not None: - body["entity_name"] = self.entity_name - if self.entity_version is not None: - body["entity_version"] = self.entity_version - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PtServedModel: """Deserializes the PtServedModel from a dictionary.""" - return cls( - entity_name=d.get("entity_name", None), - entity_version=d.get("entity_version", None), - name=d.get("name", None), - provisioned_model_units=d.get("provisioned_model_units", None), - ) + return cls(entity_name=d.get('entity_name', None), entity_version=d.get('entity_version', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None)) + + @dataclass @@ -2447,70 +2081,53 @@ class PutAiGatewayRequest: fallback_config: Optional[FallbackConfig] = None """Configuration for traffic fallback which auto fallbacks to other served entities if the request to a served entity fails with certain error codes, to increase availability.""" - + guardrails: Optional[AiGatewayGuardrails] = None """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.""" - + inference_table_config: Optional[AiGatewayInferenceTableConfig] = None """Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.""" - + name: Optional[str] = None """The name of the serving endpoint whose AI Gateway is being updated. This field is required.""" - + rate_limits: Optional[List[AiGatewayRateLimit]] = None """Configuration for rate limits which can be set to limit endpoint traffic.""" - + usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None """Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.""" - + def as_dict(self) -> dict: """Serializes the PutAiGatewayRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config.as_dict() - if self.guardrails: - body["guardrails"] = self.guardrails.as_dict() - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config.as_dict() + if self.fallback_config: body['fallback_config'] = self.fallback_config.as_dict() + if self.guardrails: body['guardrails'] = self.guardrails.as_dict() + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict() + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PutAiGatewayRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config - if self.guardrails: - body["guardrails"] = self.guardrails - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = self.rate_limits - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config + if self.fallback_config: body['fallback_config'] = self.fallback_config + if self.guardrails: body['guardrails'] = self.guardrails + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutAiGatewayRequest: """Deserializes the PutAiGatewayRequest from a dictionary.""" - return cls( - fallback_config=_from_dict(d, "fallback_config", FallbackConfig), - guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails), - inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig), - name=d.get("name", None), - rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit), - usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig), - ) + return cls(fallback_config=_from_dict(d, 'fallback_config', FallbackConfig), guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails), inference_table_config=_from_dict(d, 'inference_table_config', AiGatewayInferenceTableConfig), name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit), usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig)) + + @dataclass @@ -2518,266 +2135,209 @@ class PutAiGatewayResponse: fallback_config: Optional[FallbackConfig] = None """Configuration for traffic fallback which auto fallbacks to other served entities if the request to a served entity fails with certain error codes, to increase availability.""" - + guardrails: Optional[AiGatewayGuardrails] = None """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.""" - + inference_table_config: Optional[AiGatewayInferenceTableConfig] = None """Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.""" - + rate_limits: Optional[List[AiGatewayRateLimit]] = None """Configuration for rate limits which can be set to limit endpoint traffic.""" - + usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None """Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.""" - + def as_dict(self) -> dict: """Serializes the PutAiGatewayResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config.as_dict() - if self.guardrails: - body["guardrails"] = self.guardrails.as_dict() - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config.as_dict() - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config.as_dict() + if self.fallback_config: body['fallback_config'] = self.fallback_config.as_dict() + if self.guardrails: body['guardrails'] = self.guardrails.as_dict() + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict() + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PutAiGatewayResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config - if self.guardrails: - body["guardrails"] = self.guardrails - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config - if self.rate_limits: - body["rate_limits"] = self.rate_limits - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config + if self.fallback_config: body['fallback_config'] = self.fallback_config + if self.guardrails: body['guardrails'] = self.guardrails + if self.inference_table_config: body['inference_table_config'] = self.inference_table_config + if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutAiGatewayResponse: """Deserializes the PutAiGatewayResponse from a dictionary.""" - return cls( - fallback_config=_from_dict(d, "fallback_config", FallbackConfig), - guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails), - inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig), - rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit), - usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig), - ) + return cls(fallback_config=_from_dict(d, 'fallback_config', FallbackConfig), guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails), inference_table_config=_from_dict(d, 'inference_table_config', AiGatewayInferenceTableConfig), rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit), usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig)) + + @dataclass class PutRequest: name: Optional[str] = None """The name of the serving endpoint whose rate limits are being updated. This field is required.""" - + rate_limits: Optional[List[RateLimit]] = None """The list of endpoint rate limits.""" - + def as_dict(self) -> dict: """Serializes the PutRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] return body def as_shallow_dict(self) -> dict: """Serializes the PutRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = self.rate_limits + if self.name is not None: body['name'] = self.name + if self.rate_limits: body['rate_limits'] = self.rate_limits return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutRequest: """Deserializes the PutRequest from a dictionary.""" - return cls(name=d.get("name", None), rate_limits=_repeated_dict(d, "rate_limits", RateLimit)) + return cls(name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', RateLimit)) + + @dataclass class PutResponse: rate_limits: Optional[List[RateLimit]] = None """The list of endpoint rate limits.""" - + def as_dict(self) -> dict: """Serializes the PutResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] + if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] return body def as_shallow_dict(self) -> dict: """Serializes the PutResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.rate_limits: - body["rate_limits"] = self.rate_limits + if self.rate_limits: body['rate_limits'] = self.rate_limits return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutResponse: """Deserializes the PutResponse from a dictionary.""" - return cls(rate_limits=_repeated_dict(d, "rate_limits", RateLimit)) + return cls(rate_limits=_repeated_dict(d, 'rate_limits', RateLimit)) + + @dataclass class QueryEndpointInput: dataframe_records: Optional[List[Any]] = None """Pandas Dataframe input in the records orientation.""" - + dataframe_split: Optional[DataframeSplitInput] = None """Pandas Dataframe input in the split orientation.""" - - extra_params: Optional[Dict[str, str]] = None + + extra_params: Optional[Dict[str,str]] = None """The extra parameters field used ONLY for __completions, chat,__ and __embeddings external & foundation model__ serving endpoints. This is a map of strings and should only be used with other external/foundation model query fields.""" - + input: Optional[Any] = None """The input string (or array of strings) field used ONLY for __embeddings external & foundation model__ serving endpoints and is the only field (along with extra_params if needed) used by embeddings queries.""" - + inputs: Optional[Any] = None """Tensor-based input in columnar format.""" - + instances: Optional[List[Any]] = None """Tensor-based input in row format.""" - + max_tokens: Optional[int] = None """The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is an integer and should only be used with other chat/completions query fields.""" - + messages: Optional[List[ChatMessage]] = None """The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a map of strings and should only be used with other chat query fields.""" - + n: Optional[int] = None """The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be used with other chat/completions query fields.""" - + name: Optional[str] = None """The name of the serving endpoint. This field is required.""" - + prompt: Optional[Any] = None """The prompt string (or array of strings) field used ONLY for __completions external & foundation model__ serving endpoints and should only be used with other completions query fields.""" - + stop: Optional[List[str]] = None """The stop sequences field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a list of strings and should only be used with other chat/completions query fields.""" - + stream: Optional[bool] = None """The stream field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a boolean defaulting to false and should only be used with other chat/completions query fields.""" - + temperature: Optional[float] = None """The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields.""" - + def as_dict(self) -> dict: """Serializes the QueryEndpointInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataframe_records: - body["dataframe_records"] = [v for v in self.dataframe_records] - if self.dataframe_split: - body["dataframe_split"] = self.dataframe_split.as_dict() - if self.extra_params: - body["extra_params"] = self.extra_params - if self.input: - body["input"] = self.input - if self.inputs: - body["inputs"] = self.inputs - if self.instances: - body["instances"] = [v for v in self.instances] - if self.max_tokens is not None: - body["max_tokens"] = self.max_tokens - if self.messages: - body["messages"] = [v.as_dict() for v in self.messages] - if self.n is not None: - body["n"] = self.n - if self.name is not None: - body["name"] = self.name - if self.prompt: - body["prompt"] = self.prompt - if self.stop: - body["stop"] = [v for v in self.stop] - if self.stream is not None: - body["stream"] = self.stream - if self.temperature is not None: - body["temperature"] = self.temperature + if self.dataframe_records: body['dataframe_records'] = [v for v in self.dataframe_records] + if self.dataframe_split: body['dataframe_split'] = self.dataframe_split.as_dict() + if self.extra_params: body['extra_params'] = self.extra_params + if self.input: body['input'] = self.input + if self.inputs: body['inputs'] = self.inputs + if self.instances: body['instances'] = [v for v in self.instances] + if self.max_tokens is not None: body['max_tokens'] = self.max_tokens + if self.messages: body['messages'] = [v.as_dict() for v in self.messages] + if self.n is not None: body['n'] = self.n + if self.name is not None: body['name'] = self.name + if self.prompt: body['prompt'] = self.prompt + if self.stop: body['stop'] = [v for v in self.stop] + if self.stream is not None: body['stream'] = self.stream + if self.temperature is not None: body['temperature'] = self.temperature return body def as_shallow_dict(self) -> dict: """Serializes the QueryEndpointInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataframe_records: - body["dataframe_records"] = self.dataframe_records - if self.dataframe_split: - body["dataframe_split"] = self.dataframe_split - if self.extra_params: - body["extra_params"] = self.extra_params - if self.input: - body["input"] = self.input - if self.inputs: - body["inputs"] = self.inputs - if self.instances: - body["instances"] = self.instances - if self.max_tokens is not None: - body["max_tokens"] = self.max_tokens - if self.messages: - body["messages"] = self.messages - if self.n is not None: - body["n"] = self.n - if self.name is not None: - body["name"] = self.name - if self.prompt: - body["prompt"] = self.prompt - if self.stop: - body["stop"] = self.stop - if self.stream is not None: - body["stream"] = self.stream - if self.temperature is not None: - body["temperature"] = self.temperature + if self.dataframe_records: body['dataframe_records'] = self.dataframe_records + if self.dataframe_split: body['dataframe_split'] = self.dataframe_split + if self.extra_params: body['extra_params'] = self.extra_params + if self.input: body['input'] = self.input + if self.inputs: body['inputs'] = self.inputs + if self.instances: body['instances'] = self.instances + if self.max_tokens is not None: body['max_tokens'] = self.max_tokens + if self.messages: body['messages'] = self.messages + if self.n is not None: body['n'] = self.n + if self.name is not None: body['name'] = self.name + if self.prompt: body['prompt'] = self.prompt + if self.stop: body['stop'] = self.stop + if self.stream is not None: body['stream'] = self.stream + if self.temperature is not None: body['temperature'] = self.temperature return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryEndpointInput: """Deserializes the QueryEndpointInput from a dictionary.""" - return cls( - dataframe_records=d.get("dataframe_records", None), - dataframe_split=_from_dict(d, "dataframe_split", DataframeSplitInput), - extra_params=d.get("extra_params", None), - input=d.get("input", None), - inputs=d.get("inputs", None), - instances=d.get("instances", None), - max_tokens=d.get("max_tokens", None), - messages=_repeated_dict(d, "messages", ChatMessage), - n=d.get("n", None), - name=d.get("name", None), - prompt=d.get("prompt", None), - stop=d.get("stop", None), - stream=d.get("stream", None), - temperature=d.get("temperature", None), - ) + return cls(dataframe_records=d.get('dataframe_records', None), dataframe_split=_from_dict(d, 'dataframe_split', DataframeSplitInput), extra_params=d.get('extra_params', None), input=d.get('input', None), inputs=d.get('inputs', None), instances=d.get('instances', None), max_tokens=d.get('max_tokens', None), messages=_repeated_dict(d, 'messages', ChatMessage), n=d.get('n', None), name=d.get('name', None), prompt=d.get('prompt', None), stop=d.get('stop', None), stream=d.get('stream', None), temperature=d.get('temperature', None)) + + @dataclass @@ -2785,197 +2345,158 @@ class QueryEndpointResponse: choices: Optional[List[V1ResponseChoiceElement]] = None """The list of choices returned by the __chat or completions external/foundation model__ serving endpoint.""" - + created: Optional[int] = None """The timestamp in seconds when the query was created in Unix time returned by a __completions or chat external/foundation model__ serving endpoint.""" - + data: Optional[List[EmbeddingsV1ResponseEmbeddingElement]] = None """The list of the embeddings returned by the __embeddings external/foundation model__ serving endpoint.""" - + id: Optional[str] = None """The ID of the query that may be returned by a __completions or chat external/foundation model__ serving endpoint.""" - + model: Optional[str] = None """The name of the __external/foundation model__ used for querying. This is the name of the model that was specified in the endpoint config.""" - + object: Optional[QueryEndpointResponseObject] = None """The type of object returned by the __external/foundation model__ serving endpoint, one of [text_completion, chat.completion, list (of embeddings)].""" - + predictions: Optional[List[Any]] = None """The predictions returned by the serving endpoint.""" - + served_model_name: Optional[str] = None """The name of the served model that served the request. This is useful when there are multiple models behind the same endpoint with traffic split.""" - + usage: Optional[ExternalModelUsageElement] = None """The usage object that may be returned by the __external/foundation model__ serving endpoint. This contains information about the number of tokens used in the prompt and response.""" - + def as_dict(self) -> dict: """Serializes the QueryEndpointResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.choices: - body["choices"] = [v.as_dict() for v in self.choices] - if self.created is not None: - body["created"] = self.created - if self.data: - body["data"] = [v.as_dict() for v in self.data] - if self.id is not None: - body["id"] = self.id - if self.model is not None: - body["model"] = self.model - if self.object is not None: - body["object"] = self.object.value - if self.predictions: - body["predictions"] = [v for v in self.predictions] - if self.served_model_name is not None: - body["served-model-name"] = self.served_model_name - if self.usage: - body["usage"] = self.usage.as_dict() + if self.choices: body['choices'] = [v.as_dict() for v in self.choices] + if self.created is not None: body['created'] = self.created + if self.data: body['data'] = [v.as_dict() for v in self.data] + if self.id is not None: body['id'] = self.id + if self.model is not None: body['model'] = self.model + if self.object is not None: body['object'] = self.object.value + if self.predictions: body['predictions'] = [v for v in self.predictions] + if self.served_model_name is not None: body['served-model-name'] = self.served_model_name + if self.usage: body['usage'] = self.usage.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the QueryEndpointResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.choices: - body["choices"] = self.choices - if self.created is not None: - body["created"] = self.created - if self.data: - body["data"] = self.data - if self.id is not None: - body["id"] = self.id - if self.model is not None: - body["model"] = self.model - if self.object is not None: - body["object"] = self.object - if self.predictions: - body["predictions"] = self.predictions - if self.served_model_name is not None: - body["served-model-name"] = self.served_model_name - if self.usage: - body["usage"] = self.usage + if self.choices: body['choices'] = self.choices + if self.created is not None: body['created'] = self.created + if self.data: body['data'] = self.data + if self.id is not None: body['id'] = self.id + if self.model is not None: body['model'] = self.model + if self.object is not None: body['object'] = self.object + if self.predictions: body['predictions'] = self.predictions + if self.served_model_name is not None: body['served-model-name'] = self.served_model_name + if self.usage: body['usage'] = self.usage return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryEndpointResponse: """Deserializes the QueryEndpointResponse from a dictionary.""" - return cls( - choices=_repeated_dict(d, "choices", V1ResponseChoiceElement), - created=d.get("created", None), - data=_repeated_dict(d, "data", EmbeddingsV1ResponseEmbeddingElement), - id=d.get("id", None), - model=d.get("model", None), - object=_enum(d, "object", QueryEndpointResponseObject), - predictions=d.get("predictions", None), - served_model_name=d.get("served-model-name", None), - usage=_from_dict(d, "usage", ExternalModelUsageElement), - ) + return cls(choices=_repeated_dict(d, 'choices', V1ResponseChoiceElement), created=d.get('created', None), data=_repeated_dict(d, 'data', EmbeddingsV1ResponseEmbeddingElement), id=d.get('id', None), model=d.get('model', None), object=_enum(d, 'object', QueryEndpointResponseObject), predictions=d.get('predictions', None), served_model_name=d.get('served-model-name', None), usage=_from_dict(d, 'usage', ExternalModelUsageElement)) + + class QueryEndpointResponseObject(Enum): """The type of object returned by the __external/foundation model__ serving endpoint, one of [text_completion, chat.completion, list (of embeddings)].""" - - CHAT_COMPLETION = "chat.completion" - LIST = "list" - TEXT_COMPLETION = "text_completion" - + + CHAT_COMPLETION = 'chat.completion' + LIST = 'list' + TEXT_COMPLETION = 'text_completion' @dataclass class RateLimit: calls: int """Used to specify how many calls are allowed for a key within the renewal_period.""" - + renewal_period: RateLimitRenewalPeriod """Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.""" - + key: Optional[RateLimitKey] = None """Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.""" - + def as_dict(self) -> dict: """Serializes the RateLimit into a dictionary suitable for use as a JSON request body.""" body = {} - if self.calls is not None: - body["calls"] = self.calls - if self.key is not None: - body["key"] = self.key.value - if self.renewal_period is not None: - body["renewal_period"] = self.renewal_period.value + if self.calls is not None: body['calls'] = self.calls + if self.key is not None: body['key'] = self.key.value + if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value return body def as_shallow_dict(self) -> dict: """Serializes the RateLimit into a shallow dictionary of its immediate attributes.""" body = {} - if self.calls is not None: - body["calls"] = self.calls - if self.key is not None: - body["key"] = self.key - if self.renewal_period is not None: - body["renewal_period"] = self.renewal_period + if self.calls is not None: body['calls'] = self.calls + if self.key is not None: body['key'] = self.key + if self.renewal_period is not None: body['renewal_period'] = self.renewal_period return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RateLimit: """Deserializes the RateLimit from a dictionary.""" - return cls( - calls=d.get("calls", None), - key=_enum(d, "key", RateLimitKey), - renewal_period=_enum(d, "renewal_period", RateLimitRenewalPeriod), - ) - + return cls(calls=d.get('calls', None), key=_enum(d, 'key', RateLimitKey), renewal_period=_enum(d, 'renewal_period', RateLimitRenewalPeriod)) + -class RateLimitKey(Enum): - ENDPOINT = "endpoint" - USER = "user" +class RateLimitKey(Enum): + + + ENDPOINT = 'endpoint' + USER = 'user' class RateLimitRenewalPeriod(Enum): - - MINUTE = "minute" - + + + MINUTE = 'minute' @dataclass class Route: served_model_name: str """The name of the served model this route configures traffic for.""" - + traffic_percentage: int """The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.""" - + def as_dict(self) -> dict: """Serializes the Route into a dictionary suitable for use as a JSON request body.""" body = {} - if self.served_model_name is not None: - body["served_model_name"] = self.served_model_name - if self.traffic_percentage is not None: - body["traffic_percentage"] = self.traffic_percentage + if self.served_model_name is not None: body['served_model_name'] = self.served_model_name + if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage return body def as_shallow_dict(self) -> dict: """Serializes the Route into a shallow dictionary of its immediate attributes.""" body = {} - if self.served_model_name is not None: - body["served_model_name"] = self.served_model_name - if self.traffic_percentage is not None: - body["traffic_percentage"] = self.traffic_percentage + if self.served_model_name is not None: body['served_model_name'] = self.served_model_name + if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Route: """Deserializes the Route from a dictionary.""" - return cls( - served_model_name=d.get("served_model_name", None), traffic_percentage=d.get("traffic_percentage", None) - ) + return cls(served_model_name=d.get('served_model_name', None), traffic_percentage=d.get('traffic_percentage', None)) + + @dataclass @@ -2985,15 +2506,15 @@ class ServedEntityInput: a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.""" - + entity_version: Optional[str] = None - - environment_vars: Optional[Dict[str, str]] = None + + environment_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" - + external_model: Optional[ExternalModel] = None """The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with @@ -3001,28 +2522,36 @@ class ServedEntityInput: existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" - + + max_provisioned_concurrency: Optional[int] = None + """The maximum provisioned concurrency that the endpoint can scale up to. Do not use if + workload_size is specified.""" + max_provisioned_throughput: Optional[int] = None """The maximum tokens per second that the endpoint can scale up to.""" - + + min_provisioned_concurrency: Optional[int] = None + """The minimum provisioned concurrency that the endpoint can scale down to. Do not use if + workload_size is specified.""" + min_provisioned_throughput: Optional[int] = None """The minimum tokens per second that the endpoint can scale down to.""" - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + provisioned_model_units: Optional[int] = None """The number of model units provisioned.""" - + scale_to_zero_enabled: Optional[bool] = None """Whether the compute resources for the served entity should scale down to zero.""" - + workload_size: Optional[str] = None """The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can @@ -3030,8 +2559,8 @@ class ServedEntityInput: "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size - is 0.""" - + is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.""" + workload_type: Optional[ServingModelWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -3039,104 +2568,73 @@ class ServedEntityInput: available [GPU types]. [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" - + def as_dict(self) -> dict: """Serializes the ServedEntityInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entity_name is not None: - body["entity_name"] = self.entity_name - if self.entity_version is not None: - body["entity_version"] = self.entity_version - if self.environment_vars: - body["environment_vars"] = self.environment_vars - if self.external_model: - body["external_model"] = self.external_model.as_dict() - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_provisioned_throughput is not None: - body["max_provisioned_throughput"] = self.max_provisioned_throughput - if self.min_provisioned_throughput is not None: - body["min_provisioned_throughput"] = self.min_provisioned_throughput - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: - body["scale_to_zero_enabled"] = self.scale_to_zero_enabled - if self.workload_size is not None: - body["workload_size"] = self.workload_size - if self.workload_type is not None: - body["workload_type"] = self.workload_type.value + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.external_model: body['external_model'] = self.external_model.as_dict() + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ServedEntityInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.entity_name is not None: - body["entity_name"] = self.entity_name - if self.entity_version is not None: - body["entity_version"] = self.entity_version - if self.environment_vars: - body["environment_vars"] = self.environment_vars - if self.external_model: - body["external_model"] = self.external_model - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_provisioned_throughput is not None: - body["max_provisioned_throughput"] = self.max_provisioned_throughput - if self.min_provisioned_throughput is not None: - body["min_provisioned_throughput"] = self.min_provisioned_throughput - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: - body["scale_to_zero_enabled"] = self.scale_to_zero_enabled - if self.workload_size is not None: - body["workload_size"] = self.workload_size - if self.workload_type is not None: - body["workload_type"] = self.workload_type + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.external_model: body['external_model'] = self.external_model + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedEntityInput: """Deserializes the ServedEntityInput from a dictionary.""" - return cls( - entity_name=d.get("entity_name", None), - entity_version=d.get("entity_version", None), - environment_vars=d.get("environment_vars", None), - external_model=_from_dict(d, "external_model", ExternalModel), - instance_profile_arn=d.get("instance_profile_arn", None), - max_provisioned_throughput=d.get("max_provisioned_throughput", None), - min_provisioned_throughput=d.get("min_provisioned_throughput", None), - name=d.get("name", None), - provisioned_model_units=d.get("provisioned_model_units", None), - scale_to_zero_enabled=d.get("scale_to_zero_enabled", None), - workload_size=d.get("workload_size", None), - workload_type=_enum(d, "workload_type", ServingModelWorkloadType), - ) + return cls(entity_name=d.get('entity_name', None), entity_version=d.get('entity_version', None), environment_vars=d.get('environment_vars', None), external_model=_from_dict(d, 'external_model', ExternalModel), instance_profile_arn=d.get('instance_profile_arn', None), max_provisioned_concurrency=d.get('max_provisioned_concurrency', None), max_provisioned_throughput=d.get('max_provisioned_throughput', None), min_provisioned_concurrency=d.get('min_provisioned_concurrency', None), min_provisioned_throughput=d.get('min_provisioned_throughput', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), workload_size=d.get('workload_size', None), workload_type=_enum(d, 'workload_type', ServingModelWorkloadType)) + + @dataclass class ServedEntityOutput: creation_timestamp: Optional[int] = None - + creator: Optional[str] = None - + entity_name: Optional[str] = None """The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.""" - + entity_version: Optional[str] = None - - environment_vars: Optional[Dict[str, str]] = None + + environment_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" - + external_model: Optional[ExternalModel] = None """The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with @@ -3144,34 +2642,42 @@ class ServedEntityOutput: existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.""" - + foundation_model: Optional[FoundationModel] = None """All fields are not sensitive as they are hard-coded in the system and made available to customers.""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" - + + max_provisioned_concurrency: Optional[int] = None + """The maximum provisioned concurrency that the endpoint can scale up to. Do not use if + workload_size is specified.""" + max_provisioned_throughput: Optional[int] = None """The maximum tokens per second that the endpoint can scale up to.""" - + + min_provisioned_concurrency: Optional[int] = None + """The minimum provisioned concurrency that the endpoint can scale down to. Do not use if + workload_size is specified.""" + min_provisioned_throughput: Optional[int] = None """The minimum tokens per second that the endpoint can scale down to.""" - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + provisioned_model_units: Optional[int] = None """The number of model units provisioned.""" - + scale_to_zero_enabled: Optional[bool] = None """Whether the compute resources for the served entity should scale down to zero.""" - + state: Optional[ServedModelState] = None - + workload_size: Optional[str] = None """The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can @@ -3179,8 +2685,8 @@ class ServedEntityOutput: "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size - is 0.""" - + is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.""" + workload_type: Optional[ServingModelWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -3188,193 +2694,144 @@ class ServedEntityOutput: available [GPU types]. [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" - + def as_dict(self) -> dict: """Serializes the ServedEntityOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.entity_name is not None: - body["entity_name"] = self.entity_name - if self.entity_version is not None: - body["entity_version"] = self.entity_version - if self.environment_vars: - body["environment_vars"] = self.environment_vars - if self.external_model: - body["external_model"] = self.external_model.as_dict() - if self.foundation_model: - body["foundation_model"] = self.foundation_model.as_dict() - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_provisioned_throughput is not None: - body["max_provisioned_throughput"] = self.max_provisioned_throughput - if self.min_provisioned_throughput is not None: - body["min_provisioned_throughput"] = self.min_provisioned_throughput - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: - body["scale_to_zero_enabled"] = self.scale_to_zero_enabled - if self.state: - body["state"] = self.state.as_dict() - if self.workload_size is not None: - body["workload_size"] = self.workload_size - if self.workload_type is not None: - body["workload_type"] = self.workload_type.value + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.external_model: body['external_model'] = self.external_model.as_dict() + if self.foundation_model: body['foundation_model'] = self.foundation_model.as_dict() + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.state: body['state'] = self.state.as_dict() + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ServedEntityOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.entity_name is not None: - body["entity_name"] = self.entity_name - if self.entity_version is not None: - body["entity_version"] = self.entity_version - if self.environment_vars: - body["environment_vars"] = self.environment_vars - if self.external_model: - body["external_model"] = self.external_model - if self.foundation_model: - body["foundation_model"] = self.foundation_model - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_provisioned_throughput is not None: - body["max_provisioned_throughput"] = self.max_provisioned_throughput - if self.min_provisioned_throughput is not None: - body["min_provisioned_throughput"] = self.min_provisioned_throughput - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: - body["scale_to_zero_enabled"] = self.scale_to_zero_enabled - if self.state: - body["state"] = self.state - if self.workload_size is not None: - body["workload_size"] = self.workload_size - if self.workload_type is not None: - body["workload_type"] = self.workload_type + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.external_model: body['external_model'] = self.external_model + if self.foundation_model: body['foundation_model'] = self.foundation_model + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.state: body['state'] = self.state + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedEntityOutput: """Deserializes the ServedEntityOutput from a dictionary.""" - return cls( - creation_timestamp=d.get("creation_timestamp", None), - creator=d.get("creator", None), - entity_name=d.get("entity_name", None), - entity_version=d.get("entity_version", None), - environment_vars=d.get("environment_vars", None), - external_model=_from_dict(d, "external_model", ExternalModel), - foundation_model=_from_dict(d, "foundation_model", FoundationModel), - instance_profile_arn=d.get("instance_profile_arn", None), - max_provisioned_throughput=d.get("max_provisioned_throughput", None), - min_provisioned_throughput=d.get("min_provisioned_throughput", None), - name=d.get("name", None), - provisioned_model_units=d.get("provisioned_model_units", None), - scale_to_zero_enabled=d.get("scale_to_zero_enabled", None), - state=_from_dict(d, "state", ServedModelState), - workload_size=d.get("workload_size", None), - workload_type=_enum(d, "workload_type", ServingModelWorkloadType), - ) + return cls(creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), entity_name=d.get('entity_name', None), entity_version=d.get('entity_version', None), environment_vars=d.get('environment_vars', None), external_model=_from_dict(d, 'external_model', ExternalModel), foundation_model=_from_dict(d, 'foundation_model', FoundationModel), instance_profile_arn=d.get('instance_profile_arn', None), max_provisioned_concurrency=d.get('max_provisioned_concurrency', None), max_provisioned_throughput=d.get('max_provisioned_throughput', None), min_provisioned_concurrency=d.get('min_provisioned_concurrency', None), min_provisioned_throughput=d.get('min_provisioned_throughput', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), state=_from_dict(d, 'state', ServedModelState), workload_size=d.get('workload_size', None), workload_type=_enum(d, 'workload_type', ServingModelWorkloadType)) + + @dataclass class ServedEntitySpec: entity_name: Optional[str] = None - + entity_version: Optional[str] = None - + external_model: Optional[ExternalModel] = None - + foundation_model: Optional[FoundationModel] = None """All fields are not sensitive as they are hard-coded in the system and made available to customers.""" - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ServedEntitySpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entity_name is not None: - body["entity_name"] = self.entity_name - if self.entity_version is not None: - body["entity_version"] = self.entity_version - if self.external_model: - body["external_model"] = self.external_model.as_dict() - if self.foundation_model: - body["foundation_model"] = self.foundation_model.as_dict() - if self.name is not None: - body["name"] = self.name + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.external_model: body['external_model'] = self.external_model.as_dict() + if self.foundation_model: body['foundation_model'] = self.foundation_model.as_dict() + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the ServedEntitySpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.entity_name is not None: - body["entity_name"] = self.entity_name - if self.entity_version is not None: - body["entity_version"] = self.entity_version - if self.external_model: - body["external_model"] = self.external_model - if self.foundation_model: - body["foundation_model"] = self.foundation_model - if self.name is not None: - body["name"] = self.name + if self.entity_name is not None: body['entity_name'] = self.entity_name + if self.entity_version is not None: body['entity_version'] = self.entity_version + if self.external_model: body['external_model'] = self.external_model + if self.foundation_model: body['foundation_model'] = self.foundation_model + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedEntitySpec: """Deserializes the ServedEntitySpec from a dictionary.""" - return cls( - entity_name=d.get("entity_name", None), - entity_version=d.get("entity_version", None), - external_model=_from_dict(d, "external_model", ExternalModel), - foundation_model=_from_dict(d, "foundation_model", FoundationModel), - name=d.get("name", None), - ) + return cls(entity_name=d.get('entity_name', None), entity_version=d.get('entity_version', None), external_model=_from_dict(d, 'external_model', ExternalModel), foundation_model=_from_dict(d, 'foundation_model', FoundationModel), name=d.get('name', None)) + + @dataclass class ServedModelInput: scale_to_zero_enabled: bool """Whether the compute resources for the served entity should scale down to zero.""" - + model_name: str - + model_version: str - - environment_vars: Optional[Dict[str, str]] = None + + environment_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" - + + max_provisioned_concurrency: Optional[int] = None + """The maximum provisioned concurrency that the endpoint can scale up to. Do not use if + workload_size is specified.""" + max_provisioned_throughput: Optional[int] = None """The maximum tokens per second that the endpoint can scale up to.""" - + + min_provisioned_concurrency: Optional[int] = None + """The minimum provisioned concurrency that the endpoint can scale down to. Do not use if + workload_size is specified.""" + min_provisioned_throughput: Optional[int] = None """The minimum tokens per second that the endpoint can scale down to.""" - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + provisioned_model_units: Optional[int] = None """The number of model units provisioned.""" - + workload_size: Optional[str] = None """The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can @@ -3382,8 +2839,8 @@ class ServedModelInput: "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size - is 0.""" - + is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.""" + workload_type: Optional[ServedModelInputWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -3391,122 +2848,101 @@ class ServedModelInput: available [GPU types]. [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" - + def as_dict(self) -> dict: """Serializes the ServedModelInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.environment_vars: - body["environment_vars"] = self.environment_vars - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_provisioned_throughput is not None: - body["max_provisioned_throughput"] = self.max_provisioned_throughput - if self.min_provisioned_throughput is not None: - body["min_provisioned_throughput"] = self.min_provisioned_throughput - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version is not None: - body["model_version"] = self.model_version - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: - body["scale_to_zero_enabled"] = self.scale_to_zero_enabled - if self.workload_size is not None: - body["workload_size"] = self.workload_size - if self.workload_type is not None: - body["workload_type"] = self.workload_type.value + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version is not None: body['model_version'] = self.model_version + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ServedModelInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.environment_vars: - body["environment_vars"] = self.environment_vars - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_provisioned_throughput is not None: - body["max_provisioned_throughput"] = self.max_provisioned_throughput - if self.min_provisioned_throughput is not None: - body["min_provisioned_throughput"] = self.min_provisioned_throughput - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version is not None: - body["model_version"] = self.model_version - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: - body["scale_to_zero_enabled"] = self.scale_to_zero_enabled - if self.workload_size is not None: - body["workload_size"] = self.workload_size - if self.workload_type is not None: - body["workload_type"] = self.workload_type + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version is not None: body['model_version'] = self.model_version + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedModelInput: """Deserializes the ServedModelInput from a dictionary.""" - return cls( - environment_vars=d.get("environment_vars", None), - instance_profile_arn=d.get("instance_profile_arn", None), - max_provisioned_throughput=d.get("max_provisioned_throughput", None), - min_provisioned_throughput=d.get("min_provisioned_throughput", None), - model_name=d.get("model_name", None), - model_version=d.get("model_version", None), - name=d.get("name", None), - provisioned_model_units=d.get("provisioned_model_units", None), - scale_to_zero_enabled=d.get("scale_to_zero_enabled", None), - workload_size=d.get("workload_size", None), - workload_type=_enum(d, "workload_type", ServedModelInputWorkloadType), - ) + return cls(environment_vars=d.get('environment_vars', None), instance_profile_arn=d.get('instance_profile_arn', None), max_provisioned_concurrency=d.get('max_provisioned_concurrency', None), max_provisioned_throughput=d.get('max_provisioned_throughput', None), min_provisioned_concurrency=d.get('min_provisioned_concurrency', None), min_provisioned_throughput=d.get('min_provisioned_throughput', None), model_name=d.get('model_name', None), model_version=d.get('model_version', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), workload_size=d.get('workload_size', None), workload_type=_enum(d, 'workload_type', ServedModelInputWorkloadType)) + + class ServedModelInputWorkloadType(Enum): """Please keep this in sync with with workload types in InferenceEndpointEntities.scala""" - - CPU = "CPU" - GPU_LARGE = "GPU_LARGE" - GPU_MEDIUM = "GPU_MEDIUM" - GPU_SMALL = "GPU_SMALL" - MULTIGPU_MEDIUM = "MULTIGPU_MEDIUM" - + + CPU = 'CPU' + GPU_LARGE = 'GPU_LARGE' + GPU_MEDIUM = 'GPU_MEDIUM' + GPU_SMALL = 'GPU_SMALL' + MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM' @dataclass class ServedModelOutput: creation_timestamp: Optional[int] = None - + creator: Optional[str] = None - - environment_vars: Optional[Dict[str, str]] = None + + environment_vars: Optional[Dict[str,str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" - + + max_provisioned_concurrency: Optional[int] = None + """The maximum provisioned concurrency that the endpoint can scale up to. Do not use if + workload_size is specified.""" + + min_provisioned_concurrency: Optional[int] = None + """The minimum provisioned concurrency that the endpoint can scale down to. Do not use if + workload_size is specified.""" + model_name: Optional[str] = None - + model_version: Optional[str] = None - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + provisioned_model_units: Optional[int] = None """The number of model units provisioned.""" - + scale_to_zero_enabled: Optional[bool] = None """Whether the compute resources for the served entity should scale down to zero.""" - + state: Optional[ServedModelState] = None - + workload_size: Optional[str] = None """The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can @@ -3514,8 +2950,8 @@ class ServedModelOutput: "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size - is 0.""" - + is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.""" + workload_type: Optional[ServingModelWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -3523,189 +2959,147 @@ class ServedModelOutput: available [GPU types]. [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" - + def as_dict(self) -> dict: """Serializes the ServedModelOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.environment_vars: - body["environment_vars"] = self.environment_vars - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version is not None: - body["model_version"] = self.model_version - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: - body["scale_to_zero_enabled"] = self.scale_to_zero_enabled - if self.state: - body["state"] = self.state.as_dict() - if self.workload_size is not None: - body["workload_size"] = self.workload_size - if self.workload_type is not None: - body["workload_type"] = self.workload_type.value + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency + if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version is not None: body['model_version'] = self.model_version + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.state: body['state'] = self.state.as_dict() + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ServedModelOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.environment_vars: - body["environment_vars"] = self.environment_vars - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version is not None: - body["model_version"] = self.model_version - if self.name is not None: - body["name"] = self.name - if self.provisioned_model_units is not None: - body["provisioned_model_units"] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: - body["scale_to_zero_enabled"] = self.scale_to_zero_enabled - if self.state: - body["state"] = self.state - if self.workload_size is not None: - body["workload_size"] = self.workload_size - if self.workload_type is not None: - body["workload_type"] = self.workload_type + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.environment_vars: body['environment_vars'] = self.environment_vars + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency + if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version is not None: body['model_version'] = self.model_version + if self.name is not None: body['name'] = self.name + if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled + if self.state: body['state'] = self.state + if self.workload_size is not None: body['workload_size'] = self.workload_size + if self.workload_type is not None: body['workload_type'] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedModelOutput: """Deserializes the ServedModelOutput from a dictionary.""" - return cls( - creation_timestamp=d.get("creation_timestamp", None), - creator=d.get("creator", None), - environment_vars=d.get("environment_vars", None), - instance_profile_arn=d.get("instance_profile_arn", None), - model_name=d.get("model_name", None), - model_version=d.get("model_version", None), - name=d.get("name", None), - provisioned_model_units=d.get("provisioned_model_units", None), - scale_to_zero_enabled=d.get("scale_to_zero_enabled", None), - state=_from_dict(d, "state", ServedModelState), - workload_size=d.get("workload_size", None), - workload_type=_enum(d, "workload_type", ServingModelWorkloadType), - ) + return cls(creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), environment_vars=d.get('environment_vars', None), instance_profile_arn=d.get('instance_profile_arn', None), max_provisioned_concurrency=d.get('max_provisioned_concurrency', None), min_provisioned_concurrency=d.get('min_provisioned_concurrency', None), model_name=d.get('model_name', None), model_version=d.get('model_version', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), state=_from_dict(d, 'state', ServedModelState), workload_size=d.get('workload_size', None), workload_type=_enum(d, 'workload_type', ServingModelWorkloadType)) + + @dataclass class ServedModelSpec: model_name: Optional[str] = None """Only one of model_name and entity_name should be populated""" - + model_version: Optional[str] = None """Only one of model_version and entity_version should be populated""" - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ServedModelSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version is not None: - body["model_version"] = self.model_version - if self.name is not None: - body["name"] = self.name + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version is not None: body['model_version'] = self.model_version + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the ServedModelSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version is not None: - body["model_version"] = self.model_version - if self.name is not None: - body["name"] = self.name + if self.model_name is not None: body['model_name'] = self.model_name + if self.model_version is not None: body['model_version'] = self.model_version + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedModelSpec: """Deserializes the ServedModelSpec from a dictionary.""" - return cls( - model_name=d.get("model_name", None), model_version=d.get("model_version", None), name=d.get("name", None) - ) + return cls(model_name=d.get('model_name', None), model_version=d.get('model_version', None), name=d.get('name', None)) + + @dataclass class ServedModelState: deployment: Optional[ServedModelStateDeployment] = None - + deployment_state_message: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ServedModelState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.deployment is not None: - body["deployment"] = self.deployment.value - if self.deployment_state_message is not None: - body["deployment_state_message"] = self.deployment_state_message + if self.deployment is not None: body['deployment'] = self.deployment.value + if self.deployment_state_message is not None: body['deployment_state_message'] = self.deployment_state_message return body def as_shallow_dict(self) -> dict: """Serializes the ServedModelState into a shallow dictionary of its immediate attributes.""" body = {} - if self.deployment is not None: - body["deployment"] = self.deployment - if self.deployment_state_message is not None: - body["deployment_state_message"] = self.deployment_state_message + if self.deployment is not None: body['deployment'] = self.deployment + if self.deployment_state_message is not None: body['deployment_state_message'] = self.deployment_state_message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedModelState: """Deserializes the ServedModelState from a dictionary.""" - return cls( - deployment=_enum(d, "deployment", ServedModelStateDeployment), - deployment_state_message=d.get("deployment_state_message", None), - ) - + return cls(deployment=_enum(d, 'deployment', ServedModelStateDeployment), deployment_state_message=d.get('deployment_state_message', None)) + -class ServedModelStateDeployment(Enum): - ABORTED = "DEPLOYMENT_ABORTED" - CREATING = "DEPLOYMENT_CREATING" - FAILED = "DEPLOYMENT_FAILED" - READY = "DEPLOYMENT_READY" - RECOVERING = "DEPLOYMENT_RECOVERING" +class ServedModelStateDeployment(Enum): + + + ABORTED = 'DEPLOYMENT_ABORTED' + CREATING = 'DEPLOYMENT_CREATING' + FAILED = 'DEPLOYMENT_FAILED' + READY = 'DEPLOYMENT_READY' + RECOVERING = 'DEPLOYMENT_RECOVERING' @dataclass class ServerLogsResponse: logs: str """The most recent log lines of the model server processing invocation requests.""" - + def as_dict(self) -> dict: """Serializes the ServerLogsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.logs is not None: - body["logs"] = self.logs + if self.logs is not None: body['logs'] = self.logs return body def as_shallow_dict(self) -> dict: """Serializes the ServerLogsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.logs is not None: - body["logs"] = self.logs + if self.logs is not None: body['logs'] = self.logs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServerLogsResponse: """Deserializes the ServerLogsResponse from a dictionary.""" - return cls(logs=d.get("logs", None)) + return cls(logs=d.get('logs', None)) + + @dataclass @@ -3714,217 +3108,160 @@ class ServingEndpoint: """The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables.""" - + budget_policy_id: Optional[str] = None """The budget policy associated with the endpoint.""" - + config: Optional[EndpointCoreConfigSummary] = None """The config that is currently being served by the endpoint.""" - + creation_timestamp: Optional[int] = None """The timestamp when the endpoint was created in Unix time.""" - + creator: Optional[str] = None """The email of the user who created the serving endpoint.""" - + id: Optional[str] = None """System-generated ID of the endpoint, included to be used by the Permissions API.""" - + last_updated_timestamp: Optional[int] = None """The timestamp when the endpoint was last updated by a user in Unix time.""" - + name: Optional[str] = None """The name of the serving endpoint.""" - + state: Optional[EndpointState] = None """Information corresponding to the state of the serving endpoint.""" - + tags: Optional[List[EndpointTag]] = None """Tags attached to the serving endpoint.""" - + task: Optional[str] = None """The task type of the serving endpoint.""" - + def as_dict(self) -> dict: """Serializes the ServingEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config.as_dict() - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.name is not None: - body["name"] = self.name - if self.state: - body["state"] = self.state.as_dict() - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.task is not None: - body["task"] = self.task + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict() + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.config: body['config'] = self.config.as_dict() + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.state: body['state'] = self.state.as_dict() + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.task is not None: body['task'] = self.task return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.name is not None: - body["name"] = self.name - if self.state: - body["state"] = self.state - if self.tags: - body["tags"] = self.tags - if self.task is not None: - body["task"] = self.task + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.config: body['config'] = self.config + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.state: body['state'] = self.state + if self.tags: body['tags'] = self.tags + if self.task is not None: body['task'] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpoint: """Deserializes the ServingEndpoint from a dictionary.""" - return cls( - ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), - budget_policy_id=d.get("budget_policy_id", None), - config=_from_dict(d, "config", EndpointCoreConfigSummary), - creation_timestamp=d.get("creation_timestamp", None), - creator=d.get("creator", None), - id=d.get("id", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - name=d.get("name", None), - state=_from_dict(d, "state", EndpointState), - tags=_repeated_dict(d, "tags", EndpointTag), - task=d.get("task", None), - ) + return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig), budget_policy_id=d.get('budget_policy_id', None), config=_from_dict(d, 'config', EndpointCoreConfigSummary), creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), name=d.get('name', None), state=_from_dict(d, 'state', EndpointState), tags=_repeated_dict(d, 'tags', EndpointTag), task=d.get('task', None)) + + @dataclass class ServingEndpointAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[ServingEndpointPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointAccessControlRequest: """Deserializes the ServingEndpointAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", ServingEndpointPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ServingEndpointPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class ServingEndpointAccessControlResponse: all_permissions: Optional[List[ServingEndpointPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointAccessControlResponse: """Deserializes the ServingEndpointAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", ServingEndpointPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', ServingEndpointPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass @@ -3933,442 +3270,353 @@ class ServingEndpointDetailed: """The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables.""" - + budget_policy_id: Optional[str] = None """The budget policy associated with the endpoint.""" - + config: Optional[EndpointCoreConfigOutput] = None """The config that is currently being served by the endpoint.""" - + creation_timestamp: Optional[int] = None """The timestamp when the endpoint was created in Unix time.""" - + creator: Optional[str] = None """The email of the user who created the serving endpoint.""" - + data_plane_info: Optional[ModelDataPlaneInfo] = None """Information required to query DataPlane APIs.""" - + endpoint_url: Optional[str] = None """Endpoint invocation url if route optimization is enabled for endpoint""" - + id: Optional[str] = None """System-generated ID of the endpoint. This is used to refer to the endpoint in the Permissions API""" - + last_updated_timestamp: Optional[int] = None """The timestamp when the endpoint was last updated by a user in Unix time.""" - + name: Optional[str] = None """The name of the serving endpoint.""" - + pending_config: Optional[EndpointPendingConfig] = None """The config that the endpoint is attempting to update to.""" - + permission_level: Optional[ServingEndpointDetailedPermissionLevel] = None """The permission level of the principal making the request.""" - + route_optimized: Optional[bool] = None """Boolean representing if route optimization has been enabled for the endpoint""" - + state: Optional[EndpointState] = None """Information corresponding to the state of the serving endpoint.""" - + tags: Optional[List[EndpointTag]] = None """Tags attached to the serving endpoint.""" - + task: Optional[str] = None """The task type of the serving endpoint.""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointDetailed into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config.as_dict() - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.data_plane_info: - body["data_plane_info"] = self.data_plane_info.as_dict() - if self.endpoint_url is not None: - body["endpoint_url"] = self.endpoint_url - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.name is not None: - body["name"] = self.name - if self.pending_config: - body["pending_config"] = self.pending_config.as_dict() - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.route_optimized is not None: - body["route_optimized"] = self.route_optimized - if self.state: - body["state"] = self.state.as_dict() - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.task is not None: - body["task"] = self.task + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict() + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.config: body['config'] = self.config.as_dict() + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.data_plane_info: body['data_plane_info'] = self.data_plane_info.as_dict() + if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.pending_config: body['pending_config'] = self.pending_config.as_dict() + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.route_optimized is not None: body['route_optimized'] = self.route_optimized + if self.state: body['state'] = self.state.as_dict() + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.task is not None: body['task'] = self.task return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointDetailed into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.data_plane_info: - body["data_plane_info"] = self.data_plane_info - if self.endpoint_url is not None: - body["endpoint_url"] = self.endpoint_url - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.name is not None: - body["name"] = self.name - if self.pending_config: - body["pending_config"] = self.pending_config - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.route_optimized is not None: - body["route_optimized"] = self.route_optimized - if self.state: - body["state"] = self.state - if self.tags: - body["tags"] = self.tags - if self.task is not None: - body["task"] = self.task + if self.ai_gateway: body['ai_gateway'] = self.ai_gateway + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.config: body['config'] = self.config + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.data_plane_info: body['data_plane_info'] = self.data_plane_info + if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.name is not None: body['name'] = self.name + if self.pending_config: body['pending_config'] = self.pending_config + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.route_optimized is not None: body['route_optimized'] = self.route_optimized + if self.state: body['state'] = self.state + if self.tags: body['tags'] = self.tags + if self.task is not None: body['task'] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointDetailed: """Deserializes the ServingEndpointDetailed from a dictionary.""" - return cls( - ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), - budget_policy_id=d.get("budget_policy_id", None), - config=_from_dict(d, "config", EndpointCoreConfigOutput), - creation_timestamp=d.get("creation_timestamp", None), - creator=d.get("creator", None), - data_plane_info=_from_dict(d, "data_plane_info", ModelDataPlaneInfo), - endpoint_url=d.get("endpoint_url", None), - id=d.get("id", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - name=d.get("name", None), - pending_config=_from_dict(d, "pending_config", EndpointPendingConfig), - permission_level=_enum(d, "permission_level", ServingEndpointDetailedPermissionLevel), - route_optimized=d.get("route_optimized", None), - state=_from_dict(d, "state", EndpointState), - tags=_repeated_dict(d, "tags", EndpointTag), - task=d.get("task", None), - ) - + return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig), budget_policy_id=d.get('budget_policy_id', None), config=_from_dict(d, 'config', EndpointCoreConfigOutput), creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), data_plane_info=_from_dict(d, 'data_plane_info', ModelDataPlaneInfo), endpoint_url=d.get('endpoint_url', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), name=d.get('name', None), pending_config=_from_dict(d, 'pending_config', EndpointPendingConfig), permission_level=_enum(d, 'permission_level', ServingEndpointDetailedPermissionLevel), route_optimized=d.get('route_optimized', None), state=_from_dict(d, 'state', EndpointState), tags=_repeated_dict(d, 'tags', EndpointTag), task=d.get('task', None)) + -class ServingEndpointDetailedPermissionLevel(Enum): - CAN_MANAGE = "CAN_MANAGE" - CAN_QUERY = "CAN_QUERY" - CAN_VIEW = "CAN_VIEW" +class ServingEndpointDetailedPermissionLevel(Enum): + + + CAN_MANAGE = 'CAN_MANAGE' + CAN_QUERY = 'CAN_QUERY' + CAN_VIEW = 'CAN_VIEW' @dataclass class ServingEndpointPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[ServingEndpointPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermission: """Deserializes the ServingEndpointPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", ServingEndpointPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ServingEndpointPermissionLevel)) + + class ServingEndpointPermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = "CAN_MANAGE" - CAN_QUERY = "CAN_QUERY" - CAN_VIEW = "CAN_VIEW" - + + CAN_MANAGE = 'CAN_MANAGE' + CAN_QUERY = 'CAN_QUERY' + CAN_VIEW = 'CAN_VIEW' @dataclass class ServingEndpointPermissions: access_control_list: Optional[List[ServingEndpointAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ServingEndpointPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissions: """Deserializes the ServingEndpointPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ServingEndpointAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', ServingEndpointAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class ServingEndpointPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[ServingEndpointPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissionsDescription: """Deserializes the ServingEndpointPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", ServingEndpointPermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ServingEndpointPermissionLevel)) + + @dataclass class ServingEndpointPermissionsRequest: access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None - + serving_endpoint_id: Optional[str] = None """The serving endpoint for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.serving_endpoint_id is not None: - body["serving_endpoint_id"] = self.serving_endpoint_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.serving_endpoint_id is not None: - body["serving_endpoint_id"] = self.serving_endpoint_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissionsRequest: """Deserializes the ServingEndpointPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ServingEndpointAccessControlRequest), - serving_endpoint_id=d.get("serving_endpoint_id", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', ServingEndpointAccessControlRequest), serving_endpoint_id=d.get('serving_endpoint_id', None)) + + class ServingModelWorkloadType(Enum): """Please keep this in sync with with workload types in InferenceEndpointEntities.scala""" - - CPU = "CPU" - GPU_LARGE = "GPU_LARGE" - GPU_MEDIUM = "GPU_MEDIUM" - GPU_SMALL = "GPU_SMALL" - MULTIGPU_MEDIUM = "MULTIGPU_MEDIUM" - + + CPU = 'CPU' + GPU_LARGE = 'GPU_LARGE' + GPU_MEDIUM = 'GPU_MEDIUM' + GPU_SMALL = 'GPU_SMALL' + MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM' @dataclass class TrafficConfig: routes: Optional[List[Route]] = None """The list of routes that define traffic to each served entity.""" - + def as_dict(self) -> dict: """Serializes the TrafficConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.routes: - body["routes"] = [v.as_dict() for v in self.routes] + if self.routes: body['routes'] = [v.as_dict() for v in self.routes] return body def as_shallow_dict(self) -> dict: """Serializes the TrafficConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.routes: - body["routes"] = self.routes + if self.routes: body['routes'] = self.routes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TrafficConfig: """Deserializes the TrafficConfig from a dictionary.""" - return cls(routes=_repeated_dict(d, "routes", Route)) + return cls(routes=_repeated_dict(d, 'routes', Route)) + + @dataclass class UpdateProvisionedThroughputEndpointConfigRequest: config: PtEndpointCoreConfig - + name: Optional[str] = None """The name of the pt endpoint to update. This field is required.""" - + def as_dict(self) -> dict: """Serializes the UpdateProvisionedThroughputEndpointConfigRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.name is not None: - body["name"] = self.name + if self.config: body['config'] = self.config.as_dict() + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProvisionedThroughputEndpointConfigRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.config: - body["config"] = self.config - if self.name is not None: - body["name"] = self.name + if self.config: body['config'] = self.config + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProvisionedThroughputEndpointConfigRequest: """Deserializes the UpdateProvisionedThroughputEndpointConfigRequest from a dictionary.""" - return cls(config=_from_dict(d, "config", PtEndpointCoreConfig), name=d.get("name", None)) + return cls(config=_from_dict(d, 'config', PtEndpointCoreConfig), name=d.get('name', None)) + + @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None """The finish reason returned by the endpoint.""" - + index: Optional[int] = None """The index of the choice in the __chat or completions__ response.""" - + logprobs: Optional[int] = None """The logprobs returned only by the __completions__ endpoint.""" - + message: Optional[ChatMessage] = None """The message response from the __chat__ endpoint.""" - + text: Optional[str] = None """The text response from the __completions__ endpoint.""" - + def as_dict(self) -> dict: """Serializes the V1ResponseChoiceElement into a dictionary suitable for use as a JSON request body.""" body = {} - if self.finish_reason is not None: - body["finishReason"] = self.finish_reason - if self.index is not None: - body["index"] = self.index - if self.logprobs is not None: - body["logprobs"] = self.logprobs - if self.message: - body["message"] = self.message.as_dict() - if self.text is not None: - body["text"] = self.text + if self.finish_reason is not None: body['finishReason'] = self.finish_reason + if self.index is not None: body['index'] = self.index + if self.logprobs is not None: body['logprobs'] = self.logprobs + if self.message: body['message'] = self.message.as_dict() + if self.text is not None: body['text'] = self.text return body def as_shallow_dict(self) -> dict: """Serializes the V1ResponseChoiceElement into a shallow dictionary of its immediate attributes.""" body = {} - if self.finish_reason is not None: - body["finishReason"] = self.finish_reason - if self.index is not None: - body["index"] = self.index - if self.logprobs is not None: - body["logprobs"] = self.logprobs - if self.message: - body["message"] = self.message - if self.text is not None: - body["text"] = self.text + if self.finish_reason is not None: body['finishReason'] = self.finish_reason + if self.index is not None: body['index'] = self.index + if self.logprobs is not None: body['logprobs'] = self.logprobs + if self.message: body['message'] = self.message + if self.text is not None: body['text'] = self.text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> V1ResponseChoiceElement: """Deserializes the V1ResponseChoiceElement from a dictionary.""" - return cls( - finish_reason=d.get("finishReason", None), - index=d.get("index", None), - logprobs=d.get("logprobs", None), - message=_from_dict(d, "message", ChatMessage), - text=d.get("text", None), - ) + return cls(finish_reason=d.get('finishReason', None), index=d.get('index', None), logprobs=d.get('logprobs', None), message=_from_dict(d, 'message', ChatMessage), text=d.get('text', None)) + + + + class ServingEndpointsAPI: """The Serving Endpoints API allows you to create, update, and delete model serving endpoints. - + You can use a serving endpoint to serve models from the Databricks Model Registry or from Unity Catalog. Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means the endpoints and associated compute resources are fully managed by Databricks and will not appear in your @@ -4377,80 +3625,79 @@ class ServingEndpointsAPI: configure traffic settings to define how requests should be routed to your served entities behind an endpoint. Additionally, you can configure the scale of resources that should be applied to each served entity.""" - + def __init__(self, api_client): self._api = api_client + - def wait_get_serving_endpoint_not_updating( - self, - name: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[ServingEndpointDetailed], None]] = None, - ) -> ServingEndpointDetailed: - deadline = time.time() + timeout.total_seconds() - target_states = (EndpointStateConfigUpdate.NOT_UPDATING,) - failure_states = ( - EndpointStateConfigUpdate.UPDATE_FAILED, - EndpointStateConfigUpdate.UPDATE_CANCELED, - ) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.state.config_update - status_message = f"current status: {status}" - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach NOT_UPDATING, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def build_logs(self, name: str, served_model_name: str) -> BuildLogsResponse: - """Get build logs for a served model. + - Retrieves the build logs associated with the provided served model. + + def wait_get_serving_endpoint_not_updating(self, name: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[ServingEndpointDetailed], None]] = None) -> ServingEndpointDetailed: + deadline = time.time() + timeout.total_seconds() + target_states = (EndpointStateConfigUpdate.NOT_UPDATING, ) + failure_states = (EndpointStateConfigUpdate.UPDATE_FAILED, EndpointStateConfigUpdate.UPDATE_CANCELED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.state.config_update + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach NOT_UPDATING, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + def build_logs(self + , name: str, served_model_name: str + ) -> BuildLogsResponse: + """Get build logs for a served model. + + Retrieves the build logs associated with the provided served model. + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. :param served_model_name: str The name of the served model that build logs will be retrieved for. This field is required. - + :returns: :class:`BuildLogsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/build-logs", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/build-logs' + + , headers=headers + ) return BuildLogsResponse.from_dict(res) - def create( - self, - name: str, - *, - ai_gateway: Optional[AiGatewayConfig] = None, - budget_policy_id: Optional[str] = None, - config: Optional[EndpointCoreConfigInput] = None, - rate_limits: Optional[List[RateLimit]] = None, - route_optimized: Optional[bool] = None, - tags: Optional[List[EndpointTag]] = None, - ) -> Wait[ServingEndpointDetailed]: - """Create a new serving endpoint. + + + + def create(self + , name: str + , * + , ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, config: Optional[EndpointCoreConfigInput] = None, rate_limits: Optional[List[RateLimit]] = None, route_optimized: Optional[bool] = None, tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]: + """Create a new serving endpoint. + :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. @@ -4469,71 +3716,46 @@ def create( Enable route optimization for the serving endpoint. :param tags: List[:class:`EndpointTag`] (optional) Tags to be attached to the serving endpoint and automatically propagated to billing logs. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ body = {} - if ai_gateway is not None: - body["ai_gateway"] = ai_gateway.as_dict() - if budget_policy_id is not None: - body["budget_policy_id"] = budget_policy_id - if config is not None: - body["config"] = config.as_dict() - if name is not None: - body["name"] = name - if rate_limits is not None: - body["rate_limits"] = [v.as_dict() for v in rate_limits] - if route_optimized is not None: - body["route_optimized"] = route_optimized - if tags is not None: - body["tags"] = [v.as_dict() for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.0/serving-endpoints", body=body, headers=headers) - return Wait( - self.wait_get_serving_endpoint_not_updating, - response=ServingEndpointDetailed.from_dict(op_response), - name=op_response["name"], - ) - - def create_and_wait( - self, - name: str, - *, - ai_gateway: Optional[AiGatewayConfig] = None, - budget_policy_id: Optional[str] = None, - config: Optional[EndpointCoreConfigInput] = None, - rate_limits: Optional[List[RateLimit]] = None, - route_optimized: Optional[bool] = None, - tags: Optional[List[EndpointTag]] = None, - timeout=timedelta(minutes=20), - ) -> ServingEndpointDetailed: - return self.create( - ai_gateway=ai_gateway, - budget_policy_id=budget_policy_id, - config=config, - name=name, - rate_limits=rate_limits, - route_optimized=route_optimized, - tags=tags, - ).result(timeout=timeout) - - def create_provisioned_throughput_endpoint( - self, - name: str, - config: PtEndpointCoreConfig, - *, - ai_gateway: Optional[AiGatewayConfig] = None, - budget_policy_id: Optional[str] = None, - tags: Optional[List[EndpointTag]] = None, - ) -> Wait[ServingEndpointDetailed]: - """Create a new PT serving endpoint. + if ai_gateway is not None: body['ai_gateway'] = ai_gateway.as_dict() + if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id + if config is not None: body['config'] = config.as_dict() + if name is not None: body['name'] = name + if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits] + if route_optimized is not None: body['route_optimized'] = route_optimized + if tags is not None: body['tags'] = [v.as_dict() for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.0/serving-endpoints', body=body + + , headers=headers + ) + return Wait(self.wait_get_serving_endpoint_not_updating + , response = ServingEndpointDetailed.from_dict(op_response) + , name=op_response['name']) + + + def create_and_wait(self + , name: str + , * + , ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, config: Optional[EndpointCoreConfigInput] = None, rate_limits: Optional[List[RateLimit]] = None, route_optimized: Optional[bool] = None, tags: Optional[List[EndpointTag]] = None, + timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: + return self.create(ai_gateway=ai_gateway, budget_policy_id=budget_policy_id, config=config, name=name, rate_limits=rate_limits, route_optimized=route_optimized, tags=tags).result(timeout=timeout) + + + + def create_provisioned_throughput_endpoint(self + , name: str, config: PtEndpointCoreConfig + , * + , ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]: + """Create a new PT serving endpoint. + :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. @@ -4545,167 +3767,194 @@ def create_provisioned_throughput_endpoint( The budget policy associated with the endpoint. :param tags: List[:class:`EndpointTag`] (optional) Tags to be attached to the serving endpoint and automatically propagated to billing logs. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ body = {} - if ai_gateway is not None: - body["ai_gateway"] = ai_gateway.as_dict() - if budget_policy_id is not None: - body["budget_policy_id"] = budget_policy_id - if config is not None: - body["config"] = config.as_dict() - if name is not None: - body["name"] = name - if tags is not None: - body["tags"] = [v.as_dict() for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.0/serving-endpoints/pt", body=body, headers=headers) - return Wait( - self.wait_get_serving_endpoint_not_updating, - response=ServingEndpointDetailed.from_dict(op_response), - name=op_response["name"], - ) - - def create_provisioned_throughput_endpoint_and_wait( - self, - name: str, - config: PtEndpointCoreConfig, - *, - ai_gateway: Optional[AiGatewayConfig] = None, - budget_policy_id: Optional[str] = None, - tags: Optional[List[EndpointTag]] = None, - timeout=timedelta(minutes=20), - ) -> ServingEndpointDetailed: - return self.create_provisioned_throughput_endpoint( - ai_gateway=ai_gateway, budget_policy_id=budget_policy_id, config=config, name=name, tags=tags - ).result(timeout=timeout) - - def delete(self, name: str): - """Delete a serving endpoint. - - :param name: str + if ai_gateway is not None: body['ai_gateway'] = ai_gateway.as_dict() + if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id + if config is not None: body['config'] = config.as_dict() + if name is not None: body['name'] = name + if tags is not None: body['tags'] = [v.as_dict() for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.0/serving-endpoints/pt', body=body + + , headers=headers + ) + return Wait(self.wait_get_serving_endpoint_not_updating + , response = ServingEndpointDetailed.from_dict(op_response) + , name=op_response['name']) + + def create_provisioned_throughput_endpoint_and_wait(self + , name: str, config: PtEndpointCoreConfig + , * + , ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, tags: Optional[List[EndpointTag]] = None, + timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: + return self.create_provisioned_throughput_endpoint(ai_gateway=ai_gateway, budget_policy_id=budget_policy_id, config=config, name=name, tags=tags).result(timeout=timeout) + + + + def delete(self + , name: str + ): + """Delete a serving endpoint. + + :param name: str + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/serving-endpoints/{name}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.0/serving-endpoints/{name}", headers=headers) + + + - def export_metrics(self, name: str) -> ExportMetricsResponse: + def export_metrics(self + , name: str + ) -> ExportMetricsResponse: """Get metrics of a serving endpoint. - + Retrieves the metrics associated with the provided serving endpoint in either Prometheus or OpenMetrics exposition format. - + :param name: str The name of the serving endpoint to retrieve metrics for. This field is required. - + :returns: :class:`ExportMetricsResponse` """ - - headers = { - "Accept": "text/plain", - } - - res = self._api.do("GET", f"/api/2.0/serving-endpoints/{name}/metrics", headers=headers, raw=True) + + headers = {'Accept': 'text/plain',} + + res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}/metrics' + + , headers=headers + , raw=True) return ExportMetricsResponse.from_dict(res) - def get(self, name: str) -> ServingEndpointDetailed: - """Get a single serving endpoint. + + + + def get(self + , name: str + ) -> ServingEndpointDetailed: + """Get a single serving endpoint. + Retrieves the details for a single serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. - + :returns: :class:`ServingEndpointDetailed` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/serving-endpoints/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}' + + , headers=headers + ) return ServingEndpointDetailed.from_dict(res) - def get_open_api(self, name: str) -> GetOpenApiResponse: - """Get the schema for a serving endpoint. + + + + def get_open_api(self + , name: str + ) -> GetOpenApiResponse: + """Get the schema for a serving endpoint. + Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for the supported paths, input and output format and datatypes. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. - + :returns: :class:`GetOpenApiResponse` """ - - headers = { - "Accept": "text/plain", - } - - res = self._api.do("GET", f"/api/2.0/serving-endpoints/{name}/openapi", headers=headers, raw=True) + + headers = {'Accept': 'text/plain',} + + res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}/openapi' + + , headers=headers + , raw=True) return GetOpenApiResponse.from_dict(res) - def get_permission_levels(self, serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse: - """Get serving endpoint permission levels. + + + + def get_permission_levels(self + , serving_endpoint_id: str + ) -> GetServingEndpointPermissionLevelsResponse: + """Get serving endpoint permission levels. + Gets the permission levels that a user can have on an object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. - + :returns: :class:`GetServingEndpointPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}/permissionLevels", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}/permissionLevels' + + , headers=headers + ) return GetServingEndpointPermissionLevelsResponse.from_dict(res) - def get_permissions(self, serving_endpoint_id: str) -> ServingEndpointPermissions: - """Get serving endpoint permissions. + + + + def get_permissions(self + , serving_endpoint_id: str + ) -> ServingEndpointPermissions: + """Get serving endpoint permissions. + Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. - + :returns: :class:`ServingEndpointPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}' + + , headers=headers + ) return ServingEndpointPermissions.from_dict(res) - def http_request( - self, - connection_name: str, - method: ExternalFunctionRequestHttpMethod, - path: str, - *, - headers: Optional[str] = None, - json: Optional[str] = None, - params: Optional[str] = None, - ) -> HttpRequestResponse: - """Make external services call using the credentials stored in UC Connection. + + + + def http_request(self + , connection_name: str, method: ExternalFunctionRequestHttpMethod, path: str + , * + , headers: Optional[str] = None, json: Optional[str] = None, params: Optional[str] = None) -> HttpRequestResponse: + """Make external services call using the credentials stored in UC Connection. + :param connection_name: str The connection name to use. This is required to identify the external connection. :param method: :class:`ExternalFunctionRequestHttpMethod` @@ -4719,133 +3968,144 @@ def http_request( The JSON payload to send in the request body. :param params: str (optional) Query parameters for the request. - + :returns: :class:`HttpRequestResponse` """ body = {} - if connection_name is not None: - body["connection_name"] = connection_name - if headers is not None: - body["headers"] = headers - if json is not None: - body["json"] = json - if method is not None: - body["method"] = method.value - if params is not None: - body["params"] = params - if path is not None: - body["path"] = path - headers = { - "Accept": "text/plain", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/external-function", body=body, headers=headers, raw=True) + if connection_name is not None: body['connection_name'] = connection_name + if headers is not None: body['headers'] = headers + if json is not None: body['json'] = json + if method is not None: body['method'] = method.value + if params is not None: body['params'] = params + if path is not None: body['path'] = path + headers = {'Accept': 'text/plain','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/external-function', body=body + + , headers=headers + , raw=True) return HttpRequestResponse.from_dict(res) + + + + def list(self) -> Iterator[ServingEndpoint]: """Get all serving endpoints. - + :returns: Iterator over :class:`ServingEndpoint` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/serving-endpoints", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/serving-endpoints' + , headers=headers + ) parsed = ListEndpointsResponse.from_dict(json).endpoints return parsed if parsed is not None else [] + - def logs(self, name: str, served_model_name: str) -> ServerLogsResponse: - """Get the latest logs for a served model. + + + + def logs(self + , name: str, served_model_name: str + ) -> ServerLogsResponse: + """Get the latest logs for a served model. + Retrieves the service logs associated with the provided served model. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. :param served_model_name: str The name of the served model that logs will be retrieved for. This field is required. - + :returns: :class:`ServerLogsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/logs", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/logs' + + , headers=headers + ) return ServerLogsResponse.from_dict(res) - def patch( - self, name: str, *, add_tags: Optional[List[EndpointTag]] = None, delete_tags: Optional[List[str]] = None - ) -> EndpointTags: - """Update tags of a serving endpoint. + + + + def patch(self + , name: str + , * + , add_tags: Optional[List[EndpointTag]] = None, delete_tags: Optional[List[str]] = None) -> EndpointTags: + """Update tags of a serving endpoint. + Used to batch add and delete tags from a serving endpoint with a single API call. - + :param name: str The name of the serving endpoint who's tags to patch. This field is required. :param add_tags: List[:class:`EndpointTag`] (optional) List of endpoint tags to add :param delete_tags: List[str] (optional) List of tag keys to delete - + :returns: :class:`EndpointTags` """ body = {} - if add_tags is not None: - body["add_tags"] = [v.as_dict() for v in add_tags] - if delete_tags is not None: - body["delete_tags"] = [v for v in delete_tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/serving-endpoints/{name}/tags", body=body, headers=headers) + if add_tags is not None: body['add_tags'] = [v.as_dict() for v in add_tags] + if delete_tags is not None: body['delete_tags'] = [v for v in delete_tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/serving-endpoints/{name}/tags', body=body + + , headers=headers + ) return EndpointTags.from_dict(res) - def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse: - """Update rate limits of a serving endpoint. + + + + def put(self + , name: str + , * + , rate_limits: Optional[List[RateLimit]] = None) -> PutResponse: + """Update rate limits of a serving endpoint. + Deprecated: Please use AI Gateway to manage rate limits instead. - + :param name: str The name of the serving endpoint whose rate limits are being updated. This field is required. :param rate_limits: List[:class:`RateLimit`] (optional) The list of endpoint rate limits. - + :returns: :class:`PutResponse` """ body = {} - if rate_limits is not None: - body["rate_limits"] = [v.as_dict() for v in rate_limits] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/serving-endpoints/{name}/rate-limits", body=body, headers=headers) + if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/serving-endpoints/{name}/rate-limits', body=body + + , headers=headers + ) return PutResponse.from_dict(res) - def put_ai_gateway( - self, - name: str, - *, - fallback_config: Optional[FallbackConfig] = None, - guardrails: Optional[AiGatewayGuardrails] = None, - inference_table_config: Optional[AiGatewayInferenceTableConfig] = None, - rate_limits: Optional[List[AiGatewayRateLimit]] = None, - usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None, - ) -> PutAiGatewayResponse: - """Update AI Gateway of a serving endpoint. + + + + def put_ai_gateway(self + , name: str + , * + , fallback_config: Optional[FallbackConfig] = None, guardrails: Optional[AiGatewayGuardrails] = None, inference_table_config: Optional[AiGatewayInferenceTableConfig] = None, rate_limits: Optional[List[AiGatewayRateLimit]] = None, usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None) -> PutAiGatewayResponse: + """Update AI Gateway of a serving endpoint. + Used to update the AI Gateway of a serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables. - + :param name: str The name of the serving endpoint whose AI Gateway is being updated. This field is required. :param fallback_config: :class:`FallbackConfig` (optional) @@ -4861,48 +4121,33 @@ def put_ai_gateway( :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional) Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs. - + :returns: :class:`PutAiGatewayResponse` """ body = {} - if fallback_config is not None: - body["fallback_config"] = fallback_config.as_dict() - if guardrails is not None: - body["guardrails"] = guardrails.as_dict() - if inference_table_config is not None: - body["inference_table_config"] = inference_table_config.as_dict() - if rate_limits is not None: - body["rate_limits"] = [v.as_dict() for v in rate_limits] - if usage_tracking_config is not None: - body["usage_tracking_config"] = usage_tracking_config.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/serving-endpoints/{name}/ai-gateway", body=body, headers=headers) + if fallback_config is not None: body['fallback_config'] = fallback_config.as_dict() + if guardrails is not None: body['guardrails'] = guardrails.as_dict() + if inference_table_config is not None: body['inference_table_config'] = inference_table_config.as_dict() + if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits] + if usage_tracking_config is not None: body['usage_tracking_config'] = usage_tracking_config.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/serving-endpoints/{name}/ai-gateway', body=body + + , headers=headers + ) return PutAiGatewayResponse.from_dict(res) - def query( - self, - name: str, - *, - dataframe_records: Optional[List[Any]] = None, - dataframe_split: Optional[DataframeSplitInput] = None, - extra_params: Optional[Dict[str, str]] = None, - input: Optional[Any] = None, - inputs: Optional[Any] = None, - instances: Optional[List[Any]] = None, - max_tokens: Optional[int] = None, - messages: Optional[List[ChatMessage]] = None, - n: Optional[int] = None, - prompt: Optional[Any] = None, - stop: Optional[List[str]] = None, - stream: Optional[bool] = None, - temperature: Optional[float] = None, - ) -> QueryEndpointResponse: - """Query a serving endpoint. + + + + def query(self + , name: str + , * + , dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str,str]] = None, input: Optional[Any] = None, inputs: Optional[Any] = None, instances: Optional[List[Any]] = None, max_tokens: Optional[int] = None, messages: Optional[List[ChatMessage]] = None, n: Optional[int] = None, prompt: Optional[Any] = None, stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None) -> QueryEndpointResponse: + """Query a serving endpoint. + :param name: str The name of the serving endpoint. This field is required. :param dataframe_records: List[Any] (optional) @@ -4946,97 +4191,74 @@ def query( The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - + :returns: :class:`QueryEndpointResponse` """ body = {} - if dataframe_records is not None: - body["dataframe_records"] = [v for v in dataframe_records] - if dataframe_split is not None: - body["dataframe_split"] = dataframe_split.as_dict() - if extra_params is not None: - body["extra_params"] = extra_params - if input is not None: - body["input"] = input - if inputs is not None: - body["inputs"] = inputs - if instances is not None: - body["instances"] = [v for v in instances] - if max_tokens is not None: - body["max_tokens"] = max_tokens - if messages is not None: - body["messages"] = [v.as_dict() for v in messages] - if n is not None: - body["n"] = n - if prompt is not None: - body["prompt"] = prompt - if stop is not None: - body["stop"] = [v for v in stop] - if stream is not None: - body["stream"] = stream - if temperature is not None: - body["temperature"] = temperature - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - response_headers = [ - "served-model-name", - ] - res = self._api.do( - "POST", - f"/serving-endpoints/{name}/invocations", - body=body, - headers=headers, - response_headers=response_headers, - ) + if dataframe_records is not None: body['dataframe_records'] = [v for v in dataframe_records] + if dataframe_split is not None: body['dataframe_split'] = dataframe_split.as_dict() + if extra_params is not None: body['extra_params'] = extra_params + if input is not None: body['input'] = input + if inputs is not None: body['inputs'] = inputs + if instances is not None: body['instances'] = [v for v in instances] + if max_tokens is not None: body['max_tokens'] = max_tokens + if messages is not None: body['messages'] = [v.as_dict() for v in messages] + if n is not None: body['n'] = n + if prompt is not None: body['prompt'] = prompt + if stop is not None: body['stop'] = [v for v in stop] + if stream is not None: body['stream'] = stream + if temperature is not None: body['temperature'] = temperature + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + response_headers = ['served-model-name',] + res = self._api.do('POST',f'/serving-endpoints/{name}/invocations', body=body + + , headers=headers + , response_headers=response_headers) return QueryEndpointResponse.from_dict(res) - def set_permissions( - self, - serving_endpoint_id: str, - *, - access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None, - ) -> ServingEndpointPermissions: - """Set serving endpoint permissions. + + + + def set_permissions(self + , serving_endpoint_id: str + , * + , access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None) -> ServingEndpointPermissions: + """Set serving endpoint permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional) - + :returns: :class:`ServingEndpointPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}', body=body + + , headers=headers + ) return ServingEndpointPermissions.from_dict(res) - def update_config( - self, - name: str, - *, - auto_capture_config: Optional[AutoCaptureConfigInput] = None, - served_entities: Optional[List[ServedEntityInput]] = None, - served_models: Optional[List[ServedModelInput]] = None, - traffic_config: Optional[TrafficConfig] = None, - ) -> Wait[ServingEndpointDetailed]: - """Update config of a serving endpoint. + + + + def update_config(self + , name: str + , * + , auto_capture_config: Optional[AutoCaptureConfigInput] = None, served_entities: Optional[List[ServedEntityInput]] = None, served_models: Optional[List[ServedModelInput]] = None, traffic_config: Optional[TrafficConfig] = None) -> Wait[ServingEndpointDetailed]: + """Update config of a serving endpoint. + Updates any combination of the serving endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the current update completes or fails. - + :param name: str The name of the serving endpoint to update. This field is required. :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional) @@ -5051,168 +4273,147 @@ def update_config( config. :param traffic_config: :class:`TrafficConfig` (optional) The traffic configuration associated with the serving endpoint config. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ body = {} - if auto_capture_config is not None: - body["auto_capture_config"] = auto_capture_config.as_dict() - if served_entities is not None: - body["served_entities"] = [v.as_dict() for v in served_entities] - if served_models is not None: - body["served_models"] = [v.as_dict() for v in served_models] - if traffic_config is not None: - body["traffic_config"] = traffic_config.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("PUT", f"/api/2.0/serving-endpoints/{name}/config", body=body, headers=headers) - return Wait( - self.wait_get_serving_endpoint_not_updating, - response=ServingEndpointDetailed.from_dict(op_response), - name=op_response["name"], - ) - - def update_config_and_wait( - self, - name: str, - *, - auto_capture_config: Optional[AutoCaptureConfigInput] = None, - served_entities: Optional[List[ServedEntityInput]] = None, - served_models: Optional[List[ServedModelInput]] = None, - traffic_config: Optional[TrafficConfig] = None, - timeout=timedelta(minutes=20), - ) -> ServingEndpointDetailed: - return self.update_config( - auto_capture_config=auto_capture_config, - name=name, - served_entities=served_entities, - served_models=served_models, - traffic_config=traffic_config, - ).result(timeout=timeout) - - def update_permissions( - self, - serving_endpoint_id: str, - *, - access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None, - ) -> ServingEndpointPermissions: - """Update serving endpoint permissions. + if auto_capture_config is not None: body['auto_capture_config'] = auto_capture_config.as_dict() + if served_entities is not None: body['served_entities'] = [v.as_dict() for v in served_entities] + if served_models is not None: body['served_models'] = [v.as_dict() for v in served_models] + if traffic_config is not None: body['traffic_config'] = traffic_config.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('PUT',f'/api/2.0/serving-endpoints/{name}/config', body=body + + , headers=headers + ) + return Wait(self.wait_get_serving_endpoint_not_updating + , response = ServingEndpointDetailed.from_dict(op_response) + , name=op_response['name']) + + def update_config_and_wait(self + , name: str + , * + , auto_capture_config: Optional[AutoCaptureConfigInput] = None, served_entities: Optional[List[ServedEntityInput]] = None, served_models: Optional[List[ServedModelInput]] = None, traffic_config: Optional[TrafficConfig] = None, + timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: + return self.update_config(auto_capture_config=auto_capture_config, name=name, served_entities=served_entities, served_models=served_models, traffic_config=traffic_config).result(timeout=timeout) + + + + + def update_permissions(self + , serving_endpoint_id: str + , * + , access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None) -> ServingEndpointPermissions: + """Update serving endpoint permissions. + Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional) - + :returns: :class:`ServingEndpointPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}', body=body + + , headers=headers + ) return ServingEndpointPermissions.from_dict(res) - def update_provisioned_throughput_endpoint_config( - self, name: str, config: PtEndpointCoreConfig - ) -> Wait[ServingEndpointDetailed]: - """Update config of a PT serving endpoint. + + + + def update_provisioned_throughput_endpoint_config(self + , name: str, config: PtEndpointCoreConfig + ) -> Wait[ServingEndpointDetailed]: + """Update config of a PT serving endpoint. + Updates any combination of the pt endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. Updates are instantaneous and endpoint should be updated instantly - + :param name: str The name of the pt endpoint to update. This field is required. :param config: :class:`PtEndpointCoreConfig` - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ body = {} - if config is not None: - body["config"] = config.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("PUT", f"/api/2.0/serving-endpoints/pt/{name}/config", body=body, headers=headers) - return Wait( - self.wait_get_serving_endpoint_not_updating, - response=ServingEndpointDetailed.from_dict(op_response), - name=op_response["name"], - ) - - def update_provisioned_throughput_endpoint_config_and_wait( - self, name: str, config: PtEndpointCoreConfig, timeout=timedelta(minutes=20) - ) -> ServingEndpointDetailed: - return self.update_provisioned_throughput_endpoint_config(config=config, name=name).result(timeout=timeout) - + if config is not None: body['config'] = config.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('PUT',f'/api/2.0/serving-endpoints/pt/{name}/config', body=body + + , headers=headers + ) + return Wait(self.wait_get_serving_endpoint_not_updating + , response = ServingEndpointDetailed.from_dict(op_response) + , name=op_response['name']) + + def update_provisioned_throughput_endpoint_config_and_wait(self + , name: str, config: PtEndpointCoreConfig + , + timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: + return self.update_provisioned_throughput_endpoint_config(config=config, name=name).result(timeout=timeout) + + class ServingEndpointsDataPlaneAPI: """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service.""" - + def __init__(self, api_client, control_plane_service, dpts): self._api = api_client self._lock = threading.Lock() self._control_plane_service = control_plane_service self._dpts = dpts self._data_plane_details = {} + + + - def _data_plane_info_query(self, name: str) -> DataPlaneInfo: - key = "query" + "/".join( - [ - str(name), - ] - ) + + + + + def _data_plane_info_query (self + , name: str + ) -> DataPlaneInfo: + key = "query" + "/".join([ + str(name), + ]) with self._lock: if key in self._data_plane_details: return self._data_plane_details[key] response = self._control_plane_service.get( - name=name, - ) + name = name, + ) if response.data_plane_info is None: raise Exception("Resource does not support direct Data Plane access") result = response.data_plane_info.query_info with self._lock: self._data_plane_details[key] = result return result + - def query( - self, - name: str, - *, - dataframe_records: Optional[List[Any]] = None, - dataframe_split: Optional[DataframeSplitInput] = None, - extra_params: Optional[Dict[str, str]] = None, - input: Optional[Any] = None, - inputs: Optional[Any] = None, - instances: Optional[List[Any]] = None, - max_tokens: Optional[int] = None, - messages: Optional[List[ChatMessage]] = None, - n: Optional[int] = None, - prompt: Optional[Any] = None, - stop: Optional[List[str]] = None, - stream: Optional[bool] = None, - temperature: Optional[float] = None, - ) -> QueryEndpointResponse: + def query(self + , name: str + , * + , dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str,str]] = None, input: Optional[Any] = None, inputs: Optional[Any] = None, instances: Optional[List[Any]] = None, max_tokens: Optional[int] = None, messages: Optional[List[ChatMessage]] = None, n: Optional[int] = None, prompt: Optional[Any] = None, stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None) -> QueryEndpointResponse: """Query a serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. :param dataframe_records: List[Any] (optional) @@ -5256,59 +4457,39 @@ def query( The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - + :returns: :class:`QueryEndpointResponse` """ body = {} - if dataframe_records is not None: - body["dataframe_records"] = [v for v in dataframe_records] - if dataframe_split is not None: - body["dataframe_split"] = dataframe_split.as_dict() - if extra_params is not None: - body["extra_params"] = extra_params - if input is not None: - body["input"] = input - if inputs is not None: - body["inputs"] = inputs - if instances is not None: - body["instances"] = [v for v in instances] - if max_tokens is not None: - body["max_tokens"] = max_tokens - if messages is not None: - body["messages"] = [v.as_dict() for v in messages] - if n is not None: - body["n"] = n - if prompt is not None: - body["prompt"] = prompt - if stop is not None: - body["stop"] = [v for v in stop] - if stream is not None: - body["stream"] = stream - if temperature is not None: - body["temperature"] = temperature + if dataframe_records is not None: body['dataframe_records'] = [v for v in dataframe_records] + if dataframe_split is not None: body['dataframe_split'] = dataframe_split.as_dict() + if extra_params is not None: body['extra_params'] = extra_params + if input is not None: body['input'] = input + if inputs is not None: body['inputs'] = inputs + if instances is not None: body['instances'] = [v for v in instances] + if max_tokens is not None: body['max_tokens'] = max_tokens + if messages is not None: body['messages'] = [v.as_dict() for v in messages] + if n is not None: body['n'] = n + if prompt is not None: body['prompt'] = prompt + if stop is not None: body['stop'] = [v for v in stop] + if stream is not None: body['stream'] = stream + if temperature is not None: body['temperature'] = temperature data_plane_info = self._data_plane_info_query( - name=name, - ) + name = name, + ) token = self._dpts.token(data_plane_info.endpoint_url, data_plane_info.authorization_details) def auth(r: requests.PreparedRequest) -> requests.PreparedRequest: authorization = f"{token.token_type} {token.access_token}" r.headers["Authorization"] = authorization return r - - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - response_headers = [ - "served-model-name", - ] - res = self._api.do( - "POST", - url=data_plane_info.endpoint_url, - body=body, - headers=headers, - response_headers=response_headers, - auth=auth, - ) + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + response_headers = ['served-model-name',] + res = self._api.do('POST',url=data_plane_info.endpoint_url, body=body + + , headers=headers + , response_headers=response_headers,auth=auth) return QueryEndpointResponse.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index 70b3bc0a6..f3d019dcf 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -1,24 +1,30 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Any, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class AccountIpAccessEnable: acct_ip_acl_enable: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -26,125 +32,107 @@ class AccountIpAccessEnable: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the AccountIpAccessEnable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.acct_ip_acl_enable: - body["acct_ip_acl_enable"] = self.acct_ip_acl_enable.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the AccountIpAccessEnable into a shallow dictionary of its immediate attributes.""" body = {} - if self.acct_ip_acl_enable: - body["acct_ip_acl_enable"] = self.acct_ip_acl_enable - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountIpAccessEnable: """Deserializes the AccountIpAccessEnable from a dictionary.""" - return cls( - acct_ip_acl_enable=_from_dict(d, "acct_ip_acl_enable", BooleanMessage), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(acct_ip_acl_enable=_from_dict(d, 'acct_ip_acl_enable', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class AccountNetworkPolicy: account_id: Optional[str] = None """The associated account ID for this Network Policy object.""" - + egress: Optional[NetworkPolicyEgress] = None """The network policies applying for egress traffic.""" - + network_policy_id: Optional[str] = None """The unique identifier for the network policy.""" - + def as_dict(self) -> dict: """Serializes the AccountNetworkPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.egress: - body["egress"] = self.egress.as_dict() - if self.network_policy_id is not None: - body["network_policy_id"] = self.network_policy_id + if self.account_id is not None: body['account_id'] = self.account_id + if self.egress: body['egress'] = self.egress.as_dict() + if self.network_policy_id is not None: body['network_policy_id'] = self.network_policy_id return body def as_shallow_dict(self) -> dict: """Serializes the AccountNetworkPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.egress: - body["egress"] = self.egress - if self.network_policy_id is not None: - body["network_policy_id"] = self.network_policy_id + if self.account_id is not None: body['account_id'] = self.account_id + if self.egress: body['egress'] = self.egress + if self.network_policy_id is not None: body['network_policy_id'] = self.network_policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountNetworkPolicy: """Deserializes the AccountNetworkPolicy from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - egress=_from_dict(d, "egress", NetworkPolicyEgress), - network_policy_id=d.get("network_policy_id", None), - ) + return cls(account_id=d.get('account_id', None), egress=_from_dict(d, 'egress', NetworkPolicyEgress), network_policy_id=d.get('network_policy_id', None)) + + @dataclass class AibiDashboardEmbeddingAccessPolicy: access_policy_type: AibiDashboardEmbeddingAccessPolicyAccessPolicyType - + def as_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_policy_type is not None: - body["access_policy_type"] = self.access_policy_type.value + if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type.value return body def as_shallow_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_policy_type is not None: - body["access_policy_type"] = self.access_policy_type + if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingAccessPolicy: """Deserializes the AibiDashboardEmbeddingAccessPolicy from a dictionary.""" - return cls( - access_policy_type=_enum(d, "access_policy_type", AibiDashboardEmbeddingAccessPolicyAccessPolicyType) - ) - + return cls(access_policy_type=_enum(d, 'access_policy_type', AibiDashboardEmbeddingAccessPolicyAccessPolicyType)) + -class AibiDashboardEmbeddingAccessPolicyAccessPolicyType(Enum): - ALLOW_ALL_DOMAINS = "ALLOW_ALL_DOMAINS" - ALLOW_APPROVED_DOMAINS = "ALLOW_APPROVED_DOMAINS" - DENY_ALL_DOMAINS = "DENY_ALL_DOMAINS" +class AibiDashboardEmbeddingAccessPolicyAccessPolicyType(Enum): + + + ALLOW_ALL_DOMAINS = 'ALLOW_ALL_DOMAINS' + ALLOW_APPROVED_DOMAINS = 'ALLOW_APPROVED_DOMAINS' + DENY_ALL_DOMAINS = 'DENY_ALL_DOMAINS' @dataclass class AibiDashboardEmbeddingAccessPolicySetting: aibi_dashboard_embedding_access_policy: AibiDashboardEmbeddingAccessPolicy - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -152,75 +140,65 @@ class AibiDashboardEmbeddingAccessPolicySetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aibi_dashboard_embedding_access_policy: - body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.aibi_dashboard_embedding_access_policy: body['aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.aibi_dashboard_embedding_access_policy: - body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.aibi_dashboard_embedding_access_policy: body['aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingAccessPolicySetting: """Deserializes the AibiDashboardEmbeddingAccessPolicySetting from a dictionary.""" - return cls( - aibi_dashboard_embedding_access_policy=_from_dict( - d, "aibi_dashboard_embedding_access_policy", AibiDashboardEmbeddingAccessPolicy - ), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(aibi_dashboard_embedding_access_policy=_from_dict(d, 'aibi_dashboard_embedding_access_policy', AibiDashboardEmbeddingAccessPolicy), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class AibiDashboardEmbeddingApprovedDomains: approved_domains: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingApprovedDomains into a dictionary suitable for use as a JSON request body.""" body = {} - if self.approved_domains: - body["approved_domains"] = [v for v in self.approved_domains] + if self.approved_domains: body['approved_domains'] = [v for v in self.approved_domains] return body def as_shallow_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingApprovedDomains into a shallow dictionary of its immediate attributes.""" body = {} - if self.approved_domains: - body["approved_domains"] = self.approved_domains + if self.approved_domains: body['approved_domains'] = self.approved_domains return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingApprovedDomains: """Deserializes the AibiDashboardEmbeddingApprovedDomains from a dictionary.""" - return cls(approved_domains=d.get("approved_domains", None)) + return cls(approved_domains=d.get('approved_domains', None)) + + @dataclass class AibiDashboardEmbeddingApprovedDomainsSetting: aibi_dashboard_embedding_approved_domains: AibiDashboardEmbeddingApprovedDomains - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -228,51 +206,41 @@ class AibiDashboardEmbeddingApprovedDomainsSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aibi_dashboard_embedding_approved_domains: - body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.aibi_dashboard_embedding_approved_domains: body['aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.aibi_dashboard_embedding_approved_domains: - body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.aibi_dashboard_embedding_approved_domains: body['aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingApprovedDomainsSetting: """Deserializes the AibiDashboardEmbeddingApprovedDomainsSetting from a dictionary.""" - return cls( - aibi_dashboard_embedding_approved_domains=_from_dict( - d, "aibi_dashboard_embedding_approved_domains", AibiDashboardEmbeddingApprovedDomains - ), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(aibi_dashboard_embedding_approved_domains=_from_dict(d, 'aibi_dashboard_embedding_approved_domains', AibiDashboardEmbeddingApprovedDomains), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class AutomaticClusterUpdateSetting: automatic_cluster_update_workspace: ClusterAutoRestartMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -280,128 +248,104 @@ class AutomaticClusterUpdateSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the AutomaticClusterUpdateSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.automatic_cluster_update_workspace: - body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.automatic_cluster_update_workspace: body['automatic_cluster_update_workspace'] = self.automatic_cluster_update_workspace.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the AutomaticClusterUpdateSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.automatic_cluster_update_workspace: - body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.automatic_cluster_update_workspace: body['automatic_cluster_update_workspace'] = self.automatic_cluster_update_workspace + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutomaticClusterUpdateSetting: """Deserializes the AutomaticClusterUpdateSetting from a dictionary.""" - return cls( - automatic_cluster_update_workspace=_from_dict( - d, "automatic_cluster_update_workspace", ClusterAutoRestartMessage - ), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(automatic_cluster_update_workspace=_from_dict(d, 'automatic_cluster_update_workspace', ClusterAutoRestartMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class BooleanMessage: value: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the BooleanMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the BooleanMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BooleanMessage: """Deserializes the BooleanMessage from a dictionary.""" - return cls(value=d.get("value", None)) + return cls(value=d.get('value', None)) + + @dataclass class ClusterAutoRestartMessage: can_toggle: Optional[bool] = None - + enabled: Optional[bool] = None - + enablement_details: Optional[ClusterAutoRestartMessageEnablementDetails] = None """Contains an information about the enablement status judging (e.g. whether the enterprise tier is enabled) This is only additional information that MUST NOT be used to decide whether the setting is enabled or not. This is intended to use only for purposes like showing an error message to the customer with the additional details. For example, using these details we can check why exactly the feature is disabled for this customer.""" - + maintenance_window: Optional[ClusterAutoRestartMessageMaintenanceWindow] = None - + restart_even_if_no_updates_available: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_toggle is not None: - body["can_toggle"] = self.can_toggle - if self.enabled is not None: - body["enabled"] = self.enabled - if self.enablement_details: - body["enablement_details"] = self.enablement_details.as_dict() - if self.maintenance_window: - body["maintenance_window"] = self.maintenance_window.as_dict() - if self.restart_even_if_no_updates_available is not None: - body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available + if self.can_toggle is not None: body['can_toggle'] = self.can_toggle + if self.enabled is not None: body['enabled'] = self.enabled + if self.enablement_details: body['enablement_details'] = self.enablement_details.as_dict() + if self.maintenance_window: body['maintenance_window'] = self.maintenance_window.as_dict() + if self.restart_even_if_no_updates_available is not None: body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.can_toggle is not None: - body["can_toggle"] = self.can_toggle - if self.enabled is not None: - body["enabled"] = self.enabled - if self.enablement_details: - body["enablement_details"] = self.enablement_details - if self.maintenance_window: - body["maintenance_window"] = self.maintenance_window - if self.restart_even_if_no_updates_available is not None: - body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available + if self.can_toggle is not None: body['can_toggle'] = self.can_toggle + if self.enabled is not None: body['enabled'] = self.enabled + if self.enablement_details: body['enablement_details'] = self.enablement_details + if self.maintenance_window: body['maintenance_window'] = self.maintenance_window + if self.restart_even_if_no_updates_available is not None: body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessage: """Deserializes the ClusterAutoRestartMessage from a dictionary.""" - return cls( - can_toggle=d.get("can_toggle", None), - enabled=d.get("enabled", None), - enablement_details=_from_dict(d, "enablement_details", ClusterAutoRestartMessageEnablementDetails), - maintenance_window=_from_dict(d, "maintenance_window", ClusterAutoRestartMessageMaintenanceWindow), - restart_even_if_no_updates_available=d.get("restart_even_if_no_updates_available", None), - ) + return cls(can_toggle=d.get('can_toggle', None), enabled=d.get('enabled', None), enablement_details=_from_dict(d, 'enablement_details', ClusterAutoRestartMessageEnablementDetails), maintenance_window=_from_dict(d, 'maintenance_window', ClusterAutoRestartMessageMaintenanceWindow), restart_even_if_no_updates_available=d.get('restart_even_if_no_updates_available', None)) + + @dataclass @@ -411,212 +355,183 @@ class ClusterAutoRestartMessageEnablementDetails: is enabled or not. This is intended to use only for purposes like showing an error message to the customer with the additional details. For example, using these details we can check why exactly the feature is disabled for this customer.""" - + forced_for_compliance_mode: Optional[bool] = None """The feature is force enabled if compliance mode is active""" - + unavailable_for_disabled_entitlement: Optional[bool] = None """The feature is unavailable if the corresponding entitlement disabled (see getShieldEntitlementEnable)""" - + unavailable_for_non_enterprise_tier: Optional[bool] = None """The feature is unavailable if the customer doesn't have enterprise tier""" - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageEnablementDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.forced_for_compliance_mode is not None: - body["forced_for_compliance_mode"] = self.forced_for_compliance_mode - if self.unavailable_for_disabled_entitlement is not None: - body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement - if self.unavailable_for_non_enterprise_tier is not None: - body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier + if self.forced_for_compliance_mode is not None: body['forced_for_compliance_mode'] = self.forced_for_compliance_mode + if self.unavailable_for_disabled_entitlement is not None: body['unavailable_for_disabled_entitlement'] = self.unavailable_for_disabled_entitlement + if self.unavailable_for_non_enterprise_tier is not None: body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageEnablementDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.forced_for_compliance_mode is not None: - body["forced_for_compliance_mode"] = self.forced_for_compliance_mode - if self.unavailable_for_disabled_entitlement is not None: - body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement - if self.unavailable_for_non_enterprise_tier is not None: - body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier + if self.forced_for_compliance_mode is not None: body['forced_for_compliance_mode'] = self.forced_for_compliance_mode + if self.unavailable_for_disabled_entitlement is not None: body['unavailable_for_disabled_entitlement'] = self.unavailable_for_disabled_entitlement + if self.unavailable_for_non_enterprise_tier is not None: body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageEnablementDetails: """Deserializes the ClusterAutoRestartMessageEnablementDetails from a dictionary.""" - return cls( - forced_for_compliance_mode=d.get("forced_for_compliance_mode", None), - unavailable_for_disabled_entitlement=d.get("unavailable_for_disabled_entitlement", None), - unavailable_for_non_enterprise_tier=d.get("unavailable_for_non_enterprise_tier", None), - ) + return cls(forced_for_compliance_mode=d.get('forced_for_compliance_mode', None), unavailable_for_disabled_entitlement=d.get('unavailable_for_disabled_entitlement', None), unavailable_for_non_enterprise_tier=d.get('unavailable_for_non_enterprise_tier', None)) + + @dataclass class ClusterAutoRestartMessageMaintenanceWindow: week_day_based_schedule: Optional[ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule] = None - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a dictionary suitable for use as a JSON request body.""" body = {} - if self.week_day_based_schedule: - body["week_day_based_schedule"] = self.week_day_based_schedule.as_dict() + if self.week_day_based_schedule: body['week_day_based_schedule'] = self.week_day_based_schedule.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a shallow dictionary of its immediate attributes.""" body = {} - if self.week_day_based_schedule: - body["week_day_based_schedule"] = self.week_day_based_schedule + if self.week_day_based_schedule: body['week_day_based_schedule'] = self.week_day_based_schedule return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindow: """Deserializes the ClusterAutoRestartMessageMaintenanceWindow from a dictionary.""" - return cls( - week_day_based_schedule=_from_dict( - d, "week_day_based_schedule", ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule - ) - ) - + return cls(week_day_based_schedule=_from_dict(d, 'week_day_based_schedule', ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule)) + -class ClusterAutoRestartMessageMaintenanceWindowDayOfWeek(Enum): - FRIDAY = "FRIDAY" - MONDAY = "MONDAY" - SATURDAY = "SATURDAY" - SUNDAY = "SUNDAY" - THURSDAY = "THURSDAY" - TUESDAY = "TUESDAY" - WEDNESDAY = "WEDNESDAY" +class ClusterAutoRestartMessageMaintenanceWindowDayOfWeek(Enum): + + + FRIDAY = 'FRIDAY' + MONDAY = 'MONDAY' + SATURDAY = 'SATURDAY' + SUNDAY = 'SUNDAY' + THURSDAY = 'THURSDAY' + TUESDAY = 'TUESDAY' + WEDNESDAY = 'WEDNESDAY' @dataclass class ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule: day_of_week: Optional[ClusterAutoRestartMessageMaintenanceWindowDayOfWeek] = None - + frequency: Optional[ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency] = None - + window_start_time: Optional[ClusterAutoRestartMessageMaintenanceWindowWindowStartTime] = None - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.day_of_week is not None: - body["day_of_week"] = self.day_of_week.value - if self.frequency is not None: - body["frequency"] = self.frequency.value - if self.window_start_time: - body["window_start_time"] = self.window_start_time.as_dict() + if self.day_of_week is not None: body['day_of_week'] = self.day_of_week.value + if self.frequency is not None: body['frequency'] = self.frequency.value + if self.window_start_time: body['window_start_time'] = self.window_start_time.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.day_of_week is not None: - body["day_of_week"] = self.day_of_week - if self.frequency is not None: - body["frequency"] = self.frequency - if self.window_start_time: - body["window_start_time"] = self.window_start_time + if self.day_of_week is not None: body['day_of_week'] = self.day_of_week + if self.frequency is not None: body['frequency'] = self.frequency + if self.window_start_time: body['window_start_time'] = self.window_start_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule: """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule from a dictionary.""" - return cls( - day_of_week=_enum(d, "day_of_week", ClusterAutoRestartMessageMaintenanceWindowDayOfWeek), - frequency=_enum(d, "frequency", ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency), - window_start_time=_from_dict( - d, "window_start_time", ClusterAutoRestartMessageMaintenanceWindowWindowStartTime - ), - ) - + return cls(day_of_week=_enum(d, 'day_of_week', ClusterAutoRestartMessageMaintenanceWindowDayOfWeek), frequency=_enum(d, 'frequency', ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency), window_start_time=_from_dict(d, 'window_start_time', ClusterAutoRestartMessageMaintenanceWindowWindowStartTime)) + -class ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency(Enum): - EVERY_WEEK = "EVERY_WEEK" - FIRST_AND_THIRD_OF_MONTH = "FIRST_AND_THIRD_OF_MONTH" - FIRST_OF_MONTH = "FIRST_OF_MONTH" - FOURTH_OF_MONTH = "FOURTH_OF_MONTH" - SECOND_AND_FOURTH_OF_MONTH = "SECOND_AND_FOURTH_OF_MONTH" - SECOND_OF_MONTH = "SECOND_OF_MONTH" - THIRD_OF_MONTH = "THIRD_OF_MONTH" +class ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency(Enum): + + + EVERY_WEEK = 'EVERY_WEEK' + FIRST_AND_THIRD_OF_MONTH = 'FIRST_AND_THIRD_OF_MONTH' + FIRST_OF_MONTH = 'FIRST_OF_MONTH' + FOURTH_OF_MONTH = 'FOURTH_OF_MONTH' + SECOND_AND_FOURTH_OF_MONTH = 'SECOND_AND_FOURTH_OF_MONTH' + SECOND_OF_MONTH = 'SECOND_OF_MONTH' + THIRD_OF_MONTH = 'THIRD_OF_MONTH' @dataclass class ClusterAutoRestartMessageMaintenanceWindowWindowStartTime: hours: Optional[int] = None - + minutes: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a dictionary suitable for use as a JSON request body.""" body = {} - if self.hours is not None: - body["hours"] = self.hours - if self.minutes is not None: - body["minutes"] = self.minutes + if self.hours is not None: body['hours'] = self.hours + if self.minutes is not None: body['minutes'] = self.minutes return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a shallow dictionary of its immediate attributes.""" body = {} - if self.hours is not None: - body["hours"] = self.hours - if self.minutes is not None: - body["minutes"] = self.minutes + if self.hours is not None: body['hours'] = self.hours + if self.minutes is not None: body['minutes'] = self.minutes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindowWindowStartTime: """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime from a dictionary.""" - return cls(hours=d.get("hours", None), minutes=d.get("minutes", None)) + return cls(hours=d.get('hours', None), minutes=d.get('minutes', None)) + + @dataclass class ComplianceSecurityProfile: """SHIELD feature: CSP""" - + compliance_standards: Optional[List[ComplianceStandard]] = None """Set by customers when they request Compliance Security Profile (CSP)""" - + is_enabled: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compliance_standards: - body["compliance_standards"] = [v.value for v in self.compliance_standards] - if self.is_enabled is not None: - body["is_enabled"] = self.is_enabled + if self.compliance_standards: body['compliance_standards'] = [v.value for v in self.compliance_standards] + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled return body def as_shallow_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.compliance_standards: - body["compliance_standards"] = self.compliance_standards - if self.is_enabled is not None: - body["is_enabled"] = self.is_enabled + if self.compliance_standards: body['compliance_standards'] = self.compliance_standards + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: """Deserializes the ComplianceSecurityProfile from a dictionary.""" - return cls( - compliance_standards=_repeated_enum(d, "compliance_standards", ComplianceStandard), - is_enabled=d.get("is_enabled", None), - ) + return cls(compliance_standards=_repeated_enum(d, 'compliance_standards', ComplianceStandard), is_enabled=d.get('is_enabled', None)) + + @dataclass class ComplianceSecurityProfileSetting: compliance_security_profile_workspace: ComplianceSecurityProfile """SHIELD feature: CSP""" - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -624,502 +539,461 @@ class ComplianceSecurityProfileSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the ComplianceSecurityProfileSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compliance_security_profile_workspace: - body["compliance_security_profile_workspace"] = self.compliance_security_profile_workspace.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.compliance_security_profile_workspace: body['compliance_security_profile_workspace'] = self.compliance_security_profile_workspace.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the ComplianceSecurityProfileSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.compliance_security_profile_workspace: - body["compliance_security_profile_workspace"] = self.compliance_security_profile_workspace - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.compliance_security_profile_workspace: body['compliance_security_profile_workspace'] = self.compliance_security_profile_workspace + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfileSetting: """Deserializes the ComplianceSecurityProfileSetting from a dictionary.""" - return cls( - compliance_security_profile_workspace=_from_dict( - d, "compliance_security_profile_workspace", ComplianceSecurityProfile - ), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(compliance_security_profile_workspace=_from_dict(d, 'compliance_security_profile_workspace', ComplianceSecurityProfile), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + class ComplianceStandard(Enum): """Compliance stardard for SHIELD customers""" - - CANADA_PROTECTED_B = "CANADA_PROTECTED_B" - CYBER_ESSENTIAL_PLUS = "CYBER_ESSENTIAL_PLUS" - FEDRAMP_HIGH = "FEDRAMP_HIGH" - FEDRAMP_IL5 = "FEDRAMP_IL5" - FEDRAMP_MODERATE = "FEDRAMP_MODERATE" - HIPAA = "HIPAA" - HITRUST = "HITRUST" - IRAP_PROTECTED = "IRAP_PROTECTED" - ISMAP = "ISMAP" - ITAR_EAR = "ITAR_EAR" - K_FSI = "K_FSI" - NONE = "NONE" - PCI_DSS = "PCI_DSS" - + + CANADA_PROTECTED_B = 'CANADA_PROTECTED_B' + CYBER_ESSENTIAL_PLUS = 'CYBER_ESSENTIAL_PLUS' + FEDRAMP_HIGH = 'FEDRAMP_HIGH' + FEDRAMP_IL5 = 'FEDRAMP_IL5' + FEDRAMP_MODERATE = 'FEDRAMP_MODERATE' + HIPAA = 'HIPAA' + HITRUST = 'HITRUST' + IRAP_PROTECTED = 'IRAP_PROTECTED' + ISMAP = 'ISMAP' + ITAR_EAR = 'ITAR_EAR' + K_FSI = 'K_FSI' + NONE = 'NONE' + PCI_DSS = 'PCI_DSS' @dataclass class Config: email: Optional[EmailConfig] = None - + generic_webhook: Optional[GenericWebhookConfig] = None - + microsoft_teams: Optional[MicrosoftTeamsConfig] = None - + pagerduty: Optional[PagerdutyConfig] = None - + slack: Optional[SlackConfig] = None - + def as_dict(self) -> dict: """Serializes the Config into a dictionary suitable for use as a JSON request body.""" body = {} - if self.email: - body["email"] = self.email.as_dict() - if self.generic_webhook: - body["generic_webhook"] = self.generic_webhook.as_dict() - if self.microsoft_teams: - body["microsoft_teams"] = self.microsoft_teams.as_dict() - if self.pagerduty: - body["pagerduty"] = self.pagerduty.as_dict() - if self.slack: - body["slack"] = self.slack.as_dict() + if self.email: body['email'] = self.email.as_dict() + if self.generic_webhook: body['generic_webhook'] = self.generic_webhook.as_dict() + if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams.as_dict() + if self.pagerduty: body['pagerduty'] = self.pagerduty.as_dict() + if self.slack: body['slack'] = self.slack.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Config into a shallow dictionary of its immediate attributes.""" body = {} - if self.email: - body["email"] = self.email - if self.generic_webhook: - body["generic_webhook"] = self.generic_webhook - if self.microsoft_teams: - body["microsoft_teams"] = self.microsoft_teams - if self.pagerduty: - body["pagerduty"] = self.pagerduty - if self.slack: - body["slack"] = self.slack + if self.email: body['email'] = self.email + if self.generic_webhook: body['generic_webhook'] = self.generic_webhook + if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams + if self.pagerduty: body['pagerduty'] = self.pagerduty + if self.slack: body['slack'] = self.slack return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Config: """Deserializes the Config from a dictionary.""" - return cls( - email=_from_dict(d, "email", EmailConfig), - generic_webhook=_from_dict(d, "generic_webhook", GenericWebhookConfig), - microsoft_teams=_from_dict(d, "microsoft_teams", MicrosoftTeamsConfig), - pagerduty=_from_dict(d, "pagerduty", PagerdutyConfig), - slack=_from_dict(d, "slack", SlackConfig), - ) + return cls(email=_from_dict(d, 'email', EmailConfig), generic_webhook=_from_dict(d, 'generic_webhook', GenericWebhookConfig), microsoft_teams=_from_dict(d, 'microsoft_teams', MicrosoftTeamsConfig), pagerduty=_from_dict(d, 'pagerduty', PagerdutyConfig), slack=_from_dict(d, 'slack', SlackConfig)) + + @dataclass class CreateIpAccessList: """Details required to configure a block list or allow list.""" - + label: str """Label for the IP access list. This **cannot** be empty.""" - + list_type: ListType """Type of IP access list. Valid values are as follows and are case-sensitive: * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - + ip_addresses: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the CreateIpAccessList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type.value + if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] + if self.label is not None: body['label'] = self.label + if self.list_type is not None: body['list_type'] = self.list_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateIpAccessList into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type + if self.ip_addresses: body['ip_addresses'] = self.ip_addresses + if self.label is not None: body['label'] = self.label + if self.list_type is not None: body['list_type'] = self.list_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateIpAccessList: """Deserializes the CreateIpAccessList from a dictionary.""" - return cls( - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_type=_enum(d, "list_type", ListType), - ) + return cls(ip_addresses=d.get('ip_addresses', None), label=d.get('label', None), list_type=_enum(d, 'list_type', ListType)) + + @dataclass class CreateIpAccessListResponse: """An IP access list was successfully created.""" - + ip_access_list: Optional[IpAccessListInfo] = None """Definition of an IP Access list""" - + def as_dict(self) -> dict: """Serializes the CreateIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateIpAccessListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateIpAccessListResponse: """Deserializes the CreateIpAccessListResponse from a dictionary.""" - return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo)) + return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) + + + + + @dataclass class CreateNetworkConnectivityConfiguration: """Properties of the new network connectivity configuration.""" - + name: str """The name of the network connectivity configuration. The name can contain alphanumeric characters, hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the regular expression ^[0-9a-zA-Z-_]{3,30}$""" - + region: str """The region for the network connectivity configuration. Only workspaces in the same region can be attached to the network connectivity configuration.""" - + def as_dict(self) -> dict: """Serializes the CreateNetworkConnectivityConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region + if self.name is not None: body['name'] = self.name + if self.region is not None: body['region'] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the CreateNetworkConnectivityConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region + if self.name is not None: body['name'] = self.name + if self.region is not None: body['region'] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateNetworkConnectivityConfiguration: """Deserializes the CreateNetworkConnectivityConfiguration from a dictionary.""" - return cls(name=d.get("name", None), region=d.get("region", None)) + return cls(name=d.get('name', None), region=d.get('region', None)) + + + + + @dataclass class CreateNotificationDestinationRequest: config: Optional[Config] = None """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" - + display_name: Optional[str] = None """The display name for the notification destination.""" - + def as_dict(self) -> dict: """Serializes the CreateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.display_name is not None: - body["display_name"] = self.display_name + if self.config: body['config'] = self.config.as_dict() + if self.display_name is not None: body['display_name'] = self.display_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.config: - body["config"] = self.config - if self.display_name is not None: - body["display_name"] = self.display_name + if self.config: body['config'] = self.config + if self.display_name is not None: body['display_name'] = self.display_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateNotificationDestinationRequest: """Deserializes the CreateNotificationDestinationRequest from a dictionary.""" - return cls(config=_from_dict(d, "config", Config), display_name=d.get("display_name", None)) + return cls(config=_from_dict(d, 'config', Config), display_name=d.get('display_name', None)) + + @dataclass class CreateOboTokenRequest: """Configuration details for creating on-behalf tokens.""" - + application_id: str """Application ID of the service principal.""" - + comment: Optional[str] = None """Comment that describes the purpose of the token.""" - + lifetime_seconds: Optional[int] = None """The number of seconds before the token expires.""" - + def as_dict(self) -> dict: """Serializes the CreateOboTokenRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.application_id is not None: - body["application_id"] = self.application_id - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds + if self.application_id is not None: body['application_id'] = self.application_id + if self.comment is not None: body['comment'] = self.comment + if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds return body def as_shallow_dict(self) -> dict: """Serializes the CreateOboTokenRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.application_id is not None: - body["application_id"] = self.application_id - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds + if self.application_id is not None: body['application_id'] = self.application_id + if self.comment is not None: body['comment'] = self.comment + if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateOboTokenRequest: """Deserializes the CreateOboTokenRequest from a dictionary.""" - return cls( - application_id=d.get("application_id", None), - comment=d.get("comment", None), - lifetime_seconds=d.get("lifetime_seconds", None), - ) + return cls(application_id=d.get('application_id', None), comment=d.get('comment', None), lifetime_seconds=d.get('lifetime_seconds', None)) + + @dataclass class CreateOboTokenResponse: """An on-behalf token was successfully created for the service principal.""" - + token_info: Optional[TokenInfo] = None - + token_value: Optional[str] = None """Value of the token.""" - + def as_dict(self) -> dict: """Serializes the CreateOboTokenResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_info: - body["token_info"] = self.token_info.as_dict() - if self.token_value is not None: - body["token_value"] = self.token_value + if self.token_info: body['token_info'] = self.token_info.as_dict() + if self.token_value is not None: body['token_value'] = self.token_value return body def as_shallow_dict(self) -> dict: """Serializes the CreateOboTokenResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_info: - body["token_info"] = self.token_info - if self.token_value is not None: - body["token_value"] = self.token_value + if self.token_info: body['token_info'] = self.token_info + if self.token_value is not None: body['token_value'] = self.token_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateOboTokenResponse: """Deserializes the CreateOboTokenResponse from a dictionary.""" - return cls(token_info=_from_dict(d, "token_info", TokenInfo), token_value=d.get("token_value", None)) + return cls(token_info=_from_dict(d, 'token_info', TokenInfo), token_value=d.get('token_value', None)) + + @dataclass class CreatePrivateEndpointRule: """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization.""" - - resource_id: str - """The Azure resource ID of the target resource.""" - + domain_names: Optional[List[str]] = None - """Only used by private endpoints to customer-managed resources. + """Only used by private endpoints to customer-managed private endpoint services. Domain names of target private link service. When updating this field, the full list of target domain_names must be specified.""" - + + endpoint_service: Optional[str] = None + """The full target AWS endpoint service name that connects to the destination resources of the + private endpoint.""" + group_id: Optional[str] = None - """Only used by private endpoints to Azure first-party services. Enum: blob | dfs | sqlServer | - mysqlServer + """Not used by customer-managed private endpoint services. The sub-resource type (group ID) of the target resource. Note that to connect to workspace root storage (root DBFS), you need two endpoints, one for blob and one for dfs.""" - + + resource_id: Optional[str] = None + """The Azure resource ID of the target resource.""" + + resource_names: Optional[List[str]] = None + """Only used by private endpoints towards AWS S3 service. + + The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names + must be in the same region as the NCC/endpoint service. When updating this field, we perform + full update on this field. Please ensure a full list of desired resource_names is provided.""" + def as_dict(self) -> dict: """Serializes the CreatePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.domain_names: - body["domain_names"] = [v for v in self.domain_names] - if self.group_id is not None: - body["group_id"] = self.group_id - if self.resource_id is not None: - body["resource_id"] = self.resource_id + if self.domain_names: body['domain_names'] = [v for v in self.domain_names] + if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service + if self.group_id is not None: body['group_id'] = self.group_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.resource_names: body['resource_names'] = [v for v in self.resource_names] return body def as_shallow_dict(self) -> dict: """Serializes the CreatePrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.domain_names: - body["domain_names"] = self.domain_names - if self.group_id is not None: - body["group_id"] = self.group_id - if self.resource_id is not None: - body["resource_id"] = self.resource_id + if self.domain_names: body['domain_names'] = self.domain_names + if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service + if self.group_id is not None: body['group_id'] = self.group_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.resource_names: body['resource_names'] = self.resource_names return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePrivateEndpointRule: """Deserializes the CreatePrivateEndpointRule from a dictionary.""" - return cls( - domain_names=d.get("domain_names", None), - group_id=d.get("group_id", None), - resource_id=d.get("resource_id", None), - ) + return cls(domain_names=d.get('domain_names', None), endpoint_service=d.get('endpoint_service', None), group_id=d.get('group_id', None), resource_id=d.get('resource_id', None), resource_names=d.get('resource_names', None)) + + + + + @dataclass class CreateTokenRequest: comment: Optional[str] = None """Optional description to attach to the token.""" - + lifetime_seconds: Optional[int] = None """The lifetime of the token, in seconds. If the lifetime is not specified, this token remains valid indefinitely.""" - + def as_dict(self) -> dict: """Serializes the CreateTokenRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds + if self.comment is not None: body['comment'] = self.comment + if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds return body def as_shallow_dict(self) -> dict: """Serializes the CreateTokenRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds + if self.comment is not None: body['comment'] = self.comment + if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTokenRequest: """Deserializes the CreateTokenRequest from a dictionary.""" - return cls(comment=d.get("comment", None), lifetime_seconds=d.get("lifetime_seconds", None)) + return cls(comment=d.get('comment', None), lifetime_seconds=d.get('lifetime_seconds', None)) + + @dataclass class CreateTokenResponse: token_info: Optional[PublicTokenInfo] = None """The information for the new token.""" - + token_value: Optional[str] = None """The value of the new token.""" - + def as_dict(self) -> dict: """Serializes the CreateTokenResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_info: - body["token_info"] = self.token_info.as_dict() - if self.token_value is not None: - body["token_value"] = self.token_value + if self.token_info: body['token_info'] = self.token_info.as_dict() + if self.token_value is not None: body['token_value'] = self.token_value return body def as_shallow_dict(self) -> dict: """Serializes the CreateTokenResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_info: - body["token_info"] = self.token_info - if self.token_value is not None: - body["token_value"] = self.token_value + if self.token_info: body['token_info'] = self.token_info + if self.token_value is not None: body['token_value'] = self.token_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTokenResponse: """Deserializes the CreateTokenResponse from a dictionary.""" - return cls(token_info=_from_dict(d, "token_info", PublicTokenInfo), token_value=d.get("token_value", None)) + return cls(token_info=_from_dict(d, 'token_info', PublicTokenInfo), token_value=d.get('token_value', None)) + + @dataclass class CspEnablementAccount: """Account level policy for CSP""" - + compliance_standards: Optional[List[ComplianceStandard]] = None """Set by customers when they request Compliance Security Profile (CSP) Invariants are enforced in Settings policy.""" - + is_enforced: Optional[bool] = None """Enforced = it cannot be overriden at workspace level.""" - + def as_dict(self) -> dict: """Serializes the CspEnablementAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compliance_standards: - body["compliance_standards"] = [v.value for v in self.compliance_standards] - if self.is_enforced is not None: - body["is_enforced"] = self.is_enforced + if self.compliance_standards: body['compliance_standards'] = [v.value for v in self.compliance_standards] + if self.is_enforced is not None: body['is_enforced'] = self.is_enforced return body def as_shallow_dict(self) -> dict: """Serializes the CspEnablementAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.compliance_standards: - body["compliance_standards"] = self.compliance_standards - if self.is_enforced is not None: - body["is_enforced"] = self.is_enforced + if self.compliance_standards: body['compliance_standards'] = self.compliance_standards + if self.is_enforced is not None: body['is_enforced'] = self.is_enforced return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CspEnablementAccount: """Deserializes the CspEnablementAccount from a dictionary.""" - return cls( - compliance_standards=_repeated_enum(d, "compliance_standards", ComplianceStandard), - is_enforced=d.get("is_enforced", None), - ) + return cls(compliance_standards=_repeated_enum(d, 'compliance_standards', ComplianceStandard), is_enforced=d.get('is_enforced', None)) + + @dataclass class CspEnablementAccountSetting: csp_enablement_account: CspEnablementAccount """Account level policy for CSP""" - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1127,107 +1001,255 @@ class CspEnablementAccountSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the CspEnablementAccountSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.csp_enablement_account: - body["csp_enablement_account"] = self.csp_enablement_account.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.csp_enablement_account: body['csp_enablement_account'] = self.csp_enablement_account.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the CspEnablementAccountSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.csp_enablement_account: - body["csp_enablement_account"] = self.csp_enablement_account - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.csp_enablement_account: body['csp_enablement_account'] = self.csp_enablement_account + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CspEnablementAccountSetting: """Deserializes the CspEnablementAccountSetting from a dictionary.""" - return cls( - csp_enablement_account=_from_dict(d, "csp_enablement_account", CspEnablementAccount), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) - + return cls(csp_enablement_account=_from_dict(d, 'csp_enablement_account', CspEnablementAccount), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + -@dataclass -class DefaultNamespaceSetting: - """This represents the setting configuration for the default namespace in the Databricks workspace. - Setting the default catalog for the workspace determines the catalog that is used when queries - do not reference a fully qualified 3 level name. For example, if the default catalog is set to - 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object - 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a - restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only - applies when using Unity Catalog-enabled compute.""" - namespace: StringMessage - etag: Optional[str] = None - """etag used for versioning. The response is at least as fresh as the eTag provided. This is used - for optimistic concurrency control as a way to help prevent simultaneous writes of a setting - overwriting each other. It is strongly suggested that systems make use of the etag in the read - -> update pattern to perform setting updates in order to avoid race conditions. That is, get an +@dataclass +class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule: + """Properties of the new private endpoint rule. Note that for private endpoints towards a VPC + endpoint service behind a customer-managed NLB, you must approve the endpoint in AWS console + after initialization.""" + + account_id: Optional[str] = None + """Databricks account ID. You can find your account ID from the Accounts Console.""" + + connection_state: Optional[CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState] = None + """The current status of this private endpoint. The private endpoint rules are effective only if + the connection state is ESTABLISHED. Remember that you must approve new endpoints on your + resources in the AWS console before they take effect. The possible values are: - PENDING: The + endpoint has been created and pending approval. - ESTABLISHED: The endpoint has been approved + and is ready to use in your serverless compute resources. - REJECTED: Connection was rejected by + the private link resource owner. - DISCONNECTED: Connection was removed by the private link + resource owner, the private endpoint becomes informative and should be deleted for clean-up. - + EXPIRED: If the endpoint is created but not approved in 14 days, it is EXPIRED.""" + + creation_time: Optional[int] = None + """Time in epoch milliseconds when this object was created.""" + + deactivated: Optional[bool] = None + """Whether this private endpoint is deactivated.""" + + deactivated_at: Optional[int] = None + """Time in epoch milliseconds when this object was deactivated.""" + + domain_names: Optional[List[str]] = None + """Only used by private endpoints towards a VPC endpoint service for customer-managed VPC endpoint + service. + + The target AWS resource FQDNs accessible via the VPC endpoint service. When updating this field, + we perform full update on this field. Please ensure a full list of desired domain_names is + provided.""" + + enabled: Optional[bool] = None + """Only used by private endpoints towards an AWS S3 service. + + Update this field to activate/deactivate this private endpoint to allow egress access from + serverless compute resources.""" + + endpoint_service: Optional[str] = None + """The full target AWS endpoint service name that connects to the destination resources of the + private endpoint.""" + + network_connectivity_config_id: Optional[str] = None + """The ID of a network connectivity configuration, which is the parent resource of this private + endpoint rule object.""" + + resource_names: Optional[List[str]] = None + """Only used by private endpoints towards AWS S3 service. + + The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names + must be in the same region as the NCC/endpoint service. When updating this field, we perform + full update on this field. Please ensure a full list of desired resource_names is provided.""" + + rule_id: Optional[str] = None + """The ID of a private endpoint rule.""" + + updated_time: Optional[int] = None + """Time in epoch milliseconds when this object was updated.""" + + vpc_endpoint_id: Optional[str] = None + """The AWS VPC endpoint ID. You can use this ID to identify VPC endpoint created by Databricks.""" + + def as_dict(self) -> dict: + """Serializes the CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.connection_state is not None: body['connection_state'] = self.connection_state.value + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.deactivated is not None: body['deactivated'] = self.deactivated + if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at + if self.domain_names: body['domain_names'] = [v for v in self.domain_names] + if self.enabled is not None: body['enabled'] = self.enabled + if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_names: body['resource_names'] = [v for v in self.resource_names] + if self.rule_id is not None: body['rule_id'] = self.rule_id + if self.updated_time is not None: body['updated_time'] = self.updated_time + if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.connection_state is not None: body['connection_state'] = self.connection_state + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.deactivated is not None: body['deactivated'] = self.deactivated + if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at + if self.domain_names: body['domain_names'] = self.domain_names + if self.enabled is not None: body['enabled'] = self.enabled + if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_names: body['resource_names'] = self.resource_names + if self.rule_id is not None: body['rule_id'] = self.rule_id + if self.updated_time is not None: body['updated_time'] = self.updated_time + if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule: + """Deserializes the CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule from a dictionary.""" + return cls(account_id=d.get('account_id', None), connection_state=_enum(d, 'connection_state', CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState), creation_time=d.get('creation_time', None), deactivated=d.get('deactivated', None), deactivated_at=d.get('deactivated_at', None), domain_names=d.get('domain_names', None), enabled=d.get('enabled', None), endpoint_service=d.get('endpoint_service', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), resource_names=d.get('resource_names', None), rule_id=d.get('rule_id', None), updated_time=d.get('updated_time', None), vpc_endpoint_id=d.get('vpc_endpoint_id', None)) + + + + +class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState(Enum): + + + DISCONNECTED = 'DISCONNECTED' + ESTABLISHED = 'ESTABLISHED' + EXPIRED = 'EXPIRED' + PENDING = 'PENDING' + REJECTED = 'REJECTED' + +@dataclass +class DashboardEmailSubscriptions: + boolean_val: BooleanMessage + + etag: Optional[str] = None + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" + + def as_dict(self) -> dict: + """Serializes the DashboardEmailSubscriptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DashboardEmailSubscriptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.boolean_val: body['boolean_val'] = self.boolean_val + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DashboardEmailSubscriptions: + """Deserializes the DashboardEmailSubscriptions from a dictionary.""" + return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + + +@dataclass +class DefaultNamespaceSetting: + """This represents the setting configuration for the default namespace in the Databricks workspace. + Setting the default catalog for the workspace determines the catalog that is used when queries + do not reference a fully qualified 3 level name. For example, if the default catalog is set to + 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object + 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a + restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only + applies when using Unity Catalog-enabled compute.""" + + namespace: StringMessage + + etag: Optional[str] = None + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + etag from a GET request, and pass it with the PATCH request to identify the setting version you + are updating.""" + + setting_name: Optional[str] = None + """Name of the corresponding setting. This field is populated in the response, but it will not be + respected even if it's set in the request body. The setting name in the path parameter will be + respected instead. Setting name is required to be 'default' if the setting only has one instance + per workspace.""" + def as_dict(self) -> dict: """Serializes the DefaultNamespaceSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.namespace: - body["namespace"] = self.namespace.as_dict() - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.etag is not None: body['etag'] = self.etag + if self.namespace: body['namespace'] = self.namespace.as_dict() + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the DefaultNamespaceSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.namespace: - body["namespace"] = self.namespace - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.etag is not None: body['etag'] = self.etag + if self.namespace: body['namespace'] = self.namespace + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DefaultNamespaceSetting: """Deserializes the DefaultNamespaceSetting from a dictionary.""" - return cls( - etag=d.get("etag", None), - namespace=_from_dict(d, "namespace", StringMessage), - setting_name=d.get("setting_name", None), - ) + return cls(etag=d.get('etag', None), namespace=_from_dict(d, 'namespace', StringMessage), setting_name=d.get('setting_name', None)) + + + + + @dataclass class DeleteAccountIpAccessEnableResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1235,31 +1257,37 @@ class DeleteAccountIpAccessEnableResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteAccountIpAccessEnableResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteAccountIpAccessEnableResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteAccountIpAccessEnableResponse: """Deserializes the DeleteAccountIpAccessEnableResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + + + + + + + + @dataclass class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1267,31 +1295,34 @@ class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: """Deserializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + + + + + @dataclass class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1299,31 +1330,34 @@ class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: """Deserializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + + + + + @dataclass -class DeleteDefaultNamespaceSettingResponse: +class DeleteDashboardEmailSubscriptionsResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1331,31 +1365,69 @@ class DeleteDefaultNamespaceSettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" + + def as_dict(self) -> dict: + """Serializes the DeleteDashboardEmailSubscriptionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDashboardEmailSubscriptionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteDashboardEmailSubscriptionsResponse: + """Deserializes the DeleteDashboardEmailSubscriptionsResponse from a dictionary.""" + return cls(etag=d.get('etag', None)) + + + + + + +@dataclass +class DeleteDefaultNamespaceSettingResponse: + """The etag is returned.""" + + etag: str + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + an etag from a GET request, and pass it with the DELETE request to identify the rule set version + you are deleting.""" + def as_dict(self) -> dict: """Serializes the DeleteDefaultNamespaceSettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDefaultNamespaceSettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDefaultNamespaceSettingResponse: """Deserializes the DeleteDefaultNamespaceSettingResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + + + + + @dataclass class DeleteDisableLegacyAccessResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1363,31 +1435,34 @@ class DeleteDisableLegacyAccessResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteDisableLegacyAccessResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDisableLegacyAccessResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDisableLegacyAccessResponse: """Deserializes the DeleteDisableLegacyAccessResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + + + + + @dataclass class DeleteDisableLegacyDbfsResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1395,31 +1470,34 @@ class DeleteDisableLegacyDbfsResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteDisableLegacyDbfsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDisableLegacyDbfsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDisableLegacyDbfsResponse: """Deserializes the DeleteDisableLegacyDbfsResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + + + + + @dataclass class DeleteDisableLegacyFeaturesResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1427,31 +1505,37 @@ class DeleteDisableLegacyFeaturesResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteDisableLegacyFeaturesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDisableLegacyFeaturesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDisableLegacyFeaturesResponse: """Deserializes the DeleteDisableLegacyFeaturesResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + + + + + + + + @dataclass class DeleteLlmProxyPartnerPoweredWorkspaceResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1459,25 +1543,28 @@ class DeleteLlmProxyPartnerPoweredWorkspaceResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteLlmProxyPartnerPoweredWorkspaceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteLlmProxyPartnerPoweredWorkspaceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse: """Deserializes the DeleteLlmProxyPartnerPoweredWorkspaceResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + + + + + @dataclass @@ -1496,6 +1583,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteNetworkConnectivityConfigurationResponse: """Deserializes the DeleteNetworkConnectivityConfigurationResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -1514,12 +1606,20 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteNetworkPolicyRpcResponse: """Deserializes the DeleteNetworkPolicyRpcResponse from a dictionary.""" return cls() + + + + + + + + @dataclass class DeletePersonalComputeSettingResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1527,25 +1627,28 @@ class DeletePersonalComputeSettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeletePersonalComputeSettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeletePersonalComputeSettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeletePersonalComputeSettingResponse: """Deserializes the DeletePersonalComputeSettingResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + + + + + @dataclass @@ -1564,12 +1667,17 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + + + + @dataclass class DeleteRestrictWorkspaceAdminsSettingResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1577,90 +1685,120 @@ class DeleteRestrictWorkspaceAdminsSettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteRestrictWorkspaceAdminsSettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRestrictWorkspaceAdminsSettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag + if self.etag is not None: body['etag'] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRestrictWorkspaceAdminsSettingResponse: """Deserializes the DeleteRestrictWorkspaceAdminsSettingResponse from a dictionary.""" - return cls(etag=d.get("etag", None)) + return cls(etag=d.get('etag', None)) + -class DestinationType(Enum): - EMAIL = "EMAIL" - MICROSOFT_TEAMS = "MICROSOFT_TEAMS" - PAGERDUTY = "PAGERDUTY" - SLACK = "SLACK" - WEBHOOK = "WEBHOOK" -@dataclass -class DisableLegacyAccess: - disable_legacy_access: BooleanMessage - etag: Optional[str] = None +@dataclass +class DeleteSqlResultsDownloadResponse: + """The etag is returned.""" + + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read - -> update pattern to perform setting updates in order to avoid race conditions. That is, get an - etag from a GET request, and pass it with the PATCH request to identify the setting version you - are updating.""" + -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + an etag from a GET request, and pass it with the DELETE request to identify the rule set version + you are deleting.""" + + def as_dict(self) -> dict: + """Serializes the DeleteSqlResultsDownloadResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body - setting_name: Optional[str] = None - """Name of the corresponding setting. This field is populated in the response, but it will not be + def as_shallow_dict(self) -> dict: + """Serializes the DeleteSqlResultsDownloadResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: body['etag'] = self.etag + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteSqlResultsDownloadResponse: + """Deserializes the DeleteSqlResultsDownloadResponse from a dictionary.""" + return cls(etag=d.get('etag', None)) + + + + + + + +class DestinationType(Enum): + + + EMAIL = 'EMAIL' + MICROSOFT_TEAMS = 'MICROSOFT_TEAMS' + PAGERDUTY = 'PAGERDUTY' + SLACK = 'SLACK' + WEBHOOK = 'WEBHOOK' + +@dataclass +class DisableLegacyAccess: + disable_legacy_access: BooleanMessage + + etag: Optional[str] = None + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + etag from a GET request, and pass it with the PATCH request to identify the setting version you + are updating.""" + + setting_name: Optional[str] = None + """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the DisableLegacyAccess into a dictionary suitable for use as a JSON request body.""" body = {} - if self.disable_legacy_access: - body["disable_legacy_access"] = self.disable_legacy_access.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the DisableLegacyAccess into a shallow dictionary of its immediate attributes.""" body = {} - if self.disable_legacy_access: - body["disable_legacy_access"] = self.disable_legacy_access - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DisableLegacyAccess: """Deserializes the DisableLegacyAccess from a dictionary.""" - return cls( - disable_legacy_access=_from_dict(d, "disable_legacy_access", BooleanMessage), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(disable_legacy_access=_from_dict(d, 'disable_legacy_access', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class DisableLegacyDbfs: disable_legacy_dbfs: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1668,49 +1806,41 @@ class DisableLegacyDbfs: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the DisableLegacyDbfs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.disable_legacy_dbfs: - body["disable_legacy_dbfs"] = self.disable_legacy_dbfs.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the DisableLegacyDbfs into a shallow dictionary of its immediate attributes.""" body = {} - if self.disable_legacy_dbfs: - body["disable_legacy_dbfs"] = self.disable_legacy_dbfs - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DisableLegacyDbfs: """Deserializes the DisableLegacyDbfs from a dictionary.""" - return cls( - disable_legacy_dbfs=_from_dict(d, "disable_legacy_dbfs", BooleanMessage), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(disable_legacy_dbfs=_from_dict(d, 'disable_legacy_dbfs', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class DisableLegacyFeatures: disable_legacy_features: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1718,43 +1848,35 @@ class DisableLegacyFeatures: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the DisableLegacyFeatures into a dictionary suitable for use as a JSON request body.""" body = {} - if self.disable_legacy_features: - body["disable_legacy_features"] = self.disable_legacy_features.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.disable_legacy_features: body['disable_legacy_features'] = self.disable_legacy_features.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the DisableLegacyFeatures into a shallow dictionary of its immediate attributes.""" body = {} - if self.disable_legacy_features: - body["disable_legacy_features"] = self.disable_legacy_features - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.disable_legacy_features: body['disable_legacy_features'] = self.disable_legacy_features + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DisableLegacyFeatures: """Deserializes the DisableLegacyFeatures from a dictionary.""" - return cls( - disable_legacy_features=_from_dict(d, "disable_legacy_features", BooleanMessage), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(disable_legacy_features=_from_dict(d, 'disable_legacy_features', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass @@ -1762,85 +1884,70 @@ class EgressNetworkPolicy: """The network policies applying for egress traffic. This message is used by the UI/REST API. We translate this message to the format expected by the dataplane in Lakehouse Network Manager (for the format expected by the dataplane, see networkconfig.textproto).""" - + internet_access: Optional[EgressNetworkPolicyInternetAccessPolicy] = None """The access policy enforced for egress traffic to the internet.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.internet_access: - body["internet_access"] = self.internet_access.as_dict() + if self.internet_access: body['internet_access'] = self.internet_access.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.internet_access: - body["internet_access"] = self.internet_access + if self.internet_access: body['internet_access'] = self.internet_access return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicy: """Deserializes the EgressNetworkPolicy from a dictionary.""" - return cls(internet_access=_from_dict(d, "internet_access", EgressNetworkPolicyInternetAccessPolicy)) + return cls(internet_access=_from_dict(d, 'internet_access', EgressNetworkPolicyInternetAccessPolicy)) + + @dataclass class EgressNetworkPolicyInternetAccessPolicy: allowed_internet_destinations: Optional[List[EgressNetworkPolicyInternetAccessPolicyInternetDestination]] = None - + allowed_storage_destinations: Optional[List[EgressNetworkPolicyInternetAccessPolicyStorageDestination]] = None - + log_only_mode: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyMode] = None """Optional. If not specified, assume the policy is enforced for all workloads.""" - + restriction_mode: Optional[EgressNetworkPolicyInternetAccessPolicyRestrictionMode] = None """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can only access explicitly allowed internet and storage destinations, as well as UC connections and external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via private link.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_internet_destinations: - body["allowed_internet_destinations"] = [v.as_dict() for v in self.allowed_internet_destinations] - if self.allowed_storage_destinations: - body["allowed_storage_destinations"] = [v.as_dict() for v in self.allowed_storage_destinations] - if self.log_only_mode: - body["log_only_mode"] = self.log_only_mode.as_dict() - if self.restriction_mode is not None: - body["restriction_mode"] = self.restriction_mode.value + if self.allowed_internet_destinations: body['allowed_internet_destinations'] = [v.as_dict() for v in self.allowed_internet_destinations] + if self.allowed_storage_destinations: body['allowed_storage_destinations'] = [v.as_dict() for v in self.allowed_storage_destinations] + if self.log_only_mode: body['log_only_mode'] = self.log_only_mode.as_dict() + if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_internet_destinations: - body["allowed_internet_destinations"] = self.allowed_internet_destinations - if self.allowed_storage_destinations: - body["allowed_storage_destinations"] = self.allowed_storage_destinations - if self.log_only_mode: - body["log_only_mode"] = self.log_only_mode - if self.restriction_mode is not None: - body["restriction_mode"] = self.restriction_mode + if self.allowed_internet_destinations: body['allowed_internet_destinations'] = self.allowed_internet_destinations + if self.allowed_storage_destinations: body['allowed_storage_destinations'] = self.allowed_storage_destinations + if self.log_only_mode: body['log_only_mode'] = self.log_only_mode + if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyInternetAccessPolicy: """Deserializes the EgressNetworkPolicyInternetAccessPolicy from a dictionary.""" - return cls( - allowed_internet_destinations=_repeated_dict( - d, "allowed_internet_destinations", EgressNetworkPolicyInternetAccessPolicyInternetDestination - ), - allowed_storage_destinations=_repeated_dict( - d, "allowed_storage_destinations", EgressNetworkPolicyInternetAccessPolicyStorageDestination - ), - log_only_mode=_from_dict(d, "log_only_mode", EgressNetworkPolicyInternetAccessPolicyLogOnlyMode), - restriction_mode=_enum(d, "restriction_mode", EgressNetworkPolicyInternetAccessPolicyRestrictionMode), - ) + return cls(allowed_internet_destinations=_repeated_dict(d, 'allowed_internet_destinations', EgressNetworkPolicyInternetAccessPolicyInternetDestination), allowed_storage_destinations=_repeated_dict(d, 'allowed_storage_destinations', EgressNetworkPolicyInternetAccessPolicyStorageDestination), log_only_mode=_from_dict(d, 'log_only_mode', EgressNetworkPolicyInternetAccessPolicyLogOnlyMode), restriction_mode=_enum(d, 'restriction_mode', EgressNetworkPolicyInternetAccessPolicyRestrictionMode)) + + @dataclass @@ -1848,53 +1955,39 @@ class EgressNetworkPolicyInternetAccessPolicyInternetDestination: """Users can specify accessible internet destinations when outbound access is restricted. We only support domain name (FQDN) destinations for the time being, though going forwards we want to support host names and IP addresses.""" - + destination: Optional[str] = None - - protocol: Optional[ - EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol - ] = None + + protocol: Optional[EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol] = None """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be set to TCP by default and hidden from the user. In the future, users may be able to select HTTP filtering (i.e. SNI based filtering, filtering by FQDN).""" - + type: Optional[EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType] = None - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: - body["destination"] = self.destination - if self.protocol is not None: - body["protocol"] = self.protocol.value - if self.type is not None: - body["type"] = self.type.value + if self.destination is not None: body['destination'] = self.destination + if self.protocol is not None: body['protocol'] = self.protocol.value + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: - body["destination"] = self.destination - if self.protocol is not None: - body["protocol"] = self.protocol - if self.type is not None: - body["type"] = self.type + if self.destination is not None: body['destination'] = self.destination + if self.protocol is not None: body['protocol'] = self.protocol + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyInternetAccessPolicyInternetDestination: """Deserializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination from a dictionary.""" - return cls( - destination=d.get("destination", None), - protocol=_enum( - d, - "protocol", - EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol, - ), - type=_enum(d, "type", EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType), - ) + return cls(destination=d.get('destination', None), protocol=_enum(d, 'protocol', EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol), type=_enum(d, 'type', EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType)) + + class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol(Enum): @@ -1902,62 +1995,53 @@ class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinat filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be set to TCP by default and hidden from the user. In the future, users may be able to select HTTP filtering (i.e. SNI based filtering, filtering by FQDN).""" - - TCP = "TCP" - + + TCP = 'TCP' class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType(Enum): - - FQDN = "FQDN" - + + + FQDN = 'FQDN' @dataclass class EgressNetworkPolicyInternetAccessPolicyLogOnlyMode: log_only_mode_type: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType] = None - + workloads: Optional[List[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType]] = None - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_only_mode_type is not None: - body["log_only_mode_type"] = self.log_only_mode_type.value - if self.workloads: - body["workloads"] = [v.value for v in self.workloads] + if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type.value + if self.workloads: body['workloads'] = [v.value for v in self.workloads] return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_only_mode_type is not None: - body["log_only_mode_type"] = self.log_only_mode_type - if self.workloads: - body["workloads"] = self.workloads + if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type + if self.workloads: body['workloads'] = self.workloads return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyInternetAccessPolicyLogOnlyMode: """Deserializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode from a dictionary.""" - return cls( - log_only_mode_type=_enum( - d, "log_only_mode_type", EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType - ), - workloads=_repeated_enum(d, "workloads", EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType), - ) - + return cls(log_only_mode_type=_enum(d, 'log_only_mode_type', EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType), workloads=_repeated_enum(d, 'workloads', EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType)) + -class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType(Enum): - ALL_SERVICES = "ALL_SERVICES" - SELECTED_SERVICES = "SELECTED_SERVICES" +class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType(Enum): + + + ALL_SERVICES = 'ALL_SERVICES' + SELECTED_SERVICES = 'SELECTED_SERVICES' class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType(Enum): """The values should match the list of workloads used in networkconfig.proto""" - - DBSQL = "DBSQL" - ML_SERVING = "ML_SERVING" - + + DBSQL = 'DBSQL' + ML_SERVING = 'ML_SERVING' class EgressNetworkPolicyInternetAccessPolicyRestrictionMode(Enum): """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: @@ -1965,154 +2049,113 @@ class EgressNetworkPolicyInternetAccessPolicyRestrictionMode(Enum): only access explicitly allowed internet and storage destinations, as well as UC connections and external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via private link.""" - - FULL_ACCESS = "FULL_ACCESS" - PRIVATE_ACCESS_ONLY = "PRIVATE_ACCESS_ONLY" - RESTRICTED_ACCESS = "RESTRICTED_ACCESS" - + + FULL_ACCESS = 'FULL_ACCESS' + PRIVATE_ACCESS_ONLY = 'PRIVATE_ACCESS_ONLY' + RESTRICTED_ACCESS = 'RESTRICTED_ACCESS' @dataclass class EgressNetworkPolicyInternetAccessPolicyStorageDestination: """Users can specify accessible storage destinations.""" - + allowed_paths: Optional[List[str]] = None - + azure_container: Optional[str] = None - + azure_dns_zone: Optional[str] = None - + azure_storage_account: Optional[str] = None - + azure_storage_service: Optional[str] = None - + bucket_name: Optional[str] = None - + region: Optional[str] = None - + type: Optional[EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType] = None - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_paths: - body["allowed_paths"] = [v for v in self.allowed_paths] - if self.azure_container is not None: - body["azure_container"] = self.azure_container - if self.azure_dns_zone is not None: - body["azure_dns_zone"] = self.azure_dns_zone - if self.azure_storage_account is not None: - body["azure_storage_account"] = self.azure_storage_account - if self.azure_storage_service is not None: - body["azure_storage_service"] = self.azure_storage_service - if self.bucket_name is not None: - body["bucket_name"] = self.bucket_name - if self.region is not None: - body["region"] = self.region - if self.type is not None: - body["type"] = self.type.value + if self.allowed_paths: body['allowed_paths'] = [v for v in self.allowed_paths] + if self.azure_container is not None: body['azure_container'] = self.azure_container + if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone + if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account + if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service + if self.bucket_name is not None: body['bucket_name'] = self.bucket_name + if self.region is not None: body['region'] = self.region + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_paths: - body["allowed_paths"] = self.allowed_paths - if self.azure_container is not None: - body["azure_container"] = self.azure_container - if self.azure_dns_zone is not None: - body["azure_dns_zone"] = self.azure_dns_zone - if self.azure_storage_account is not None: - body["azure_storage_account"] = self.azure_storage_account - if self.azure_storage_service is not None: - body["azure_storage_service"] = self.azure_storage_service - if self.bucket_name is not None: - body["bucket_name"] = self.bucket_name - if self.region is not None: - body["region"] = self.region - if self.type is not None: - body["type"] = self.type + if self.allowed_paths: body['allowed_paths'] = self.allowed_paths + if self.azure_container is not None: body['azure_container'] = self.azure_container + if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone + if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account + if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service + if self.bucket_name is not None: body['bucket_name'] = self.bucket_name + if self.region is not None: body['region'] = self.region + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyInternetAccessPolicyStorageDestination: """Deserializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination from a dictionary.""" - return cls( - allowed_paths=d.get("allowed_paths", None), - azure_container=d.get("azure_container", None), - azure_dns_zone=d.get("azure_dns_zone", None), - azure_storage_account=d.get("azure_storage_account", None), - azure_storage_service=d.get("azure_storage_service", None), - bucket_name=d.get("bucket_name", None), - region=d.get("region", None), - type=_enum(d, "type", EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType), - ) - + return cls(allowed_paths=d.get('allowed_paths', None), azure_container=d.get('azure_container', None), azure_dns_zone=d.get('azure_dns_zone', None), azure_storage_account=d.get('azure_storage_account', None), azure_storage_service=d.get('azure_storage_service', None), bucket_name=d.get('bucket_name', None), region=d.get('region', None), type=_enum(d, 'type', EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType)) + -class EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType(Enum): - AWS_S3 = "AWS_S3" - AZURE_STORAGE = "AZURE_STORAGE" - CLOUDFLARE_R2 = "CLOUDFLARE_R2" - GOOGLE_CLOUD_STORAGE = "GOOGLE_CLOUD_STORAGE" +class EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType(Enum): + + + AWS_S3 = 'AWS_S3' + AZURE_STORAGE = 'AZURE_STORAGE' + CLOUDFLARE_R2 = 'CLOUDFLARE_R2' + GOOGLE_CLOUD_STORAGE = 'GOOGLE_CLOUD_STORAGE' @dataclass class EgressNetworkPolicyNetworkAccessPolicy: restriction_mode: EgressNetworkPolicyNetworkAccessPolicyRestrictionMode """The restriction mode that controls how serverless workloads can access the internet.""" - + allowed_internet_destinations: Optional[List[EgressNetworkPolicyNetworkAccessPolicyInternetDestination]] = None """List of internet destinations that serverless workloads are allowed to access when in RESTRICTED_ACCESS mode.""" - + allowed_storage_destinations: Optional[List[EgressNetworkPolicyNetworkAccessPolicyStorageDestination]] = None """List of storage destinations that serverless workloads are allowed to access when in RESTRICTED_ACCESS mode.""" - + policy_enforcement: Optional[EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement] = None """Optional. When policy_enforcement is not provided, we default to ENFORCE_MODE_ALL_SERVICES""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_internet_destinations: - body["allowed_internet_destinations"] = [v.as_dict() for v in self.allowed_internet_destinations] - if self.allowed_storage_destinations: - body["allowed_storage_destinations"] = [v.as_dict() for v in self.allowed_storage_destinations] - if self.policy_enforcement: - body["policy_enforcement"] = self.policy_enforcement.as_dict() - if self.restriction_mode is not None: - body["restriction_mode"] = self.restriction_mode.value + if self.allowed_internet_destinations: body['allowed_internet_destinations'] = [v.as_dict() for v in self.allowed_internet_destinations] + if self.allowed_storage_destinations: body['allowed_storage_destinations'] = [v.as_dict() for v in self.allowed_storage_destinations] + if self.policy_enforcement: body['policy_enforcement'] = self.policy_enforcement.as_dict() + if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_internet_destinations: - body["allowed_internet_destinations"] = self.allowed_internet_destinations - if self.allowed_storage_destinations: - body["allowed_storage_destinations"] = self.allowed_storage_destinations - if self.policy_enforcement: - body["policy_enforcement"] = self.policy_enforcement - if self.restriction_mode is not None: - body["restriction_mode"] = self.restriction_mode + if self.allowed_internet_destinations: body['allowed_internet_destinations'] = self.allowed_internet_destinations + if self.allowed_storage_destinations: body['allowed_storage_destinations'] = self.allowed_storage_destinations + if self.policy_enforcement: body['policy_enforcement'] = self.policy_enforcement + if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyNetworkAccessPolicy: """Deserializes the EgressNetworkPolicyNetworkAccessPolicy from a dictionary.""" - return cls( - allowed_internet_destinations=_repeated_dict( - d, "allowed_internet_destinations", EgressNetworkPolicyNetworkAccessPolicyInternetDestination - ), - allowed_storage_destinations=_repeated_dict( - d, "allowed_storage_destinations", EgressNetworkPolicyNetworkAccessPolicyStorageDestination - ), - policy_enforcement=_from_dict( - d, "policy_enforcement", EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement - ), - restriction_mode=_enum(d, "restriction_mode", EgressNetworkPolicyNetworkAccessPolicyRestrictionMode), - ) + return cls(allowed_internet_destinations=_repeated_dict(d, 'allowed_internet_destinations', EgressNetworkPolicyNetworkAccessPolicyInternetDestination), allowed_storage_destinations=_repeated_dict(d, 'allowed_storage_destinations', EgressNetworkPolicyNetworkAccessPolicyStorageDestination), policy_enforcement=_from_dict(d, 'policy_enforcement', EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement), restriction_mode=_enum(d, 'restriction_mode', EgressNetworkPolicyNetworkAccessPolicyRestrictionMode)) + + @dataclass @@ -2120,224 +2163,177 @@ class EgressNetworkPolicyNetworkAccessPolicyInternetDestination: """Users can specify accessible internet destinations when outbound access is restricted. We only support DNS_NAME (FQDN format) destinations for the time being. Going forward we may extend support to host names and IP addresses.""" - + destination: Optional[str] = None """The internet destination to which access will be allowed. Format dependent on the destination type.""" - - internet_destination_type: Optional[ - EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType - ] = None + + internet_destination_type: Optional[EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType] = None """The type of internet destination. Currently only DNS_NAME is supported.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyInternetDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: - body["destination"] = self.destination - if self.internet_destination_type is not None: - body["internet_destination_type"] = self.internet_destination_type.value + if self.destination is not None: body['destination'] = self.destination + if self.internet_destination_type is not None: body['internet_destination_type'] = self.internet_destination_type.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyInternetDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: - body["destination"] = self.destination - if self.internet_destination_type is not None: - body["internet_destination_type"] = self.internet_destination_type + if self.destination is not None: body['destination'] = self.destination + if self.internet_destination_type is not None: body['internet_destination_type'] = self.internet_destination_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyNetworkAccessPolicyInternetDestination: """Deserializes the EgressNetworkPolicyNetworkAccessPolicyInternetDestination from a dictionary.""" - return cls( - destination=d.get("destination", None), - internet_destination_type=_enum( - d, - "internet_destination_type", - EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType, - ), - ) - + return cls(destination=d.get('destination', None), internet_destination_type=_enum(d, 'internet_destination_type', EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType)) + -class EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType(Enum): - DNS_NAME = "DNS_NAME" +class EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType(Enum): + + + DNS_NAME = 'DNS_NAME' @dataclass class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement: - dry_run_mode_product_filter: Optional[ - List[EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter] - ] = None + dry_run_mode_product_filter: Optional[List[EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter]] = None """When empty, it means dry run for all products. When non-empty, it means dry run for specific products and for the other products, they will run in enforced mode.""" - + enforcement_mode: Optional[EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode] = None """The mode of policy enforcement. ENFORCED blocks traffic that violates policy, while DRY_RUN only logs violations without blocking. When not specified, defaults to ENFORCED.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dry_run_mode_product_filter: - body["dry_run_mode_product_filter"] = [v.value for v in self.dry_run_mode_product_filter] - if self.enforcement_mode is not None: - body["enforcement_mode"] = self.enforcement_mode.value + if self.dry_run_mode_product_filter: body['dry_run_mode_product_filter'] = [v.value for v in self.dry_run_mode_product_filter] + if self.enforcement_mode is not None: body['enforcement_mode'] = self.enforcement_mode.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement into a shallow dictionary of its immediate attributes.""" body = {} - if self.dry_run_mode_product_filter: - body["dry_run_mode_product_filter"] = self.dry_run_mode_product_filter - if self.enforcement_mode is not None: - body["enforcement_mode"] = self.enforcement_mode + if self.dry_run_mode_product_filter: body['dry_run_mode_product_filter'] = self.dry_run_mode_product_filter + if self.enforcement_mode is not None: body['enforcement_mode'] = self.enforcement_mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement: """Deserializes the EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement from a dictionary.""" - return cls( - dry_run_mode_product_filter=_repeated_enum( - d, - "dry_run_mode_product_filter", - EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter, - ), - enforcement_mode=_enum( - d, "enforcement_mode", EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode - ), - ) + return cls(dry_run_mode_product_filter=_repeated_enum(d, 'dry_run_mode_product_filter', EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter), enforcement_mode=_enum(d, 'enforcement_mode', EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode)) + + class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter(Enum): """The values should match the list of workloads used in networkconfig.proto""" - - DBSQL = "DBSQL" - ML_SERVING = "ML_SERVING" - + + DBSQL = 'DBSQL' + ML_SERVING = 'ML_SERVING' class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode(Enum): - - DRY_RUN = "DRY_RUN" - ENFORCED = "ENFORCED" - + + + DRY_RUN = 'DRY_RUN' + ENFORCED = 'ENFORCED' class EgressNetworkPolicyNetworkAccessPolicyRestrictionMode(Enum): """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can only access explicitly allowed internet and storage destinations, as well as UC connections and external locations.""" - - FULL_ACCESS = "FULL_ACCESS" - RESTRICTED_ACCESS = "RESTRICTED_ACCESS" - + + FULL_ACCESS = 'FULL_ACCESS' + RESTRICTED_ACCESS = 'RESTRICTED_ACCESS' @dataclass class EgressNetworkPolicyNetworkAccessPolicyStorageDestination: """Users can specify accessible storage destinations.""" - + azure_storage_account: Optional[str] = None """The Azure storage account name.""" - + azure_storage_service: Optional[str] = None """The Azure storage service type (blob, dfs, etc.).""" - + bucket_name: Optional[str] = None - + region: Optional[str] = None """The region of the S3 bucket.""" - - storage_destination_type: Optional[ - EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType - ] = None + + storage_destination_type: Optional[EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType] = None """The type of storage destination.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyStorageDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.azure_storage_account is not None: - body["azure_storage_account"] = self.azure_storage_account - if self.azure_storage_service is not None: - body["azure_storage_service"] = self.azure_storage_service - if self.bucket_name is not None: - body["bucket_name"] = self.bucket_name - if self.region is not None: - body["region"] = self.region - if self.storage_destination_type is not None: - body["storage_destination_type"] = self.storage_destination_type.value + if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account + if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service + if self.bucket_name is not None: body['bucket_name'] = self.bucket_name + if self.region is not None: body['region'] = self.region + if self.storage_destination_type is not None: body['storage_destination_type'] = self.storage_destination_type.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyStorageDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.azure_storage_account is not None: - body["azure_storage_account"] = self.azure_storage_account - if self.azure_storage_service is not None: - body["azure_storage_service"] = self.azure_storage_service - if self.bucket_name is not None: - body["bucket_name"] = self.bucket_name - if self.region is not None: - body["region"] = self.region - if self.storage_destination_type is not None: - body["storage_destination_type"] = self.storage_destination_type + if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account + if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service + if self.bucket_name is not None: body['bucket_name'] = self.bucket_name + if self.region is not None: body['region'] = self.region + if self.storage_destination_type is not None: body['storage_destination_type'] = self.storage_destination_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyNetworkAccessPolicyStorageDestination: """Deserializes the EgressNetworkPolicyNetworkAccessPolicyStorageDestination from a dictionary.""" - return cls( - azure_storage_account=d.get("azure_storage_account", None), - azure_storage_service=d.get("azure_storage_service", None), - bucket_name=d.get("bucket_name", None), - region=d.get("region", None), - storage_destination_type=_enum( - d, - "storage_destination_type", - EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType, - ), - ) - + return cls(azure_storage_account=d.get('azure_storage_account', None), azure_storage_service=d.get('azure_storage_service', None), bucket_name=d.get('bucket_name', None), region=d.get('region', None), storage_destination_type=_enum(d, 'storage_destination_type', EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType)) + -class EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType(Enum): - AWS_S3 = "AWS_S3" - AZURE_STORAGE = "AZURE_STORAGE" - GOOGLE_CLOUD_STORAGE = "GOOGLE_CLOUD_STORAGE" +class EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType(Enum): + + + AWS_S3 = 'AWS_S3' + AZURE_STORAGE = 'AZURE_STORAGE' + GOOGLE_CLOUD_STORAGE = 'GOOGLE_CLOUD_STORAGE' class EgressResourceType(Enum): """The target resources that are supported by Network Connectivity Config. Note: some egress types can support general types that are not defined in EgressResourceType. E.g.: Azure private endpoint supports private link enabled Azure services.""" - - AZURE_BLOB_STORAGE = "AZURE_BLOB_STORAGE" - + + AZURE_BLOB_STORAGE = 'AZURE_BLOB_STORAGE' @dataclass class EmailConfig: addresses: Optional[List[str]] = None """Email addresses to notify.""" - + def as_dict(self) -> dict: """Serializes the EmailConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.addresses: - body["addresses"] = [v for v in self.addresses] + if self.addresses: body['addresses'] = [v for v in self.addresses] return body def as_shallow_dict(self) -> dict: """Serializes the EmailConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.addresses: - body["addresses"] = self.addresses + if self.addresses: body['addresses'] = self.addresses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmailConfig: """Deserializes the EmailConfig from a dictionary.""" - return cls(addresses=d.get("addresses", None)) + return cls(addresses=d.get('addresses', None)) + + @dataclass @@ -2356,141 +2352,137 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Empty: """Deserializes the Empty from a dictionary.""" return cls() + + @dataclass class EnableExportNotebook: boolean_val: Optional[BooleanMessage] = None - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EnableExportNotebook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val.as_dict() - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EnableExportNotebook into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnableExportNotebook: """Deserializes the EnableExportNotebook from a dictionary.""" - return cls(boolean_val=_from_dict(d, "boolean_val", BooleanMessage), setting_name=d.get("setting_name", None)) + return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), setting_name=d.get('setting_name', None)) + + @dataclass class EnableNotebookTableClipboard: boolean_val: Optional[BooleanMessage] = None - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EnableNotebookTableClipboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val.as_dict() - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EnableNotebookTableClipboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnableNotebookTableClipboard: """Deserializes the EnableNotebookTableClipboard from a dictionary.""" - return cls(boolean_val=_from_dict(d, "boolean_val", BooleanMessage), setting_name=d.get("setting_name", None)) + return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), setting_name=d.get('setting_name', None)) + + @dataclass class EnableResultsDownloading: boolean_val: Optional[BooleanMessage] = None - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EnableResultsDownloading into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val.as_dict() - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EnableResultsDownloading into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnableResultsDownloading: """Deserializes the EnableResultsDownloading from a dictionary.""" - return cls(boolean_val=_from_dict(d, "boolean_val", BooleanMessage), setting_name=d.get("setting_name", None)) + return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), setting_name=d.get('setting_name', None)) + + @dataclass class EnhancedSecurityMonitoring: """SHIELD feature: ESM""" - + is_enabled: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the EnhancedSecurityMonitoring into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_enabled is not None: - body["is_enabled"] = self.is_enabled + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled return body def as_shallow_dict(self) -> dict: """Serializes the EnhancedSecurityMonitoring into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_enabled is not None: - body["is_enabled"] = self.is_enabled + if self.is_enabled is not None: body['is_enabled'] = self.is_enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnhancedSecurityMonitoring: """Deserializes the EnhancedSecurityMonitoring from a dictionary.""" - return cls(is_enabled=d.get("is_enabled", None)) + return cls(is_enabled=d.get('is_enabled', None)) + + @dataclass class EnhancedSecurityMonitoringSetting: enhanced_security_monitoring_workspace: EnhancedSecurityMonitoring """SHIELD feature: ESM""" - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -2498,78 +2490,68 @@ class EnhancedSecurityMonitoringSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EnhancedSecurityMonitoringSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enhanced_security_monitoring_workspace: - body["enhanced_security_monitoring_workspace"] = self.enhanced_security_monitoring_workspace.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.enhanced_security_monitoring_workspace: body['enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EnhancedSecurityMonitoringSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.enhanced_security_monitoring_workspace: - body["enhanced_security_monitoring_workspace"] = self.enhanced_security_monitoring_workspace - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.enhanced_security_monitoring_workspace: body['enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnhancedSecurityMonitoringSetting: """Deserializes the EnhancedSecurityMonitoringSetting from a dictionary.""" - return cls( - enhanced_security_monitoring_workspace=_from_dict( - d, "enhanced_security_monitoring_workspace", EnhancedSecurityMonitoring - ), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(enhanced_security_monitoring_workspace=_from_dict(d, 'enhanced_security_monitoring_workspace', EnhancedSecurityMonitoring), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class EsmEnablementAccount: """Account level policy for ESM""" - + is_enforced: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the EsmEnablementAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_enforced is not None: - body["is_enforced"] = self.is_enforced + if self.is_enforced is not None: body['is_enforced'] = self.is_enforced return body def as_shallow_dict(self) -> dict: """Serializes the EsmEnablementAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_enforced is not None: - body["is_enforced"] = self.is_enforced + if self.is_enforced is not None: body['is_enforced'] = self.is_enforced return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EsmEnablementAccount: """Deserializes the EsmEnablementAccount from a dictionary.""" - return cls(is_enforced=d.get("is_enforced", None)) + return cls(is_enforced=d.get('is_enforced', None)) + + @dataclass class EsmEnablementAccountSetting: esm_enablement_account: EsmEnablementAccount """Account level policy for ESM""" - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -2577,754 +2559,749 @@ class EsmEnablementAccountSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EsmEnablementAccountSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.esm_enablement_account: - body["esm_enablement_account"] = self.esm_enablement_account.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EsmEnablementAccountSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.esm_enablement_account: - body["esm_enablement_account"] = self.esm_enablement_account - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EsmEnablementAccountSetting: """Deserializes the EsmEnablementAccountSetting from a dictionary.""" - return cls( - esm_enablement_account=_from_dict(d, "esm_enablement_account", EsmEnablementAccount), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(esm_enablement_account=_from_dict(d, 'esm_enablement_account', EsmEnablementAccount), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class ExchangeToken: """The exchange token is the result of the token exchange with the IdP""" - + credential: Optional[str] = None """The requested token.""" - + credential_eol_time: Optional[int] = None """The end-of-life timestamp of the token. The value is in milliseconds since the Unix epoch.""" - + owner_id: Optional[int] = None """User ID of the user that owns this token.""" - + scopes: Optional[List[str]] = None """The scopes of access granted in the token.""" - + token_type: Optional[TokenType] = None """The type of this exchange token""" - + def as_dict(self) -> dict: """Serializes the ExchangeToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential is not None: - body["credential"] = self.credential - if self.credential_eol_time is not None: - body["credentialEolTime"] = self.credential_eol_time - if self.owner_id is not None: - body["ownerId"] = self.owner_id - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_type is not None: - body["tokenType"] = self.token_type.value + if self.credential is not None: body['credential'] = self.credential + if self.credential_eol_time is not None: body['credentialEolTime'] = self.credential_eol_time + if self.owner_id is not None: body['ownerId'] = self.owner_id + if self.scopes: body['scopes'] = [v for v in self.scopes] + if self.token_type is not None: body['tokenType'] = self.token_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential is not None: - body["credential"] = self.credential - if self.credential_eol_time is not None: - body["credentialEolTime"] = self.credential_eol_time - if self.owner_id is not None: - body["ownerId"] = self.owner_id - if self.scopes: - body["scopes"] = self.scopes - if self.token_type is not None: - body["tokenType"] = self.token_type + if self.credential is not None: body['credential'] = self.credential + if self.credential_eol_time is not None: body['credentialEolTime'] = self.credential_eol_time + if self.owner_id is not None: body['ownerId'] = self.owner_id + if self.scopes: body['scopes'] = self.scopes + if self.token_type is not None: body['tokenType'] = self.token_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeToken: """Deserializes the ExchangeToken from a dictionary.""" - return cls( - credential=d.get("credential", None), - credential_eol_time=d.get("credentialEolTime", None), - owner_id=d.get("ownerId", None), - scopes=d.get("scopes", None), - token_type=_enum(d, "tokenType", TokenType), - ) + return cls(credential=d.get('credential', None), credential_eol_time=d.get('credentialEolTime', None), owner_id=d.get('ownerId', None), scopes=d.get('scopes', None), token_type=_enum(d, 'tokenType', TokenType)) + + @dataclass class ExchangeTokenRequest: """Exchange a token with the IdP""" - + partition_id: PartitionId """The partition of Credentials store""" - + token_type: List[TokenType] """A list of token types being requested""" - + scopes: List[str] """Array of scopes for the token request.""" - + def as_dict(self) -> dict: """Serializes the ExchangeTokenRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.partition_id: - body["partitionId"] = self.partition_id.as_dict() - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_type: - body["tokenType"] = [v.value for v in self.token_type] + if self.partition_id: body['partitionId'] = self.partition_id.as_dict() + if self.scopes: body['scopes'] = [v for v in self.scopes] + if self.token_type: body['tokenType'] = [v.value for v in self.token_type] return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeTokenRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.partition_id: - body["partitionId"] = self.partition_id - if self.scopes: - body["scopes"] = self.scopes - if self.token_type: - body["tokenType"] = self.token_type + if self.partition_id: body['partitionId'] = self.partition_id + if self.scopes: body['scopes'] = self.scopes + if self.token_type: body['tokenType'] = self.token_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeTokenRequest: """Deserializes the ExchangeTokenRequest from a dictionary.""" - return cls( - partition_id=_from_dict(d, "partitionId", PartitionId), - scopes=d.get("scopes", None), - token_type=_repeated_enum(d, "tokenType", TokenType), - ) + return cls(partition_id=_from_dict(d, 'partitionId', PartitionId), scopes=d.get('scopes', None), token_type=_repeated_enum(d, 'tokenType', TokenType)) + + @dataclass class ExchangeTokenResponse: """Exhanged tokens were successfully returned.""" - + values: Optional[List[ExchangeToken]] = None - + def as_dict(self) -> dict: """Serializes the ExchangeTokenResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.values: - body["values"] = [v.as_dict() for v in self.values] + if self.values: body['values'] = [v.as_dict() for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeTokenResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.values: - body["values"] = self.values + if self.values: body['values'] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeTokenResponse: """Deserializes the ExchangeTokenResponse from a dictionary.""" - return cls(values=_repeated_dict(d, "values", ExchangeToken)) + return cls(values=_repeated_dict(d, 'values', ExchangeToken)) + + @dataclass class FetchIpAccessListResponse: """An IP access list was successfully returned.""" - + ip_access_list: Optional[IpAccessListInfo] = None """Definition of an IP Access list""" - + def as_dict(self) -> dict: """Serializes the FetchIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the FetchIpAccessListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FetchIpAccessListResponse: """Deserializes the FetchIpAccessListResponse from a dictionary.""" - return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo)) + return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) + + @dataclass class GenericWebhookConfig: password: Optional[str] = None """[Input-Only][Optional] Password for webhook.""" - + password_set: Optional[bool] = None """[Output-Only] Whether password is set.""" - + url: Optional[str] = None """[Input-Only] URL for webhook.""" - + url_set: Optional[bool] = None """[Output-Only] Whether URL is set.""" - + username: Optional[str] = None """[Input-Only][Optional] Username for webhook.""" - + username_set: Optional[bool] = None """[Output-Only] Whether username is set.""" - + def as_dict(self) -> dict: """Serializes the GenericWebhookConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.password is not None: - body["password"] = self.password - if self.password_set is not None: - body["password_set"] = self.password_set - if self.url is not None: - body["url"] = self.url - if self.url_set is not None: - body["url_set"] = self.url_set - if self.username is not None: - body["username"] = self.username - if self.username_set is not None: - body["username_set"] = self.username_set + if self.password is not None: body['password'] = self.password + if self.password_set is not None: body['password_set'] = self.password_set + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + if self.username is not None: body['username'] = self.username + if self.username_set is not None: body['username_set'] = self.username_set return body def as_shallow_dict(self) -> dict: """Serializes the GenericWebhookConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.password is not None: - body["password"] = self.password - if self.password_set is not None: - body["password_set"] = self.password_set - if self.url is not None: - body["url"] = self.url - if self.url_set is not None: - body["url_set"] = self.url_set - if self.username is not None: - body["username"] = self.username - if self.username_set is not None: - body["username_set"] = self.username_set + if self.password is not None: body['password'] = self.password + if self.password_set is not None: body['password_set'] = self.password_set + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + if self.username is not None: body['username'] = self.username + if self.username_set is not None: body['username_set'] = self.username_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenericWebhookConfig: """Deserializes the GenericWebhookConfig from a dictionary.""" - return cls( - password=d.get("password", None), - password_set=d.get("password_set", None), - url=d.get("url", None), - url_set=d.get("url_set", None), - username=d.get("username", None), - username_set=d.get("username_set", None), - ) + return cls(password=d.get('password', None), password_set=d.get('password_set', None), url=d.get('url', None), url_set=d.get('url_set', None), username=d.get('username', None), username_set=d.get('username_set', None)) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @dataclass class GetIpAccessListResponse: ip_access_list: Optional[IpAccessListInfo] = None """Definition of an IP Access list""" - + def as_dict(self) -> dict: """Serializes the GetIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetIpAccessListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetIpAccessListResponse: """Deserializes the GetIpAccessListResponse from a dictionary.""" - return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo)) + return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) + + @dataclass class GetIpAccessListsResponse: """IP access lists were successfully returned.""" - + ip_access_lists: Optional[List[IpAccessListInfo]] = None - + def as_dict(self) -> dict: """Serializes the GetIpAccessListsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_lists: - body["ip_access_lists"] = [v.as_dict() for v in self.ip_access_lists] + if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists] return body def as_shallow_dict(self) -> dict: """Serializes the GetIpAccessListsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_lists: - body["ip_access_lists"] = self.ip_access_lists + if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetIpAccessListsResponse: """Deserializes the GetIpAccessListsResponse from a dictionary.""" - return cls(ip_access_lists=_repeated_dict(d, "ip_access_lists", IpAccessListInfo)) + return cls(ip_access_lists=_repeated_dict(d, 'ip_access_lists', IpAccessListInfo)) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @dataclass class GetTokenPermissionLevelsResponse: permission_levels: Optional[List[TokenPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetTokenPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetTokenPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetTokenPermissionLevelsResponse: """Deserializes the GetTokenPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", TokenPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', TokenPermissionsDescription)) + + @dataclass class GetTokenResponse: """Token with specified Token ID was successfully returned.""" - + token_info: Optional[TokenInfo] = None - + def as_dict(self) -> dict: """Serializes the GetTokenResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_info: - body["token_info"] = self.token_info.as_dict() + if self.token_info: body['token_info'] = self.token_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetTokenResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_info: - body["token_info"] = self.token_info + if self.token_info: body['token_info'] = self.token_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetTokenResponse: """Deserializes the GetTokenResponse from a dictionary.""" - return cls(token_info=_from_dict(d, "token_info", TokenInfo)) + return cls(token_info=_from_dict(d, 'token_info', TokenInfo)) + + + + + @dataclass class IpAccessListInfo: """Definition of an IP Access list""" - + address_count: Optional[int] = None """Total number of IP or CIDR values.""" - + created_at: Optional[int] = None """Creation timestamp in milliseconds.""" - + created_by: Optional[int] = None """User ID of the user who created this list.""" - + enabled: Optional[bool] = None """Specifies whether this IP access list is enabled.""" - + ip_addresses: Optional[List[str]] = None - + label: Optional[str] = None """Label for the IP access list. This **cannot** be empty.""" - + list_id: Optional[str] = None """Universally unique identifier (UUID) of the IP access list.""" - + list_type: Optional[ListType] = None """Type of IP access list. Valid values are as follows and are case-sensitive: * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - + updated_at: Optional[int] = None """Update timestamp in milliseconds.""" - + updated_by: Optional[int] = None """User ID of the user who updated this list.""" - + def as_dict(self) -> dict: """Serializes the IpAccessListInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.address_count is not None: - body["address_count"] = self.address_count - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_id is not None: - body["list_id"] = self.list_id - if self.list_type is not None: - body["list_type"] = self.list_type.value - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.address_count is not None: body['address_count'] = self.address_count + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.enabled is not None: body['enabled'] = self.enabled + if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] + if self.label is not None: body['label'] = self.label + if self.list_id is not None: body['list_id'] = self.list_id + if self.list_type is not None: body['list_type'] = self.list_type.value + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the IpAccessListInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.address_count is not None: - body["address_count"] = self.address_count - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_id is not None: - body["list_id"] = self.list_id - if self.list_type is not None: - body["list_type"] = self.list_type - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.address_count is not None: body['address_count'] = self.address_count + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.enabled is not None: body['enabled'] = self.enabled + if self.ip_addresses: body['ip_addresses'] = self.ip_addresses + if self.label is not None: body['label'] = self.label + if self.list_id is not None: body['list_id'] = self.list_id + if self.list_type is not None: body['list_type'] = self.list_type + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IpAccessListInfo: """Deserializes the IpAccessListInfo from a dictionary.""" - return cls( - address_count=d.get("address_count", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - enabled=d.get("enabled", None), - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_id=d.get("list_id", None), - list_type=_enum(d, "list_type", ListType), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(address_count=d.get('address_count', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), enabled=d.get('enabled', None), ip_addresses=d.get('ip_addresses', None), label=d.get('label', None), list_id=d.get('list_id', None), list_type=_enum(d, 'list_type', ListType), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass class ListIpAccessListResponse: """IP access lists were successfully returned.""" - + ip_access_lists: Optional[List[IpAccessListInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_lists: - body["ip_access_lists"] = [v.as_dict() for v in self.ip_access_lists] + if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists] return body def as_shallow_dict(self) -> dict: """Serializes the ListIpAccessListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_lists: - body["ip_access_lists"] = self.ip_access_lists + if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListIpAccessListResponse: """Deserializes the ListIpAccessListResponse from a dictionary.""" - return cls(ip_access_lists=_repeated_dict(d, "ip_access_lists", IpAccessListInfo)) - - -@dataclass -class ListNccAzurePrivateEndpointRulesResponse: - """The private endpoint rule list was successfully retrieved.""" - - items: Optional[List[NccAzurePrivateEndpointRule]] = None + return cls(ip_access_lists=_repeated_dict(d, 'ip_access_lists', IpAccessListInfo)) + - next_page_token: Optional[str] = None - """A token that can be used to get the next page of results. If null, there are no more results to - show.""" - def as_dict(self) -> dict: - """Serializes the ListNccAzurePrivateEndpointRulesResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.items: - body["items"] = [v.as_dict() for v in self.items] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body - def as_shallow_dict(self) -> dict: - """Serializes the ListNccAzurePrivateEndpointRulesResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.items: - body["items"] = self.items - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListNccAzurePrivateEndpointRulesResponse: - """Deserializes the ListNccAzurePrivateEndpointRulesResponse from a dictionary.""" - return cls( - items=_repeated_dict(d, "items", NccAzurePrivateEndpointRule), - next_page_token=d.get("next_page_token", None), - ) @dataclass class ListNetworkConnectivityConfigurationsResponse: """The network connectivity configuration list was successfully retrieved.""" - + items: Optional[List[NetworkConnectivityConfiguration]] = None - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If null, there are no more results to show.""" - + def as_dict(self) -> dict: """Serializes the ListNetworkConnectivityConfigurationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items: - body["items"] = [v.as_dict() for v in self.items] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.items: body['items'] = [v.as_dict() for v in self.items] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListNetworkConnectivityConfigurationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items: - body["items"] = self.items - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.items: body['items'] = self.items + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNetworkConnectivityConfigurationsResponse: """Deserializes the ListNetworkConnectivityConfigurationsResponse from a dictionary.""" - return cls( - items=_repeated_dict(d, "items", NetworkConnectivityConfiguration), - next_page_token=d.get("next_page_token", None), - ) + return cls(items=_repeated_dict(d, 'items', NetworkConnectivityConfiguration), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListNetworkPoliciesResponse: items: Optional[List[AccountNetworkPolicy]] = None """List of network policies.""" - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If null, there are no more results to show.""" - + def as_dict(self) -> dict: """Serializes the ListNetworkPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items: - body["items"] = [v.as_dict() for v in self.items] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.items: body['items'] = [v.as_dict() for v in self.items] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListNetworkPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items: - body["items"] = self.items - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.items: body['items'] = self.items + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNetworkPoliciesResponse: """Deserializes the ListNetworkPoliciesResponse from a dictionary.""" - return cls( - items=_repeated_dict(d, "items", AccountNetworkPolicy), next_page_token=d.get("next_page_token", None) - ) + return cls(items=_repeated_dict(d, 'items', AccountNetworkPolicy), next_page_token=d.get('next_page_token', None)) + + + + + @dataclass class ListNotificationDestinationsResponse: next_page_token: Optional[str] = None """Page token for next of results.""" - + results: Optional[List[ListNotificationDestinationsResult]] = None - + def as_dict(self) -> dict: """Serializes the ListNotificationDestinationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListNotificationDestinationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = self.results + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNotificationDestinationsResponse: """Deserializes the ListNotificationDestinationsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - results=_repeated_dict(d, "results", ListNotificationDestinationsResult), - ) + return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', ListNotificationDestinationsResult)) + + @dataclass class ListNotificationDestinationsResult: destination_type: Optional[DestinationType] = None """[Output-only] The type of the notification destination. The type can not be changed once set.""" - + display_name: Optional[str] = None """The display name for the notification destination.""" - + id: Optional[str] = None """UUID identifying notification destination.""" - + def as_dict(self) -> dict: """Serializes the ListNotificationDestinationsResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_type is not None: - body["destination_type"] = self.destination_type.value - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id + if self.destination_type is not None: body['destination_type'] = self.destination_type.value + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the ListNotificationDestinationsResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_type is not None: - body["destination_type"] = self.destination_type - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id + if self.destination_type is not None: body['destination_type'] = self.destination_type + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNotificationDestinationsResult: """Deserializes the ListNotificationDestinationsResult from a dictionary.""" - return cls( - destination_type=_enum(d, "destination_type", DestinationType), - display_name=d.get("display_name", None), - id=d.get("id", None), - ) + return cls(destination_type=_enum(d, 'destination_type', DestinationType), display_name=d.get('display_name', None), id=d.get('id', None)) + + + + + + + +@dataclass +class ListPrivateEndpointRulesResponse: + """The private endpoint rule list was successfully retrieved.""" + + items: Optional[List[NccPrivateEndpointRule]] = None + + next_page_token: Optional[str] = None + """A token that can be used to get the next page of results. If null, there are no more results to + show.""" + + def as_dict(self) -> dict: + """Serializes the ListPrivateEndpointRulesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.items: body['items'] = [v.as_dict() for v in self.items] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ListPrivateEndpointRulesResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.items: body['items'] = self.items + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListPrivateEndpointRulesResponse: + """Deserializes the ListPrivateEndpointRulesResponse from a dictionary.""" + return cls(items=_repeated_dict(d, 'items', NccPrivateEndpointRule), next_page_token=d.get('next_page_token', None)) + + @dataclass class ListPublicTokensResponse: token_infos: Optional[List[PublicTokenInfo]] = None """The information for each token.""" - + def as_dict(self) -> dict: """Serializes the ListPublicTokensResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_infos: - body["token_infos"] = [v.as_dict() for v in self.token_infos] + if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos] return body def as_shallow_dict(self) -> dict: """Serializes the ListPublicTokensResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_infos: - body["token_infos"] = self.token_infos + if self.token_infos: body['token_infos'] = self.token_infos return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPublicTokensResponse: """Deserializes the ListPublicTokensResponse from a dictionary.""" - return cls(token_infos=_repeated_dict(d, "token_infos", PublicTokenInfo)) + return cls(token_infos=_repeated_dict(d, 'token_infos', PublicTokenInfo)) + + + + + @dataclass class ListTokensResponse: """Tokens were successfully returned.""" - + token_infos: Optional[List[TokenInfo]] = None """Token metadata of each user-created token in the workspace""" - + def as_dict(self) -> dict: """Serializes the ListTokensResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_infos: - body["token_infos"] = [v.as_dict() for v in self.token_infos] + if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos] return body def as_shallow_dict(self) -> dict: """Serializes the ListTokensResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_infos: - body["token_infos"] = self.token_infos + if self.token_infos: body['token_infos'] = self.token_infos return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListTokensResponse: """Deserializes the ListTokensResponse from a dictionary.""" - return cls(token_infos=_repeated_dict(d, "token_infos", TokenInfo)) + return cls(token_infos=_repeated_dict(d, 'token_infos', TokenInfo)) + + class ListType(Enum): """Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - - ALLOW = "ALLOW" - BLOCK = "BLOCK" - + + ALLOW = 'ALLOW' + BLOCK = 'BLOCK' @dataclass class LlmProxyPartnerPoweredAccount: boolean_val: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -3332,49 +3309,41 @@ class LlmProxyPartnerPoweredAccount: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LlmProxyPartnerPoweredAccount: """Deserializes the LlmProxyPartnerPoweredAccount from a dictionary.""" - return cls( - boolean_val=_from_dict(d, "boolean_val", BooleanMessage), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class LlmProxyPartnerPoweredEnforce: boolean_val: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -3382,49 +3351,41 @@ class LlmProxyPartnerPoweredEnforce: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredEnforce into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredEnforce into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LlmProxyPartnerPoweredEnforce: """Deserializes the LlmProxyPartnerPoweredEnforce from a dictionary.""" - return cls( - boolean_val=_from_dict(d, "boolean_val", BooleanMessage), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class LlmProxyPartnerPoweredWorkspace: boolean_val: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -3432,111 +3393,101 @@ class LlmProxyPartnerPoweredWorkspace: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredWorkspace into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val.as_dict() - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredWorkspace into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: - body["boolean_val"] = self.boolean_val - if self.etag is not None: - body["etag"] = self.etag - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.boolean_val: body['boolean_val'] = self.boolean_val + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LlmProxyPartnerPoweredWorkspace: """Deserializes the LlmProxyPartnerPoweredWorkspace from a dictionary.""" - return cls( - boolean_val=_from_dict(d, "boolean_val", BooleanMessage), - etag=d.get("etag", None), - setting_name=d.get("setting_name", None), - ) + return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class MicrosoftTeamsConfig: url: Optional[str] = None """[Input-Only] URL for Microsoft Teams.""" - + url_set: Optional[bool] = None """[Output-Only] Whether URL is set.""" - + def as_dict(self) -> dict: """Serializes the MicrosoftTeamsConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.url is not None: - body["url"] = self.url - if self.url_set is not None: - body["url_set"] = self.url_set + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set return body def as_shallow_dict(self) -> dict: """Serializes the MicrosoftTeamsConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.url is not None: - body["url"] = self.url - if self.url_set is not None: - body["url_set"] = self.url_set + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MicrosoftTeamsConfig: """Deserializes the MicrosoftTeamsConfig from a dictionary.""" - return cls(url=d.get("url", None), url_set=d.get("url_set", None)) + return cls(url=d.get('url', None), url_set=d.get('url_set', None)) + + @dataclass class NccAwsStableIpRule: """The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to allow traffic from your Databricks workspace.""" - + cidr_blocks: Optional[List[str]] = None """The list of stable IP CIDR blocks from which Databricks network traffic originates when accessing your resources.""" - + def as_dict(self) -> dict: """Serializes the NccAwsStableIpRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cidr_blocks: - body["cidr_blocks"] = [v for v in self.cidr_blocks] + if self.cidr_blocks: body['cidr_blocks'] = [v for v in self.cidr_blocks] return body def as_shallow_dict(self) -> dict: """Serializes the NccAwsStableIpRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.cidr_blocks: - body["cidr_blocks"] = self.cidr_blocks + if self.cidr_blocks: body['cidr_blocks'] = self.cidr_blocks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccAwsStableIpRule: """Deserializes the NccAwsStableIpRule from a dictionary.""" - return cls(cidr_blocks=d.get("cidr_blocks", None)) + return cls(cidr_blocks=d.get('cidr_blocks', None)) + + @dataclass class NccAzurePrivateEndpointRule: """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization.""" - + connection_state: Optional[NccAzurePrivateEndpointRuleConnectionState] = None """The current status of this private endpoint. The private endpoint rules are effective only if the connection state is ESTABLISHED. Remember that you must approve new endpoints on your @@ -3547,172 +3498,131 @@ class NccAzurePrivateEndpointRule: link resource owner. - DISCONNECTED: Connection was removed by the private link resource owner, the private endpoint becomes informative and should be deleted for clean-up. - EXPIRED: If the endpoint was created but not approved in 14 days, it will be EXPIRED.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when this object was created.""" - + deactivated: Optional[bool] = None """Whether this private endpoint is deactivated.""" - + deactivated_at: Optional[int] = None """Time in epoch milliseconds when this object was deactivated.""" - + domain_names: Optional[List[str]] = None - """Only used by private endpoints to customer-managed resources. + """Not used by customer-managed private endpoint services. Domain names of target private link service. When updating this field, the full list of target domain_names must be specified.""" - + endpoint_name: Optional[str] = None """The name of the Azure private endpoint resource.""" - + group_id: Optional[str] = None - """Only used by private endpoints to Azure first-party services. Enum: blob | dfs | sqlServer | - mysqlServer + """Only used by private endpoints to Azure first-party services. The sub-resource type (group ID) of the target resource. Note that to connect to workspace root storage (root DBFS), you need two endpoints, one for blob and one for dfs.""" - + network_connectivity_config_id: Optional[str] = None """The ID of a network connectivity configuration, which is the parent resource of this private endpoint rule object.""" - + resource_id: Optional[str] = None """The Azure resource ID of the target resource.""" - + rule_id: Optional[str] = None """The ID of a private endpoint rule.""" - + updated_time: Optional[int] = None """Time in epoch milliseconds when this object was updated.""" - + def as_dict(self) -> dict: """Serializes the NccAzurePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_state is not None: - body["connection_state"] = self.connection_state.value - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.deactivated is not None: - body["deactivated"] = self.deactivated - if self.deactivated_at is not None: - body["deactivated_at"] = self.deactivated_at - if self.domain_names: - body["domain_names"] = [v for v in self.domain_names] - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.group_id is not None: - body["group_id"] = self.group_id - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.resource_id is not None: - body["resource_id"] = self.resource_id - if self.rule_id is not None: - body["rule_id"] = self.rule_id - if self.updated_time is not None: - body["updated_time"] = self.updated_time + if self.connection_state is not None: body['connection_state'] = self.connection_state.value + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.deactivated is not None: body['deactivated'] = self.deactivated + if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at + if self.domain_names: body['domain_names'] = [v for v in self.domain_names] + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.group_id is not None: body['group_id'] = self.group_id + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.rule_id is not None: body['rule_id'] = self.rule_id + if self.updated_time is not None: body['updated_time'] = self.updated_time return body def as_shallow_dict(self) -> dict: """Serializes the NccAzurePrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_state is not None: - body["connection_state"] = self.connection_state - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.deactivated is not None: - body["deactivated"] = self.deactivated - if self.deactivated_at is not None: - body["deactivated_at"] = self.deactivated_at - if self.domain_names: - body["domain_names"] = self.domain_names - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.group_id is not None: - body["group_id"] = self.group_id - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.resource_id is not None: - body["resource_id"] = self.resource_id - if self.rule_id is not None: - body["rule_id"] = self.rule_id - if self.updated_time is not None: - body["updated_time"] = self.updated_time + if self.connection_state is not None: body['connection_state'] = self.connection_state + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.deactivated is not None: body['deactivated'] = self.deactivated + if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at + if self.domain_names: body['domain_names'] = self.domain_names + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.group_id is not None: body['group_id'] = self.group_id + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.rule_id is not None: body['rule_id'] = self.rule_id + if self.updated_time is not None: body['updated_time'] = self.updated_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccAzurePrivateEndpointRule: """Deserializes the NccAzurePrivateEndpointRule from a dictionary.""" - return cls( - connection_state=_enum(d, "connection_state", NccAzurePrivateEndpointRuleConnectionState), - creation_time=d.get("creation_time", None), - deactivated=d.get("deactivated", None), - deactivated_at=d.get("deactivated_at", None), - domain_names=d.get("domain_names", None), - endpoint_name=d.get("endpoint_name", None), - group_id=d.get("group_id", None), - network_connectivity_config_id=d.get("network_connectivity_config_id", None), - resource_id=d.get("resource_id", None), - rule_id=d.get("rule_id", None), - updated_time=d.get("updated_time", None), - ) - + return cls(connection_state=_enum(d, 'connection_state', NccAzurePrivateEndpointRuleConnectionState), creation_time=d.get('creation_time', None), deactivated=d.get('deactivated', None), deactivated_at=d.get('deactivated_at', None), domain_names=d.get('domain_names', None), endpoint_name=d.get('endpoint_name', None), group_id=d.get('group_id', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), resource_id=d.get('resource_id', None), rule_id=d.get('rule_id', None), updated_time=d.get('updated_time', None)) + -class NccAzurePrivateEndpointRuleConnectionState(Enum): - DISCONNECTED = "DISCONNECTED" - ESTABLISHED = "ESTABLISHED" - EXPIRED = "EXPIRED" - INIT = "INIT" - PENDING = "PENDING" - REJECTED = "REJECTED" +class NccAzurePrivateEndpointRuleConnectionState(Enum): + + + DISCONNECTED = 'DISCONNECTED' + ESTABLISHED = 'ESTABLISHED' + EXPIRED = 'EXPIRED' + INIT = 'INIT' + PENDING = 'PENDING' + REJECTED = 'REJECTED' @dataclass class NccAzureServiceEndpointRule: """The stable Azure service endpoints. You can configure the firewall of your Azure resources to allow traffic from your Databricks serverless compute resources.""" - + subnets: Optional[List[str]] = None """The list of subnets from which Databricks network traffic originates when accessing your Azure resources.""" - + target_region: Optional[str] = None """The Azure region in which this service endpoint rule applies..""" - + target_services: Optional[List[EgressResourceType]] = None """The Azure services to which this service endpoint rule applies to.""" - + def as_dict(self) -> dict: """Serializes the NccAzureServiceEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.subnets: - body["subnets"] = [v for v in self.subnets] - if self.target_region is not None: - body["target_region"] = self.target_region - if self.target_services: - body["target_services"] = [v.value for v in self.target_services] + if self.subnets: body['subnets'] = [v for v in self.subnets] + if self.target_region is not None: body['target_region'] = self.target_region + if self.target_services: body['target_services'] = [v.value for v in self.target_services] return body def as_shallow_dict(self) -> dict: """Serializes the NccAzureServiceEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.subnets: - body["subnets"] = self.subnets - if self.target_region is not None: - body["target_region"] = self.target_region - if self.target_services: - body["target_services"] = self.target_services + if self.subnets: body['subnets'] = self.subnets + if self.target_region is not None: body['target_region'] = self.target_region + if self.target_services: body['target_services'] = self.target_services return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccAzureServiceEndpointRule: """Deserializes the NccAzureServiceEndpointRule from a dictionary.""" - return cls( - subnets=d.get("subnets", None), - target_region=d.get("target_region", None), - target_services=_repeated_enum(d, "target_services", EgressResourceType), - ) + return cls(subnets=d.get('subnets', None), target_region=d.get('target_region', None), target_services=_repeated_enum(d, 'target_services', EgressResourceType)) + + @dataclass @@ -3721,184 +3631,291 @@ class NccEgressConfig: """The network connectivity rules that are applied by default without resource specific configurations. You can find the stable network information of your serverless compute resources here.""" - + target_rules: Optional[NccEgressTargetRules] = None """The network connectivity rules that configured for each destinations. These rules override default rules.""" - + def as_dict(self) -> dict: """Serializes the NccEgressConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_rules: - body["default_rules"] = self.default_rules.as_dict() - if self.target_rules: - body["target_rules"] = self.target_rules.as_dict() + if self.default_rules: body['default_rules'] = self.default_rules.as_dict() + if self.target_rules: body['target_rules'] = self.target_rules.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the NccEgressConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_rules: - body["default_rules"] = self.default_rules - if self.target_rules: - body["target_rules"] = self.target_rules + if self.default_rules: body['default_rules'] = self.default_rules + if self.target_rules: body['target_rules'] = self.target_rules return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccEgressConfig: """Deserializes the NccEgressConfig from a dictionary.""" - return cls( - default_rules=_from_dict(d, "default_rules", NccEgressDefaultRules), - target_rules=_from_dict(d, "target_rules", NccEgressTargetRules), - ) + return cls(default_rules=_from_dict(d, 'default_rules', NccEgressDefaultRules), target_rules=_from_dict(d, 'target_rules', NccEgressTargetRules)) + + @dataclass class NccEgressDefaultRules: """Default rules don't have specific targets.""" - + aws_stable_ip_rule: Optional[NccAwsStableIpRule] = None """The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to allow traffic from your Databricks workspace.""" - + azure_service_endpoint_rule: Optional[NccAzureServiceEndpointRule] = None """The stable Azure service endpoints. You can configure the firewall of your Azure resources to allow traffic from your Databricks serverless compute resources.""" - + def as_dict(self) -> dict: """Serializes the NccEgressDefaultRules into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_stable_ip_rule: - body["aws_stable_ip_rule"] = self.aws_stable_ip_rule.as_dict() - if self.azure_service_endpoint_rule: - body["azure_service_endpoint_rule"] = self.azure_service_endpoint_rule.as_dict() + if self.aws_stable_ip_rule: body['aws_stable_ip_rule'] = self.aws_stable_ip_rule.as_dict() + if self.azure_service_endpoint_rule: body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the NccEgressDefaultRules into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_stable_ip_rule: - body["aws_stable_ip_rule"] = self.aws_stable_ip_rule - if self.azure_service_endpoint_rule: - body["azure_service_endpoint_rule"] = self.azure_service_endpoint_rule + if self.aws_stable_ip_rule: body['aws_stable_ip_rule'] = self.aws_stable_ip_rule + if self.azure_service_endpoint_rule: body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccEgressDefaultRules: """Deserializes the NccEgressDefaultRules from a dictionary.""" - return cls( - aws_stable_ip_rule=_from_dict(d, "aws_stable_ip_rule", NccAwsStableIpRule), - azure_service_endpoint_rule=_from_dict(d, "azure_service_endpoint_rule", NccAzureServiceEndpointRule), - ) + return cls(aws_stable_ip_rule=_from_dict(d, 'aws_stable_ip_rule', NccAwsStableIpRule), azure_service_endpoint_rule=_from_dict(d, 'azure_service_endpoint_rule', NccAzureServiceEndpointRule)) + + @dataclass class NccEgressTargetRules: """Target rule controls the egress rules that are dedicated to specific resources.""" - + + aws_private_endpoint_rules: Optional[List[CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule]] = None + """AWS private endpoint rule controls the AWS private endpoint based egress rules.""" + azure_private_endpoint_rules: Optional[List[NccAzurePrivateEndpointRule]] = None - + def as_dict(self) -> dict: """Serializes the NccEgressTargetRules into a dictionary suitable for use as a JSON request body.""" body = {} - if self.azure_private_endpoint_rules: - body["azure_private_endpoint_rules"] = [v.as_dict() for v in self.azure_private_endpoint_rules] + if self.aws_private_endpoint_rules: body['aws_private_endpoint_rules'] = [v.as_dict() for v in self.aws_private_endpoint_rules] + if self.azure_private_endpoint_rules: body['azure_private_endpoint_rules'] = [v.as_dict() for v in self.azure_private_endpoint_rules] return body def as_shallow_dict(self) -> dict: """Serializes the NccEgressTargetRules into a shallow dictionary of its immediate attributes.""" body = {} - if self.azure_private_endpoint_rules: - body["azure_private_endpoint_rules"] = self.azure_private_endpoint_rules + if self.aws_private_endpoint_rules: body['aws_private_endpoint_rules'] = self.aws_private_endpoint_rules + if self.azure_private_endpoint_rules: body['azure_private_endpoint_rules'] = self.azure_private_endpoint_rules return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccEgressTargetRules: """Deserializes the NccEgressTargetRules from a dictionary.""" - return cls( - azure_private_endpoint_rules=_repeated_dict(d, "azure_private_endpoint_rules", NccAzurePrivateEndpointRule) - ) + return cls(aws_private_endpoint_rules=_repeated_dict(d, 'aws_private_endpoint_rules', CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule), azure_private_endpoint_rules=_repeated_dict(d, 'azure_private_endpoint_rules', NccAzurePrivateEndpointRule)) + + + + +@dataclass +class NccPrivateEndpointRule: + """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure + portal after initialization.""" + + account_id: Optional[str] = None + """Databricks account ID. You can find your account ID from the Accounts Console.""" + + connection_state: Optional[NccPrivateEndpointRulePrivateLinkConnectionState] = None + """The current status of this private endpoint. The private endpoint rules are effective only if + the connection state is ESTABLISHED. Remember that you must approve new endpoints on your + resources in the Cloud console before they take effect. The possible values are: - PENDING: The + endpoint has been created and pending approval. - ESTABLISHED: The endpoint has been approved + and is ready to use in your serverless compute resources. - REJECTED: Connection was rejected by + the private link resource owner. - DISCONNECTED: Connection was removed by the private link + resource owner, the private endpoint becomes informative and should be deleted for clean-up. - + EXPIRED: If the endpoint was created but not approved in 14 days, it will be EXPIRED.""" + + creation_time: Optional[int] = None + """Time in epoch milliseconds when this object was created.""" + + deactivated: Optional[bool] = None + """Whether this private endpoint is deactivated.""" + + deactivated_at: Optional[int] = None + """Time in epoch milliseconds when this object was deactivated.""" + + domain_names: Optional[List[str]] = None + """Only used by private endpoints to customer-managed private endpoint services. + + Domain names of target private link service. When updating this field, the full list of target + domain_names must be specified.""" + + enabled: Optional[bool] = None + """Only used by private endpoints towards an AWS S3 service. + + Update this field to activate/deactivate this private endpoint to allow egress access from + serverless compute resources.""" + + endpoint_name: Optional[str] = None + """The name of the Azure private endpoint resource.""" + + endpoint_service: Optional[str] = None + """The full target AWS endpoint service name that connects to the destination resources of the + private endpoint.""" + + group_id: Optional[str] = None + """Not used by customer-managed private endpoint services. + + The sub-resource type (group ID) of the target resource. Note that to connect to workspace root + storage (root DBFS), you need two endpoints, one for blob and one for dfs.""" + + network_connectivity_config_id: Optional[str] = None + """The ID of a network connectivity configuration, which is the parent resource of this private + endpoint rule object.""" + + resource_id: Optional[str] = None + """The Azure resource ID of the target resource.""" + + resource_names: Optional[List[str]] = None + """Only used by private endpoints towards AWS S3 service. + + The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names + must be in the same region as the NCC/endpoint service. When updating this field, we perform + full update on this field. Please ensure a full list of desired resource_names is provided.""" + + rule_id: Optional[str] = None + """The ID of a private endpoint rule.""" + + updated_time: Optional[int] = None + """Time in epoch milliseconds when this object was updated.""" + + vpc_endpoint_id: Optional[str] = None + """The AWS VPC endpoint ID. You can use this ID to identify the VPC endpoint created by Databricks.""" + + def as_dict(self) -> dict: + """Serializes the NccPrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.connection_state is not None: body['connection_state'] = self.connection_state.value + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.deactivated is not None: body['deactivated'] = self.deactivated + if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at + if self.domain_names: body['domain_names'] = [v for v in self.domain_names] + if self.enabled is not None: body['enabled'] = self.enabled + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service + if self.group_id is not None: body['group_id'] = self.group_id + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.resource_names: body['resource_names'] = [v for v in self.resource_names] + if self.rule_id is not None: body['rule_id'] = self.rule_id + if self.updated_time is not None: body['updated_time'] = self.updated_time + if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the NccPrivateEndpointRule into a shallow dictionary of its immediate attributes.""" + body = {} + if self.account_id is not None: body['account_id'] = self.account_id + if self.connection_state is not None: body['connection_state'] = self.connection_state + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.deactivated is not None: body['deactivated'] = self.deactivated + if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at + if self.domain_names: body['domain_names'] = self.domain_names + if self.enabled is not None: body['enabled'] = self.enabled + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service + if self.group_id is not None: body['group_id'] = self.group_id + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.resource_names: body['resource_names'] = self.resource_names + if self.rule_id is not None: body['rule_id'] = self.rule_id + if self.updated_time is not None: body['updated_time'] = self.updated_time + if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> NccPrivateEndpointRule: + """Deserializes the NccPrivateEndpointRule from a dictionary.""" + return cls(account_id=d.get('account_id', None), connection_state=_enum(d, 'connection_state', NccPrivateEndpointRulePrivateLinkConnectionState), creation_time=d.get('creation_time', None), deactivated=d.get('deactivated', None), deactivated_at=d.get('deactivated_at', None), domain_names=d.get('domain_names', None), enabled=d.get('enabled', None), endpoint_name=d.get('endpoint_name', None), endpoint_service=d.get('endpoint_service', None), group_id=d.get('group_id', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), resource_id=d.get('resource_id', None), resource_names=d.get('resource_names', None), rule_id=d.get('rule_id', None), updated_time=d.get('updated_time', None), vpc_endpoint_id=d.get('vpc_endpoint_id', None)) + + +class NccPrivateEndpointRulePrivateLinkConnectionState(Enum): + + + DISCONNECTED = 'DISCONNECTED' + ESTABLISHED = 'ESTABLISHED' + EXPIRED = 'EXPIRED' + PENDING = 'PENDING' + REJECTED = 'REJECTED' + @dataclass class NetworkConnectivityConfiguration: """Properties of the new network connectivity configuration.""" - + account_id: Optional[str] = None - """The Databricks account ID that hosts the credential.""" - + """Your Databricks account ID. You can find your account ID in your Databricks accounts console.""" + creation_time: Optional[int] = None """Time in epoch milliseconds when this object was created.""" - + egress_config: Optional[NccEgressConfig] = None """The network connectivity rules that apply to network traffic from your serverless compute resources.""" - + name: Optional[str] = None """The name of the network connectivity configuration. The name can contain alphanumeric characters, hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the regular expression ^[0-9a-zA-Z-_]{3,30}$""" - + network_connectivity_config_id: Optional[str] = None """Databricks network connectivity configuration ID.""" - + region: Optional[str] = None """The region for the network connectivity configuration. Only workspaces in the same region can be attached to the network connectivity configuration.""" - + updated_time: Optional[int] = None """Time in epoch milliseconds when this object was updated.""" - + def as_dict(self) -> dict: """Serializes the NetworkConnectivityConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.egress_config: - body["egress_config"] = self.egress_config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.region is not None: - body["region"] = self.region - if self.updated_time is not None: - body["updated_time"] = self.updated_time + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.egress_config: body['egress_config'] = self.egress_config.as_dict() + if self.name is not None: body['name'] = self.name + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.region is not None: body['region'] = self.region + if self.updated_time is not None: body['updated_time'] = self.updated_time return body def as_shallow_dict(self) -> dict: """Serializes the NetworkConnectivityConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: - body["account_id"] = self.account_id - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.egress_config: - body["egress_config"] = self.egress_config - if self.name is not None: - body["name"] = self.name - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.region is not None: - body["region"] = self.region - if self.updated_time is not None: - body["updated_time"] = self.updated_time + if self.account_id is not None: body['account_id'] = self.account_id + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.egress_config: body['egress_config'] = self.egress_config + if self.name is not None: body['name'] = self.name + if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id + if self.region is not None: body['region'] = self.region + if self.updated_time is not None: body['updated_time'] = self.updated_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkConnectivityConfiguration: """Deserializes the NetworkConnectivityConfiguration from a dictionary.""" - return cls( - account_id=d.get("account_id", None), - creation_time=d.get("creation_time", None), - egress_config=_from_dict(d, "egress_config", NccEgressConfig), - name=d.get("name", None), - network_connectivity_config_id=d.get("network_connectivity_config_id", None), - region=d.get("region", None), - updated_time=d.get("updated_time", None), - ) + return cls(account_id=d.get('account_id', None), creation_time=d.get('creation_time', None), egress_config=_from_dict(d, 'egress_config', NccEgressConfig), name=d.get('name', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), region=d.get('region', None), updated_time=d.get('updated_time', None)) + + @dataclass @@ -3908,28 +3925,28 @@ class NetworkPolicyEgress: the format expected by the dataplane, see networkconfig.textproto). This policy should be consistent with [[com.databricks.api.proto.settingspolicy.EgressNetworkPolicy]]. Details see API-design: https://docs.google.com/document/d/1DKWO_FpZMCY4cF2O62LpwII1lx8gsnDGG-qgE3t3TOA/""" - + network_access: Optional[EgressNetworkPolicyNetworkAccessPolicy] = None """The access policy enforced for egress traffic to the internet.""" - + def as_dict(self) -> dict: """Serializes the NetworkPolicyEgress into a dictionary suitable for use as a JSON request body.""" body = {} - if self.network_access: - body["network_access"] = self.network_access.as_dict() + if self.network_access: body['network_access'] = self.network_access.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the NetworkPolicyEgress into a shallow dictionary of its immediate attributes.""" body = {} - if self.network_access: - body["network_access"] = self.network_access + if self.network_access: body['network_access'] = self.network_access return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkPolicyEgress: """Deserializes the NetworkPolicyEgress from a dictionary.""" - return cls(network_access=_from_dict(d, "network_access", EgressNetworkPolicyNetworkAccessPolicy)) + return cls(network_access=_from_dict(d, 'network_access', EgressNetworkPolicyNetworkAccessPolicy)) + + @dataclass @@ -3937,112 +3954,97 @@ class NotificationDestination: config: Optional[Config] = None """The configuration for the notification destination. Will be exactly one of the nested configs. Only returns for users with workspace admin permissions.""" - + destination_type: Optional[DestinationType] = None """[Output-only] The type of the notification destination. The type can not be changed once set.""" - + display_name: Optional[str] = None """The display name for the notification destination.""" - + id: Optional[str] = None """UUID identifying notification destination.""" - + def as_dict(self) -> dict: """Serializes the NotificationDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.destination_type is not None: - body["destination_type"] = self.destination_type.value - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id + if self.config: body['config'] = self.config.as_dict() + if self.destination_type is not None: body['destination_type'] = self.destination_type.value + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the NotificationDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.config: - body["config"] = self.config - if self.destination_type is not None: - body["destination_type"] = self.destination_type - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id + if self.config: body['config'] = self.config + if self.destination_type is not None: body['destination_type'] = self.destination_type + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotificationDestination: """Deserializes the NotificationDestination from a dictionary.""" - return cls( - config=_from_dict(d, "config", Config), - destination_type=_enum(d, "destination_type", DestinationType), - display_name=d.get("display_name", None), - id=d.get("id", None), - ) + return cls(config=_from_dict(d, 'config', Config), destination_type=_enum(d, 'destination_type', DestinationType), display_name=d.get('display_name', None), id=d.get('id', None)) + + @dataclass class PagerdutyConfig: integration_key: Optional[str] = None """[Input-Only] Integration key for PagerDuty.""" - + integration_key_set: Optional[bool] = None """[Output-Only] Whether integration key is set.""" - + def as_dict(self) -> dict: """Serializes the PagerdutyConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.integration_key is not None: - body["integration_key"] = self.integration_key - if self.integration_key_set is not None: - body["integration_key_set"] = self.integration_key_set + if self.integration_key is not None: body['integration_key'] = self.integration_key + if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set return body def as_shallow_dict(self) -> dict: """Serializes the PagerdutyConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.integration_key is not None: - body["integration_key"] = self.integration_key - if self.integration_key_set is not None: - body["integration_key_set"] = self.integration_key_set + if self.integration_key is not None: body['integration_key'] = self.integration_key + if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PagerdutyConfig: """Deserializes the PagerdutyConfig from a dictionary.""" - return cls( - integration_key=d.get("integration_key", None), integration_key_set=d.get("integration_key_set", None) - ) + return cls(integration_key=d.get('integration_key', None), integration_key_set=d.get('integration_key_set', None)) + + @dataclass class PartitionId: """Partition by workspace or account""" - + workspace_id: Optional[int] = None """The ID of the workspace.""" - + def as_dict(self) -> dict: """Serializes the PartitionId into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspace_id is not None: - body["workspaceId"] = self.workspace_id + if self.workspace_id is not None: body['workspaceId'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the PartitionId into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspace_id is not None: - body["workspaceId"] = self.workspace_id + if self.workspace_id is not None: body['workspaceId'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PartitionId: """Deserializes the PartitionId from a dictionary.""" - return cls(workspace_id=d.get("workspaceId", None)) + return cls(workspace_id=d.get('workspaceId', None)) + + @dataclass @@ -4053,25 +4055,25 @@ class PersonalComputeMessage: Personal Compute default policy to individual workspaces and requires a workspace’s users or groups to be added to the ACLs of that workspace’s Personal Compute default policy before they will be able to create compute resources through that policy.""" - + def as_dict(self) -> dict: """Serializes the PersonalComputeMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: - body["value"] = self.value.value + if self.value is not None: body['value'] = self.value.value return body def as_shallow_dict(self) -> dict: """Serializes the PersonalComputeMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PersonalComputeMessage: """Deserializes the PersonalComputeMessage from a dictionary.""" - return cls(value=_enum(d, "value", PersonalComputeMessageEnum)) + return cls(value=_enum(d, 'value', PersonalComputeMessageEnum)) + + class PersonalComputeMessageEnum(Enum): @@ -4080,15 +4082,14 @@ class PersonalComputeMessageEnum(Enum): Personal Compute default policy to individual workspaces and requires a workspace’s users or groups to be added to the ACLs of that workspace’s Personal Compute default policy before they will be able to create compute resources through that policy.""" - - DELEGATE = "DELEGATE" - ON = "ON" - + + DELEGATE = 'DELEGATE' + ON = 'ON' @dataclass class PersonalComputeSetting: personal_compute: PersonalComputeMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -4096,157 +4097,124 @@ class PersonalComputeSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the PersonalComputeSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.personal_compute: - body["personal_compute"] = self.personal_compute.as_dict() - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.etag is not None: body['etag'] = self.etag + if self.personal_compute: body['personal_compute'] = self.personal_compute.as_dict() + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the PersonalComputeSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.personal_compute: - body["personal_compute"] = self.personal_compute - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.etag is not None: body['etag'] = self.etag + if self.personal_compute: body['personal_compute'] = self.personal_compute + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PersonalComputeSetting: """Deserializes the PersonalComputeSetting from a dictionary.""" - return cls( - etag=d.get("etag", None), - personal_compute=_from_dict(d, "personal_compute", PersonalComputeMessage), - setting_name=d.get("setting_name", None), - ) + return cls(etag=d.get('etag', None), personal_compute=_from_dict(d, 'personal_compute', PersonalComputeMessage), setting_name=d.get('setting_name', None)) + + @dataclass class PublicTokenInfo: comment: Optional[str] = None """Comment the token was created with, if applicable.""" - + creation_time: Optional[int] = None """Server time (in epoch milliseconds) when the token was created.""" - + expiry_time: Optional[int] = None """Server time (in epoch milliseconds) when the token will expire, or -1 if not applicable.""" - + token_id: Optional[str] = None """The ID of this token.""" - + def as_dict(self) -> dict: """Serializes the PublicTokenInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.expiry_time is not None: - body["expiry_time"] = self.expiry_time - if self.token_id is not None: - body["token_id"] = self.token_id + if self.comment is not None: body['comment'] = self.comment + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.expiry_time is not None: body['expiry_time'] = self.expiry_time + if self.token_id is not None: body['token_id'] = self.token_id return body def as_shallow_dict(self) -> dict: """Serializes the PublicTokenInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.expiry_time is not None: - body["expiry_time"] = self.expiry_time - if self.token_id is not None: - body["token_id"] = self.token_id + if self.comment is not None: body['comment'] = self.comment + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.expiry_time is not None: body['expiry_time'] = self.expiry_time + if self.token_id is not None: body['token_id'] = self.token_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PublicTokenInfo: """Deserializes the PublicTokenInfo from a dictionary.""" - return cls( - comment=d.get("comment", None), - creation_time=d.get("creation_time", None), - expiry_time=d.get("expiry_time", None), - token_id=d.get("token_id", None), - ) + return cls(comment=d.get('comment', None), creation_time=d.get('creation_time', None), expiry_time=d.get('expiry_time', None), token_id=d.get('token_id', None)) + + @dataclass class ReplaceIpAccessList: """Details required to replace an IP access list.""" - + label: str """Label for the IP access list. This **cannot** be empty.""" - + list_type: ListType """Type of IP access list. Valid values are as follows and are case-sensitive: * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - + enabled: bool """Specifies whether this IP access list is enabled.""" - + ip_access_list_id: Optional[str] = None """The ID for the corresponding IP access list""" - + ip_addresses: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the ReplaceIpAccessList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type.value + if self.enabled is not None: body['enabled'] = self.enabled + if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id + if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] + if self.label is not None: body['label'] = self.label + if self.list_type is not None: body['list_type'] = self.list_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ReplaceIpAccessList into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type + if self.enabled is not None: body['enabled'] = self.enabled + if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id + if self.ip_addresses: body['ip_addresses'] = self.ip_addresses + if self.label is not None: body['label'] = self.label + if self.list_type is not None: body['list_type'] = self.list_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ReplaceIpAccessList: """Deserializes the ReplaceIpAccessList from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - ip_access_list_id=d.get("ip_access_list_id", None), - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_type=_enum(d, "list_type", ListType), - ) + return cls(enabled=d.get('enabled', None), ip_access_list_id=d.get('ip_access_list_id', None), ip_addresses=d.get('ip_addresses', None), label=d.get('label', None), list_type=_enum(d, 'list_type', ListType)) + + @dataclass @@ -4265,42 +4233,44 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ReplaceResponse: """Deserializes the ReplaceResponse from a dictionary.""" return cls() + + @dataclass class RestrictWorkspaceAdminsMessage: status: RestrictWorkspaceAdminsMessageStatus - + def as_dict(self) -> dict: """Serializes the RestrictWorkspaceAdminsMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.status is not None: - body["status"] = self.status.value + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the RestrictWorkspaceAdminsMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.status is not None: - body["status"] = self.status + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestrictWorkspaceAdminsMessage: """Deserializes the RestrictWorkspaceAdminsMessage from a dictionary.""" - return cls(status=_enum(d, "status", RestrictWorkspaceAdminsMessageStatus)) - + return cls(status=_enum(d, 'status', RestrictWorkspaceAdminsMessageStatus)) + -class RestrictWorkspaceAdminsMessageStatus(Enum): - ALLOW_ALL = "ALLOW_ALL" - RESTRICT_TOKENS_AND_JOB_RUN_AS = "RESTRICT_TOKENS_AND_JOB_RUN_AS" +class RestrictWorkspaceAdminsMessageStatus(Enum): + + + ALLOW_ALL = 'ALLOW_ALL' + RESTRICT_TOKENS_AND_JOB_RUN_AS = 'RESTRICT_TOKENS_AND_JOB_RUN_AS' @dataclass class RestrictWorkspaceAdminsSetting: restrict_workspace_admins: RestrictWorkspaceAdminsMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -4308,68 +4278,60 @@ class RestrictWorkspaceAdminsSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the RestrictWorkspaceAdminsSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.restrict_workspace_admins: - body["restrict_workspace_admins"] = self.restrict_workspace_admins.as_dict() - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.etag is not None: body['etag'] = self.etag + if self.restrict_workspace_admins: body['restrict_workspace_admins'] = self.restrict_workspace_admins.as_dict() + if self.setting_name is not None: body['setting_name'] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the RestrictWorkspaceAdminsSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: - body["etag"] = self.etag - if self.restrict_workspace_admins: - body["restrict_workspace_admins"] = self.restrict_workspace_admins - if self.setting_name is not None: - body["setting_name"] = self.setting_name + if self.etag is not None: body['etag'] = self.etag + if self.restrict_workspace_admins: body['restrict_workspace_admins'] = self.restrict_workspace_admins + if self.setting_name is not None: body['setting_name'] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestrictWorkspaceAdminsSetting: """Deserializes the RestrictWorkspaceAdminsSetting from a dictionary.""" - return cls( - etag=d.get("etag", None), - restrict_workspace_admins=_from_dict(d, "restrict_workspace_admins", RestrictWorkspaceAdminsMessage), - setting_name=d.get("setting_name", None), - ) + return cls(etag=d.get('etag', None), restrict_workspace_admins=_from_dict(d, 'restrict_workspace_admins', RestrictWorkspaceAdminsMessage), setting_name=d.get('setting_name', None)) + + @dataclass class RevokeTokenRequest: token_id: str """The ID of the token to be revoked.""" - + def as_dict(self) -> dict: """Serializes the RevokeTokenRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_id is not None: - body["token_id"] = self.token_id + if self.token_id is not None: body['token_id'] = self.token_id return body def as_shallow_dict(self) -> dict: """Serializes the RevokeTokenRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_id is not None: - body["token_id"] = self.token_id + if self.token_id is not None: body['token_id'] = self.token_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RevokeTokenRequest: """Deserializes the RevokeTokenRequest from a dictionary.""" - return cls(token_id=d.get("token_id", None)) + return cls(token_id=d.get('token_id', None)) + + @dataclass @@ -4388,6 +4350,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RevokeTokenResponse: """Deserializes the RevokeTokenResponse from a dictionary.""" return cls() + + @dataclass @@ -4406,431 +4370,398 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetStatusResponse: """Deserializes the SetStatusResponse from a dictionary.""" return cls() + + @dataclass class SlackConfig: url: Optional[str] = None """[Input-Only] URL for Slack destination.""" - + url_set: Optional[bool] = None """[Output-Only] Whether URL is set.""" - + def as_dict(self) -> dict: """Serializes the SlackConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.url is not None: - body["url"] = self.url - if self.url_set is not None: - body["url_set"] = self.url_set + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set return body def as_shallow_dict(self) -> dict: """Serializes the SlackConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.url is not None: - body["url"] = self.url - if self.url_set is not None: - body["url_set"] = self.url_set + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SlackConfig: """Deserializes the SlackConfig from a dictionary.""" - return cls(url=d.get("url", None), url_set=d.get("url_set", None)) + return cls(url=d.get('url', None), url_set=d.get('url_set', None)) + + + + +@dataclass +class SqlResultsDownload: + boolean_val: BooleanMessage + + etag: Optional[str] = None + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + etag from a GET request, and pass it with the PATCH request to identify the setting version you + are updating.""" + + setting_name: Optional[str] = None + """Name of the corresponding setting. This field is populated in the response, but it will not be + respected even if it's set in the request body. The setting name in the path parameter will be + respected instead. Setting name is required to be 'default' if the setting only has one instance + per workspace.""" + + def as_dict(self) -> dict: + """Serializes the SqlResultsDownload into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SqlResultsDownload into a shallow dictionary of its immediate attributes.""" + body = {} + if self.boolean_val: body['boolean_val'] = self.boolean_val + if self.etag is not None: body['etag'] = self.etag + if self.setting_name is not None: body['setting_name'] = self.setting_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SqlResultsDownload: + """Deserializes the SqlResultsDownload from a dictionary.""" + return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) + + @dataclass class StringMessage: value: Optional[str] = None """Represents a generic string value.""" - + def as_dict(self) -> dict: """Serializes the StringMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the StringMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StringMessage: """Deserializes the StringMessage from a dictionary.""" - return cls(value=d.get("value", None)) + return cls(value=d.get('value', None)) + + @dataclass class TokenAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[TokenPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the TokenAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the TokenAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenAccessControlRequest: """Deserializes the TokenAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", TokenPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', TokenPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class TokenAccessControlResponse: all_permissions: Optional[List[TokenPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the TokenAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the TokenAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenAccessControlResponse: """Deserializes the TokenAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", TokenPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', TokenPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class TokenInfo: comment: Optional[str] = None """Comment that describes the purpose of the token, specified by the token creator.""" - + created_by_id: Optional[int] = None """User ID of the user that created the token.""" - + created_by_username: Optional[str] = None """Username of the user that created the token.""" - + creation_time: Optional[int] = None """Timestamp when the token was created.""" - + expiry_time: Optional[int] = None """Timestamp when the token expires.""" - + last_used_day: Optional[int] = None """Approximate timestamp for the day the token was last used. Accurate up to 1 day.""" - + owner_id: Optional[int] = None """User ID of the user that owns the token.""" - + token_id: Optional[str] = None """ID of the token.""" - + workspace_id: Optional[int] = None """If applicable, the ID of the workspace that the token was created in.""" - + def as_dict(self) -> dict: """Serializes the TokenInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.created_by_id is not None: - body["created_by_id"] = self.created_by_id - if self.created_by_username is not None: - body["created_by_username"] = self.created_by_username - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.expiry_time is not None: - body["expiry_time"] = self.expiry_time - if self.last_used_day is not None: - body["last_used_day"] = self.last_used_day - if self.owner_id is not None: - body["owner_id"] = self.owner_id - if self.token_id is not None: - body["token_id"] = self.token_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.comment is not None: body['comment'] = self.comment + if self.created_by_id is not None: body['created_by_id'] = self.created_by_id + if self.created_by_username is not None: body['created_by_username'] = self.created_by_username + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.expiry_time is not None: body['expiry_time'] = self.expiry_time + if self.last_used_day is not None: body['last_used_day'] = self.last_used_day + if self.owner_id is not None: body['owner_id'] = self.owner_id + if self.token_id is not None: body['token_id'] = self.token_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the TokenInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.created_by_id is not None: - body["created_by_id"] = self.created_by_id - if self.created_by_username is not None: - body["created_by_username"] = self.created_by_username - if self.creation_time is not None: - body["creation_time"] = self.creation_time - if self.expiry_time is not None: - body["expiry_time"] = self.expiry_time - if self.last_used_day is not None: - body["last_used_day"] = self.last_used_day - if self.owner_id is not None: - body["owner_id"] = self.owner_id - if self.token_id is not None: - body["token_id"] = self.token_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.comment is not None: body['comment'] = self.comment + if self.created_by_id is not None: body['created_by_id'] = self.created_by_id + if self.created_by_username is not None: body['created_by_username'] = self.created_by_username + if self.creation_time is not None: body['creation_time'] = self.creation_time + if self.expiry_time is not None: body['expiry_time'] = self.expiry_time + if self.last_used_day is not None: body['last_used_day'] = self.last_used_day + if self.owner_id is not None: body['owner_id'] = self.owner_id + if self.token_id is not None: body['token_id'] = self.token_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenInfo: """Deserializes the TokenInfo from a dictionary.""" - return cls( - comment=d.get("comment", None), - created_by_id=d.get("created_by_id", None), - created_by_username=d.get("created_by_username", None), - creation_time=d.get("creation_time", None), - expiry_time=d.get("expiry_time", None), - last_used_day=d.get("last_used_day", None), - owner_id=d.get("owner_id", None), - token_id=d.get("token_id", None), - workspace_id=d.get("workspace_id", None), - ) + return cls(comment=d.get('comment', None), created_by_id=d.get('created_by_id', None), created_by_username=d.get('created_by_username', None), creation_time=d.get('creation_time', None), expiry_time=d.get('expiry_time', None), last_used_day=d.get('last_used_day', None), owner_id=d.get('owner_id', None), token_id=d.get('token_id', None), workspace_id=d.get('workspace_id', None)) + + @dataclass class TokenPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[TokenPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the TokenPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the TokenPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenPermission: """Deserializes the TokenPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", TokenPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', TokenPermissionLevel)) + + class TokenPermissionLevel(Enum): """Permission level""" - - CAN_USE = "CAN_USE" - + + CAN_USE = 'CAN_USE' @dataclass class TokenPermissions: access_control_list: Optional[List[TokenAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the TokenPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the TokenPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenPermissions: """Deserializes the TokenPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", TokenAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', TokenAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class TokenPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[TokenPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the TokenPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the TokenPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenPermissionsDescription: """Deserializes the TokenPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), permission_level=_enum(d, "permission_level", TokenPermissionLevel) - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', TokenPermissionLevel)) + + @dataclass class TokenPermissionsRequest: access_control_list: Optional[List[TokenAccessControlRequest]] = None - + def as_dict(self) -> dict: """Serializes the TokenPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] return body def as_shallow_dict(self) -> dict: """Serializes the TokenPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list + if self.access_control_list: body['access_control_list'] = self.access_control_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenPermissionsRequest: """Deserializes the TokenPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, "access_control_list", TokenAccessControlRequest)) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', TokenAccessControlRequest)) + + class TokenType(Enum): """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported.""" - - ARCLIGHT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_AZURE_EXCHANGE_TOKEN" - ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = "ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY" - ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN" - ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = ( - "ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY" - ) - AZURE_ACTIVE_DIRECTORY_TOKEN = "AZURE_ACTIVE_DIRECTORY_TOKEN" - + + ARCLIGHT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN' + ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY' + ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN' + ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = 'ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY' + AZURE_ACTIVE_DIRECTORY_TOKEN = 'AZURE_ACTIVE_DIRECTORY_TOKEN' @dataclass class UpdateAccountIpAccessEnableRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: AccountIpAccessEnable - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4841,48 +4772,40 @@ class UpdateAccountIpAccessEnableRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateAccountIpAccessEnableRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAccountIpAccessEnableRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAccountIpAccessEnableRequest: """Deserializes the UpdateAccountIpAccessEnableRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AccountIpAccessEnable), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', AccountIpAccessEnable)) + + @dataclass class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: AibiDashboardEmbeddingAccessPolicySetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4893,48 +4816,40 @@ class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: """Deserializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AibiDashboardEmbeddingAccessPolicySetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', AibiDashboardEmbeddingAccessPolicySetting)) + + @dataclass class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: AibiDashboardEmbeddingApprovedDomainsSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4945,48 +4860,40 @@ class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: """Deserializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AibiDashboardEmbeddingApprovedDomainsSetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', AibiDashboardEmbeddingApprovedDomainsSetting)) + + @dataclass class UpdateAutomaticClusterUpdateSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: AutomaticClusterUpdateSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4997,48 +4904,40 @@ class UpdateAutomaticClusterUpdateSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAutomaticClusterUpdateSettingRequest: """Deserializes the UpdateAutomaticClusterUpdateSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AutomaticClusterUpdateSetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', AutomaticClusterUpdateSetting)) + + @dataclass class UpdateComplianceSecurityProfileSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: ComplianceSecurityProfileSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5049,48 +4948,40 @@ class UpdateComplianceSecurityProfileSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateComplianceSecurityProfileSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateComplianceSecurityProfileSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateComplianceSecurityProfileSettingRequest: """Deserializes the UpdateComplianceSecurityProfileSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", ComplianceSecurityProfileSetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', ComplianceSecurityProfileSetting)) + + @dataclass class UpdateCspEnablementAccountSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: CspEnablementAccountSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5101,46 +4992,82 @@ class UpdateCspEnablementAccountSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCspEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCspEnablementAccountSettingRequest: """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", CspEnablementAccountSetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', CspEnablementAccountSetting)) + + @dataclass -class UpdateDefaultNamespaceSettingRequest: +class UpdateDashboardEmailSubscriptionsRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" + + setting: DashboardEmailSubscriptions + + field_mask: str + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" + + def as_dict(self) -> dict: + """Serializes the UpdateDashboardEmailSubscriptionsRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateDashboardEmailSubscriptionsRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateDashboardEmailSubscriptionsRequest: + """Deserializes the UpdateDashboardEmailSubscriptionsRequest from a dictionary.""" + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DashboardEmailSubscriptions)) + + + +@dataclass +class UpdateDefaultNamespaceSettingRequest: + """Details required to update a setting.""" + + allow_missing: bool + """This should always be set to true for Settings API. Added for AIP compliance.""" + setting: DefaultNamespaceSetting """This represents the setting configuration for the default namespace in the Databricks workspace. Setting the default catalog for the workspace determines the catalog that is used when queries @@ -5149,7 +5076,7 @@ class UpdateDefaultNamespaceSettingRequest: 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute.""" - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5160,48 +5087,40 @@ class UpdateDefaultNamespaceSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateDefaultNamespaceSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateDefaultNamespaceSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateDefaultNamespaceSettingRequest: """Deserializes the UpdateDefaultNamespaceSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DefaultNamespaceSetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DefaultNamespaceSetting)) + + @dataclass class UpdateDisableLegacyAccessRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: DisableLegacyAccess - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5212,48 +5131,40 @@ class UpdateDisableLegacyAccessRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateDisableLegacyAccessRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyAccessRequest: """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DisableLegacyAccess), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DisableLegacyAccess)) + + @dataclass class UpdateDisableLegacyDbfsRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: DisableLegacyDbfs - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5264,48 +5175,40 @@ class UpdateDisableLegacyDbfsRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateDisableLegacyDbfsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyDbfsRequest: """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DisableLegacyDbfs), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DisableLegacyDbfs)) + + @dataclass class UpdateDisableLegacyFeaturesRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: DisableLegacyFeatures - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5316,48 +5219,40 @@ class UpdateDisableLegacyFeaturesRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateDisableLegacyFeaturesRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyFeaturesRequest: """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DisableLegacyFeatures), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DisableLegacyFeatures)) + + @dataclass class UpdateEnableExportNotebookRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EnableExportNotebook - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5368,48 +5263,40 @@ class UpdateEnableExportNotebookRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEnableExportNotebookRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEnableExportNotebookRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableExportNotebookRequest: """Deserializes the UpdateEnableExportNotebookRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnableExportNotebook), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EnableExportNotebook)) + + @dataclass class UpdateEnableNotebookTableClipboardRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EnableNotebookTableClipboard - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5420,48 +5307,40 @@ class UpdateEnableNotebookTableClipboardRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEnableNotebookTableClipboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEnableNotebookTableClipboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableNotebookTableClipboardRequest: """Deserializes the UpdateEnableNotebookTableClipboardRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnableNotebookTableClipboard), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EnableNotebookTableClipboard)) + + @dataclass class UpdateEnableResultsDownloadingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EnableResultsDownloading - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5472,48 +5351,40 @@ class UpdateEnableResultsDownloadingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEnableResultsDownloadingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEnableResultsDownloadingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableResultsDownloadingRequest: """Deserializes the UpdateEnableResultsDownloadingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnableResultsDownloading), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EnableResultsDownloading)) + + @dataclass class UpdateEnhancedSecurityMonitoringSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EnhancedSecurityMonitoringSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5524,48 +5395,40 @@ class UpdateEnhancedSecurityMonitoringSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEnhancedSecurityMonitoringSettingRequest: """Deserializes the UpdateEnhancedSecurityMonitoringSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnhancedSecurityMonitoringSetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EnhancedSecurityMonitoringSetting)) + + @dataclass class UpdateEsmEnablementAccountSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EsmEnablementAccountSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5576,111 +5439,89 @@ class UpdateEsmEnablementAccountSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEsmEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEsmEnablementAccountSettingRequest: """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EsmEnablementAccountSetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EsmEnablementAccountSetting)) + + @dataclass class UpdateIpAccessList: """Details required to update an IP access list.""" - + enabled: Optional[bool] = None """Specifies whether this IP access list is enabled.""" - + ip_access_list_id: Optional[str] = None """The ID for the corresponding IP access list""" - + ip_addresses: Optional[List[str]] = None - + label: Optional[str] = None """Label for the IP access list. This **cannot** be empty.""" - + list_type: Optional[ListType] = None """Type of IP access list. Valid values are as follows and are case-sensitive: * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - + def as_dict(self) -> dict: """Serializes the UpdateIpAccessList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type.value + if self.enabled is not None: body['enabled'] = self.enabled + if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id + if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] + if self.label is not None: body['label'] = self.label + if self.list_type is not None: body['list_type'] = self.list_type.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdateIpAccessList into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type + if self.enabled is not None: body['enabled'] = self.enabled + if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id + if self.ip_addresses: body['ip_addresses'] = self.ip_addresses + if self.label is not None: body['label'] = self.label + if self.list_type is not None: body['list_type'] = self.list_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateIpAccessList: """Deserializes the UpdateIpAccessList from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - ip_access_list_id=d.get("ip_access_list_id", None), - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_type=_enum(d, "list_type", ListType), - ) + return cls(enabled=d.get('enabled', None), ip_access_list_id=d.get('ip_access_list_id', None), ip_addresses=d.get('ip_addresses', None), label=d.get('label', None), list_type=_enum(d, 'list_type', ListType)) + + @dataclass class UpdateLlmProxyPartnerPoweredAccountRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: LlmProxyPartnerPoweredAccount - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5691,48 +5532,40 @@ class UpdateLlmProxyPartnerPoweredAccountRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredAccountRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredAccountRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredAccountRequest: """Deserializes the UpdateLlmProxyPartnerPoweredAccountRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", LlmProxyPartnerPoweredAccount), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', LlmProxyPartnerPoweredAccount)) + + @dataclass class UpdateLlmProxyPartnerPoweredEnforceRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: LlmProxyPartnerPoweredEnforce - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5743,48 +5576,40 @@ class UpdateLlmProxyPartnerPoweredEnforceRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredEnforceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredEnforceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredEnforceRequest: """Deserializes the UpdateLlmProxyPartnerPoweredEnforceRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", LlmProxyPartnerPoweredEnforce), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', LlmProxyPartnerPoweredEnforce)) + + @dataclass class UpdateLlmProxyPartnerPoweredWorkspaceRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: LlmProxyPartnerPoweredWorkspace - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5795,89 +5620,81 @@ class UpdateLlmProxyPartnerPoweredWorkspaceRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredWorkspaceRequest: """Deserializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", LlmProxyPartnerPoweredWorkspace), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', LlmProxyPartnerPoweredWorkspace)) + + + + + + + + @dataclass class UpdateNotificationDestinationRequest: config: Optional[Config] = None """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" - + display_name: Optional[str] = None """The display name for the notification destination.""" - + id: Optional[str] = None """UUID identifying notification destination.""" - + def as_dict(self) -> dict: """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id + if self.config: body['config'] = self.config.as_dict() + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.config: - body["config"] = self.config - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id + if self.config: body['config'] = self.config + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateNotificationDestinationRequest: """Deserializes the UpdateNotificationDestinationRequest from a dictionary.""" - return cls( - config=_from_dict(d, "config", Config), display_name=d.get("display_name", None), id=d.get("id", None) - ) + return cls(config=_from_dict(d, 'config', Config), display_name=d.get('display_name', None), id=d.get('id', None)) + + @dataclass class UpdatePersonalComputeSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: PersonalComputeSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5888,68 +5705,77 @@ class UpdatePersonalComputeSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdatePersonalComputeSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePersonalComputeSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalComputeSettingRequest: """Deserializes the UpdatePersonalComputeSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", PersonalComputeSetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', PersonalComputeSetting)) + + @dataclass class UpdatePrivateEndpointRule: """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization.""" - + domain_names: Optional[List[str]] = None - """Only used by private endpoints to customer-managed resources. + """Only used by private endpoints to customer-managed private endpoint services. Domain names of target private link service. When updating this field, the full list of target domain_names must be specified.""" - + + enabled: Optional[bool] = None + """Only used by private endpoints towards an AWS S3 service. + + Update this field to activate/deactivate this private endpoint to allow egress access from + serverless compute resources.""" + + resource_names: Optional[List[str]] = None + """Only used by private endpoints towards AWS S3 service. + + The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names + must be in the same region as the NCC/endpoint service. When updating this field, we perform + full update on this field. Please ensure a full list of desired resource_names is provided.""" + def as_dict(self) -> dict: """Serializes the UpdatePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.domain_names: - body["domain_names"] = [v for v in self.domain_names] + if self.domain_names: body['domain_names'] = [v for v in self.domain_names] + if self.enabled is not None: body['enabled'] = self.enabled + if self.resource_names: body['resource_names'] = [v for v in self.resource_names] return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.domain_names: - body["domain_names"] = self.domain_names + if self.domain_names: body['domain_names'] = self.domain_names + if self.enabled is not None: body['enabled'] = self.enabled + if self.resource_names: body['resource_names'] = self.resource_names return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePrivateEndpointRule: """Deserializes the UpdatePrivateEndpointRule from a dictionary.""" - return cls(domain_names=d.get("domain_names", None)) + return cls(domain_names=d.get('domain_names', None), enabled=d.get('enabled', None), resource_names=d.get('resource_names', None)) + + @dataclass @@ -5968,17 +5794,19 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() + + @dataclass class UpdateRestrictWorkspaceAdminsSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: RestrictWorkspaceAdminsSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5989,40 +5817,79 @@ class UpdateRestrictWorkspaceAdminsSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRestrictWorkspaceAdminsSettingRequest: """Deserializes the UpdateRestrictWorkspaceAdminsSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", RestrictWorkspaceAdminsSetting), - ) + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', RestrictWorkspaceAdminsSetting)) + + + + +@dataclass +class UpdateSqlResultsDownloadRequest: + """Details required to update a setting.""" + + allow_missing: bool + """This should always be set to true for Settings API. Added for AIP compliance.""" + + setting: SqlResultsDownload + + field_mask: str + """The field mask must be a single string, with multiple fields separated by commas (no spaces). + The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields + (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, + as only the entire collection field can be specified. Field names must exactly match the + resource field names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the + API changes in the future.""" + + def as_dict(self) -> dict: + """Serializes the UpdateSqlResultsDownloadRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateSqlResultsDownloadRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.allow_missing is not None: body['allow_missing'] = self.allow_missing + if self.field_mask is not None: body['field_mask'] = self.field_mask + if self.setting: body['setting'] = self.setting + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateSqlResultsDownloadRequest: + """Deserializes the UpdateSqlResultsDownloadRequest from a dictionary.""" + return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', SqlResultsDownload)) + -WorkspaceConf = Dict[str, str] + + + + +WorkspaceConf = Dict[str,str] @dataclass @@ -6031,170 +5898,191 @@ class WorkspaceNetworkOption: """The network policy ID to apply to the workspace. This controls the network access rules for all serverless compute resources in the workspace. Each workspace can only be linked to one policy at a time. If no policy is explicitly assigned, the workspace will use 'default-policy'.""" - + workspace_id: Optional[int] = None """The workspace ID.""" - + def as_dict(self) -> dict: """Serializes the WorkspaceNetworkOption into a dictionary suitable for use as a JSON request body.""" body = {} - if self.network_policy_id is not None: - body["network_policy_id"] = self.network_policy_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.network_policy_id is not None: body['network_policy_id'] = self.network_policy_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceNetworkOption into a shallow dictionary of its immediate attributes.""" body = {} - if self.network_policy_id is not None: - body["network_policy_id"] = self.network_policy_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.network_policy_id is not None: body['network_policy_id'] = self.network_policy_id + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceNetworkOption: """Deserializes the WorkspaceNetworkOption from a dictionary.""" - return cls(network_policy_id=d.get("network_policy_id", None), workspace_id=d.get("workspace_id", None)) + return cls(network_policy_id=d.get('network_policy_id', None), workspace_id=d.get('workspace_id', None)) + + + + class AccountIpAccessListsAPI: """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console. - + Account IP Access Lists affect web application access and REST API access to the account console and account APIs. If the feature is disabled for the account, all access is allowed for this account. There is support for allow lists (inclusion) and block lists (exclusion). - + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address matches any block list, the connection is rejected. 2. **If the connection was not rejected by block lists**, the IP address is compared with the allow lists. - + If there is at least one allow list for the account, the connection is allowed only if the IP address matches an allow list. If there are no allow lists for the account, all IP addresses are allowed. - + For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. - + After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, label: str, list_type: ListType, *, ip_addresses: Optional[List[str]] = None - ) -> CreateIpAccessListResponse: - """Create access list. + - Creates an IP access list for the account. + + + + + def create(self + , label: str, list_type: ListType + , * + , ip_addresses: Optional[List[str]] = None) -> CreateIpAccessListResponse: + """Create access list. + + Creates an IP access list for the account. + A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. - + When creating or updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. - + :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) - + :returns: :class:`CreateIpAccessListResponse` """ body = {} - if ip_addresses is not None: - body["ip_addresses"] = [v for v in ip_addresses] - if label is not None: - body["label"] = label - if list_type is not None: - body["list_type"] = list_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists", body=body, headers=headers - ) + if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] + if label is not None: body['label'] = label + if list_type is not None: body['list_type'] = list_type.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists', body=body + + , headers=headers + ) return CreateIpAccessListResponse.from_dict(res) - def delete(self, ip_access_list_id: str): - """Delete access list. + + + + def delete(self + , ip_access_list_id: str + ): + """Delete access list. + Deletes an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}' + + , headers=headers + ) + - self._api.do( - "DELETE", f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}", headers=headers - ) + + + - def get(self, ip_access_list_id: str) -> GetIpAccessListResponse: + def get(self + , ip_access_list_id: str + ) -> GetIpAccessListResponse: """Get IP access list. - + Gets an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - + :returns: :class:`GetIpAccessListResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}' + + , headers=headers + ) return GetIpAccessListResponse.from_dict(res) + + + + def list(self) -> Iterator[IpAccessListInfo]: """Get access lists. - + Gets all IP access lists for the specified account. - + :returns: Iterator over :class:`IpAccessListInfo` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists' + , headers=headers + ) parsed = GetIpAccessListsResponse.from_dict(json).ip_access_lists return parsed if parsed is not None else [] + - def replace( - self, - ip_access_list_id: str, - label: str, - list_type: ListType, - enabled: bool, - *, - ip_addresses: Optional[List[str]] = None, - ): - """Replace access list. + + + + def replace(self + , ip_access_list_id: str, label: str, list_type: ListType, enabled: bool + , * + , ip_addresses: Optional[List[str]] = None): + """Replace access list. + Replaces an IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one @@ -6202,67 +6090,59 @@ def replace( `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take effect. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) - - + + """ body = {} - if enabled is not None: - body["enabled"] = enabled - if ip_addresses is not None: - body["ip_addresses"] = [v for v in ip_addresses] - if label is not None: - body["label"] = label - if list_type is not None: - body["list_type"] = list_type.value - headers = { - "Content-Type": "application/json", - } - - self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}", - body=body, - headers=headers, - ) - - def update( - self, - ip_access_list_id: str, - *, - enabled: Optional[bool] = None, - ip_addresses: Optional[List[str]] = None, - label: Optional[str] = None, - list_type: Optional[ListType] = None, - ): - """Update access list. + if enabled is not None: body['enabled'] = enabled + if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] + if label is not None: body['label'] = label + if list_type is not None: body['list_type'] = list_type.value + headers = {'Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}', body=body + + , headers=headers + ) + - Updates an existing IP access list, specified by its ID. + + + + def update(self + , ip_access_list_id: str + , * + , enabled: Optional[bool] = None, ip_addresses: Optional[List[str]] = None, label: Optional[str] = None, list_type: Optional[ListType] = None): + """Update access list. + + Updates an existing IP access list, specified by its ID. + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. - + When updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param enabled: bool (optional) @@ -6272,39 +6152,33 @@ def update( Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. - - + + """ body = {} - if enabled is not None: - body["enabled"] = enabled - if ip_addresses is not None: - body["ip_addresses"] = [v for v in ip_addresses] - if label is not None: - body["label"] = label - if list_type is not None: - body["list_type"] = list_type.value - headers = { - "Content-Type": "application/json", - } - - self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}", - body=body, - headers=headers, - ) - + if enabled is not None: body['enabled'] = enabled + if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] + if label is not None: body['label'] = label + if list_type is not None: body['list_type'] = list_type.value + headers = {'Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}', body=body + + , headers=headers + ) + + + class AccountSettingsAPI: """Accounts Settings API allows users to manage settings at the account level.""" - + def __init__(self, api_client): self._api = api_client - + self._csp_enablement_account = CspEnablementAccountAPI(self._api) self._disable_legacy_features = DisableLegacyFeaturesAPI(self._api) self._enable_ip_access_lists = EnableIpAccessListsAPI(self._api) @@ -6313,114 +6187,133 @@ def __init__(self, api_client): self._llm_proxy_partner_powered_enforce = LlmProxyPartnerPoweredEnforceAPI(self._api) self._personal_compute = PersonalComputeAPI(self._api) + @property def csp_enablement_account(self) -> CspEnablementAccountAPI: """The compliance security profile settings at the account level control whether to enable it for new workspaces.""" return self._csp_enablement_account - + @property def disable_legacy_features(self) -> DisableLegacyFeaturesAPI: """Disable legacy features for new Databricks workspaces.""" return self._disable_legacy_features - + @property def enable_ip_access_lists(self) -> EnableIpAccessListsAPI: """Controls the enforcement of IP access lists for accessing the account console.""" return self._enable_ip_access_lists - + @property def esm_enablement_account(self) -> EsmEnablementAccountAPI: """The enhanced security monitoring setting at the account level controls whether to enable the feature on new workspaces.""" return self._esm_enablement_account - + @property def llm_proxy_partner_powered_account(self) -> LlmProxyPartnerPoweredAccountAPI: """Determines if partner powered models are enabled or not for a specific account.""" return self._llm_proxy_partner_powered_account - + @property def llm_proxy_partner_powered_enforce(self) -> LlmProxyPartnerPoweredEnforceAPI: """Determines if the account-level partner-powered setting value is enforced upon the workspace-level partner-powered setting.""" return self._llm_proxy_partner_powered_enforce - + @property def personal_compute(self) -> PersonalComputeAPI: """The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources.""" return self._personal_compute + + + class AibiDashboardEmbeddingAccessPolicyAPI: """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: - """Delete the AI/BI dashboard embedding access policy. + - Delete the AI/BI dashboard embedding access policy, reverting back to the default. + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: + """Delete the AI/BI dashboard embedding access policy. + + Delete the AI/BI dashboard embedding access policy, reverting back to the default. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", - "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE','/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default', query=query + + , headers=headers + ) return DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.from_dict(res) - def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting: - """Retrieve the AI/BI dashboard embedding access policy. + + + + def get(self + + , * + , etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting: + """Retrieve the AI/BI dashboard embedding access policy. + Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default', query=query + + , headers=headers + ) return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res) - def update( - self, allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str - ) -> AibiDashboardEmbeddingAccessPolicySetting: - """Update the AI/BI dashboard embedding access policy. + + + + def update(self + , allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str + ) -> AibiDashboardEmbeddingAccessPolicySetting: + """Update the AI/BI dashboard embedding access policy. + Updates the AI/BI dashboard embedding access policy at the workspace level. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting` @@ -6430,107 +6323,115 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default', body=body + + , headers=headers + ) return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res) - + + class AibiDashboardEmbeddingApprovedDomainsAPI: """Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: - """Delete AI/BI dashboard embedding approved domains. + + + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: + """Delete AI/BI dashboard embedding approved domains. + Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default empty list. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", - "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE','/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default', query=query + + , headers=headers + ) return DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.from_dict(res) - def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting: - """Retrieve the list of domains approved to host embedded AI/BI dashboards. + + + + def get(self + + , * + , etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting: + """Retrieve the list of domains approved to host embedded AI/BI dashboards. + Retrieves the list of domains approved to host embedded AI/BI dashboards. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default', query=query + + , headers=headers + ) return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res) - def update( - self, allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str - ) -> AibiDashboardEmbeddingApprovedDomainsSetting: - """Update the list of domains approved to host embedded AI/BI dashboards. + + + + def update(self + , allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str + ) -> AibiDashboardEmbeddingApprovedDomainsSetting: + """Update the list of domains approved to host embedded AI/BI dashboards. + Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` @@ -6540,78 +6441,84 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default", - body=body, - headers=headers, - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default', body=body + + , headers=headers + ) return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res) - + + class AutomaticClusterUpdateAPI: """Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned off.""" - + def __init__(self, api_client): self._api = api_client + - def get(self, *, etag: Optional[str] = None) -> AutomaticClusterUpdateSetting: - """Get the automatic cluster update setting. + - Gets the automatic cluster update setting. + + + + + def get(self + + , * + , etag: Optional[str] = None) -> AutomaticClusterUpdateSetting: + """Get the automatic cluster update setting. + + Gets the automatic cluster update setting. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AutomaticClusterUpdateSetting` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/automatic_cluster_update/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/automatic_cluster_update/names/default', query=query + + , headers=headers + ) return AutomaticClusterUpdateSetting.from_dict(res) - def update( - self, allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str - ) -> AutomaticClusterUpdateSetting: - """Update the automatic cluster update setting. + + + + def update(self + , allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str + ) -> AutomaticClusterUpdateSetting: + """Update the automatic cluster update setting. + Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AutomaticClusterUpdateSetting` @@ -6621,77 +6528,86 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AutomaticClusterUpdateSetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/automatic_cluster_update/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/automatic_cluster_update/names/default', body=body + + , headers=headers + ) return AutomaticClusterUpdateSetting.from_dict(res) - + + class ComplianceSecurityProfileAPI: """Controls whether to enable the compliance security profile for the current workspace. Enabling it on a workspace is permanent. By default, it is turned off. - + This settings can NOT be disabled once it is enabled.""" - + def __init__(self, api_client): self._api = api_client + - def get(self, *, etag: Optional[str] = None) -> ComplianceSecurityProfileSetting: - """Get the compliance security profile setting. + - Gets the compliance security profile setting. + + + + + def get(self + + , * + , etag: Optional[str] = None) -> ComplianceSecurityProfileSetting: + """Get the compliance security profile setting. + + Gets the compliance security profile setting. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`ComplianceSecurityProfileSetting` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default', query=query + + , headers=headers + ) return ComplianceSecurityProfileSetting.from_dict(res) - def update( - self, allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str - ) -> ComplianceSecurityProfileSetting: - """Update the compliance security profile setting. + + + + def update(self + , allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str + ) -> ComplianceSecurityProfileSetting: + """Update the compliance security profile setting. + Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`ComplianceSecurityProfileSetting` @@ -6701,119 +6617,131 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`ComplianceSecurityProfileSetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default', body=body + + , headers=headers + ) return ComplianceSecurityProfileSetting.from_dict(res) - + + class CredentialsManagerAPI: """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens.""" - + def __init__(self, api_client): self._api = api_client + - def exchange_token( - self, partition_id: PartitionId, token_type: List[TokenType], scopes: List[str] - ) -> ExchangeTokenResponse: - """Exchange token. + + + + + + + def exchange_token(self + , partition_id: PartitionId, token_type: List[TokenType], scopes: List[str] + ) -> ExchangeTokenResponse: + """Exchange token. + Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to determine token permissions. - + :param partition_id: :class:`PartitionId` The partition of Credentials store :param token_type: List[:class:`TokenType`] A list of token types being requested :param scopes: List[str] Array of scopes for the token request. - + :returns: :class:`ExchangeTokenResponse` """ body = {} - if partition_id is not None: - body["partitionId"] = partition_id.as_dict() - if scopes is not None: - body["scopes"] = [v for v in scopes] - if token_type is not None: - body["tokenType"] = [v.value for v in token_type] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/credentials-manager/exchange-tokens/token", body=body, headers=headers) + if partition_id is not None: body['partitionId'] = partition_id.as_dict() + if scopes is not None: body['scopes'] = [v for v in scopes] + if token_type is not None: body['tokenType'] = [v.value for v in token_type] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/credentials-manager/exchange-tokens/token', body=body + + , headers=headers + ) return ExchangeTokenResponse.from_dict(res) - + + class CspEnablementAccountAPI: """The compliance security profile settings at the account level control whether to enable it for new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace creation, account admins can enable the compliance security profile individually for each workspace. - + This settings can be disabled so that new workspaces do not have compliance security profile enabled by default.""" - + def __init__(self, api_client): self._api = api_client + - def get(self, *, etag: Optional[str] = None) -> CspEnablementAccountSetting: - """Get the compliance security profile setting for new workspaces. + - Gets the compliance security profile setting for new workspaces. + + + + + def get(self + + , * + , etag: Optional[str] = None) -> CspEnablementAccountSetting: + """Get the compliance security profile setting for new workspaces. + + Gets the compliance security profile setting for new workspaces. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`CspEnablementAccountSetting` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default', query=query + + , headers=headers + ) return CspEnablementAccountSetting.from_dict(res) - def update( - self, allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str - ) -> CspEnablementAccountSetting: - """Update the compliance security profile setting for new workspaces. + + + + def update(self + , allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str + ) -> CspEnablementAccountSetting: + """Update the compliance security profile setting for new workspaces. + Updates the value of the compliance security profile setting for new workspaces. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`CspEnablementAccountSetting` @@ -6823,116 +6751,246 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`CspEnablementAccountSetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default", - body=body, - headers=headers, - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default', body=body + + , headers=headers + ) return CspEnablementAccountSetting.from_dict(res) - -class DefaultNamespaceAPI: - """The default namespace setting API allows users to configure the default namespace for a Databricks - workspace. - - Through this API, users can retrieve, set, or modify the default namespace used when queries do not - reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the - default catalog, then a query 'SELECT * FROM myTable' would reference the object - 'retail_prod.default.myTable' (the schema 'default' is always assumed). - - This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default - namespace only applies when using Unity Catalog-enabled compute.""" - + + +class DashboardEmailSubscriptionsAPI: + """Controls whether schedules or workload tasks for refreshing AI/BI Dashboards in the workspace can send + subscription emails containing PDFs and/or images of the dashboard. By default, this setting is enabled + (set to `true`)""" + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeleteDefaultNamespaceSettingResponse: - """Delete the default namespace setting. + - Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE` - requests (as a query parameter). The etag can be retrieved by making a `GET` request before the - `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the - request must be retried by using the fresh etag in the 409 response. + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteDashboardEmailSubscriptionsResponse: + """Delete the Dashboard Email Subscriptions setting. + + Reverts the Dashboard Email Subscriptions setting to its default value. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - - :returns: :class:`DeleteDefaultNamespaceSettingResponse` + + :returns: :class:`DeleteDashboardEmailSubscriptionsResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", "/api/2.0/settings/types/default_namespace_ws/names/default", query=query, headers=headers - ) - return DeleteDefaultNamespaceSettingResponse.from_dict(res) - - def get(self, *, etag: Optional[str] = None) -> DefaultNamespaceSetting: - """Get the default namespace setting. + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE','/api/2.0/settings/types/dashboard_email_subscriptions/names/default', query=query + + , headers=headers + ) + return DeleteDashboardEmailSubscriptionsResponse.from_dict(res) - Gets the default namespace setting. + + + + def get(self + + , * + , etag: Optional[str] = None) -> DashboardEmailSubscriptions: + """Get the Dashboard Email Subscriptions setting. + + Gets the Dashboard Email Subscriptions setting. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - - :returns: :class:`DefaultNamespaceSetting` + + :returns: :class:`DashboardEmailSubscriptions` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/default_namespace_ws/names/default", query=query, headers=headers - ) - return DefaultNamespaceSetting.from_dict(res) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/dashboard_email_subscriptions/names/default', query=query + + , headers=headers + ) + return DashboardEmailSubscriptions.from_dict(res) - def update(self, allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str) -> DefaultNamespaceSetting: - """Update the default namespace setting. + + + + + def update(self + , allow_missing: bool, setting: DashboardEmailSubscriptions, field_mask: str + ) -> DashboardEmailSubscriptions: + """Update the Dashboard Email Subscriptions setting. + + Updates the Dashboard Email Subscriptions setting. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`DashboardEmailSubscriptions` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`DashboardEmailSubscriptions` + """ + body = {} + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/dashboard_email_subscriptions/names/default', body=body + + , headers=headers + ) + return DashboardEmailSubscriptions.from_dict(res) + + + +class DefaultNamespaceAPI: + """The default namespace setting API allows users to configure the default namespace for a Databricks + workspace. + + Through this API, users can retrieve, set, or modify the default namespace used when queries do not + reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the + default catalog, then a query 'SELECT * FROM myTable' would reference the object + 'retail_prod.default.myTable' (the schema 'default' is always assumed). + + This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default + namespace only applies when using Unity Catalog-enabled compute.""" + + def __init__(self, api_client): + self._api = api_client + + + + + + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteDefaultNamespaceSettingResponse: + """Delete the default namespace setting. + + Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE` + requests (as a query parameter). The etag can be retrieved by making a `GET` request before the + `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the + request must be retried by using the fresh etag in the 409 response. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteDefaultNamespaceSettingResponse` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE','/api/2.0/settings/types/default_namespace_ws/names/default', query=query + + , headers=headers + ) + return DeleteDefaultNamespaceSettingResponse.from_dict(res) + + + + + + def get(self + + , * + , etag: Optional[str] = None) -> DefaultNamespaceSetting: + """Get the default namespace setting. + + Gets the default namespace setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DefaultNamespaceSetting` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/default_namespace_ws/names/default', query=query + + , headers=headers + ) + return DefaultNamespaceSetting.from_dict(res) + + + + + def update(self + , allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str + ) -> DefaultNamespaceSetting: + """Update the default namespace setting. + Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the etag is present in the error response, which should be set in the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DefaultNamespaceSetting` @@ -6949,100 +7007,116 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting, field_ma `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DefaultNamespaceSetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/default_namespace_ws/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/default_namespace_ws/names/default', body=body + + , headers=headers + ) return DefaultNamespaceSetting.from_dict(res) - + + class DisableLegacyAccessAPI: """'Disabling legacy access' has the following impacts: - + 1. Disables direct access to Hive Metastores from the workspace. However, you can still access a Hive Metastore through Hive Metastore federation. 2. Disables fallback mode on external location access from the workspace. 3. Disables Databricks Runtime versions prior to 13.3LTS.""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyAccessResponse: - """Delete Legacy Access Disablement Status. + - Deletes legacy access disablement status. + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteDisableLegacyAccessResponse: + """Delete Legacy Access Disablement Status. + + Deletes legacy access disablement status. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyAccessResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", "/api/2.0/settings/types/disable_legacy_access/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE','/api/2.0/settings/types/disable_legacy_access/names/default', query=query + + , headers=headers + ) return DeleteDisableLegacyAccessResponse.from_dict(res) - def get(self, *, etag: Optional[str] = None) -> DisableLegacyAccess: - """Retrieve Legacy Access Disablement Status. + + + + def get(self + + , * + , etag: Optional[str] = None) -> DisableLegacyAccess: + """Retrieve Legacy Access Disablement Status. + Retrieves legacy access disablement Status. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyAccess` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/disable_legacy_access/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/disable_legacy_access/names/default', query=query + + , headers=headers + ) return DisableLegacyAccess.from_dict(res) - def update(self, allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess: - """Update Legacy Access Disablement Status. + + + + def update(self + , allow_missing: bool, setting: DisableLegacyAccess, field_mask: str + ) -> DisableLegacyAccess: + """Update Legacy Access Disablement Status. + Updates legacy access disablement status. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyAccess` @@ -7052,103 +7126,119 @@ def update(self, allow_missing: bool, setting: DisableLegacyAccess, field_mask: `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyAccess` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/disable_legacy_access/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/disable_legacy_access/names/default', body=body + + , headers=headers + ) return DisableLegacyAccess.from_dict(res) - + + class DisableLegacyDbfsAPI: """Disabling legacy DBFS has the following implications: - + 1. Access to DBFS root and DBFS mounts is disallowed (as well as the creation of new mounts). 2. Disables Databricks Runtime versions prior to 13.3LTS. - + When the setting is off, all DBFS functionality is enabled and no restrictions are imposed on Databricks Runtime versions. This setting can take up to 20 minutes to take effect and requires a manual restart of all-purpose compute clusters and SQL warehouses.""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyDbfsResponse: - """Delete the disable legacy DBFS setting. + - Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteDisableLegacyDbfsResponse: + """Delete the disable legacy DBFS setting. + + Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyDbfsResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", "/api/2.0/settings/types/disable_legacy_dbfs/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE','/api/2.0/settings/types/disable_legacy_dbfs/names/default', query=query + + , headers=headers + ) return DeleteDisableLegacyDbfsResponse.from_dict(res) - def get(self, *, etag: Optional[str] = None) -> DisableLegacyDbfs: - """Get the disable legacy DBFS setting. + + + + def get(self + + , * + , etag: Optional[str] = None) -> DisableLegacyDbfs: + """Get the disable legacy DBFS setting. + Gets the disable legacy DBFS setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyDbfs` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/disable_legacy_dbfs/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/disable_legacy_dbfs/names/default', query=query + + , headers=headers + ) return DisableLegacyDbfs.from_dict(res) - def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs: - """Update the disable legacy DBFS setting. + + + + def update(self + , allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str + ) -> DisableLegacyDbfs: + """Update the disable legacy DBFS setting. + Updates the disable legacy DBFS setting for the workspace. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyDbfs` @@ -7158,106 +7248,116 @@ def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: st `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyDbfs` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/disable_legacy_dbfs/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/disable_legacy_dbfs/names/default', body=body + + , headers=headers + ) return DisableLegacyDbfs.from_dict(res) - + + class DisableLegacyFeaturesAPI: """Disable legacy features for new Databricks workspaces. - + For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions prior to 13.3LTS.""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyFeaturesResponse: - """Delete the disable legacy features setting. + - Deletes the disable legacy features setting. + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteDisableLegacyFeaturesResponse: + """Delete the disable legacy features setting. + + Deletes the disable legacy features setting. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyFeaturesResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default', query=query + + , headers=headers + ) return DeleteDisableLegacyFeaturesResponse.from_dict(res) - def get(self, *, etag: Optional[str] = None) -> DisableLegacyFeatures: - """Get the disable legacy features setting. + + + + def get(self + + , * + , etag: Optional[str] = None) -> DisableLegacyFeatures: + """Get the disable legacy features setting. + Gets the value of the disable legacy features setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyFeatures` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default', query=query + + , headers=headers + ) return DisableLegacyFeatures.from_dict(res) - def update(self, allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures: - """Update the disable legacy features setting. + + + + def update(self + , allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str + ) -> DisableLegacyFeatures: + """Update the disable legacy features setting. + Updates the value of the disable legacy features setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyFeatures` @@ -7267,64 +7367,69 @@ def update(self, allow_missing: bool, setting: DisableLegacyFeatures, field_mask `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyFeatures` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default", - body=body, - headers=headers, - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default', body=body + + , headers=headers + ) return DisableLegacyFeatures.from_dict(res) - + + class EnableExportNotebookAPI: """Controls whether users can export notebooks and files from the Workspace UI. By default, this setting is enabled.""" - + def __init__(self, api_client): self._api = api_client + + + + + + + + def get_enable_export_notebook(self) -> EnableExportNotebook: """Get the Notebook and File exporting setting. - + Gets the Notebook and File exporting setting. - + :returns: :class:`EnableExportNotebook` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/settings/types/enable-export-notebook/names/default", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/enable-export-notebook/names/default' + , headers=headers + ) return EnableExportNotebook.from_dict(res) - def patch_enable_export_notebook( - self, allow_missing: bool, setting: EnableExportNotebook, field_mask: str - ) -> EnableExportNotebook: - """Update the Notebook and File exporting setting. + + + + def patch_enable_export_notebook(self + , allow_missing: bool, setting: EnableExportNotebook, field_mask: str + ) -> EnableExportNotebook: + """Update the Notebook and File exporting setting. + Updates the Notebook and File exporting setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableExportNotebook` @@ -7334,103 +7439,113 @@ def patch_enable_export_notebook( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableExportNotebook` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/enable-export-notebook/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/enable-export-notebook/names/default', body=body + + , headers=headers + ) return EnableExportNotebook.from_dict(res) - + + class EnableIpAccessListsAPI: """Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or disable restricted access based on IP addresses.""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeleteAccountIpAccessEnableResponse: - """Delete the account IP access toggle setting. + - Reverts the value of the account IP access toggle setting to default (ON) + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteAccountIpAccessEnableResponse: + """Delete the account IP access toggle setting. + + Reverts the value of the account IP access toggle setting to default (ON) + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAccountIpAccessEnableResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default", - query=query, - headers=headers, - ) - return DeleteAccountIpAccessEnableResponse.from_dict(res) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default', query=query + + , headers=headers + ) + return DeleteAccountIpAccessEnableResponse.from_dict(res) - def get(self, *, etag: Optional[str] = None) -> AccountIpAccessEnable: - """Get the account IP access toggle setting. + + + + def get(self + + , * + , etag: Optional[str] = None) -> AccountIpAccessEnable: + """Get the account IP access toggle setting. + Gets the value of the account IP access toggle setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AccountIpAccessEnable` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default', query=query + + , headers=headers + ) return AccountIpAccessEnable.from_dict(res) - def update(self, allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable: - """Update the account IP access toggle setting. + + + + def update(self + , allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str + ) -> AccountIpAccessEnable: + """Update the account IP access toggle setting. + Updates the value of the account IP access toggle setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AccountIpAccessEnable` @@ -7440,66 +7555,69 @@ def update(self, allow_missing: bool, setting: AccountIpAccessEnable, field_mask `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AccountIpAccessEnable` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default", - body=body, - headers=headers, - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default', body=body + + , headers=headers + ) return AccountIpAccessEnable.from_dict(res) - + + class EnableNotebookTableClipboardAPI: """Controls whether users can copy tabular data to the clipboard via the UI. By default, this setting is enabled.""" - + def __init__(self, api_client): self._api = api_client + + + + + + + + def get_enable_notebook_table_clipboard(self) -> EnableNotebookTableClipboard: """Get the Results Table Clipboard features setting. - + Gets the Results Table Clipboard features setting. - + :returns: :class:`EnableNotebookTableClipboard` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/enable-notebook-table-clipboard/names/default", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/enable-notebook-table-clipboard/names/default' + , headers=headers + ) return EnableNotebookTableClipboard.from_dict(res) - def patch_enable_notebook_table_clipboard( - self, allow_missing: bool, setting: EnableNotebookTableClipboard, field_mask: str - ) -> EnableNotebookTableClipboard: - """Update the Results Table Clipboard features setting. + + + + def patch_enable_notebook_table_clipboard(self + , allow_missing: bool, setting: EnableNotebookTableClipboard, field_mask: str + ) -> EnableNotebookTableClipboard: + """Update the Results Table Clipboard features setting. + Updates the Results Table Clipboard features setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableNotebookTableClipboard` @@ -7509,60 +7627,68 @@ def patch_enable_notebook_table_clipboard( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableNotebookTableClipboard` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/enable-notebook-table-clipboard/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/enable-notebook-table-clipboard/names/default', body=body + + , headers=headers + ) return EnableNotebookTableClipboard.from_dict(res) - + + class EnableResultsDownloadingAPI: """Controls whether users can download notebook results. By default, this setting is enabled.""" - + def __init__(self, api_client): self._api = api_client + + + + + + + + def get_enable_results_downloading(self) -> EnableResultsDownloading: """Get the Notebook results download setting. - + Gets the Notebook results download setting. - + :returns: :class:`EnableResultsDownloading` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/settings/types/enable-results-downloading/names/default", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/enable-results-downloading/names/default' + , headers=headers + ) return EnableResultsDownloading.from_dict(res) - def patch_enable_results_downloading( - self, allow_missing: bool, setting: EnableResultsDownloading, field_mask: str - ) -> EnableResultsDownloading: - """Update the Notebook results download setting. + + + + def patch_enable_results_downloading(self + , allow_missing: bool, setting: EnableResultsDownloading, field_mask: str + ) -> EnableResultsDownloading: + """Update the Notebook results download setting. + Updates the Notebook results download setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableResultsDownloading` @@ -7572,79 +7698,88 @@ def patch_enable_results_downloading( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableResultsDownloading` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/enable-results-downloading/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/enable-results-downloading/names/default', body=body + + , headers=headers + ) return EnableResultsDownloading.from_dict(res) - + + class EnhancedSecurityMonitoringAPI: """Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the compliance security profile is enabled, this is automatically enabled. - + If the compliance security profile is disabled, you can enable or disable this setting and it is not permanent.""" - + def __init__(self, api_client): self._api = api_client + - def get(self, *, etag: Optional[str] = None) -> EnhancedSecurityMonitoringSetting: - """Get the enhanced security monitoring setting. + - Gets the enhanced security monitoring setting. + + + + + def get(self + + , * + , etag: Optional[str] = None) -> EnhancedSecurityMonitoringSetting: + """Get the enhanced security monitoring setting. + + Gets the enhanced security monitoring setting. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`EnhancedSecurityMonitoringSetting` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default', query=query + + , headers=headers + ) return EnhancedSecurityMonitoringSetting.from_dict(res) - def update( - self, allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str - ) -> EnhancedSecurityMonitoringSetting: - """Update the enhanced security monitoring setting. + + + + def update(self + , allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str + ) -> EnhancedSecurityMonitoringSetting: + """Update the enhanced security monitoring setting. + Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnhancedSecurityMonitoringSetting` @@ -7654,76 +7789,82 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnhancedSecurityMonitoringSetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default', body=body + + , headers=headers + ) return EnhancedSecurityMonitoringSetting.from_dict(res) - + + class EsmEnablementAccountAPI: """The enhanced security monitoring setting at the account level controls whether to enable the feature on new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace creation, account admins can enable enhanced security monitoring individually for each workspace.""" - + def __init__(self, api_client): self._api = api_client + - def get(self, *, etag: Optional[str] = None) -> EsmEnablementAccountSetting: - """Get the enhanced security monitoring setting for new workspaces. + - Gets the enhanced security monitoring setting for new workspaces. + + + + + def get(self + + , * + , etag: Optional[str] = None) -> EsmEnablementAccountSetting: + """Get the enhanced security monitoring setting for new workspaces. + + Gets the enhanced security monitoring setting for new workspaces. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`EsmEnablementAccountSetting` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default', query=query + + , headers=headers + ) return EsmEnablementAccountSetting.from_dict(res) - def update( - self, allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str - ) -> EsmEnablementAccountSetting: - """Update the enhanced security monitoring setting for new workspaces. + + + + def update(self + , allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str + ) -> EsmEnablementAccountSetting: + """Update the enhanced security monitoring setting for new workspaces. + Updates the value of the enhanced security monitoring setting for new workspaces. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EsmEnablementAccountSetting` @@ -7733,164 +7874,184 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EsmEnablementAccountSetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default", - body=body, - headers=headers, - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default', body=body + + , headers=headers + ) return EsmEnablementAccountSetting.from_dict(res) - + + class IpAccessListsAPI: """IP Access List enables admins to configure IP access lists. - + IP access lists affect web application access and REST API access to this workspace only. If the feature is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists (inclusion) and block lists (exclusion). - + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address matches any block list, the connection is rejected. 2. **If the connection was not rejected by block lists**, the IP address is compared with the allow lists. - + If there is at least one allow list for the workspace, the connection is allowed only if the IP address matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed. - + For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. - + After changes to the IP access list feature, it can take a few minutes for changes to take effect.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, label: str, list_type: ListType, *, ip_addresses: Optional[List[str]] = None - ) -> CreateIpAccessListResponse: - """Create access list. + - Creates an IP access list for this workspace. + + + + + def create(self + , label: str, list_type: ListType + , * + , ip_addresses: Optional[List[str]] = None) -> CreateIpAccessListResponse: + """Create access list. + + Creates an IP access list for this workspace. + A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. - + When creating or updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus - + :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) - + :returns: :class:`CreateIpAccessListResponse` """ body = {} - if ip_addresses is not None: - body["ip_addresses"] = [v for v in ip_addresses] - if label is not None: - body["label"] = label - if list_type is not None: - body["list_type"] = list_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/ip-access-lists", body=body, headers=headers) + if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] + if label is not None: body['label'] = label + if list_type is not None: body['list_type'] = list_type.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/ip-access-lists', body=body + + , headers=headers + ) return CreateIpAccessListResponse.from_dict(res) - def delete(self, ip_access_list_id: str): - """Delete access list. + + + + def delete(self + , ip_access_list_id: str + ): + """Delete access list. + Deletes an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/ip-access-lists/{ip_access_list_id}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.0/ip-access-lists/{ip_access_list_id}", headers=headers) + + + - def get(self, ip_access_list_id: str) -> FetchIpAccessListResponse: + def get(self + , ip_access_list_id: str + ) -> FetchIpAccessListResponse: """Get access list. - + Gets an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - + :returns: :class:`FetchIpAccessListResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/ip-access-lists/{ip_access_list_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/ip-access-lists/{ip_access_list_id}' + + , headers=headers + ) return FetchIpAccessListResponse.from_dict(res) + + + + def list(self) -> Iterator[IpAccessListInfo]: """Get access lists. - + Gets all IP access lists for the specified workspace. - + :returns: Iterator over :class:`IpAccessListInfo` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/ip-access-lists", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/ip-access-lists' + , headers=headers + ) parsed = ListIpAccessListResponse.from_dict(json).ip_access_lists return parsed if parsed is not None else [] + - def replace( - self, - ip_access_list_id: str, - label: str, - list_type: ListType, - enabled: bool, - *, - ip_addresses: Optional[List[str]] = None, - ): - """Replace access list. + + + + def replace(self + , ip_access_list_id: str, label: str, list_type: ListType, enabled: bool + , * + , ip_addresses: Optional[List[str]] = None): + """Replace access list. + Replaces an IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one @@ -7899,63 +8060,60 @@ def replace( returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) - - + + """ body = {} - if enabled is not None: - body["enabled"] = enabled - if ip_addresses is not None: - body["ip_addresses"] = [v for v in ip_addresses] - if label is not None: - body["label"] = label - if list_type is not None: - body["list_type"] = list_type.value - headers = { - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/ip-access-lists/{ip_access_list_id}", body=body, headers=headers) - - def update( - self, - ip_access_list_id: str, - *, - enabled: Optional[bool] = None, - ip_addresses: Optional[List[str]] = None, - label: Optional[str] = None, - list_type: Optional[ListType] = None, - ): - """Update access list. + if enabled is not None: body['enabled'] = enabled + if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] + if label is not None: body['label'] = label + if list_type is not None: body['list_type'] = list_type.value + headers = {'Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/ip-access-lists/{ip_access_list_id}', body=body + + , headers=headers + ) + - Updates an existing IP access list, specified by its ID. + + + + def update(self + , ip_access_list_id: str + , * + , enabled: Optional[bool] = None, ip_addresses: Optional[List[str]] = None, label: Optional[str] = None, list_type: Optional[ListType] = None): + """Update access list. + + Updates an existing IP access list, specified by its ID. + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. - + When updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param enabled: bool (optional) @@ -7965,71 +8123,80 @@ def update( Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. - - + + """ body = {} - if enabled is not None: - body["enabled"] = enabled - if ip_addresses is not None: - body["ip_addresses"] = [v for v in ip_addresses] - if label is not None: - body["label"] = label - if list_type is not None: - body["list_type"] = list_type.value - headers = { - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/ip-access-lists/{ip_access_list_id}", body=body, headers=headers) - + if enabled is not None: body['enabled'] = enabled + if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] + if label is not None: body['label'] = label + if list_type is not None: body['list_type'] = list_type.value + headers = {'Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/ip-access-lists/{ip_access_list_id}', body=body + + , headers=headers + ) + + + class LlmProxyPartnerPoweredAccountAPI: """Determines if partner powered models are enabled or not for a specific account""" - + def __init__(self, api_client): self._api = api_client + - def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredAccount: - """Get the enable partner powered AI features account setting. + - Gets the enable partner powered AI features account setting. + + + + + def get(self + + , * + , etag: Optional[str] = None) -> LlmProxyPartnerPoweredAccount: + """Get the enable partner powered AI features account setting. + + Gets the enable partner powered AI features account setting. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredAccount` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered/names/default', query=query + + , headers=headers + ) return LlmProxyPartnerPoweredAccount.from_dict(res) - def update( - self, allow_missing: bool, setting: LlmProxyPartnerPoweredAccount, field_mask: str - ) -> LlmProxyPartnerPoweredAccount: - """Update the enable partner powered AI features account setting. + + + + def update(self + , allow_missing: bool, setting: LlmProxyPartnerPoweredAccount, field_mask: str + ) -> LlmProxyPartnerPoweredAccount: + """Update the enable partner powered AI features account setting. + Updates the enable partner powered AI features account setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredAccount` @@ -8039,78 +8206,81 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredAccount` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered/names/default", - body=body, - headers=headers, - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered/names/default', body=body + + , headers=headers + ) return LlmProxyPartnerPoweredAccount.from_dict(res) - -class LlmProxyPartnerPoweredEnforceAPI: + + +class LlmProxyPartnerPoweredEnforceAPI: """Determines if the account-level partner-powered setting value is enforced upon the workspace-level partner-powered setting""" - + def __init__(self, api_client): self._api = api_client + - def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredEnforce: - """Get the enforcement status of partner powered AI features account setting. + - Gets the enforcement status of partner powered AI features account setting. + + + + + def get(self + + , * + , etag: Optional[str] = None) -> LlmProxyPartnerPoweredEnforce: + """Get the enforcement status of partner powered AI features account setting. + + Gets the enforcement status of partner powered AI features account setting. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredEnforce` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered_enforce/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered_enforce/names/default', query=query + + , headers=headers + ) return LlmProxyPartnerPoweredEnforce.from_dict(res) - def update( - self, allow_missing: bool, setting: LlmProxyPartnerPoweredEnforce, field_mask: str - ) -> LlmProxyPartnerPoweredEnforce: - """Update the enforcement status of partner powered AI features account setting. + + + + def update(self + , allow_missing: bool, setting: LlmProxyPartnerPoweredEnforce, field_mask: str + ) -> LlmProxyPartnerPoweredEnforce: + """Update the enforcement status of partner powered AI features account setting. + Updates the enable enforcement status of partner powered AI features account setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredEnforce` @@ -8120,101 +8290,112 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredEnforce` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered_enforce/names/default", - body=body, - headers=headers, - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered_enforce/names/default', body=body + + , headers=headers + ) return LlmProxyPartnerPoweredEnforce.from_dict(res) - + + class LlmProxyPartnerPoweredWorkspaceAPI: """Determines if partner powered models are enabled or not for a specific workspace""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse: - """Delete the enable partner powered AI features workspace setting. + - Reverts the enable partner powered AI features workspace setting to its default value. + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse: + """Delete the enable partner powered AI features workspace setting. + + Reverts the enable partner powered AI features workspace setting to its default value. + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteLlmProxyPartnerPoweredWorkspaceResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", "/api/2.0/settings/types/llm_proxy_partner_powered/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE','/api/2.0/settings/types/llm_proxy_partner_powered/names/default', query=query + + , headers=headers + ) return DeleteLlmProxyPartnerPoweredWorkspaceResponse.from_dict(res) - def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredWorkspace: - """Get the enable partner powered AI features workspace setting. + + + + def get(self + + , * + , etag: Optional[str] = None) -> LlmProxyPartnerPoweredWorkspace: + """Get the enable partner powered AI features workspace setting. + Gets the enable partner powered AI features workspace setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredWorkspace` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/llm_proxy_partner_powered/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/llm_proxy_partner_powered/names/default', query=query + + , headers=headers + ) return LlmProxyPartnerPoweredWorkspace.from_dict(res) - def update( - self, allow_missing: bool, setting: LlmProxyPartnerPoweredWorkspace, field_mask: str - ) -> LlmProxyPartnerPoweredWorkspace: - """Update the enable partner powered AI features workspace setting. + + + + def update(self + , allow_missing: bool, setting: LlmProxyPartnerPoweredWorkspace, field_mask: str + ) -> LlmProxyPartnerPoweredWorkspace: + """Update the enable partner powered AI features workspace setting. + Updates the enable partner powered AI features workspace setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredWorkspace` @@ -8224,298 +8405,314 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredWorkspace` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/llm_proxy_partner_powered/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/llm_proxy_partner_powered/names/default', body=body + + , headers=headers + ) return LlmProxyPartnerPoweredWorkspace.from_dict(res) - + + class NetworkConnectivityAPI: """These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources. This API provides stable subnets for your workspace so that you can configure your firewalls on your Azure Storage accounts to allow access from Databricks. You can also use the API to provision private endpoints for Databricks to privately connect serverless compute resources to your Azure resources using Azure Private Link. See [configure serverless secure connectivity]. - - [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security - """ - + + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security""" + def __init__(self, api_client): self._api = api_client + - def create_network_connectivity_configuration( - self, network_connectivity_config: CreateNetworkConnectivityConfiguration - ) -> NetworkConnectivityConfiguration: - """Create a network connectivity configuration. + + + + + + + def create_network_connectivity_configuration(self + , network_connectivity_config: CreateNetworkConnectivityConfiguration + ) -> NetworkConnectivityConfiguration: + """Create a network connectivity configuration. + Creates a network connectivity configuration (NCC), which provides stable Azure service subnets when accessing your Azure Storage accounts. You can also use a network connectivity configuration to create Databricks managed private endpoints so that Databricks serverless compute resources privately access your resources. - + **IMPORTANT**: After you create the network connectivity configuration, you must assign one or more workspaces to the new network connectivity configuration. You can share one network connectivity configuration with multiple workspaces from the same Azure region within the same Databricks account. See [configure serverless secure connectivity]. - + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security - + :param network_connectivity_config: :class:`CreateNetworkConnectivityConfiguration` Properties of the new network connectivity configuration. - + :returns: :class:`NetworkConnectivityConfiguration` """ body = network_connectivity_config.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs", body=body, headers=headers - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs', body=body + + , headers=headers + ) return NetworkConnectivityConfiguration.from_dict(res) - def create_private_endpoint_rule( - self, network_connectivity_config_id: str, private_endpoint_rule: CreatePrivateEndpointRule - ) -> NccAzurePrivateEndpointRule: - """Create a private endpoint rule. + + + + def create_private_endpoint_rule(self + , network_connectivity_config_id: str, private_endpoint_rule: CreatePrivateEndpointRule + ) -> NccPrivateEndpointRule: + """Create a private endpoint rule. + Create a private endpoint rule for the specified network connectivity config object. Once the object is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure resource. - + **IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to complete the connection. To get the information of the private endpoint created, make a `GET` request on the new private endpoint rule. See [serverless private link]. - + [serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. :param private_endpoint_rule: :class:`CreatePrivateEndpointRule` Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization. - - :returns: :class:`NccAzurePrivateEndpointRule` + + :returns: :class:`NccPrivateEndpointRule` """ body = private_endpoint_rule.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules", - body=body, - headers=headers, - ) - return NccAzurePrivateEndpointRule.from_dict(res) - - def delete_network_connectivity_configuration(self, network_connectivity_config_id: str): - """Delete a network connectivity configuration. + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules', body=body + + , headers=headers + ) + return NccPrivateEndpointRule.from_dict(res) - Deletes a network connectivity configuration. + + + + def delete_network_connectivity_configuration(self + , network_connectivity_config_id: str + ): + """Delete a network connectivity configuration. + + Deletes a network connectivity configuration. + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}", - headers=headers, - ) + + + - def delete_private_endpoint_rule( - self, network_connectivity_config_id: str, private_endpoint_rule_id: str - ) -> NccAzurePrivateEndpointRule: + def delete_private_endpoint_rule(self + , network_connectivity_config_id: str, private_endpoint_rule_id: str + ) -> NccPrivateEndpointRule: """Delete a private endpoint rule. - + Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is set to `true` and the private endpoint is not available to your serverless compute resources. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param private_endpoint_rule_id: str Your private endpoint rule ID. - - :returns: :class:`NccAzurePrivateEndpointRule` + + :returns: :class:`NccPrivateEndpointRule` """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}' + + , headers=headers + ) + return NccPrivateEndpointRule.from_dict(res) - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}", - headers=headers, - ) - return NccAzurePrivateEndpointRule.from_dict(res) + + + - def get_network_connectivity_configuration( - self, network_connectivity_config_id: str - ) -> NetworkConnectivityConfiguration: + def get_network_connectivity_configuration(self + , network_connectivity_config_id: str + ) -> NetworkConnectivityConfiguration: """Get a network connectivity configuration. - + Gets a network connectivity configuration. - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. - + :returns: :class:`NetworkConnectivityConfiguration` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}' + + , headers=headers + ) return NetworkConnectivityConfiguration.from_dict(res) - def get_private_endpoint_rule( - self, network_connectivity_config_id: str, private_endpoint_rule_id: str - ) -> NccAzurePrivateEndpointRule: - """Gets a private endpoint rule. + + + + def get_private_endpoint_rule(self + , network_connectivity_config_id: str, private_endpoint_rule_id: str + ) -> NccPrivateEndpointRule: + """Gets a private endpoint rule. + Gets the private endpoint rule. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param private_endpoint_rule_id: str Your private endpoint rule ID. - - :returns: :class:`NccAzurePrivateEndpointRule` + + :returns: :class:`NccPrivateEndpointRule` """ + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}' + + , headers=headers + ) + return NccPrivateEndpointRule.from_dict(res) - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}", - headers=headers, - ) - return NccAzurePrivateEndpointRule.from_dict(res) + + + - def list_network_connectivity_configurations( - self, *, page_token: Optional[str] = None - ) -> Iterator[NetworkConnectivityConfiguration]: + def list_network_connectivity_configurations(self + + , * + , page_token: Optional[str] = None) -> Iterator[NetworkConnectivityConfiguration]: """List network connectivity configurations. - + Gets an array of network connectivity configurations. - + :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`NetworkConnectivityConfiguration` """ - + query = {} - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs", - query=query, - headers=headers, - ) - if "items" in json: - for v in json["items"]: - yield NetworkConnectivityConfiguration.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_private_endpoint_rules( - self, network_connectivity_config_id: str, *, page_token: Optional[str] = None - ) -> Iterator[NccAzurePrivateEndpointRule]: - """List private endpoint rules. + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs', query=query + + , headers=headers + ) + if 'items' in json: + for v in json['items']: + yield NetworkConnectivityConfiguration.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Gets an array of private endpoint rules. + + + + def list_private_endpoint_rules(self + , network_connectivity_config_id: str + , * + , page_token: Optional[str] = None) -> Iterator[NccPrivateEndpointRule]: + """List private endpoint rules. + + Gets an array of private endpoint rules. + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param page_token: str (optional) Pagination token to go to next page based on previous query. - - :returns: Iterator over :class:`NccAzurePrivateEndpointRule` + + :returns: Iterator over :class:`NccPrivateEndpointRule` """ - + query = {} - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules", - query=query, - headers=headers, - ) - if "items" in json: - for v in json["items"]: - yield NccAzurePrivateEndpointRule.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update_ncc_azure_private_endpoint_rule_public( - self, - network_connectivity_config_id: str, - private_endpoint_rule_id: str, - private_endpoint_rule: UpdatePrivateEndpointRule, - update_mask: str, - ) -> NccAzurePrivateEndpointRule: - """Update a private endpoint rule. + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules', query=query + + , headers=headers + ) + if 'items' in json: + for v in json['items']: + yield NccPrivateEndpointRule.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update_private_endpoint_rule(self + , network_connectivity_config_id: str, private_endpoint_rule_id: str, private_endpoint_rule: UpdatePrivateEndpointRule, update_mask: str + ) -> NccPrivateEndpointRule: + """Update a private endpoint rule. + Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed resources is allowed to be updated. - + :param network_connectivity_config_id: str - Your Network Connectivity Configuration ID. + The ID of a network connectivity configuration, which is the parent resource of this private + endpoint rule object. :param private_endpoint_rule_id: str Your private endpoint rule ID. :param private_endpoint_rule: :class:`UpdatePrivateEndpointRule` @@ -8527,28 +8724,22 @@ def update_ncc_azure_private_endpoint_rule_public( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - - :returns: :class:`NccAzurePrivateEndpointRule` + + :returns: :class:`NccPrivateEndpointRule` """ body = private_endpoint_rule.as_dict() query = {} - if update_mask is not None: - query["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}", - query=query, - body=body, - headers=headers, - ) - return NccAzurePrivateEndpointRule.from_dict(res) - + if update_mask is not None: query['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}', query=query, body=body + + , headers=headers + ) + return NccPrivateEndpointRule.from_dict(res) + + class NetworkPoliciesAPI: """These APIs manage network policies for this account. Network policies control which network destinations can be accessed from the Databricks environment. Each Databricks account includes a default policy named @@ -8556,333 +8747,411 @@ class NetworkPoliciesAPI: assignment, and is automatically associated with each newly created workspace. 'default-policy' is reserved and cannot be deleted, but it can be updated to customize the default network access rules for your account.""" - + def __init__(self, api_client): self._api = api_client + - def create_network_policy_rpc(self, network_policy: AccountNetworkPolicy) -> AccountNetworkPolicy: - """Create a network policy. + + + + + + + def create_network_policy_rpc(self + , network_policy: AccountNetworkPolicy + ) -> AccountNetworkPolicy: + """Create a network policy. + Creates a new network policy to manage which network destinations can be accessed from the Databricks environment. - + :param network_policy: :class:`AccountNetworkPolicy` - + :returns: :class:`AccountNetworkPolicy` """ body = network_policy.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/accounts/{self._api.account_id}/network-policies", body=body, headers=headers - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/network-policies', body=body + + , headers=headers + ) return AccountNetworkPolicy.from_dict(res) - def delete_network_policy_rpc(self, network_policy_id: str): - """Delete a network policy. + + + + def delete_network_policy_rpc(self + , network_policy_id: str + ): + """Delete a network policy. + Deletes a network policy. Cannot be called on 'default-policy'. - + :param network_policy_id: str The unique identifier of the network policy to delete. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", f"/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}", headers=headers - ) + + + - def get_network_policy_rpc(self, network_policy_id: str) -> AccountNetworkPolicy: + def get_network_policy_rpc(self + , network_policy_id: str + ) -> AccountNetworkPolicy: """Get a network policy. - + Gets a network policy. - + :param network_policy_id: str The unique identifier of the network policy to retrieve. - + :returns: :class:`AccountNetworkPolicy` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}' + + , headers=headers + ) return AccountNetworkPolicy.from_dict(res) - def list_network_policies_rpc(self, *, page_token: Optional[str] = None) -> Iterator[AccountNetworkPolicy]: - """List network policies. + + + + def list_network_policies_rpc(self + + , * + , page_token: Optional[str] = None) -> Iterator[AccountNetworkPolicy]: + """List network policies. + Gets an array of network policies. - + :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`AccountNetworkPolicy` """ - + query = {} - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/network-policies", query=query, headers=headers - ) - if "items" in json: - for v in json["items"]: - yield AccountNetworkPolicy.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update_network_policy_rpc( - self, network_policy_id: str, network_policy: AccountNetworkPolicy - ) -> AccountNetworkPolicy: - """Update a network policy. + json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-policies', query=query + + , headers=headers + ) + if 'items' in json: + for v in json['items']: + yield AccountNetworkPolicy.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Updates a network policy. This allows you to modify the configuration of a network policy. + + + + def update_network_policy_rpc(self + , network_policy_id: str, network_policy: AccountNetworkPolicy + ) -> AccountNetworkPolicy: + """Update a network policy. + + Updates a network policy. This allows you to modify the configuration of a network policy. + :param network_policy_id: str The unique identifier for the network policy. :param network_policy: :class:`AccountNetworkPolicy` - + :returns: :class:`AccountNetworkPolicy` """ body = network_policy.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}", - body=body, - headers=headers, - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}', body=body + + , headers=headers + ) return AccountNetworkPolicy.from_dict(res) - + + class NotificationDestinationsAPI: """The notification destinations API lets you programmatically manage a workspace's notification destinations. Notification destinations are used to send notifications for query alerts and jobs to destinations outside of Databricks. Only workspace admins can create, update, and delete notification destinations.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, *, config: Optional[Config] = None, display_name: Optional[str] = None) -> NotificationDestination: - """Create a notification destination. + - Creates a notification destination. Requires workspace admin permissions. + + + + + def create(self + + , * + , config: Optional[Config] = None, display_name: Optional[str] = None) -> NotificationDestination: + """Create a notification destination. + + Creates a notification destination. Requires workspace admin permissions. + :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) The display name for the notification destination. - + :returns: :class:`NotificationDestination` """ body = {} - if config is not None: - body["config"] = config.as_dict() - if display_name is not None: - body["display_name"] = display_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/notification-destinations", body=body, headers=headers) + if config is not None: body['config'] = config.as_dict() + if display_name is not None: body['display_name'] = display_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/notification-destinations', body=body + + , headers=headers + ) return NotificationDestination.from_dict(res) - def delete(self, id: str): - """Delete a notification destination. - - Deletes a notification destination. Requires workspace admin permissions. + + + + def delete(self + , id: str + ): + """Delete a notification destination. + + Deletes a notification destination. Requires workspace admin permissions. + :param id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/notification-destinations/{id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/notification-destinations/{id}", headers=headers) + + + - def get(self, id: str) -> NotificationDestination: + def get(self + , id: str + ) -> NotificationDestination: """Get a notification destination. - + Gets a notification destination. - + :param id: str - + :returns: :class:`NotificationDestination` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/notification-destinations/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/notification-destinations/{id}' + + , headers=headers + ) return NotificationDestination.from_dict(res) - def list( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ListNotificationDestinationsResult]: - """List notification destinations. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ListNotificationDestinationsResult]: + """List notification destinations. + Lists notification destinations. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListNotificationDestinationsResult` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/notification-destinations", query=query, headers=headers) - if "results" in json: - for v in json["results"]: - yield ListNotificationDestinationsResult.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, id: str, *, config: Optional[Config] = None, display_name: Optional[str] = None - ) -> NotificationDestination: - """Update a notification destination. + json = self._api.do('GET','/api/2.0/notification-destinations', query=query + + , headers=headers + ) + if 'results' in json: + for v in json['results']: + yield ListNotificationDestinationsResult.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , id: str + , * + , config: Optional[Config] = None, display_name: Optional[str] = None) -> NotificationDestination: + """Update a notification destination. + Updates a notification destination. Requires workspace admin permissions. At least one field is required in the request body. - + :param id: str UUID identifying notification destination. :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) The display name for the notification destination. - + :returns: :class:`NotificationDestination` """ body = {} - if config is not None: - body["config"] = config.as_dict() - if display_name is not None: - body["display_name"] = display_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/notification-destinations/{id}", body=body, headers=headers) + if config is not None: body['config'] = config.as_dict() + if display_name is not None: body['display_name'] = display_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/notification-destinations/{id}', body=body + + , headers=headers + ) return NotificationDestination.from_dict(res) - + + class PersonalComputeAPI: """The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can change the setting to instead let individual workspaces configure access control (DELEGATE). - + There is only one instance of this setting per account. Since this setting has a default value, this setting is present on all accounts even though it's never set on a given account. Deletion reverts the value of the setting back to the default value.""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeletePersonalComputeSettingResponse: - """Delete Personal Compute setting. + - Reverts back the Personal Compute setting value to default (ON) + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeletePersonalComputeSettingResponse: + """Delete Personal Compute setting. + + Reverts back the Personal Compute setting value to default (ON) + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeletePersonalComputeSettingResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default', query=query + + , headers=headers + ) return DeletePersonalComputeSettingResponse.from_dict(res) - def get(self, *, etag: Optional[str] = None) -> PersonalComputeSetting: - """Get Personal Compute setting. + + + + def get(self + + , * + , etag: Optional[str] = None) -> PersonalComputeSetting: + """Get Personal Compute setting. + Gets the value of the Personal Compute setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`PersonalComputeSetting` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default", - query=query, - headers=headers, - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default', query=query + + , headers=headers + ) return PersonalComputeSetting.from_dict(res) - def update(self, allow_missing: bool, setting: PersonalComputeSetting, field_mask: str) -> PersonalComputeSetting: - """Update Personal Compute setting. + + + + def update(self + , allow_missing: bool, setting: PersonalComputeSetting, field_mask: str + ) -> PersonalComputeSetting: + """Update Personal Compute setting. + Updates the value of the Personal Compute setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`PersonalComputeSetting` @@ -8892,34 +9161,27 @@ def update(self, allow_missing: bool, setting: PersonalComputeSetting, field_mas `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`PersonalComputeSetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default", - body=body, - headers=headers, - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default', body=body + + , headers=headers + ) return PersonalComputeSetting.from_dict(res) - + + class RestrictWorkspaceAdminsAPI: """The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. With the setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on @@ -8930,77 +9192,95 @@ class RestrictWorkspaceAdminsAPI: service principals they have the Service Principal User role on. They can also only change a job owner to themselves. And they can change the job run_as setting to themselves or to a service principal on which they have the Service Principal User role.""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, *, etag: Optional[str] = None) -> DeleteRestrictWorkspaceAdminsSettingResponse: - """Delete the restrict workspace admins setting. + + + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteRestrictWorkspaceAdminsSettingResponse: + """Delete the restrict workspace admins setting. + Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteRestrictWorkspaceAdminsSettingResponse` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", "/api/2.0/settings/types/restrict_workspace_admins/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE','/api/2.0/settings/types/restrict_workspace_admins/names/default', query=query + + , headers=headers + ) return DeleteRestrictWorkspaceAdminsSettingResponse.from_dict(res) - def get(self, *, etag: Optional[str] = None) -> RestrictWorkspaceAdminsSetting: - """Get the restrict workspace admins setting. + + + + def get(self + + , * + , etag: Optional[str] = None) -> RestrictWorkspaceAdminsSetting: + """Get the restrict workspace admins setting. + Gets the restrict workspace admins setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`RestrictWorkspaceAdminsSetting` """ - + query = {} - if etag is not None: - query["etag"] = etag - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", "/api/2.0/settings/types/restrict_workspace_admins/names/default", query=query, headers=headers - ) + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/restrict_workspace_admins/names/default', query=query + + , headers=headers + ) return RestrictWorkspaceAdminsSetting.from_dict(res) - def update( - self, allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str - ) -> RestrictWorkspaceAdminsSetting: - """Update the restrict workspace admins setting. + + + + def update(self + , allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str + ) -> RestrictWorkspaceAdminsSetting: + """Update the restrict workspace admins setting. + Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`RestrictWorkspaceAdminsSetting` @@ -9010,41 +9290,38 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`RestrictWorkspaceAdminsSetting` """ body = {} - if allow_missing is not None: - body["allow_missing"] = allow_missing - if field_mask is not None: - body["field_mask"] = field_mask - if setting is not None: - body["setting"] = setting.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", "/api/2.0/settings/types/restrict_workspace_admins/names/default", body=body, headers=headers - ) + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/restrict_workspace_admins/names/default', body=body + + , headers=headers + ) return RestrictWorkspaceAdminsSetting.from_dict(res) - + + class SettingsAPI: """Workspace Settings API allows users to manage settings at the workspace level.""" - + def __init__(self, api_client): self._api = api_client - + self._aibi_dashboard_embedding_access_policy = AibiDashboardEmbeddingAccessPolicyAPI(self._api) self._aibi_dashboard_embedding_approved_domains = AibiDashboardEmbeddingApprovedDomainsAPI(self._api) self._automatic_cluster_update = AutomaticClusterUpdateAPI(self._api) self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api) + self._dashboard_email_subscriptions = DashboardEmailSubscriptionsAPI(self._api) self._default_namespace = DefaultNamespaceAPI(self._api) self._disable_legacy_access = DisableLegacyAccessAPI(self._api) self._disable_legacy_dbfs = DisableLegacyDbfsAPI(self._api) @@ -9054,421 +9331,648 @@ def __init__(self, api_client): self._enhanced_security_monitoring = EnhancedSecurityMonitoringAPI(self._api) self._llm_proxy_partner_powered_workspace = LlmProxyPartnerPoweredWorkspaceAPI(self._api) self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api) + self._sql_results_download = SqlResultsDownloadAPI(self._api) + @property def aibi_dashboard_embedding_access_policy(self) -> AibiDashboardEmbeddingAccessPolicyAPI: """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the workspace level.""" return self._aibi_dashboard_embedding_access_policy - + @property def aibi_dashboard_embedding_approved_domains(self) -> AibiDashboardEmbeddingApprovedDomainsAPI: """Controls the list of domains approved to host the embedded AI/BI dashboards.""" return self._aibi_dashboard_embedding_approved_domains - + @property def automatic_cluster_update(self) -> AutomaticClusterUpdateAPI: """Controls whether automatic cluster update is enabled for the current workspace.""" return self._automatic_cluster_update - + @property def compliance_security_profile(self) -> ComplianceSecurityProfileAPI: """Controls whether to enable the compliance security profile for the current workspace.""" return self._compliance_security_profile - + + @property + def dashboard_email_subscriptions(self) -> DashboardEmailSubscriptionsAPI: + """Controls whether schedules or workload tasks for refreshing AI/BI Dashboards in the workspace can send subscription emails containing PDFs and/or images of the dashboard.""" + return self._dashboard_email_subscriptions + @property def default_namespace(self) -> DefaultNamespaceAPI: """The default namespace setting API allows users to configure the default namespace for a Databricks workspace.""" return self._default_namespace - + @property def disable_legacy_access(self) -> DisableLegacyAccessAPI: """'Disabling legacy access' has the following impacts: 1.""" return self._disable_legacy_access - + @property def disable_legacy_dbfs(self) -> DisableLegacyDbfsAPI: """Disabling legacy DBFS has the following implications: 1.""" return self._disable_legacy_dbfs - + @property def enable_export_notebook(self) -> EnableExportNotebookAPI: """Controls whether users can export notebooks and files from the Workspace UI.""" return self._enable_export_notebook - + @property def enable_notebook_table_clipboard(self) -> EnableNotebookTableClipboardAPI: """Controls whether users can copy tabular data to the clipboard via the UI.""" return self._enable_notebook_table_clipboard - + @property def enable_results_downloading(self) -> EnableResultsDownloadingAPI: """Controls whether users can download notebook results.""" return self._enable_results_downloading - + @property def enhanced_security_monitoring(self) -> EnhancedSecurityMonitoringAPI: """Controls whether enhanced security monitoring is enabled for the current workspace.""" return self._enhanced_security_monitoring - + @property def llm_proxy_partner_powered_workspace(self) -> LlmProxyPartnerPoweredWorkspaceAPI: """Determines if partner powered models are enabled or not for a specific workspace.""" return self._llm_proxy_partner_powered_workspace - + @property def restrict_workspace_admins(self) -> RestrictWorkspaceAdminsAPI: """The Restrict Workspace Admins setting lets you control the capabilities of workspace admins.""" return self._restrict_workspace_admins + + @property + def sql_results_download(self) -> SqlResultsDownloadAPI: + """Controls whether users within the workspace are allowed to download results from the SQL Editor and AI/BI Dashboards UIs.""" + return self._sql_results_download + + + + + +class SqlResultsDownloadAPI: + """Controls whether users within the workspace are allowed to download results from the SQL Editor and AI/BI + Dashboards UIs. By default, this setting is enabled (set to `true`)""" + + def __init__(self, api_client): + self._api = api_client + + + + + + + + + + def delete(self + + , * + , etag: Optional[str] = None) -> DeleteSqlResultsDownloadResponse: + """Delete the SQL Results Download setting. + + Reverts the SQL Results Download setting to its default value. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteSqlResultsDownloadResponse` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE','/api/2.0/settings/types/sql_results_download/names/default', query=query + + , headers=headers + ) + return DeleteSqlResultsDownloadResponse.from_dict(res) + + + + + + def get(self + + , * + , etag: Optional[str] = None) -> SqlResultsDownload: + """Get the SQL Results Download setting. + + Gets the SQL Results Download setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`SqlResultsDownload` + """ + + query = {} + if etag is not None: query['etag'] = etag + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/settings/types/sql_results_download/names/default', query=query + + , headers=headers + ) + return SqlResultsDownload.from_dict(res) + + + + + def update(self + , allow_missing: bool, setting: SqlResultsDownload, field_mask: str + ) -> SqlResultsDownload: + """Update the SQL Results Download setting. + + Updates the SQL Results Download setting. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`SqlResultsDownload` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`SqlResultsDownload` + """ + body = {} + if allow_missing is not None: body['allow_missing'] = allow_missing + if field_mask is not None: body['field_mask'] = field_mask + if setting is not None: body['setting'] = setting.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/settings/types/sql_results_download/names/default', body=body + + , headers=headers + ) + return SqlResultsDownload.from_dict(res) + + class TokenManagementAPI: """Enables administrators to get all tokens and delete tokens for other users. Admins can either get every token, get a specific token by ID, or get all tokens for a particular user.""" - + def __init__(self, api_client): self._api = api_client + - def create_obo_token( - self, application_id: str, *, comment: Optional[str] = None, lifetime_seconds: Optional[int] = None - ) -> CreateOboTokenResponse: - """Create on-behalf token. + - Creates a token on behalf of a service principal. + + + + + def create_obo_token(self + , application_id: str + , * + , comment: Optional[str] = None, lifetime_seconds: Optional[int] = None) -> CreateOboTokenResponse: + """Create on-behalf token. + + Creates a token on behalf of a service principal. + :param application_id: str Application ID of the service principal. :param comment: str (optional) Comment that describes the purpose of the token. :param lifetime_seconds: int (optional) The number of seconds before the token expires. - + :returns: :class:`CreateOboTokenResponse` """ body = {} - if application_id is not None: - body["application_id"] = application_id - if comment is not None: - body["comment"] = comment - if lifetime_seconds is not None: - body["lifetime_seconds"] = lifetime_seconds - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/token-management/on-behalf-of/tokens", body=body, headers=headers) + if application_id is not None: body['application_id'] = application_id + if comment is not None: body['comment'] = comment + if lifetime_seconds is not None: body['lifetime_seconds'] = lifetime_seconds + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/token-management/on-behalf-of/tokens', body=body + + , headers=headers + ) return CreateOboTokenResponse.from_dict(res) - def delete(self, token_id: str): - """Delete a token. + + + + def delete(self + , token_id: str + ): + """Delete a token. + Deletes a token, specified by its ID. - + :param token_id: str The ID of the token to revoke. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.0/token-management/tokens/{token_id}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.0/token-management/tokens/{token_id}", headers=headers) + + + - def get(self, token_id: str) -> GetTokenResponse: + def get(self + , token_id: str + ) -> GetTokenResponse: """Get token info. - + Gets information about a token, specified by its ID. - + :param token_id: str The ID of the token to get. - + :returns: :class:`GetTokenResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/token-management/tokens/{token_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/token-management/tokens/{token_id}' + + , headers=headers + ) return GetTokenResponse.from_dict(res) + + + + def get_permission_levels(self) -> GetTokenPermissionLevelsResponse: """Get token permission levels. - + Gets the permission levels that a user can have on an object. - + :returns: :class:`GetTokenPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/permissions/authorization/tokens/permissionLevels", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/permissions/authorization/tokens/permissionLevels' + , headers=headers + ) return GetTokenPermissionLevelsResponse.from_dict(res) + + + + def get_permissions(self) -> TokenPermissions: """Get token permissions. - + Gets the permissions of all tokens. Tokens can inherit permissions from their root object. - + :returns: :class:`TokenPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/permissions/authorization/tokens", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/permissions/authorization/tokens' + , headers=headers + ) return TokenPermissions.from_dict(res) - def list( - self, *, created_by_id: Optional[int] = None, created_by_username: Optional[str] = None - ) -> Iterator[TokenInfo]: - """List all tokens. + + + + def list(self + + , * + , created_by_id: Optional[int] = None, created_by_username: Optional[str] = None) -> Iterator[TokenInfo]: + """List all tokens. + Lists all tokens associated with the specified workspace or user. - + :param created_by_id: int (optional) User ID of the user that created the token. :param created_by_username: str (optional) Username of the user that created the token. - + :returns: Iterator over :class:`TokenInfo` """ - + query = {} - if created_by_id is not None: - query["created_by_id"] = created_by_id - if created_by_username is not None: - query["created_by_username"] = created_by_username - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/token-management/tokens", query=query, headers=headers) + if created_by_id is not None: query['created_by_id'] = created_by_id + if created_by_username is not None: query['created_by_username'] = created_by_username + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/token-management/tokens', query=query + + , headers=headers + ) parsed = ListTokensResponse.from_dict(json).token_infos return parsed if parsed is not None else [] + - def set_permissions( - self, *, access_control_list: Optional[List[TokenAccessControlRequest]] = None - ) -> TokenPermissions: - """Set token permissions. + + + + def set_permissions(self + + , * + , access_control_list: Optional[List[TokenAccessControlRequest]] = None) -> TokenPermissions: + """Set token permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) - + :returns: :class:`TokenPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", "/api/2.0/permissions/authorization/tokens", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT','/api/2.0/permissions/authorization/tokens', body=body + + , headers=headers + ) return TokenPermissions.from_dict(res) - def update_permissions( - self, *, access_control_list: Optional[List[TokenAccessControlRequest]] = None - ) -> TokenPermissions: - """Update token permissions. + + + + def update_permissions(self + + , * + , access_control_list: Optional[List[TokenAccessControlRequest]] = None) -> TokenPermissions: + """Update token permissions. + Updates the permissions on all tokens. Tokens can inherit permissions from their root object. - + :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) - + :returns: :class:`TokenPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", "/api/2.0/permissions/authorization/tokens", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH','/api/2.0/permissions/authorization/tokens', body=body + + , headers=headers + ) return TokenPermissions.from_dict(res) - + + class TokensAPI: """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, *, comment: Optional[str] = None, lifetime_seconds: Optional[int] = None) -> CreateTokenResponse: - """Create a user token. + + + + + + + def create(self + + , * + , comment: Optional[str] = None, lifetime_seconds: Optional[int] = None) -> CreateTokenResponse: + """Create a user token. + Creates and returns a token for a user. If this call is made through token authentication, it creates a token with the same client ID as the authenticated token. If the user's token quota is exceeded, this call returns an error **QUOTA_EXCEEDED**. - + :param comment: str (optional) Optional description to attach to the token. :param lifetime_seconds: int (optional) The lifetime of the token, in seconds. - + If the lifetime is not specified, this token remains valid indefinitely. - + :returns: :class:`CreateTokenResponse` """ body = {} - if comment is not None: - body["comment"] = comment - if lifetime_seconds is not None: - body["lifetime_seconds"] = lifetime_seconds - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/token/create", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if lifetime_seconds is not None: body['lifetime_seconds'] = lifetime_seconds + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/token/create', body=body + + , headers=headers + ) return CreateTokenResponse.from_dict(res) - def delete(self, token_id: str): - """Revoke token. + + + + def delete(self + , token_id: str + ): + """Revoke token. + Revokes an access token. - + If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**. - + :param token_id: str The ID of the token to be revoked. - - + + """ body = {} - if token_id is not None: - body["token_id"] = token_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if token_id is not None: body['token_id'] = token_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/token/delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/token/delete", body=body, headers=headers) + + + def list(self) -> Iterator[PublicTokenInfo]: """List tokens. - + Lists all the valid tokens for a user-workspace pair. - + :returns: Iterator over :class:`PublicTokenInfo` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/token/list", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/token/list' + , headers=headers + ) parsed = ListPublicTokensResponse.from_dict(json).token_infos return parsed if parsed is not None else [] + - + + class WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" - + def __init__(self, api_client): self._api = api_client + - def get_status(self, keys: str) -> WorkspaceConf: - """Check configuration status. + - Gets the configuration status for a workspace. + - :param keys: str + + + def get_status(self + , keys: str + ) -> WorkspaceConf: + """Check configuration status. + + Gets the configuration status for a workspace. + + :param keys: str + :returns: Dict[str,str] """ - + query = {} - if keys is not None: - query["keys"] = keys - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/workspace-conf", query=query, headers=headers) + if keys is not None: query['keys'] = keys + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/workspace-conf', query=query + + , headers=headers + ) return res - def set_status(self, contents: Dict[str, str]): - """Enable/disable features. + + + + def set_status(self, contents: Dict[str,str] + + ): + """Enable/disable features. + Sets the configuration status for a workspace, including enabling or disabling it. - - - + + + """ + + headers = {'Content-Type': 'application/json',} + + self._api.do('PATCH','/api/2.0/workspace-conf', body=contents + + , headers=headers + ) + - headers = { - "Content-Type": "application/json", - } - - self._api.do("PATCH", "/api/2.0/workspace-conf", body=contents, headers=headers) - - + + class WorkspaceNetworkConfigurationAPI: - """These APIs allow configuration of network settings for Databricks workspaces. Each workspace is always - associated with exactly one network policy that controls which network destinations can be accessed from - the Databricks environment. By default, workspaces are associated with the 'default-policy' network - policy. You cannot create or delete a workspace's network configuration, only update it to associate the - workspace with a different policy.""" - + """These APIs allow configuration of network settings for Databricks workspaces by selecting which network + policy to associate with the workspace. Each workspace is always associated with exactly one network + policy that controls which network destinations can be accessed from the Databricks environment. By + default, workspaces are associated with the 'default-policy' network policy. You cannot create or delete a + workspace's network option, only update it to associate the workspace with a different policy""" + def __init__(self, api_client): self._api = api_client + + + - def get_workspace_network_option_rpc(self, workspace_id: int) -> WorkspaceNetworkOption: - """Get workspace network configuration. + - Gets the network configuration for a workspace. Every workspace has exactly one network policy - binding, with 'default-policy' used if no explicit assignment exists. + + + def get_workspace_network_option_rpc(self + , workspace_id: int + ) -> WorkspaceNetworkOption: + """Get workspace network option. + + Gets the network option for a workspace. Every workspace has exactly one network policy binding, with + 'default-policy' used if no explicit assignment exists. + :param workspace_id: int The workspace ID. - + :returns: :class:`WorkspaceNetworkOption` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/network", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/network' + + , headers=headers + ) return WorkspaceNetworkOption.from_dict(res) - def update_workspace_network_option_rpc( - self, workspace_id: int, workspace_network_option: WorkspaceNetworkOption - ) -> WorkspaceNetworkOption: - """Update workspace network configuration. - - Updates the network configuration for a workspace. This operation associates the workspace with the - specified network policy. To revert to the default policy, specify 'default-policy' as the - network_policy_id. + + + + def update_workspace_network_option_rpc(self + , workspace_id: int, workspace_network_option: WorkspaceNetworkOption + ) -> WorkspaceNetworkOption: + """Update workspace network option. + + Updates the network option for a workspace. This operation associates the workspace with the specified + network policy. To revert to the default policy, specify 'default-policy' as the network_policy_id. + :param workspace_id: int The workspace ID. :param workspace_network_option: :class:`WorkspaceNetworkOption` - + :returns: :class:`WorkspaceNetworkOption` """ body = workspace_network_option.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", - f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/network", - body=body, - headers=headers, - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/network', body=body + + , headers=headers + ) return WorkspaceNetworkOption.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 09bf080f5..eb2e9a734 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -1,244 +1,218 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Any, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') from databricks.sdk.service import catalog # all definitions in this file are in alphabetical order - class AuthenticationType(Enum): """The delta sharing authentication type.""" - - DATABRICKS = "DATABRICKS" - OAUTH_CLIENT_CREDENTIALS = "OAUTH_CLIENT_CREDENTIALS" - TOKEN = "TOKEN" - + + DATABRICKS = 'DATABRICKS' + OAUTH_CLIENT_CREDENTIALS = 'OAUTH_CLIENT_CREDENTIALS' + OIDC_FEDERATION = 'OIDC_FEDERATION' + TOKEN = 'TOKEN' class ColumnTypeName(Enum): """UC supported column types Copied from - https://src.dev.databricks.com/databricks/universe@23a85902bb58695ab9293adc9f327b0714b55e72/-/blob/managed-catalog/api/messages/table.proto?L68 - """ - - ARRAY = "ARRAY" - BINARY = "BINARY" - BOOLEAN = "BOOLEAN" - BYTE = "BYTE" - CHAR = "CHAR" - DATE = "DATE" - DECIMAL = "DECIMAL" - DOUBLE = "DOUBLE" - FLOAT = "FLOAT" - INT = "INT" - INTERVAL = "INTERVAL" - LONG = "LONG" - MAP = "MAP" - NULL = "NULL" - SHORT = "SHORT" - STRING = "STRING" - STRUCT = "STRUCT" - TABLE_TYPE = "TABLE_TYPE" - TIMESTAMP = "TIMESTAMP" - TIMESTAMP_NTZ = "TIMESTAMP_NTZ" - USER_DEFINED_TYPE = "USER_DEFINED_TYPE" - VARIANT = "VARIANT" + https://src.dev.databricks.com/databricks/universe@23a85902bb58695ab9293adc9f327b0714b55e72/-/blob/managed-catalog/api/messages/table.proto?L68""" + + ARRAY = 'ARRAY' + BINARY = 'BINARY' + BOOLEAN = 'BOOLEAN' + BYTE = 'BYTE' + CHAR = 'CHAR' + DATE = 'DATE' + DECIMAL = 'DECIMAL' + DOUBLE = 'DOUBLE' + FLOAT = 'FLOAT' + INT = 'INT' + INTERVAL = 'INTERVAL' + LONG = 'LONG' + MAP = 'MAP' + NULL = 'NULL' + SHORT = 'SHORT' + STRING = 'STRING' + STRUCT = 'STRUCT' + TABLE_TYPE = 'TABLE_TYPE' + TIMESTAMP = 'TIMESTAMP' + TIMESTAMP_NTZ = 'TIMESTAMP_NTZ' + USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' + VARIANT = 'VARIANT' + + @dataclass class CreateProvider: name: str """The name of the Provider.""" - + authentication_type: AuthenticationType """The delta sharing authentication type.""" - + comment: Optional[str] = None """Description about the provider.""" - + recipient_profile_str: Optional[str] = None """This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - + def as_dict(self) -> dict: """Serializes the CreateProvider into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str return body def as_shallow_dict(self) -> dict: """Serializes the CreateProvider into a shallow dictionary of its immediate attributes.""" body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateProvider: """Deserializes the CreateProvider from a dictionary.""" - return cls( - authentication_type=_enum(d, "authentication_type", AuthenticationType), - comment=d.get("comment", None), - name=d.get("name", None), - recipient_profile_str=d.get("recipient_profile_str", None), - ) + return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), comment=d.get('comment', None), name=d.get('name', None), recipient_profile_str=d.get('recipient_profile_str', None)) + + @dataclass class CreateRecipient: name: str """Name of Recipient.""" - + authentication_type: AuthenticationType """The delta sharing authentication type.""" - + comment: Optional[str] = None """Description about the recipient.""" - + data_recipient_global_metastore_id: Optional[str] = None """The global Unity Catalog metastore id provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token, in epoch milliseconds.""" - + ip_access_list: Optional[IpAccessList] = None """IP Access List""" - + owner: Optional[str] = None """Username of the recipient owner.""" - + properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None """Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write.""" - + sharing_code: Optional[str] = None """The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + def as_dict(self) -> dict: """Serializes the CreateRecipient into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.data_recipient_global_metastore_id is not None: - body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs.as_dict() - if self.sharing_code is not None: - body["sharing_code"] = self.sharing_code + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value + if self.comment is not None: body['comment'] = self.comment + if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict() + if self.sharing_code is not None: body['sharing_code'] = self.sharing_code return body def as_shallow_dict(self) -> dict: """Serializes the CreateRecipient into a shallow dictionary of its immediate attributes.""" body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type - if self.comment is not None: - body["comment"] = self.comment - if self.data_recipient_global_metastore_id is not None: - body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs - if self.sharing_code is not None: - body["sharing_code"] = self.sharing_code + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type + if self.comment is not None: body['comment'] = self.comment + if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + if self.sharing_code is not None: body['sharing_code'] = self.sharing_code return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRecipient: """Deserializes the CreateRecipient from a dictionary.""" - return cls( - authentication_type=_enum(d, "authentication_type", AuthenticationType), - comment=d.get("comment", None), - data_recipient_global_metastore_id=d.get("data_recipient_global_metastore_id", None), - expiration_time=d.get("expiration_time", None), - ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), - name=d.get("name", None), - owner=d.get("owner", None), - properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), - sharing_code=d.get("sharing_code", None), - ) + return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), comment=d.get('comment', None), data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), name=d.get('name', None), owner=d.get('owner', None), properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs), sharing_code=d.get('sharing_code', None)) + + @dataclass class CreateShare: name: str """Name of the share.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + storage_root: Optional[str] = None """Storage root URL for the share.""" - + def as_dict(self) -> dict: """Serializes the CreateShare into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.storage_root is not None: body['storage_root'] = self.storage_root return body def as_shallow_dict(self) -> dict: """Serializes the CreateShare into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.storage_root is not None: body['storage_root'] = self.storage_root return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateShare: """Deserializes the CreateShare from a dictionary.""" - return cls(comment=d.get("comment", None), name=d.get("name", None), storage_root=d.get("storage_root", None)) + return cls(comment=d.get('comment', None), name=d.get('name', None), storage_root=d.get('storage_root', None)) + + + + + + + + + + + @dataclass @@ -257,505 +231,405 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + + + + @dataclass class DeltaSharingDependency: """Represents a UC dependency.""" - + function: Optional[DeltaSharingFunctionDependency] = None """A Function in UC as a dependency.""" - + table: Optional[DeltaSharingTableDependency] = None """A Table in UC as a dependency.""" - + def as_dict(self) -> dict: """Serializes the DeltaSharingDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function: - body["function"] = self.function.as_dict() - if self.table: - body["table"] = self.table.as_dict() + if self.function: body['function'] = self.function.as_dict() + if self.table: body['table'] = self.table.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.function: - body["function"] = self.function - if self.table: - body["table"] = self.table + if self.function: body['function'] = self.function + if self.table: body['table'] = self.table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingDependency: """Deserializes the DeltaSharingDependency from a dictionary.""" - return cls( - function=_from_dict(d, "function", DeltaSharingFunctionDependency), - table=_from_dict(d, "table", DeltaSharingTableDependency), - ) + return cls(function=_from_dict(d, 'function', DeltaSharingFunctionDependency), table=_from_dict(d, 'table', DeltaSharingTableDependency)) + + @dataclass class DeltaSharingDependencyList: """Represents a list of dependencies.""" - + dependencies: Optional[List[DeltaSharingDependency]] = None """An array of Dependency.""" - + def as_dict(self) -> dict: """Serializes the DeltaSharingDependencyList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dependencies: - body["dependencies"] = [v.as_dict() for v in self.dependencies] + if self.dependencies: body['dependencies'] = [v.as_dict() for v in self.dependencies] return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingDependencyList into a shallow dictionary of its immediate attributes.""" body = {} - if self.dependencies: - body["dependencies"] = self.dependencies + if self.dependencies: body['dependencies'] = self.dependencies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingDependencyList: """Deserializes the DeltaSharingDependencyList from a dictionary.""" - return cls(dependencies=_repeated_dict(d, "dependencies", DeltaSharingDependency)) + return cls(dependencies=_repeated_dict(d, 'dependencies', DeltaSharingDependency)) + + @dataclass class DeltaSharingFunction: aliases: Optional[List[RegisteredModelAlias]] = None """The aliass of registered model.""" - + comment: Optional[str] = None """The comment of the function.""" - + data_type: Optional[ColumnTypeName] = None """The data type of the function.""" - + dependency_list: Optional[DeltaSharingDependencyList] = None """The dependency list of the function.""" - + full_data_type: Optional[str] = None """The full data type of the function.""" - + id: Optional[str] = None """The id of the function.""" - + input_params: Optional[FunctionParameterInfos] = None """The function parameter information.""" - + name: Optional[str] = None """The name of the function.""" - + properties: Optional[str] = None """The properties of the function.""" - + routine_definition: Optional[str] = None """The routine definition of the function.""" - + schema: Optional[str] = None """The name of the schema that the function belongs to.""" - + securable_kind: Optional[SharedSecurableKind] = None """The securable kind of the function.""" - + share: Optional[str] = None """The name of the share that the function belongs to.""" - + share_id: Optional[str] = None """The id of the share that the function belongs to.""" - + storage_location: Optional[str] = None """The storage location of the function.""" - + tags: Optional[List[catalog.TagKeyValue]] = None """The tags of the function.""" - + def as_dict(self) -> dict: """Serializes the DeltaSharingFunction into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aliases: - body["aliases"] = [v.as_dict() for v in self.aliases] - if self.comment is not None: - body["comment"] = self.comment - if self.data_type is not None: - body["data_type"] = self.data_type.value - if self.dependency_list: - body["dependency_list"] = self.dependency_list.as_dict() - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.id is not None: - body["id"] = self.id - if self.input_params: - body["input_params"] = self.input_params.as_dict() - if self.name is not None: - body["name"] = self.name - if self.properties is not None: - body["properties"] = self.properties - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.schema is not None: - body["schema"] = self.schema - if self.securable_kind is not None: - body["securable_kind"] = self.securable_kind.value - if self.share is not None: - body["share"] = self.share - if self.share_id is not None: - body["share_id"] = self.share_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases] + if self.comment is not None: body['comment'] = self.comment + if self.data_type is not None: body['data_type'] = self.data_type.value + if self.dependency_list: body['dependency_list'] = self.dependency_list.as_dict() + if self.full_data_type is not None: body['full_data_type'] = self.full_data_type + if self.id is not None: body['id'] = self.id + if self.input_params: body['input_params'] = self.input_params.as_dict() + if self.name is not None: body['name'] = self.name + if self.properties is not None: body['properties'] = self.properties + if self.routine_definition is not None: body['routine_definition'] = self.routine_definition + if self.schema is not None: body['schema'] = self.schema + if self.securable_kind is not None: body['securable_kind'] = self.securable_kind.value + if self.share is not None: body['share'] = self.share + if self.share_id is not None: body['share_id'] = self.share_id + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingFunction into a shallow dictionary of its immediate attributes.""" body = {} - if self.aliases: - body["aliases"] = self.aliases - if self.comment is not None: - body["comment"] = self.comment - if self.data_type is not None: - body["data_type"] = self.data_type - if self.dependency_list: - body["dependency_list"] = self.dependency_list - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.id is not None: - body["id"] = self.id - if self.input_params: - body["input_params"] = self.input_params - if self.name is not None: - body["name"] = self.name - if self.properties is not None: - body["properties"] = self.properties - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.schema is not None: - body["schema"] = self.schema - if self.securable_kind is not None: - body["securable_kind"] = self.securable_kind - if self.share is not None: - body["share"] = self.share - if self.share_id is not None: - body["share_id"] = self.share_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.tags: - body["tags"] = self.tags + if self.aliases: body['aliases'] = self.aliases + if self.comment is not None: body['comment'] = self.comment + if self.data_type is not None: body['data_type'] = self.data_type + if self.dependency_list: body['dependency_list'] = self.dependency_list + if self.full_data_type is not None: body['full_data_type'] = self.full_data_type + if self.id is not None: body['id'] = self.id + if self.input_params: body['input_params'] = self.input_params + if self.name is not None: body['name'] = self.name + if self.properties is not None: body['properties'] = self.properties + if self.routine_definition is not None: body['routine_definition'] = self.routine_definition + if self.schema is not None: body['schema'] = self.schema + if self.securable_kind is not None: body['securable_kind'] = self.securable_kind + if self.share is not None: body['share'] = self.share + if self.share_id is not None: body['share_id'] = self.share_id + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingFunction: """Deserializes the DeltaSharingFunction from a dictionary.""" - return cls( - aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), - comment=d.get("comment", None), - data_type=_enum(d, "data_type", ColumnTypeName), - dependency_list=_from_dict(d, "dependency_list", DeltaSharingDependencyList), - full_data_type=d.get("full_data_type", None), - id=d.get("id", None), - input_params=_from_dict(d, "input_params", FunctionParameterInfos), - name=d.get("name", None), - properties=d.get("properties", None), - routine_definition=d.get("routine_definition", None), - schema=d.get("schema", None), - securable_kind=_enum(d, "securable_kind", SharedSecurableKind), - share=d.get("share", None), - share_id=d.get("share_id", None), - storage_location=d.get("storage_location", None), - tags=_repeated_dict(d, "tags", catalog.TagKeyValue), - ) + return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias), comment=d.get('comment', None), data_type=_enum(d, 'data_type', ColumnTypeName), dependency_list=_from_dict(d, 'dependency_list', DeltaSharingDependencyList), full_data_type=d.get('full_data_type', None), id=d.get('id', None), input_params=_from_dict(d, 'input_params', FunctionParameterInfos), name=d.get('name', None), properties=d.get('properties', None), routine_definition=d.get('routine_definition', None), schema=d.get('schema', None), securable_kind=_enum(d, 'securable_kind', SharedSecurableKind), share=d.get('share', None), share_id=d.get('share_id', None), storage_location=d.get('storage_location', None), tags=_repeated_dict(d, 'tags', catalog.TagKeyValue)) + + @dataclass class DeltaSharingFunctionDependency: """A Function in UC as a dependency.""" - + function_name: Optional[str] = None - + schema_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the DeltaSharingFunctionDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.function_name is not None: body['function_name'] = self.function_name + if self.schema_name is not None: body['schema_name'] = self.schema_name return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingFunctionDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.function_name is not None: body['function_name'] = self.function_name + if self.schema_name is not None: body['schema_name'] = self.schema_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingFunctionDependency: """Deserializes the DeltaSharingFunctionDependency from a dictionary.""" - return cls(function_name=d.get("function_name", None), schema_name=d.get("schema_name", None)) + return cls(function_name=d.get('function_name', None), schema_name=d.get('schema_name', None)) + + @dataclass class DeltaSharingTableDependency: """A Table in UC as a dependency.""" - + schema_name: Optional[str] = None - + table_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the DeltaSharingTableDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.table_name is not None: - body["table_name"] = self.table_name + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.table_name is not None: body['table_name'] = self.table_name return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingTableDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.table_name is not None: - body["table_name"] = self.table_name + if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.table_name is not None: body['table_name'] = self.table_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingTableDependency: """Deserializes the DeltaSharingTableDependency from a dictionary.""" - return cls(schema_name=d.get("schema_name", None), table_name=d.get("table_name", None)) + return cls(schema_name=d.get('schema_name', None), table_name=d.get('table_name', None)) + + @dataclass class FederationPolicy: comment: Optional[str] = None """Description of the policy. This is a user-provided description.""" - + create_time: Optional[str] = None """System-generated timestamp indicating when the policy was created.""" - + id: Optional[str] = None """Unique, immutable system-generated identifier for the federation policy.""" - + name: Optional[str] = None """Name of the federation policy. A recipient can have multiple policies with different names. The name must contain only lowercase alphanumeric characters, numbers, and hyphens.""" - + oidc_policy: Optional[OidcFederationPolicy] = None """Specifies the policy to use for validating OIDC claims in the federated tokens.""" - + update_time: Optional[str] = None """System-generated timestamp indicating when the policy was last updated.""" - + def as_dict(self) -> dict: """Serializes the FederationPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.create_time is not None: - body["create_time"] = self.create_time - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.oidc_policy: - body["oidc_policy"] = self.oidc_policy.as_dict() - if self.update_time is not None: - body["update_time"] = self.update_time + if self.comment is not None: body['comment'] = self.comment + if self.create_time is not None: body['create_time'] = self.create_time + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.oidc_policy: body['oidc_policy'] = self.oidc_policy.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the FederationPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.create_time is not None: - body["create_time"] = self.create_time - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.oidc_policy: - body["oidc_policy"] = self.oidc_policy - if self.update_time is not None: - body["update_time"] = self.update_time + if self.comment is not None: body['comment'] = self.comment + if self.create_time is not None: body['create_time'] = self.create_time + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.oidc_policy: body['oidc_policy'] = self.oidc_policy + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FederationPolicy: """Deserializes the FederationPolicy from a dictionary.""" - return cls( - comment=d.get("comment", None), - create_time=d.get("create_time", None), - id=d.get("id", None), - name=d.get("name", None), - oidc_policy=_from_dict(d, "oidc_policy", OidcFederationPolicy), - update_time=d.get("update_time", None), - ) + return cls(comment=d.get('comment', None), create_time=d.get('create_time', None), id=d.get('id', None), name=d.get('name', None), oidc_policy=_from_dict(d, 'oidc_policy', OidcFederationPolicy), update_time=d.get('update_time', None)) + + @dataclass class FunctionParameterInfo: """Represents a parameter of a function. The same message is used for both input and output columns.""" - + comment: Optional[str] = None """The comment of the parameter.""" - + name: Optional[str] = None """The name of the parameter.""" - + parameter_default: Optional[str] = None """The default value of the parameter.""" - + parameter_mode: Optional[FunctionParameterMode] = None """The mode of the function parameter.""" - + parameter_type: Optional[FunctionParameterType] = None """The type of the function parameter.""" - + position: Optional[int] = None """The position of the parameter.""" - + type_interval_type: Optional[str] = None """The interval type of the parameter type.""" - + type_json: Optional[str] = None """The type of the parameter in JSON format.""" - + type_name: Optional[ColumnTypeName] = None """The type of the parameter in Enum format.""" - + type_precision: Optional[int] = None """The precision of the parameter type.""" - + type_scale: Optional[int] = None """The scale of the parameter type.""" - + type_text: Optional[str] = None """The type of the parameter in text format.""" - + def as_dict(self) -> dict: """Serializes the FunctionParameterInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.parameter_default is not None: - body["parameter_default"] = self.parameter_default - if self.parameter_mode is not None: - body["parameter_mode"] = self.parameter_mode.value - if self.parameter_type is not None: - body["parameter_type"] = self.parameter_type.value - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name.value - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.parameter_default is not None: body['parameter_default'] = self.parameter_default + if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode.value + if self.parameter_type is not None: body['parameter_type'] = self.parameter_type.value + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_json is not None: body['type_json'] = self.type_json + if self.type_name is not None: body['type_name'] = self.type_name.value + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text return body def as_shallow_dict(self) -> dict: """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.parameter_default is not None: - body["parameter_default"] = self.parameter_default - if self.parameter_mode is not None: - body["parameter_mode"] = self.parameter_mode - if self.parameter_type is not None: - body["parameter_type"] = self.parameter_type - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.parameter_default is not None: body['parameter_default'] = self.parameter_default + if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode + if self.parameter_type is not None: body['parameter_type'] = self.parameter_type + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_json is not None: body['type_json'] = self.type_json + if self.type_name is not None: body['type_name'] = self.type_name + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: """Deserializes the FunctionParameterInfo from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - parameter_default=d.get("parameter_default", None), - parameter_mode=_enum(d, "parameter_mode", FunctionParameterMode), - parameter_type=_enum(d, "parameter_type", FunctionParameterType), - position=d.get("position", None), - type_interval_type=d.get("type_interval_type", None), - type_json=d.get("type_json", None), - type_name=_enum(d, "type_name", ColumnTypeName), - type_precision=d.get("type_precision", None), - type_scale=d.get("type_scale", None), - type_text=d.get("type_text", None), - ) + return cls(comment=d.get('comment', None), name=d.get('name', None), parameter_default=d.get('parameter_default', None), parameter_mode=_enum(d, 'parameter_mode', FunctionParameterMode), parameter_type=_enum(d, 'parameter_type', FunctionParameterType), position=d.get('position', None), type_interval_type=d.get('type_interval_type', None), type_json=d.get('type_json', None), type_name=_enum(d, 'type_name', ColumnTypeName), type_precision=d.get('type_precision', None), type_scale=d.get('type_scale', None), type_text=d.get('type_text', None)) + + @dataclass class FunctionParameterInfos: parameters: Optional[List[FunctionParameterInfo]] = None """The list of parameters of the function.""" - + def as_dict(self) -> dict: """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: - body["parameters"] = self.parameters + if self.parameters: body['parameters'] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: """Deserializes the FunctionParameterInfos from a dictionary.""" - return cls(parameters=_repeated_dict(d, "parameters", FunctionParameterInfo)) - + return cls(parameters=_repeated_dict(d, 'parameters', FunctionParameterInfo)) + -class FunctionParameterMode(Enum): - IN = "IN" - INOUT = "INOUT" - OUT = "OUT" +class FunctionParameterMode(Enum): + + + IN = 'IN' + INOUT = 'INOUT' + OUT = 'OUT' class FunctionParameterType(Enum): + + + COLUMN = 'COLUMN' + PARAM = 'PARAM' + - COLUMN = "COLUMN" - PARAM = "PARAM" @dataclass @@ -774,6 +648,17 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> GetActivationUrlInfoResponse: """Deserializes the GetActivationUrlInfoResponse from a dictionary.""" return cls() + + + + + + + + + + + @dataclass @@ -781,35 +666,30 @@ class GetRecipientSharePermissionsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + permissions_out: Optional[List[ShareToPrivilegeAssignment]] = None """An array of data share permissions for a recipient.""" - + def as_dict(self) -> dict: """Serializes the GetRecipientSharePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.permissions_out: - body["permissions_out"] = [v.as_dict() for v in self.permissions_out] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out] return body def as_shallow_dict(self) -> dict: """Serializes the GetRecipientSharePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.permissions_out: - body["permissions_out"] = self.permissions_out + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.permissions_out: body['permissions_out'] = self.permissions_out return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRecipientSharePermissionsResponse: """Deserializes the GetRecipientSharePermissionsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - permissions_out=_repeated_dict(d, "permissions_out", ShareToPrivilegeAssignment), - ) + return cls(next_page_token=d.get('next_page_token', None), permissions_out=_repeated_dict(d, 'permissions_out', ShareToPrivilegeAssignment)) + + @dataclass @@ -817,145 +697,134 @@ class GetSharePermissionsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + privilege_assignments: Optional[List[PrivilegeAssignment]] = None """The privileges assigned to each principal""" - + def as_dict(self) -> dict: """Serializes the GetSharePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the GetSharePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetSharePermissionsResponse: """Deserializes the GetSharePermissionsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), - ) + return cls(next_page_token=d.get('next_page_token', None), privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment)) + + + + + @dataclass class IpAccessList: allowed_ip_addresses: Optional[List[str]] = None """Allowed IP Addresses in CIDR notation. Limit of 100.""" - + def as_dict(self) -> dict: """Serializes the IpAccessList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_ip_addresses: - body["allowed_ip_addresses"] = [v for v in self.allowed_ip_addresses] + if self.allowed_ip_addresses: body['allowed_ip_addresses'] = [v for v in self.allowed_ip_addresses] return body def as_shallow_dict(self) -> dict: """Serializes the IpAccessList into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_ip_addresses: - body["allowed_ip_addresses"] = self.allowed_ip_addresses + if self.allowed_ip_addresses: body['allowed_ip_addresses'] = self.allowed_ip_addresses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IpAccessList: """Deserializes the IpAccessList from a dictionary.""" - return cls(allowed_ip_addresses=d.get("allowed_ip_addresses", None)) + return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None)) + + + + + @dataclass class ListFederationPoliciesResponse: next_page_token: Optional[str] = None - + policies: Optional[List[FederationPolicy]] = None - + def as_dict(self) -> dict: """Serializes the ListFederationPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policies: - body["policies"] = [v.as_dict() for v in self.policies] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = [v.as_dict() for v in self.policies] return body def as_shallow_dict(self) -> dict: """Serializes the ListFederationPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.policies: - body["policies"] = self.policies + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.policies: body['policies'] = self.policies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFederationPoliciesResponse: """Deserializes the ListFederationPoliciesResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), policies=_repeated_dict(d, "policies", FederationPolicy) - ) + return cls(next_page_token=d.get('next_page_token', None), policies=_repeated_dict(d, 'policies', FederationPolicy)) + + + + + @dataclass class ListProviderShareAssetsResponse: """Response to ListProviderShareAssets, which contains the list of assets of a share.""" - + functions: Optional[List[DeltaSharingFunction]] = None """The list of functions in the share.""" - + notebooks: Optional[List[NotebookFile]] = None """The list of notebooks in the share.""" - + tables: Optional[List[Table]] = None """The list of tables in the share.""" - + volumes: Optional[List[Volume]] = None """The list of volumes in the share.""" - + def as_dict(self) -> dict: """Serializes the ListProviderShareAssetsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.functions: - body["functions"] = [v.as_dict() for v in self.functions] - if self.notebooks: - body["notebooks"] = [v.as_dict() for v in self.notebooks] - if self.tables: - body["tables"] = [v.as_dict() for v in self.tables] - if self.volumes: - body["volumes"] = [v.as_dict() for v in self.volumes] + if self.functions: body['functions'] = [v.as_dict() for v in self.functions] + if self.notebooks: body['notebooks'] = [v.as_dict() for v in self.notebooks] + if self.tables: body['tables'] = [v.as_dict() for v in self.tables] + if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes] return body def as_shallow_dict(self) -> dict: """Serializes the ListProviderShareAssetsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.functions: - body["functions"] = self.functions - if self.notebooks: - body["notebooks"] = self.notebooks - if self.tables: - body["tables"] = self.tables - if self.volumes: - body["volumes"] = self.volumes + if self.functions: body['functions'] = self.functions + if self.notebooks: body['notebooks'] = self.notebooks + if self.tables: body['tables'] = self.tables + if self.volumes: body['volumes'] = self.volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProviderShareAssetsResponse: """Deserializes the ListProviderShareAssetsResponse from a dictionary.""" - return cls( - functions=_repeated_dict(d, "functions", DeltaSharingFunction), - notebooks=_repeated_dict(d, "notebooks", NotebookFile), - tables=_repeated_dict(d, "tables", Table), - volumes=_repeated_dict(d, "volumes", Volume), - ) + return cls(functions=_repeated_dict(d, 'functions', DeltaSharingFunction), notebooks=_repeated_dict(d, 'notebooks', NotebookFile), tables=_repeated_dict(d, 'tables', Table), volumes=_repeated_dict(d, 'volumes', Volume)) + + @dataclass @@ -963,32 +832,33 @@ class ListProviderSharesResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + shares: Optional[List[ProviderShare]] = None """An array of provider shares.""" - + def as_dict(self) -> dict: """Serializes the ListProviderSharesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.shares: - body["shares"] = [v.as_dict() for v in self.shares] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.shares: body['shares'] = [v.as_dict() for v in self.shares] return body def as_shallow_dict(self) -> dict: """Serializes the ListProviderSharesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.shares: - body["shares"] = self.shares + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.shares: body['shares'] = self.shares return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProviderSharesResponse: """Deserializes the ListProviderSharesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), shares=_repeated_dict(d, "shares", ProviderShare)) + return cls(next_page_token=d.get('next_page_token', None), shares=_repeated_dict(d, 'shares', ProviderShare)) + + + + + @dataclass @@ -996,34 +866,33 @@ class ListProvidersResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + providers: Optional[List[ProviderInfo]] = None """An array of provider information objects.""" - + def as_dict(self) -> dict: """Serializes the ListProvidersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.providers: - body["providers"] = [v.as_dict() for v in self.providers] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.providers: body['providers'] = [v.as_dict() for v in self.providers] return body def as_shallow_dict(self) -> dict: """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.providers: - body["providers"] = self.providers + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.providers: body['providers'] = self.providers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProvidersResponse: """Deserializes the ListProvidersResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), providers=_repeated_dict(d, "providers", ProviderInfo) - ) + return cls(next_page_token=d.get('next_page_token', None), providers=_repeated_dict(d, 'providers', ProviderInfo)) + + + + + @dataclass @@ -1031,34 +900,33 @@ class ListRecipientsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + recipients: Optional[List[RecipientInfo]] = None """An array of recipient information objects.""" - + def as_dict(self) -> dict: """Serializes the ListRecipientsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.recipients: - body["recipients"] = [v.as_dict() for v in self.recipients] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients] return body def as_shallow_dict(self) -> dict: """Serializes the ListRecipientsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.recipients: - body["recipients"] = self.recipients + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.recipients: body['recipients'] = self.recipients return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListRecipientsResponse: """Deserializes the ListRecipientsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), recipients=_repeated_dict(d, "recipients", RecipientInfo) - ) + return cls(next_page_token=d.get('next_page_token', None), recipients=_repeated_dict(d, 'recipients', RecipientInfo)) + + + + + @dataclass @@ -1066,99 +934,80 @@ class ListSharesResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + shares: Optional[List[ShareInfo]] = None """An array of data share information objects.""" - + def as_dict(self) -> dict: """Serializes the ListSharesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.shares: - body["shares"] = [v.as_dict() for v in self.shares] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.shares: body['shares'] = [v.as_dict() for v in self.shares] return body def as_shallow_dict(self) -> dict: """Serializes the ListSharesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.shares: - body["shares"] = self.shares + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.shares: body['shares'] = self.shares return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSharesResponse: """Deserializes the ListSharesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), shares=_repeated_dict(d, "shares", ShareInfo)) + return cls(next_page_token=d.get('next_page_token', None), shares=_repeated_dict(d, 'shares', ShareInfo)) + + @dataclass class NotebookFile: comment: Optional[str] = None """The comment of the notebook file.""" - + id: Optional[str] = None """The id of the notebook file.""" - + name: Optional[str] = None """Name of the notebook file.""" - + share: Optional[str] = None """The name of the share that the notebook file belongs to.""" - + share_id: Optional[str] = None """The id of the share that the notebook file belongs to.""" - + tags: Optional[List[catalog.TagKeyValue]] = None """The tags of the notebook file.""" - + def as_dict(self) -> dict: """Serializes the NotebookFile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.share is not None: - body["share"] = self.share - if self.share_id is not None: - body["share_id"] = self.share_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.comment is not None: body['comment'] = self.comment + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.share is not None: body['share'] = self.share + if self.share_id is not None: body['share_id'] = self.share_id + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the NotebookFile into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.share is not None: - body["share"] = self.share - if self.share_id is not None: - body["share_id"] = self.share_id - if self.tags: - body["tags"] = self.tags + if self.comment is not None: body['comment'] = self.comment + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.share is not None: body['share'] = self.share + if self.share_id is not None: body['share_id'] = self.share_id + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotebookFile: """Deserializes the NotebookFile from a dictionary.""" - return cls( - comment=d.get("comment", None), - id=d.get("id", None), - name=d.get("name", None), - share=d.get("share", None), - share_id=d.get("share_id", None), - tags=_repeated_dict(d, "tags", catalog.TagKeyValue), - ) + return cls(comment=d.get('comment', None), id=d.get('id', None), name=d.get('name', None), share=d.get('share', None), share_id=d.get('share_id', None), tags=_repeated_dict(d, 'tags', catalog.TagKeyValue)) + + @dataclass @@ -1166,10 +1015,10 @@ class OidcFederationPolicy: """Specifies the policy to use for validating OIDC claims in your federated tokens from Delta Sharing Clients. Refer to https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed for more details.""" - + issuer: str """The required token issuer, as specified in the 'iss' claim of federated tokens.""" - + subject_claim: str """The claim that contains the subject of the token. Depending on the identity provider and the use case (U2M or M2M), this can vary: - For Entra ID (AAD): * U2M flow (group access): Use `groups`. @@ -1178,7 +1027,7 @@ class OidcFederationPolicy: Supported `subject_claim` values are: - `oid`: Object ID of the user. - `azp`: Client ID of the OAuth app. - `groups`: Object ID of the group. - `sub`: Subject identifier for other use cases.""" - + subject: str """The required token subject, as specified in the subject claim of federated tokens. The subject claim identifies the identity of the user or machine accessing the resource. Examples for Entra @@ -1186,637 +1035,504 @@ class OidcFederationPolicy: ID of the group in Entra ID. - U2M flow (user access): If the subject claim is `oid`, this must be the Object ID of the user in Entra ID. - M2M flow (OAuth App access): If the subject claim is `azp`, this must be the client ID of the OAuth app registered in Entra ID.""" - + audiences: Optional[List[str]] = None """The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience identifier is intended to represent the recipient of the token. Can be any non-empty string value. As long as the audience in the token matches at least one audience in the policy,""" - + def as_dict(self) -> dict: """Serializes the OidcFederationPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.audiences: - body["audiences"] = [v for v in self.audiences] - if self.issuer is not None: - body["issuer"] = self.issuer - if self.subject is not None: - body["subject"] = self.subject - if self.subject_claim is not None: - body["subject_claim"] = self.subject_claim + if self.audiences: body['audiences'] = [v for v in self.audiences] + if self.issuer is not None: body['issuer'] = self.issuer + if self.subject is not None: body['subject'] = self.subject + if self.subject_claim is not None: body['subject_claim'] = self.subject_claim return body def as_shallow_dict(self) -> dict: """Serializes the OidcFederationPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.audiences: - body["audiences"] = self.audiences - if self.issuer is not None: - body["issuer"] = self.issuer - if self.subject is not None: - body["subject"] = self.subject - if self.subject_claim is not None: - body["subject_claim"] = self.subject_claim + if self.audiences: body['audiences'] = self.audiences + if self.issuer is not None: body['issuer'] = self.issuer + if self.subject is not None: body['subject'] = self.subject + if self.subject_claim is not None: body['subject_claim'] = self.subject_claim return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OidcFederationPolicy: """Deserializes the OidcFederationPolicy from a dictionary.""" - return cls( - audiences=d.get("audiences", None), - issuer=d.get("issuer", None), - subject=d.get("subject", None), - subject_claim=d.get("subject_claim", None), - ) + return cls(audiences=d.get('audiences', None), issuer=d.get('issuer', None), subject=d.get('subject', None), subject_claim=d.get('subject_claim', None)) + + @dataclass class Partition: values: Optional[List[PartitionValue]] = None """An array of partition values.""" - + def as_dict(self) -> dict: """Serializes the Partition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.values: - body["values"] = [v.as_dict() for v in self.values] + if self.values: body['values'] = [v.as_dict() for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the Partition into a shallow dictionary of its immediate attributes.""" body = {} - if self.values: - body["values"] = self.values + if self.values: body['values'] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Partition: """Deserializes the Partition from a dictionary.""" - return cls(values=_repeated_dict(d, "values", PartitionValue)) + return cls(values=_repeated_dict(d, 'values', PartitionValue)) + + @dataclass class PartitionValue: name: Optional[str] = None """The name of the partition column.""" - + op: Optional[PartitionValueOp] = None """The operator to apply for the value.""" - + recipient_property_key: Optional[str] = None """The key of a Delta Sharing recipient's property. For example "databricks-account-id". When this field is set, field `value` can not be set.""" - + value: Optional[str] = None """The value of the partition column. When this value is not set, it means `null` value. When this field is set, field `recipient_property_key` can not be set.""" - + def as_dict(self) -> dict: """Serializes the PartitionValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.op is not None: - body["op"] = self.op.value - if self.recipient_property_key is not None: - body["recipient_property_key"] = self.recipient_property_key - if self.value is not None: - body["value"] = self.value + if self.name is not None: body['name'] = self.name + if self.op is not None: body['op'] = self.op.value + if self.recipient_property_key is not None: body['recipient_property_key'] = self.recipient_property_key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the PartitionValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.op is not None: - body["op"] = self.op - if self.recipient_property_key is not None: - body["recipient_property_key"] = self.recipient_property_key - if self.value is not None: - body["value"] = self.value + if self.name is not None: body['name'] = self.name + if self.op is not None: body['op'] = self.op + if self.recipient_property_key is not None: body['recipient_property_key'] = self.recipient_property_key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PartitionValue: """Deserializes the PartitionValue from a dictionary.""" - return cls( - name=d.get("name", None), - op=_enum(d, "op", PartitionValueOp), - recipient_property_key=d.get("recipient_property_key", None), - value=d.get("value", None), - ) - + return cls(name=d.get('name', None), op=_enum(d, 'op', PartitionValueOp), recipient_property_key=d.get('recipient_property_key', None), value=d.get('value', None)) + -class PartitionValueOp(Enum): - EQUAL = "EQUAL" - LIKE = "LIKE" +class PartitionValueOp(Enum): + + + EQUAL = 'EQUAL' + LIKE = 'LIKE' @dataclass class PermissionsChange: add: Optional[List[str]] = None """The set of privileges to add.""" - + principal: Optional[str] = None """The principal whose privileges we are changing.""" - + remove: Optional[List[str]] = None """The set of privileges to remove.""" - + def as_dict(self) -> dict: """Serializes the PermissionsChange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add: - body["add"] = [v for v in self.add] - if self.principal is not None: - body["principal"] = self.principal - if self.remove: - body["remove"] = [v for v in self.remove] + if self.add: body['add'] = [v for v in self.add] + if self.principal is not None: body['principal'] = self.principal + if self.remove: body['remove'] = [v for v in self.remove] return body def as_shallow_dict(self) -> dict: """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes.""" body = {} - if self.add: - body["add"] = self.add - if self.principal is not None: - body["principal"] = self.principal - if self.remove: - body["remove"] = self.remove + if self.add: body['add'] = self.add + if self.principal is not None: body['principal'] = self.principal + if self.remove: body['remove'] = self.remove return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: """Deserializes the PermissionsChange from a dictionary.""" - return cls(add=d.get("add", None), principal=d.get("principal", None), remove=d.get("remove", None)) - + return cls(add=d.get('add', None), principal=d.get('principal', None), remove=d.get('remove', None)) + -class Privilege(Enum): - ACCESS = "ACCESS" - ALL_PRIVILEGES = "ALL_PRIVILEGES" - APPLY_TAG = "APPLY_TAG" - CREATE = "CREATE" - CREATE_CATALOG = "CREATE_CATALOG" - CREATE_CONNECTION = "CREATE_CONNECTION" - CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION" - CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE" - CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME" - CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG" - CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE" - CREATE_FUNCTION = "CREATE_FUNCTION" - CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE" - CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW" - CREATE_MODEL = "CREATE_MODEL" - CREATE_PROVIDER = "CREATE_PROVIDER" - CREATE_RECIPIENT = "CREATE_RECIPIENT" - CREATE_SCHEMA = "CREATE_SCHEMA" - CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL" - CREATE_SHARE = "CREATE_SHARE" - CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL" - CREATE_TABLE = "CREATE_TABLE" - CREATE_VIEW = "CREATE_VIEW" - CREATE_VOLUME = "CREATE_VOLUME" - EXECUTE = "EXECUTE" - MANAGE = "MANAGE" - MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" - MODIFY = "MODIFY" - READ_FILES = "READ_FILES" - READ_PRIVATE_FILES = "READ_PRIVATE_FILES" - READ_VOLUME = "READ_VOLUME" - REFRESH = "REFRESH" - SELECT = "SELECT" - SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION" - USAGE = "USAGE" - USE_CATALOG = "USE_CATALOG" - USE_CONNECTION = "USE_CONNECTION" - USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS" - USE_PROVIDER = "USE_PROVIDER" - USE_RECIPIENT = "USE_RECIPIENT" - USE_SCHEMA = "USE_SCHEMA" - USE_SHARE = "USE_SHARE" - WRITE_FILES = "WRITE_FILES" - WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES" - WRITE_VOLUME = "WRITE_VOLUME" +class Privilege(Enum): + + + ACCESS = 'ACCESS' + ALL_PRIVILEGES = 'ALL_PRIVILEGES' + APPLY_TAG = 'APPLY_TAG' + CREATE = 'CREATE' + CREATE_CATALOG = 'CREATE_CATALOG' + CREATE_CONNECTION = 'CREATE_CONNECTION' + CREATE_EXTERNAL_LOCATION = 'CREATE_EXTERNAL_LOCATION' + CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE' + CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME' + CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG' + CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE' + CREATE_FUNCTION = 'CREATE_FUNCTION' + CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE' + CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW' + CREATE_MODEL = 'CREATE_MODEL' + CREATE_PROVIDER = 'CREATE_PROVIDER' + CREATE_RECIPIENT = 'CREATE_RECIPIENT' + CREATE_SCHEMA = 'CREATE_SCHEMA' + CREATE_SERVICE_CREDENTIAL = 'CREATE_SERVICE_CREDENTIAL' + CREATE_SHARE = 'CREATE_SHARE' + CREATE_STORAGE_CREDENTIAL = 'CREATE_STORAGE_CREDENTIAL' + CREATE_TABLE = 'CREATE_TABLE' + CREATE_VIEW = 'CREATE_VIEW' + CREATE_VOLUME = 'CREATE_VOLUME' + EXECUTE = 'EXECUTE' + MANAGE = 'MANAGE' + MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST' + MODIFY = 'MODIFY' + READ_FILES = 'READ_FILES' + READ_PRIVATE_FILES = 'READ_PRIVATE_FILES' + READ_VOLUME = 'READ_VOLUME' + REFRESH = 'REFRESH' + SELECT = 'SELECT' + SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION' + USAGE = 'USAGE' + USE_CATALOG = 'USE_CATALOG' + USE_CONNECTION = 'USE_CONNECTION' + USE_MARKETPLACE_ASSETS = 'USE_MARKETPLACE_ASSETS' + USE_PROVIDER = 'USE_PROVIDER' + USE_RECIPIENT = 'USE_RECIPIENT' + USE_SCHEMA = 'USE_SCHEMA' + USE_SHARE = 'USE_SHARE' + WRITE_FILES = 'WRITE_FILES' + WRITE_PRIVATE_FILES = 'WRITE_PRIVATE_FILES' + WRITE_VOLUME = 'WRITE_VOLUME' @dataclass class PrivilegeAssignment: principal: Optional[str] = None """The principal (user email address or group name).""" - + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" - + def as_dict(self) -> dict: """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = [v.value for v in self.privileges] + if self.principal is not None: body['principal'] = self.principal + if self.privileges: body['privileges'] = [v.value for v in self.privileges] return body def as_shallow_dict(self) -> dict: """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = self.privileges + if self.principal is not None: body['principal'] = self.principal + if self.privileges: body['privileges'] = self.privileges return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) + return cls(principal=d.get('principal', None), privileges=_repeated_enum(d, 'privileges', Privilege)) + + @dataclass class ProviderInfo: authentication_type: Optional[AuthenticationType] = None """The delta sharing authentication type.""" - + cloud: Optional[str] = None """Cloud vendor of the provider's UC metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + comment: Optional[str] = None """Description about the provider.""" - + created_at: Optional[int] = None """Time at which this Provider was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of Provider creator.""" - + data_provider_global_metastore_id: Optional[str] = None """The global UC metastore id of the data provider. This field is only present when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" - + metastore_id: Optional[str] = None """UUID of the provider's UC metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + name: Optional[str] = None """The name of the Provider.""" - + owner: Optional[str] = None """Username of Provider owner.""" - + recipient_profile: Optional[RecipientProfile] = None """The recipient profile. This field is only present when the authentication_type is `TOKEN` or `OAUTH_CLIENT_CREDENTIALS`.""" - + recipient_profile_str: Optional[str] = None """This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - + region: Optional[str] = None """Cloud region of the provider's UC metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + updated_at: Optional[int] = None """Time at which this Provider was created, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified Provider.""" - + def as_dict(self) -> dict: """Serializes the ProviderInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type.value - if self.cloud is not None: - body["cloud"] = self.cloud - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_provider_global_metastore_id is not None: - body["data_provider_global_metastore_id"] = self.data_provider_global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.recipient_profile: - body["recipient_profile"] = self.recipient_profile.as_dict() - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - if self.region is not None: - body["region"] = self.region - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value + if self.cloud is not None: body['cloud'] = self.cloud + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_provider_global_metastore_id is not None: body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.recipient_profile: body['recipient_profile'] = self.recipient_profile.as_dict() + if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str + if self.region is not None: body['region'] = self.region + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type - if self.cloud is not None: - body["cloud"] = self.cloud - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_provider_global_metastore_id is not None: - body["data_provider_global_metastore_id"] = self.data_provider_global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.recipient_profile: - body["recipient_profile"] = self.recipient_profile - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - if self.region is not None: - body["region"] = self.region - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type + if self.cloud is not None: body['cloud'] = self.cloud + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_provider_global_metastore_id is not None: body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.recipient_profile: body['recipient_profile'] = self.recipient_profile + if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str + if self.region is not None: body['region'] = self.region + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProviderInfo: """Deserializes the ProviderInfo from a dictionary.""" - return cls( - authentication_type=_enum(d, "authentication_type", AuthenticationType), - cloud=d.get("cloud", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - data_provider_global_metastore_id=d.get("data_provider_global_metastore_id", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - recipient_profile=_from_dict(d, "recipient_profile", RecipientProfile), - recipient_profile_str=d.get("recipient_profile_str", None), - region=d.get("region", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), cloud=d.get('cloud', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), data_provider_global_metastore_id=d.get('data_provider_global_metastore_id', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), recipient_profile=_from_dict(d, 'recipient_profile', RecipientProfile), recipient_profile_str=d.get('recipient_profile_str', None), region=d.get('region', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass class ProviderShare: name: Optional[str] = None """The name of the Provider Share.""" - + def as_dict(self) -> dict: """Serializes the ProviderShare into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the ProviderShare into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProviderShare: """Deserializes the ProviderShare from a dictionary.""" - return cls(name=d.get("name", None)) + return cls(name=d.get('name', None)) + + @dataclass class RecipientInfo: activated: Optional[bool] = None """A boolean status field showing whether the Recipient's activation URL has been exercised or not.""" - + activation_url: Optional[str] = None """Full activation url to retrieve the access token. It will be empty if the token is already retrieved.""" - + authentication_type: Optional[AuthenticationType] = None """The delta sharing authentication type.""" - + cloud: Optional[str] = None """Cloud vendor of the recipient's Unity Catalog Metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + comment: Optional[str] = None """Description about the recipient.""" - + created_at: Optional[int] = None """Time at which this recipient was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of recipient creator.""" - + data_recipient_global_metastore_id: Optional[str] = None """The global Unity Catalog metastore id provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token, in epoch milliseconds.""" - + ip_access_list: Optional[IpAccessList] = None """IP Access List""" - + metastore_id: Optional[str] = None """Unique identifier of recipient's Unity Catalog Metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + name: Optional[str] = None """Name of Recipient.""" - + owner: Optional[str] = None """Username of the recipient owner.""" - + properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None """Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write.""" - + region: Optional[str] = None """Cloud region of the recipient's Unity Catalog Metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + sharing_code: Optional[str] = None """The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + tokens: Optional[List[RecipientTokenInfo]] = None """This field is only present when the __authentication_type__ is **TOKEN**.""" - + updated_at: Optional[int] = None """Time at which the recipient was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of recipient updater.""" - + def as_dict(self) -> dict: """Serializes the RecipientInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activated is not None: - body["activated"] = self.activated - if self.activation_url is not None: - body["activation_url"] = self.activation_url - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type.value - if self.cloud is not None: - body["cloud"] = self.cloud - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_recipient_global_metastore_id is not None: - body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs.as_dict() - if self.region is not None: - body["region"] = self.region - if self.sharing_code is not None: - body["sharing_code"] = self.sharing_code - if self.tokens: - body["tokens"] = [v.as_dict() for v in self.tokens] - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.activated is not None: body['activated'] = self.activated + if self.activation_url is not None: body['activation_url'] = self.activation_url + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value + if self.cloud is not None: body['cloud'] = self.cloud + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict() + if self.region is not None: body['region'] = self.region + if self.sharing_code is not None: body['sharing_code'] = self.sharing_code + if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens] + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the RecipientInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.activated is not None: - body["activated"] = self.activated - if self.activation_url is not None: - body["activation_url"] = self.activation_url - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type - if self.cloud is not None: - body["cloud"] = self.cloud - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_recipient_global_metastore_id is not None: - body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs - if self.region is not None: - body["region"] = self.region - if self.sharing_code is not None: - body["sharing_code"] = self.sharing_code - if self.tokens: - body["tokens"] = self.tokens - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.activated is not None: body['activated'] = self.activated + if self.activation_url is not None: body['activation_url'] = self.activation_url + if self.authentication_type is not None: body['authentication_type'] = self.authentication_type + if self.cloud is not None: body['cloud'] = self.cloud + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.name is not None: body['name'] = self.name + if self.owner is not None: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + if self.region is not None: body['region'] = self.region + if self.sharing_code is not None: body['sharing_code'] = self.sharing_code + if self.tokens: body['tokens'] = self.tokens + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RecipientInfo: """Deserializes the RecipientInfo from a dictionary.""" - return cls( - activated=d.get("activated", None), - activation_url=d.get("activation_url", None), - authentication_type=_enum(d, "authentication_type", AuthenticationType), - cloud=d.get("cloud", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - data_recipient_global_metastore_id=d.get("data_recipient_global_metastore_id", None), - expiration_time=d.get("expiration_time", None), - ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), - region=d.get("region", None), - sharing_code=d.get("sharing_code", None), - tokens=_repeated_dict(d, "tokens", RecipientTokenInfo), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(activated=d.get('activated', None), activation_url=d.get('activation_url', None), authentication_type=_enum(d, 'authentication_type', AuthenticationType), cloud=d.get('cloud', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs), region=d.get('region', None), sharing_code=d.get('sharing_code', None), tokens=_repeated_dict(d, 'tokens', RecipientTokenInfo), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass class RecipientProfile: bearer_token: Optional[str] = None """The token used to authorize the recipient.""" - + endpoint: Optional[str] = None """The endpoint for the share to be used by the recipient.""" - + share_credentials_version: Optional[int] = None """The version number of the recipient's credentials on a share.""" - + def as_dict(self) -> dict: """Serializes the RecipientProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bearer_token is not None: - body["bearer_token"] = self.bearer_token - if self.endpoint is not None: - body["endpoint"] = self.endpoint - if self.share_credentials_version is not None: - body["share_credentials_version"] = self.share_credentials_version + if self.bearer_token is not None: body['bearer_token'] = self.bearer_token + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.share_credentials_version is not None: body['share_credentials_version'] = self.share_credentials_version return body def as_shallow_dict(self) -> dict: """Serializes the RecipientProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.bearer_token is not None: - body["bearer_token"] = self.bearer_token - if self.endpoint is not None: - body["endpoint"] = self.endpoint - if self.share_credentials_version is not None: - body["share_credentials_version"] = self.share_credentials_version + if self.bearer_token is not None: body['bearer_token'] = self.bearer_token + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.share_credentials_version is not None: body['share_credentials_version'] = self.share_credentials_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RecipientProfile: """Deserializes the RecipientProfile from a dictionary.""" - return cls( - bearer_token=d.get("bearer_token", None), - endpoint=d.get("endpoint", None), - share_credentials_version=d.get("share_credentials_version", None), - ) + return cls(bearer_token=d.get('bearer_token', None), endpoint=d.get('endpoint', None), share_credentials_version=d.get('share_credentials_version', None)) + + @dataclass @@ -1824,158 +1540,128 @@ class RecipientTokenInfo: activation_url: Optional[str] = None """Full activation URL to retrieve the access token. It will be empty if the token is already retrieved.""" - + created_at: Optional[int] = None """Time at which this recipient token was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of recipient token creator.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token in epoch milliseconds.""" - + id: Optional[str] = None """Unique ID of the recipient token.""" - + updated_at: Optional[int] = None """Time at which this recipient token was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of recipient token updater.""" - + def as_dict(self) -> dict: """Serializes the RecipientTokenInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activation_url is not None: - body["activation_url"] = self.activation_url - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.id is not None: - body["id"] = self.id - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.activation_url is not None: body['activation_url'] = self.activation_url + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.id is not None: body['id'] = self.id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the RecipientTokenInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.activation_url is not None: - body["activation_url"] = self.activation_url - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.id is not None: - body["id"] = self.id - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.activation_url is not None: body['activation_url'] = self.activation_url + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.id is not None: body['id'] = self.id + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RecipientTokenInfo: """Deserializes the RecipientTokenInfo from a dictionary.""" - return cls( - activation_url=d.get("activation_url", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - expiration_time=d.get("expiration_time", None), - id=d.get("id", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(activation_url=d.get('activation_url', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), expiration_time=d.get('expiration_time', None), id=d.get('id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + @dataclass class RegisteredModelAlias: alias_name: Optional[str] = None """Name of the alias.""" - + version_num: Optional[int] = None """Numeric model version that alias will reference.""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelAlias into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alias_name is not None: - body["alias_name"] = self.alias_name - if self.version_num is not None: - body["version_num"] = self.version_num + if self.alias_name is not None: body['alias_name'] = self.alias_name + if self.version_num is not None: body['version_num'] = self.version_num return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes.""" body = {} - if self.alias_name is not None: - body["alias_name"] = self.alias_name - if self.version_num is not None: - body["version_num"] = self.version_num + if self.alias_name is not None: body['alias_name'] = self.alias_name + if self.version_num is not None: body['version_num'] = self.version_num return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: """Deserializes the RegisteredModelAlias from a dictionary.""" - return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) + return cls(alias_name=d.get('alias_name', None), version_num=d.get('version_num', None)) + + + + + @dataclass class RetrieveTokenResponse: bearer_token: Optional[str] = None """The token used to authorize the recipient.""" - + endpoint: Optional[str] = None """The endpoint for the share to be used by the recipient.""" - + expiration_time: Optional[str] = None """Expiration timestamp of the token in epoch milliseconds.""" - + share_credentials_version: Optional[int] = None """These field names must follow the delta sharing protocol.""" - + def as_dict(self) -> dict: """Serializes the RetrieveTokenResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bearer_token is not None: - body["bearerToken"] = self.bearer_token - if self.endpoint is not None: - body["endpoint"] = self.endpoint - if self.expiration_time is not None: - body["expirationTime"] = self.expiration_time - if self.share_credentials_version is not None: - body["shareCredentialsVersion"] = self.share_credentials_version + if self.bearer_token is not None: body['bearerToken'] = self.bearer_token + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.expiration_time is not None: body['expirationTime'] = self.expiration_time + if self.share_credentials_version is not None: body['shareCredentialsVersion'] = self.share_credentials_version return body def as_shallow_dict(self) -> dict: """Serializes the RetrieveTokenResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bearer_token is not None: - body["bearerToken"] = self.bearer_token - if self.endpoint is not None: - body["endpoint"] = self.endpoint - if self.expiration_time is not None: - body["expirationTime"] = self.expiration_time - if self.share_credentials_version is not None: - body["shareCredentialsVersion"] = self.share_credentials_version + if self.bearer_token is not None: body['bearerToken'] = self.bearer_token + if self.endpoint is not None: body['endpoint'] = self.endpoint + if self.expiration_time is not None: body['expirationTime'] = self.expiration_time + if self.share_credentials_version is not None: body['shareCredentialsVersion'] = self.share_credentials_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RetrieveTokenResponse: """Deserializes the RetrieveTokenResponse from a dictionary.""" - return cls( - bearer_token=d.get("bearerToken", None), - endpoint=d.get("endpoint", None), - expiration_time=d.get("expirationTime", None), - share_credentials_version=d.get("shareCredentialsVersion", None), - ) + return cls(bearer_token=d.get('bearerToken', None), endpoint=d.get('endpoint', None), expiration_time=d.get('expirationTime', None), share_credentials_version=d.get('shareCredentialsVersion', None)) + + @dataclass @@ -1984,195 +1670,160 @@ class RotateRecipientToken: """The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire the existing token immediately, negative number will return an error.""" - + name: Optional[str] = None """The name of the Recipient.""" - + def as_dict(self) -> dict: """Serializes the RotateRecipientToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.existing_token_expire_in_seconds is not None: - body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds - if self.name is not None: - body["name"] = self.name + if self.existing_token_expire_in_seconds is not None: body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the RotateRecipientToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.existing_token_expire_in_seconds is not None: - body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds - if self.name is not None: - body["name"] = self.name + if self.existing_token_expire_in_seconds is not None: body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RotateRecipientToken: """Deserializes the RotateRecipientToken from a dictionary.""" - return cls( - existing_token_expire_in_seconds=d.get("existing_token_expire_in_seconds", None), name=d.get("name", None) - ) + return cls(existing_token_expire_in_seconds=d.get('existing_token_expire_in_seconds', None), name=d.get('name', None)) + + @dataclass class SecurablePropertiesKvPairs: """An object with __properties__ containing map of key-value properties attached to the securable.""" - - properties: Dict[str, str] + + properties: Dict[str,str] """A map of key-value properties attached to the securable.""" - + def as_dict(self) -> dict: """Serializes the SecurablePropertiesKvPairs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.properties: - body["properties"] = self.properties + if self.properties: body['properties'] = self.properties return body def as_shallow_dict(self) -> dict: """Serializes the SecurablePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" body = {} - if self.properties: - body["properties"] = self.properties + if self.properties: body['properties'] = self.properties return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SecurablePropertiesKvPairs: """Deserializes the SecurablePropertiesKvPairs from a dictionary.""" - return cls(properties=d.get("properties", None)) + return cls(properties=d.get('properties', None)) + + @dataclass class ShareInfo: comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this share was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of share creator.""" - + name: Optional[str] = None """Name of the share.""" - + objects: Optional[List[SharedDataObject]] = None """A list of shared data objects within the share.""" - + owner: Optional[str] = None """Username of current owner of share.""" - + storage_location: Optional[str] = None """Storage Location URL (full path) for the share.""" - + storage_root: Optional[str] = None """Storage root URL for the share.""" - + updated_at: Optional[int] = None """Time at which this share was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of share updater.""" - + def as_dict(self) -> dict: """Serializes the ShareInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.name is not None: - body["name"] = self.name - if self.objects: - body["objects"] = [v.as_dict() for v in self.objects] - if self.owner is not None: - body["owner"] = self.owner - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.name is not None: body['name'] = self.name + if self.objects: body['objects'] = [v.as_dict() for v in self.objects] + if self.owner is not None: body['owner'] = self.owner + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the ShareInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.name is not None: - body["name"] = self.name - if self.objects: - body["objects"] = self.objects - if self.owner is not None: - body["owner"] = self.owner - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.comment is not None: body['comment'] = self.comment + if self.created_at is not None: body['created_at'] = self.created_at + if self.created_by is not None: body['created_by'] = self.created_by + if self.name is not None: body['name'] = self.name + if self.objects: body['objects'] = self.objects + if self.owner is not None: body['owner'] = self.owner + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.updated_by is not None: body['updated_by'] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ShareInfo: """Deserializes the ShareInfo from a dictionary.""" - return cls( - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - name=d.get("name", None), - objects=_repeated_dict(d, "objects", SharedDataObject), - owner=d.get("owner", None), - storage_location=d.get("storage_location", None), - storage_root=d.get("storage_root", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) + return cls(comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), name=d.get('name', None), objects=_repeated_dict(d, 'objects', SharedDataObject), owner=d.get('owner', None), storage_location=d.get('storage_location', None), storage_root=d.get('storage_root', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) + + + + + @dataclass class ShareToPrivilegeAssignment: privilege_assignments: Optional[List[PrivilegeAssignment]] = None """The privileges assigned to the principal.""" - + share_name: Optional[str] = None """The share name.""" - + def as_dict(self) -> dict: """Serializes the ShareToPrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] - if self.share_name is not None: - body["share_name"] = self.share_name + if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] + if self.share_name is not None: body['share_name'] = self.share_name return body def as_shallow_dict(self) -> dict: """Serializes the ShareToPrivilegeAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments - if self.share_name is not None: - body["share_name"] = self.share_name + if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + if self.share_name is not None: body['share_name'] = self.share_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ShareToPrivilegeAssignment: """Deserializes the ShareToPrivilegeAssignment from a dictionary.""" - return cls( - privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), - share_name=d.get("share_name", None), - ) + return cls(privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment), share_name=d.get('share_name', None)) + + @dataclass @@ -2180,39 +1831,39 @@ class SharedDataObject: name: str """A fully qualified name that uniquely identifies a data object. For example, a table's fully qualified name is in the format of `..`,""" - + added_at: Optional[int] = None """The time when this data object is added to the share, in epoch milliseconds.""" - + added_by: Optional[str] = None """Username of the sharer.""" - + cdf_enabled: Optional[bool] = None """Whether to enable cdf or indicate if cdf is enabled on the shared object.""" - + comment: Optional[str] = None """A user-provided comment when adding the data object to the share.""" - + content: Optional[str] = None """The content of the notebook file when the data object type is NOTEBOOK_FILE. This should be base64 encoded. Required for adding a NOTEBOOK_FILE, optional for updating, ignored for other types.""" - + data_object_type: Optional[SharedDataObjectDataObjectType] = None """The type of the data object.""" - + history_data_sharing_status: Optional[SharedDataObjectHistoryDataSharingStatus] = None """Whether to enable or disable sharing of data history. If not specified, the default is **DISABLED**.""" - + partitions: Optional[List[Partition]] = None """Array of partitions for the shared data.""" - + shared_as: Optional[str] = None """A user-provided new name for the data object within the share. If this new name is not provided, the object's original name will be used as the `shared_as` name. The `shared_as` name must be unique within a share. For tables, the new name must follow the format of `.
`.""" - + start_version: Optional[int] = None """The start version associated with the object. This allows data providers to control the lowest object version that is accessible by clients. If specified, clients can query snapshots or @@ -2220,278 +1871,203 @@ class SharedDataObject: the version of the object at the time it was added to the share. NOTE: The start_version should be <= the `current` version of the object.""" - + status: Optional[SharedDataObjectStatus] = None """One of: **ACTIVE**, **PERMISSION_DENIED**.""" - + string_shared_as: Optional[str] = None """A user-provided new name for the shared object within the share. If this new name is not not provided, the object's original name will be used as the `string_shared_as` name. The `string_shared_as` name must be unique for objects of the same type within a Share. For notebooks, the new name should be the new notebook file name.""" - + def as_dict(self) -> dict: """Serializes the SharedDataObject into a dictionary suitable for use as a JSON request body.""" body = {} - if self.added_at is not None: - body["added_at"] = self.added_at - if self.added_by is not None: - body["added_by"] = self.added_by - if self.cdf_enabled is not None: - body["cdf_enabled"] = self.cdf_enabled - if self.comment is not None: - body["comment"] = self.comment - if self.content is not None: - body["content"] = self.content - if self.data_object_type is not None: - body["data_object_type"] = self.data_object_type.value - if self.history_data_sharing_status is not None: - body["history_data_sharing_status"] = self.history_data_sharing_status.value - if self.name is not None: - body["name"] = self.name - if self.partitions: - body["partitions"] = [v.as_dict() for v in self.partitions] - if self.shared_as is not None: - body["shared_as"] = self.shared_as - if self.start_version is not None: - body["start_version"] = self.start_version - if self.status is not None: - body["status"] = self.status.value - if self.string_shared_as is not None: - body["string_shared_as"] = self.string_shared_as + if self.added_at is not None: body['added_at'] = self.added_at + if self.added_by is not None: body['added_by'] = self.added_by + if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled + if self.comment is not None: body['comment'] = self.comment + if self.content is not None: body['content'] = self.content + if self.data_object_type is not None: body['data_object_type'] = self.data_object_type.value + if self.history_data_sharing_status is not None: body['history_data_sharing_status'] = self.history_data_sharing_status.value + if self.name is not None: body['name'] = self.name + if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions] + if self.shared_as is not None: body['shared_as'] = self.shared_as + if self.start_version is not None: body['start_version'] = self.start_version + if self.status is not None: body['status'] = self.status.value + if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as return body def as_shallow_dict(self) -> dict: """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes.""" body = {} - if self.added_at is not None: - body["added_at"] = self.added_at - if self.added_by is not None: - body["added_by"] = self.added_by - if self.cdf_enabled is not None: - body["cdf_enabled"] = self.cdf_enabled - if self.comment is not None: - body["comment"] = self.comment - if self.content is not None: - body["content"] = self.content - if self.data_object_type is not None: - body["data_object_type"] = self.data_object_type - if self.history_data_sharing_status is not None: - body["history_data_sharing_status"] = self.history_data_sharing_status - if self.name is not None: - body["name"] = self.name - if self.partitions: - body["partitions"] = self.partitions - if self.shared_as is not None: - body["shared_as"] = self.shared_as - if self.start_version is not None: - body["start_version"] = self.start_version - if self.status is not None: - body["status"] = self.status - if self.string_shared_as is not None: - body["string_shared_as"] = self.string_shared_as + if self.added_at is not None: body['added_at'] = self.added_at + if self.added_by is not None: body['added_by'] = self.added_by + if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled + if self.comment is not None: body['comment'] = self.comment + if self.content is not None: body['content'] = self.content + if self.data_object_type is not None: body['data_object_type'] = self.data_object_type + if self.history_data_sharing_status is not None: body['history_data_sharing_status'] = self.history_data_sharing_status + if self.name is not None: body['name'] = self.name + if self.partitions: body['partitions'] = self.partitions + if self.shared_as is not None: body['shared_as'] = self.shared_as + if self.start_version is not None: body['start_version'] = self.start_version + if self.status is not None: body['status'] = self.status + if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SharedDataObject: """Deserializes the SharedDataObject from a dictionary.""" - return cls( - added_at=d.get("added_at", None), - added_by=d.get("added_by", None), - cdf_enabled=d.get("cdf_enabled", None), - comment=d.get("comment", None), - content=d.get("content", None), - data_object_type=_enum(d, "data_object_type", SharedDataObjectDataObjectType), - history_data_sharing_status=_enum( - d, "history_data_sharing_status", SharedDataObjectHistoryDataSharingStatus - ), - name=d.get("name", None), - partitions=_repeated_dict(d, "partitions", Partition), - shared_as=d.get("shared_as", None), - start_version=d.get("start_version", None), - status=_enum(d, "status", SharedDataObjectStatus), - string_shared_as=d.get("string_shared_as", None), - ) - + return cls(added_at=d.get('added_at', None), added_by=d.get('added_by', None), cdf_enabled=d.get('cdf_enabled', None), comment=d.get('comment', None), content=d.get('content', None), data_object_type=_enum(d, 'data_object_type', SharedDataObjectDataObjectType), history_data_sharing_status=_enum(d, 'history_data_sharing_status', SharedDataObjectHistoryDataSharingStatus), name=d.get('name', None), partitions=_repeated_dict(d, 'partitions', Partition), shared_as=d.get('shared_as', None), start_version=d.get('start_version', None), status=_enum(d, 'status', SharedDataObjectStatus), string_shared_as=d.get('string_shared_as', None)) + -class SharedDataObjectDataObjectType(Enum): - FEATURE_SPEC = "FEATURE_SPEC" - FUNCTION = "FUNCTION" - MATERIALIZED_VIEW = "MATERIALIZED_VIEW" - MODEL = "MODEL" - NOTEBOOK_FILE = "NOTEBOOK_FILE" - SCHEMA = "SCHEMA" - STREAMING_TABLE = "STREAMING_TABLE" - TABLE = "TABLE" - VIEW = "VIEW" +class SharedDataObjectDataObjectType(Enum): + + + FEATURE_SPEC = 'FEATURE_SPEC' + FUNCTION = 'FUNCTION' + MATERIALIZED_VIEW = 'MATERIALIZED_VIEW' + MODEL = 'MODEL' + NOTEBOOK_FILE = 'NOTEBOOK_FILE' + SCHEMA = 'SCHEMA' + STREAMING_TABLE = 'STREAMING_TABLE' + TABLE = 'TABLE' + VIEW = 'VIEW' class SharedDataObjectHistoryDataSharingStatus(Enum): - - DISABLED = "DISABLED" - ENABLED = "ENABLED" - + + + DISABLED = 'DISABLED' + ENABLED = 'ENABLED' class SharedDataObjectStatus(Enum): - - ACTIVE = "ACTIVE" - PERMISSION_DENIED = "PERMISSION_DENIED" - + + + ACTIVE = 'ACTIVE' + PERMISSION_DENIED = 'PERMISSION_DENIED' @dataclass class SharedDataObjectUpdate: action: Optional[SharedDataObjectUpdateAction] = None """One of: **ADD**, **REMOVE**, **UPDATE**.""" - + data_object: Optional[SharedDataObject] = None """The data object that is being added, removed, or updated.""" - + def as_dict(self) -> dict: """Serializes the SharedDataObjectUpdate into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action is not None: - body["action"] = self.action.value - if self.data_object: - body["data_object"] = self.data_object.as_dict() + if self.action is not None: body['action'] = self.action.value + if self.data_object: body['data_object'] = self.data_object.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SharedDataObjectUpdate into a shallow dictionary of its immediate attributes.""" body = {} - if self.action is not None: - body["action"] = self.action - if self.data_object: - body["data_object"] = self.data_object + if self.action is not None: body['action'] = self.action + if self.data_object: body['data_object'] = self.data_object return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SharedDataObjectUpdate: """Deserializes the SharedDataObjectUpdate from a dictionary.""" - return cls( - action=_enum(d, "action", SharedDataObjectUpdateAction), - data_object=_from_dict(d, "data_object", SharedDataObject), - ) - + return cls(action=_enum(d, 'action', SharedDataObjectUpdateAction), data_object=_from_dict(d, 'data_object', SharedDataObject)) + -class SharedDataObjectUpdateAction(Enum): - ADD = "ADD" - REMOVE = "REMOVE" - UPDATE = "UPDATE" +class SharedDataObjectUpdateAction(Enum): + + + ADD = 'ADD' + REMOVE = 'REMOVE' + UPDATE = 'UPDATE' class SharedSecurableKind(Enum): """The SecurableKind of a delta-shared object.""" - - FUNCTION_FEATURE_SPEC = "FUNCTION_FEATURE_SPEC" - FUNCTION_REGISTERED_MODEL = "FUNCTION_REGISTERED_MODEL" - FUNCTION_STANDARD = "FUNCTION_STANDARD" - + + FUNCTION_FEATURE_SPEC = 'FUNCTION_FEATURE_SPEC' + FUNCTION_REGISTERED_MODEL = 'FUNCTION_REGISTERED_MODEL' + FUNCTION_STANDARD = 'FUNCTION_STANDARD' @dataclass class Table: comment: Optional[str] = None """The comment of the table.""" - + id: Optional[str] = None """The id of the table.""" - + internal_attributes: Optional[TableInternalAttributes] = None """Internal information for D2D sharing that should not be disclosed to external users.""" - + materialization_namespace: Optional[str] = None """The catalog and schema of the materialized table""" - + materialized_table_name: Optional[str] = None """The name of a materialized table.""" - + name: Optional[str] = None """The name of the table.""" - + schema: Optional[str] = None """The name of the schema that the table belongs to.""" - + share: Optional[str] = None """The name of the share that the table belongs to.""" - + share_id: Optional[str] = None """The id of the share that the table belongs to.""" - + tags: Optional[List[catalog.TagKeyValue]] = None """The Tags of the table.""" - + def as_dict(self) -> dict: """Serializes the Table into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - if self.internal_attributes: - body["internal_attributes"] = self.internal_attributes.as_dict() - if self.materialization_namespace is not None: - body["materialization_namespace"] = self.materialization_namespace - if self.materialized_table_name is not None: - body["materialized_table_name"] = self.materialized_table_name - if self.name is not None: - body["name"] = self.name - if self.schema is not None: - body["schema"] = self.schema - if self.share is not None: - body["share"] = self.share - if self.share_id is not None: - body["share_id"] = self.share_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.comment is not None: body['comment'] = self.comment + if self.id is not None: body['id'] = self.id + if self.internal_attributes: body['internal_attributes'] = self.internal_attributes.as_dict() + if self.materialization_namespace is not None: body['materialization_namespace'] = self.materialization_namespace + if self.materialized_table_name is not None: body['materialized_table_name'] = self.materialized_table_name + if self.name is not None: body['name'] = self.name + if self.schema is not None: body['schema'] = self.schema + if self.share is not None: body['share'] = self.share + if self.share_id is not None: body['share_id'] = self.share_id + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the Table into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - if self.internal_attributes: - body["internal_attributes"] = self.internal_attributes - if self.materialization_namespace is not None: - body["materialization_namespace"] = self.materialization_namespace - if self.materialized_table_name is not None: - body["materialized_table_name"] = self.materialized_table_name - if self.name is not None: - body["name"] = self.name - if self.schema is not None: - body["schema"] = self.schema - if self.share is not None: - body["share"] = self.share - if self.share_id is not None: - body["share_id"] = self.share_id - if self.tags: - body["tags"] = self.tags + if self.comment is not None: body['comment'] = self.comment + if self.id is not None: body['id'] = self.id + if self.internal_attributes: body['internal_attributes'] = self.internal_attributes + if self.materialization_namespace is not None: body['materialization_namespace'] = self.materialization_namespace + if self.materialized_table_name is not None: body['materialized_table_name'] = self.materialized_table_name + if self.name is not None: body['name'] = self.name + if self.schema is not None: body['schema'] = self.schema + if self.share is not None: body['share'] = self.share + if self.share_id is not None: body['share_id'] = self.share_id + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Table: """Deserializes the Table from a dictionary.""" - return cls( - comment=d.get("comment", None), - id=d.get("id", None), - internal_attributes=_from_dict(d, "internal_attributes", TableInternalAttributes), - materialization_namespace=d.get("materialization_namespace", None), - materialized_table_name=d.get("materialized_table_name", None), - name=d.get("name", None), - schema=d.get("schema", None), - share=d.get("share", None), - share_id=d.get("share_id", None), - tags=_repeated_dict(d, "tags", catalog.TagKeyValue), - ) + return cls(comment=d.get('comment', None), id=d.get('id', None), internal_attributes=_from_dict(d, 'internal_attributes', TableInternalAttributes), materialization_namespace=d.get('materialization_namespace', None), materialized_table_name=d.get('materialized_table_name', None), name=d.get('name', None), schema=d.get('schema', None), share=d.get('share', None), share_id=d.get('share_id', None), tags=_repeated_dict(d, 'tags', catalog.TagKeyValue)) + + @dataclass class TableInternalAttributes: """Internal information for D2D sharing that should not be disclosed to external users.""" - + parent_storage_location: Optional[str] = None """Will be populated in the reconciliation response for VIEW and FOREIGN_TABLE, with the value of the parent UC entity's storage_location, following the same logic as getManagedEntityPath in @@ -2499,473 +2075,387 @@ class TableInternalAttributes: VIEW/FOREIGN_TABLE for D2O queries. The value will be used on the recipient side to be whitelisted when SEG is enabled on the workspace of the recipient, to allow the recipient users to query this shared VIEW/FOREIGN_TABLE.""" - + storage_location: Optional[str] = None """The cloud storage location of a shard table with DIRECTORY_BASED_TABLE type.""" - + type: Optional[TableInternalAttributesSharedTableType] = None """The type of the shared table.""" - + view_definition: Optional[str] = None """The view definition of a shared view. DEPRECATED.""" - + def as_dict(self) -> dict: """Serializes the TableInternalAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parent_storage_location is not None: - body["parent_storage_location"] = self.parent_storage_location - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.type is not None: - body["type"] = self.type.value - if self.view_definition is not None: - body["view_definition"] = self.view_definition + if self.parent_storage_location is not None: body['parent_storage_location'] = self.parent_storage_location + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.type is not None: body['type'] = self.type.value + if self.view_definition is not None: body['view_definition'] = self.view_definition return body def as_shallow_dict(self) -> dict: """Serializes the TableInternalAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.parent_storage_location is not None: - body["parent_storage_location"] = self.parent_storage_location - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.type is not None: - body["type"] = self.type - if self.view_definition is not None: - body["view_definition"] = self.view_definition + if self.parent_storage_location is not None: body['parent_storage_location'] = self.parent_storage_location + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.type is not None: body['type'] = self.type + if self.view_definition is not None: body['view_definition'] = self.view_definition return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableInternalAttributes: """Deserializes the TableInternalAttributes from a dictionary.""" - return cls( - parent_storage_location=d.get("parent_storage_location", None), - storage_location=d.get("storage_location", None), - type=_enum(d, "type", TableInternalAttributesSharedTableType), - view_definition=d.get("view_definition", None), - ) + return cls(parent_storage_location=d.get('parent_storage_location', None), storage_location=d.get('storage_location', None), type=_enum(d, 'type', TableInternalAttributesSharedTableType), view_definition=d.get('view_definition', None)) + + class TableInternalAttributesSharedTableType(Enum): + + + DIRECTORY_BASED_TABLE = 'DIRECTORY_BASED_TABLE' + FILE_BASED_TABLE = 'FILE_BASED_TABLE' + FOREIGN_TABLE = 'FOREIGN_TABLE' + MATERIALIZED_VIEW = 'MATERIALIZED_VIEW' + STREAMING_TABLE = 'STREAMING_TABLE' + VIEW = 'VIEW' + - DIRECTORY_BASED_TABLE = "DIRECTORY_BASED_TABLE" - FILE_BASED_TABLE = "FILE_BASED_TABLE" - FOREIGN_TABLE = "FOREIGN_TABLE" - MATERIALIZED_VIEW = "MATERIALIZED_VIEW" - STREAMING_TABLE = "STREAMING_TABLE" - VIEW = "VIEW" @dataclass class UpdateProvider: comment: Optional[str] = None """Description about the provider.""" - + name: Optional[str] = None """Name of the provider.""" - + new_name: Optional[str] = None """New name for the provider.""" - + owner: Optional[str] = None """Username of Provider owner.""" - + recipient_profile_str: Optional[str] = None """This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - + def as_dict(self) -> dict: """Serializes the UpdateProvider into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProvider into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProvider: """Deserializes the UpdateProvider from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - recipient_profile_str=d.get("recipient_profile_str", None), - ) + return cls(comment=d.get('comment', None), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), recipient_profile_str=d.get('recipient_profile_str', None)) + + @dataclass class UpdateRecipient: comment: Optional[str] = None """Description about the recipient.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token, in epoch milliseconds.""" - + ip_access_list: Optional[IpAccessList] = None """IP Access List""" - + name: Optional[str] = None """Name of the recipient.""" - + new_name: Optional[str] = None """New name for the recipient. .""" - + owner: Optional[str] = None """Username of the recipient owner.""" - + properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None """Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write.""" - + def as_dict(self) -> dict: """Serializes the UpdateRecipient into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs.as_dict() + if self.comment is not None: body['comment'] = self.comment + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRecipient into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs + if self.comment is not None: body['comment'] = self.comment + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time + if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRecipient: """Deserializes the UpdateRecipient from a dictionary.""" - return cls( - comment=d.get("comment", None), - expiration_time=d.get("expiration_time", None), - ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), - ) + return cls(comment=d.get('comment', None), expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs)) + + @dataclass class UpdateShare: comment: Optional[str] = None """User-provided free-form text description.""" - + name: Optional[str] = None """The name of the share.""" - + new_name: Optional[str] = None """New name for the share.""" - + owner: Optional[str] = None """Username of current owner of share.""" - + storage_root: Optional[str] = None """Storage root URL for the share.""" - + updates: Optional[List[SharedDataObjectUpdate]] = None """Array of shared data object updates.""" - + def as_dict(self) -> dict: """Serializes the UpdateShare into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updates: - body["updates"] = [v.as_dict() for v in self.updates] + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updates: body['updates'] = [v.as_dict() for v in self.updates] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateShare into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updates: - body["updates"] = self.updates + if self.comment is not None: body['comment'] = self.comment + if self.name is not None: body['name'] = self.name + if self.new_name is not None: body['new_name'] = self.new_name + if self.owner is not None: body['owner'] = self.owner + if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.updates: body['updates'] = self.updates return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateShare: """Deserializes the UpdateShare from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - storage_root=d.get("storage_root", None), - updates=_repeated_dict(d, "updates", SharedDataObjectUpdate), - ) + return cls(comment=d.get('comment', None), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), storage_root=d.get('storage_root', None), updates=_repeated_dict(d, 'updates', SharedDataObjectUpdate)) + + @dataclass class UpdateSharePermissions: changes: Optional[List[PermissionsChange]] = None - """Array of permission changes.""" - + """Array of permissions change objects.""" + name: Optional[str] = None """The name of the share.""" - + omit_permissions_list: Optional[bool] = None """Optional. Whether to return the latest permissions list of the share in the response.""" - + def as_dict(self) -> dict: """Serializes the UpdateSharePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.changes: - body["changes"] = [v.as_dict() for v in self.changes] - if self.name is not None: - body["name"] = self.name - if self.omit_permissions_list is not None: - body["omit_permissions_list"] = self.omit_permissions_list + if self.changes: body['changes'] = [v.as_dict() for v in self.changes] + if self.name is not None: body['name'] = self.name + if self.omit_permissions_list is not None: body['omit_permissions_list'] = self.omit_permissions_list return body def as_shallow_dict(self) -> dict: """Serializes the UpdateSharePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.changes: - body["changes"] = self.changes - if self.name is not None: - body["name"] = self.name - if self.omit_permissions_list is not None: - body["omit_permissions_list"] = self.omit_permissions_list + if self.changes: body['changes'] = self.changes + if self.name is not None: body['name'] = self.name + if self.omit_permissions_list is not None: body['omit_permissions_list'] = self.omit_permissions_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateSharePermissions: """Deserializes the UpdateSharePermissions from a dictionary.""" - return cls( - changes=_repeated_dict(d, "changes", PermissionsChange), - name=d.get("name", None), - omit_permissions_list=d.get("omit_permissions_list", None), - ) + return cls(changes=_repeated_dict(d, 'changes', PermissionsChange), name=d.get('name', None), omit_permissions_list=d.get('omit_permissions_list', None)) + + @dataclass class UpdateSharePermissionsResponse: privilege_assignments: Optional[List[PrivilegeAssignment]] = None """The privileges assigned to each principal""" - + def as_dict(self) -> dict: """Serializes the UpdateSharePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] + if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateSharePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments + if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateSharePermissionsResponse: """Deserializes the UpdateSharePermissionsResponse from a dictionary.""" - return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment)) + return cls(privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment)) + + @dataclass class Volume: comment: Optional[str] = None """The comment of the volume.""" - + id: Optional[str] = None """This id maps to the shared_volume_id in database Recipient needs shared_volume_id for recon to check if this volume is already in recipient's DB or not.""" - + internal_attributes: Optional[VolumeInternalAttributes] = None """Internal attributes for D2D sharing that should not be disclosed to external users.""" - + name: Optional[str] = None """The name of the volume.""" - + schema: Optional[str] = None """The name of the schema that the volume belongs to.""" - + share: Optional[str] = None """The name of the share that the volume belongs to.""" - + share_id: Optional[str] = None """/ The id of the share that the volume belongs to.""" - + tags: Optional[List[catalog.TagKeyValue]] = None """The tags of the volume.""" - + def as_dict(self) -> dict: """Serializes the Volume into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - if self.internal_attributes: - body["internal_attributes"] = self.internal_attributes.as_dict() - if self.name is not None: - body["name"] = self.name - if self.schema is not None: - body["schema"] = self.schema - if self.share is not None: - body["share"] = self.share - if self.share_id is not None: - body["share_id"] = self.share_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.comment is not None: body['comment'] = self.comment + if self.id is not None: body['id'] = self.id + if self.internal_attributes: body['internal_attributes'] = self.internal_attributes.as_dict() + if self.name is not None: body['name'] = self.name + if self.schema is not None: body['schema'] = self.schema + if self.share is not None: body['share'] = self.share + if self.share_id is not None: body['share_id'] = self.share_id + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the Volume into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - if self.internal_attributes: - body["internal_attributes"] = self.internal_attributes - if self.name is not None: - body["name"] = self.name - if self.schema is not None: - body["schema"] = self.schema - if self.share is not None: - body["share"] = self.share - if self.share_id is not None: - body["share_id"] = self.share_id - if self.tags: - body["tags"] = self.tags + if self.comment is not None: body['comment'] = self.comment + if self.id is not None: body['id'] = self.id + if self.internal_attributes: body['internal_attributes'] = self.internal_attributes + if self.name is not None: body['name'] = self.name + if self.schema is not None: body['schema'] = self.schema + if self.share is not None: body['share'] = self.share + if self.share_id is not None: body['share_id'] = self.share_id + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Volume: """Deserializes the Volume from a dictionary.""" - return cls( - comment=d.get("comment", None), - id=d.get("id", None), - internal_attributes=_from_dict(d, "internal_attributes", VolumeInternalAttributes), - name=d.get("name", None), - schema=d.get("schema", None), - share=d.get("share", None), - share_id=d.get("share_id", None), - tags=_repeated_dict(d, "tags", catalog.TagKeyValue), - ) + return cls(comment=d.get('comment', None), id=d.get('id', None), internal_attributes=_from_dict(d, 'internal_attributes', VolumeInternalAttributes), name=d.get('name', None), schema=d.get('schema', None), share=d.get('share', None), share_id=d.get('share_id', None), tags=_repeated_dict(d, 'tags', catalog.TagKeyValue)) + + @dataclass class VolumeInternalAttributes: """Internal information for D2D sharing that should not be disclosed to external users.""" - + storage_location: Optional[str] = None """The cloud storage location of the volume""" - + type: Optional[str] = None """The type of the shared volume.""" - + def as_dict(self) -> dict: """Serializes the VolumeInternalAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.type is not None: - body["type"] = self.type + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.type is not None: body['type'] = self.type return body def as_shallow_dict(self) -> dict: """Serializes the VolumeInternalAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.type is not None: - body["type"] = self.type + if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VolumeInternalAttributes: """Deserializes the VolumeInternalAttributes from a dictionary.""" - return cls(storage_location=d.get("storage_location", None), type=d.get("type", None)) + return cls(storage_location=d.get('storage_location', None), type=d.get('type', None)) + + + + class ProvidersAPI: """A data provider is an object representing the organization in the real world who shares the data. A provider contains shares which further contain the shared data.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - name: str, - authentication_type: AuthenticationType, - *, - comment: Optional[str] = None, - recipient_profile_str: Optional[str] = None, - ) -> ProviderInfo: - """Create an auth provider. + + + + + + + def create(self + , name: str, authentication_type: AuthenticationType + , * + , comment: Optional[str] = None, recipient_profile_str: Optional[str] = None) -> ProviderInfo: + """Create an auth provider. + Creates a new authentication provider minimally based on a name and authentication type. The caller must be an admin on the metastore. - + :param name: str The name of the Provider. :param authentication_type: :class:`AuthenticationType` @@ -2975,74 +2465,88 @@ def create( :param recipient_profile_str: str (optional) This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided. - + :returns: :class:`ProviderInfo` """ body = {} - if authentication_type is not None: - body["authentication_type"] = authentication_type.value - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if recipient_profile_str is not None: - body["recipient_profile_str"] = recipient_profile_str - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/providers", body=body, headers=headers) + if authentication_type is not None: body['authentication_type'] = authentication_type.value + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if recipient_profile_str is not None: body['recipient_profile_str'] = recipient_profile_str + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/providers', body=body + + , headers=headers + ) return ProviderInfo.from_dict(res) - def delete(self, name: str): - """Delete a provider. + + + + def delete(self + , name: str + ): + """Delete a provider. + Deletes an authentication provider, if the caller is a metastore admin or is the owner of the provider. - + :param name: str Name of the provider. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/providers/{name}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/providers/{name}", headers=headers) + + + - def get(self, name: str) -> ProviderInfo: + def get(self + , name: str + ) -> ProviderInfo: """Get a provider. - + Gets a specific authentication provider. The caller must supply the name of the provider, and must either be a metastore admin or the owner of the provider. - + :param name: str Name of the provider. - + :returns: :class:`ProviderInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/providers/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/providers/{name}' + + , headers=headers + ) return ProviderInfo.from_dict(res) - def list( - self, - *, - data_provider_global_metastore_id: Optional[str] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[ProviderInfo]: - """List providers. + + + + def list(self + + , * + , data_provider_global_metastore_id: Optional[str] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderInfo]: + """List providers. + Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There is no guarantee of a specific ordering of the elements in the array. - + :param data_provider_global_metastore_id: str (optional) If not provided, all providers will be returned. If no providers exist with this ID, no results will be returned. @@ -3056,47 +2560,44 @@ def list( from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ProviderInfo` """ - + query = {} - if data_provider_global_metastore_id is not None: - query["data_provider_global_metastore_id"] = data_provider_global_metastore_id - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - if "max_results" not in query: - query["max_results"] = 0 + if data_provider_global_metastore_id is not None: query['data_provider_global_metastore_id'] = data_provider_global_metastore_id + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + if "max_results" not in query: query['max_results'] = 0 while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/providers", query=query, headers=headers) - if "providers" in json: - for v in json["providers"]: - yield ProviderInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_provider_share_assets( - self, - provider_name: str, - share_name: str, - *, - function_max_results: Optional[int] = None, - notebook_max_results: Optional[int] = None, - table_max_results: Optional[int] = None, - volume_max_results: Optional[int] = None, - ) -> ListProviderShareAssetsResponse: - """List assets by provider share. + json = self._api.do('GET','/api/2.1/unity-catalog/providers', query=query + + , headers=headers + ) + if 'providers' in json: + for v in json['providers']: + yield ProviderInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def list_provider_share_assets(self + , provider_name: str, share_name: str + , * + , function_max_results: Optional[int] = None, notebook_max_results: Optional[int] = None, table_max_results: Optional[int] = None, volume_max_results: Optional[int] = None) -> ListProviderShareAssetsResponse: + """List assets by provider share. + Get arrays of assets associated with a specified provider's share. The caller is the recipient of the share. - + :param provider_name: str The name of the provider who owns the share. :param share_name: str @@ -3109,37 +2610,37 @@ def list_provider_share_assets( Maximum number of tables to return. :param volume_max_results: int (optional) Maximum number of volumes to return. - + :returns: :class:`ListProviderShareAssetsResponse` """ - + query = {} - if function_max_results is not None: - query["function_max_results"] = function_max_results - if notebook_max_results is not None: - query["notebook_max_results"] = notebook_max_results - if table_max_results is not None: - query["table_max_results"] = table_max_results - if volume_max_results is not None: - query["volume_max_results"] = volume_max_results - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.1/data-sharing/providers/{provider_name}/shares/{share_name}", query=query, headers=headers - ) + if function_max_results is not None: query['function_max_results'] = function_max_results + if notebook_max_results is not None: query['notebook_max_results'] = notebook_max_results + if table_max_results is not None: query['table_max_results'] = table_max_results + if volume_max_results is not None: query['volume_max_results'] = volume_max_results + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/data-sharing/providers/{provider_name}/shares/{share_name}', query=query + + , headers=headers + ) return ListProviderShareAssetsResponse.from_dict(res) - def list_shares( - self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ProviderShare]: - """List shares by Provider. + + + + def list_shares(self + , name: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderShare]: + """List shares by Provider. + Gets an array of a specified provider's shares within the metastore where: - + * the caller is a metastore admin, or * the caller is the owner. - + :param name: str Name of the provider in which to list shares. :param max_results: int (optional) @@ -3152,45 +2653,44 @@ def list_shares( response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ProviderShare` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - if "max_results" not in query: - query["max_results"] = 0 + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + if "max_results" not in query: query['max_results'] = 0 while True: - json = self._api.do("GET", f"/api/2.1/unity-catalog/providers/{name}/shares", query=query, headers=headers) - if "shares" in json: - for v in json["shares"]: - yield ProviderShare.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - name: str, - *, - comment: Optional[str] = None, - new_name: Optional[str] = None, - owner: Optional[str] = None, - recipient_profile_str: Optional[str] = None, - ) -> ProviderInfo: - """Update a provider. + json = self._api.do('GET',f'/api/2.1/unity-catalog/providers/{name}/shares', query=query + + , headers=headers + ) + if 'shares' in json: + for v in json['shares']: + yield ProviderShare.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , name: str + , * + , comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None, recipient_profile_str: Optional[str] = None) -> ProviderInfo: + """Update a provider. + Updates the information for an authentication provider, if the caller is a metastore admin or is the owner of the provider. If the update changes the provider name, the caller must be both a metastore admin and the owner of the provider. - + :param name: str Name of the provider. :param comment: str (optional) @@ -3202,79 +2702,92 @@ def update( :param recipient_profile_str: str (optional) This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided. - + :returns: :class:`ProviderInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - if recipient_profile_str is not None: - body["recipient_profile_str"] = recipient_profile_str - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/providers/{name}", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + if recipient_profile_str is not None: body['recipient_profile_str'] = recipient_profile_str + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/providers/{name}', body=body + + , headers=headers + ) return ProviderInfo.from_dict(res) - + + class RecipientActivationAPI: """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data provider to download the credential file that includes the access token. The recipient will then use the credential file to establish a secure connection with the provider to receive the shared data. - + Note that you can download the credential file only once. Recipients should treat the downloaded credential as a secret and must not share it outside of their organization.""" - + def __init__(self, api_client): self._api = api_client + - def get_activation_url_info(self, activation_url: str): - """Get a share activation URL. + - Gets an activation URL for a share. + + + + + def get_activation_url_info(self + , activation_url: str + ): + """Get a share activation URL. + + Gets an activation URL for a share. + :param activation_url: str The one time activation url. It also accepts activation token. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('GET',f'/api/2.1/unity-catalog/public/data_sharing_activation_info/{activation_url}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "GET", f"/api/2.1/unity-catalog/public/data_sharing_activation_info/{activation_url}", headers=headers - ) + + + - def retrieve_token(self, activation_url: str) -> RetrieveTokenResponse: + def retrieve_token(self + , activation_url: str + ) -> RetrieveTokenResponse: """Get an access token. - + Retrieve access token with an activation url. This is a public API without any authentication. - + :param activation_url: str The one time activation url. It also accepts activation token. - + :returns: :class:`RetrieveTokenResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.1/unity-catalog/public/data_sharing_activation/{activation_url}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/public/data_sharing_activation/{activation_url}' + + , headers=headers + ) return RetrieveTokenResponse.from_dict(res) - + + class RecipientFederationPoliciesAPI: """The Recipient Federation Policies APIs are only applicable in the open sharing model where the recipient object has the authentication type of `OIDC_RECIPIENT`, enabling data sharing from Databricks to @@ -3291,148 +2804,175 @@ class RecipientFederationPoliciesAPI: Multi-Factor Authentication (MFA), and enhances security by minimizing the risk of credential leakage through the use of short-lived, expiring tokens. It is designed for strong identity governance, secure cross-platform data sharing, and reduced operational overhead for credential management. - + For more information, see https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security and https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed""" - + def __init__(self, api_client): self._api = api_client + - def create(self, recipient_name: str, policy: FederationPolicy) -> FederationPolicy: - """Create recipient federation policy. + + + + + + + def create(self + , recipient_name: str, policy: FederationPolicy + ) -> FederationPolicy: + """Create recipient federation policy. + Create a federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must be the owner of the recipient. When sharing data from Databricks to non-Databricks clients, you can define a federation policy to authenticate non-Databricks recipients. The federation policy validates OIDC claims in federated tokens and is defined at the recipient level. This enables secretless sharing clients to authenticate using OIDC tokens. - + Supported scenarios for federation policies: 1. **User-to-Machine (U2M) flow** (e.g., PowerBI): A user accesses a resource using their own identity. 2. **Machine-to-Machine (M2M) flow** (e.g., OAuth App): An OAuth App accesses a resource using its own identity, typically for tasks like running nightly jobs. - + For an overview, refer to: - Blog post: Overview of feature: https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security - + For detailed configuration guides based on your use case: - Creating a Federation Policy as a provider: https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed - Configuration and usage for Machine-to-Machine (M2M) applications (e.g., Python Delta Sharing Client): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-m2m - Configuration and usage for User-to-Machine (U2M) applications (e.g., PowerBI): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-u2m - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being created. :param policy: :class:`FederationPolicy` - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies", body=body, headers=headers - ) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies', body=body + + , headers=headers + ) return FederationPolicy.from_dict(res) - def delete(self, recipient_name: str, name: str): - """Delete recipient federation policy. + + + + def delete(self + , recipient_name: str, name: str + ): + """Delete recipient federation policy. + Deletes an existing federation policy for an OIDC_FEDERATION recipient. The caller must be the owner of the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being deleted. :param name: str Name of the policy. This is the name of the policy to be deleted. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do( - "DELETE", f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}", headers=headers - ) + + + - def get_federation_policy(self, recipient_name: str, name: str) -> FederationPolicy: + def get_federation_policy(self + , recipient_name: str, name: str + ) -> FederationPolicy: """Get recipient federation policy. - + Reads an existing federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being retrieved. :param name: str Name of the policy. This is the name of the policy to be retrieved. - + :returns: :class:`FederationPolicy` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}' + + , headers=headers + ) return FederationPolicy.from_dict(res) - def list( - self, recipient_name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[FederationPolicy]: - """List recipient federation policies. + + + + def list(self + , recipient_name: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FederationPolicy]: + """List recipient federation policies. + Lists federation policies for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policies are being listed. :param max_results: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do( - "GET", - f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies", - query=query, - headers=headers, - ) - if "policies" in json: - for v in json["policies"]: - yield FederationPolicy.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, recipient_name: str, name: str, policy: FederationPolicy, *, update_mask: Optional[str] = None - ) -> FederationPolicy: - """Update recipient federation policy. + json = self._api.do('GET',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies', query=query + + , headers=headers + ) + if 'policies' in json: + for v in json['policies']: + yield FederationPolicy.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def update(self + , recipient_name: str, name: str, policy: FederationPolicy + , * + , update_mask: Optional[str] = None) -> FederationPolicy: + """Update recipient federation policy. + Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being updated. :param name: str @@ -3444,64 +2984,57 @@ def update( should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'comment,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if update_mask is not None: - query["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", - f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}", - query=query, - body=body, - headers=headers, - ) + if update_mask is not None: query['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}', query=query, body=body + + , headers=headers + ) return FederationPolicy.from_dict(res) - + + class RecipientsAPI: """A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares. The way how sharing works differs depending on whether or not your recipient has access to a Databricks workspace that is enabled for Unity Catalog: - + - For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier is the key identifier that enables the secure connection. This sharing mode is called **Databricks-to-Databricks sharing**. - + - For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you create a recipient object, Databricks generates an activation link you can send to the recipient. The recipient follows the activation link to download the credential file, and then uses the credential file to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - name: str, - authentication_type: AuthenticationType, - *, - comment: Optional[str] = None, - data_recipient_global_metastore_id: Optional[str] = None, - expiration_time: Optional[int] = None, - ip_access_list: Optional[IpAccessList] = None, - owner: Optional[str] = None, - properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, - sharing_code: Optional[str] = None, - ) -> RecipientInfo: - """Create a share recipient. + + + + + + + def create(self + , name: str, authentication_type: AuthenticationType + , * + , comment: Optional[str] = None, data_recipient_global_metastore_id: Optional[str] = None, expiration_time: Optional[int] = None, ip_access_list: Optional[IpAccessList] = None, owner: Optional[str] = None, properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, sharing_code: Optional[str] = None) -> RecipientInfo: + """Create a share recipient. + Creates a new recipient with the delta sharing authentication type in the metastore. The caller must be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore. - + :param name: str Name of Recipient. :param authentication_type: :class:`AuthenticationType` @@ -3525,85 +3058,94 @@ def create( :param sharing_code: str (optional) The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. - + :returns: :class:`RecipientInfo` """ body = {} - if authentication_type is not None: - body["authentication_type"] = authentication_type.value - if comment is not None: - body["comment"] = comment - if data_recipient_global_metastore_id is not None: - body["data_recipient_global_metastore_id"] = data_recipient_global_metastore_id - if expiration_time is not None: - body["expiration_time"] = expiration_time - if ip_access_list is not None: - body["ip_access_list"] = ip_access_list.as_dict() - if name is not None: - body["name"] = name - if owner is not None: - body["owner"] = owner - if properties_kvpairs is not None: - body["properties_kvpairs"] = properties_kvpairs.as_dict() - if sharing_code is not None: - body["sharing_code"] = sharing_code - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/recipients", body=body, headers=headers) + if authentication_type is not None: body['authentication_type'] = authentication_type.value + if comment is not None: body['comment'] = comment + if data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id + if expiration_time is not None: body['expiration_time'] = expiration_time + if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict() + if name is not None: body['name'] = name + if owner is not None: body['owner'] = owner + if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict() + if sharing_code is not None: body['sharing_code'] = sharing_code + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/recipients', body=body + + , headers=headers + ) return RecipientInfo.from_dict(res) - def delete(self, name: str): - """Delete a share recipient. + + + + def delete(self + , name: str + ): + """Delete a share recipient. + Deletes the specified recipient from the metastore. The caller must be the owner of the recipient. - + :param name: str Name of the recipient. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/recipients/{name}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/recipients/{name}", headers=headers) + + + - def get(self, name: str) -> RecipientInfo: + def get(self + , name: str + ) -> RecipientInfo: """Get a share recipient. - + Gets a share recipient from the metastore if: - + * the caller is the owner of the share recipient, or: * is a metastore admin - + :param name: str Name of the recipient. - + :returns: :class:`RecipientInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/recipients/{name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/recipients/{name}' + + , headers=headers + ) return RecipientInfo.from_dict(res) - def list( - self, - *, - data_recipient_global_metastore_id: Optional[str] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> Iterator[RecipientInfo]: - """List share recipients. + + + + def list(self + + , * + , data_recipient_global_metastore_id: Optional[str] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[RecipientInfo]: + """List share recipients. + Gets an array of all share recipients within the current metastore where: - + * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific ordering of the elements in the array. - + :param data_recipient_global_metastore_id: str (optional) If not provided, all recipients will be returned. If no recipients exist with this ID, no results will be returned. @@ -3617,66 +3159,75 @@ def list( from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`RecipientInfo` """ - + query = {} - if data_recipient_global_metastore_id is not None: - query["data_recipient_global_metastore_id"] = data_recipient_global_metastore_id - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - if "max_results" not in query: - query["max_results"] = 0 + if data_recipient_global_metastore_id is not None: query['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + if "max_results" not in query: query['max_results'] = 0 while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/recipients", query=query, headers=headers) - if "recipients" in json: - for v in json["recipients"]: - yield RecipientInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> RecipientInfo: - """Rotate a token. + json = self._api.do('GET','/api/2.1/unity-catalog/recipients', query=query + + , headers=headers + ) + if 'recipients' in json: + for v in json['recipients']: + yield RecipientInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def rotate_token(self + , name: str, existing_token_expire_in_seconds: int + ) -> RecipientInfo: + """Rotate a token. + Refreshes the specified recipient's delta sharing authentication token with the provided token info. The caller must be the owner of the recipient. - + :param name: str The name of the Recipient. :param existing_token_expire_in_seconds: int The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire the existing token immediately, negative number will return an error. - + :returns: :class:`RecipientInfo` """ body = {} - if existing_token_expire_in_seconds is not None: - body["existing_token_expire_in_seconds"] = existing_token_expire_in_seconds - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.1/unity-catalog/recipients/{name}/rotate-token", body=body, headers=headers) + if existing_token_expire_in_seconds is not None: body['existing_token_expire_in_seconds'] = existing_token_expire_in_seconds + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.1/unity-catalog/recipients/{name}/rotate-token', body=body + + , headers=headers + ) return RecipientInfo.from_dict(res) - def share_permissions( - self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> GetRecipientSharePermissionsResponse: - """Get recipient share permissions. + + + + def share_permissions(self + , name: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> GetRecipientSharePermissionsResponse: + """Get recipient share permissions. + Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the owner of the Recipient. - + :param name: str The name of the Recipient. :param max_results: int (optional) @@ -3689,41 +3240,35 @@ def share_permissions( unset from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: :class:`GetRecipientSharePermissionsResponse` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.1/unity-catalog/recipients/{name}/share-permissions", query=query, headers=headers - ) + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/recipients/{name}/share-permissions', query=query + + , headers=headers + ) return GetRecipientSharePermissionsResponse.from_dict(res) - def update( - self, - name: str, - *, - comment: Optional[str] = None, - expiration_time: Optional[int] = None, - ip_access_list: Optional[IpAccessList] = None, - new_name: Optional[str] = None, - owner: Optional[str] = None, - properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, - ) -> RecipientInfo: - """Update a share recipient. + + + + def update(self + , name: str + , * + , comment: Optional[str] = None, expiration_time: Optional[int] = None, ip_access_list: Optional[IpAccessList] = None, new_name: Optional[str] = None, owner: Optional[str] = None, properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None) -> RecipientInfo: + """Update a share recipient. + Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of the recipient. If the recipient name will be updated, the user must be both a metastore admin and the owner of the recipient. - + :param name: str Name of the recipient. :param comment: str (optional) @@ -3740,115 +3285,142 @@ def update( Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write. - + :returns: :class:`RecipientInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if expiration_time is not None: - body["expiration_time"] = expiration_time - if ip_access_list is not None: - body["ip_access_list"] = ip_access_list.as_dict() - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - if properties_kvpairs is not None: - body["properties_kvpairs"] = properties_kvpairs.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/recipients/{name}", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if expiration_time is not None: body['expiration_time'] = expiration_time + if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict() + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/recipients/{name}', body=body + + , headers=headers + ) return RecipientInfo.from_dict(res) - + + class SharesAPI: """A share is a container instantiated with :method:shares/create. Once created you can iteratively register a collection of existing data assets defined within the metastore using :method:shares/update. You can register data assets under their original name, qualified by their original schema, or provide alternate exposed names.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, name: str, *, comment: Optional[str] = None, storage_root: Optional[str] = None) -> ShareInfo: - """Create a share. + + + + + + + def create(self + , name: str + , * + , comment: Optional[str] = None, storage_root: Optional[str] = None) -> ShareInfo: + """Create a share. + Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore. - + :param name: str Name of the share. :param comment: str (optional) User-provided free-form text description. :param storage_root: str (optional) Storage root URL for the share. - + :returns: :class:`ShareInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if name is not None: - body["name"] = name - if storage_root is not None: - body["storage_root"] = storage_root - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.1/unity-catalog/shares", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if name is not None: body['name'] = name + if storage_root is not None: body['storage_root'] = storage_root + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.1/unity-catalog/shares', body=body + + , headers=headers + ) return ShareInfo.from_dict(res) - def delete(self, name: str): - """Delete a share. + + + + def delete(self + , name: str + ): + """Delete a share. + Deletes a data object share from the metastore. The caller must be an owner of the share. - + :param name: str The name of the share. - - + + """ - + headers = {} + + self._api.do('DELETE',f'/api/2.1/unity-catalog/shares/{name}' + + , headers=headers + ) + - self._api.do("DELETE", f"/api/2.1/unity-catalog/shares/{name}", headers=headers) + + + - def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> ShareInfo: + def get(self + , name: str + , * + , include_shared_data: Optional[bool] = None) -> ShareInfo: """Get a share. - + Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the share. - + :param name: str The name of the share. :param include_shared_data: bool (optional) Query for data to include in the share. - + :returns: :class:`ShareInfo` """ - + query = {} - if include_shared_data is not None: - query["include_shared_data"] = include_shared_data - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/shares/{name}", query=query, headers=headers) + if include_shared_data is not None: query['include_shared_data'] = include_shared_data + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/shares/{name}', query=query + + , headers=headers + ) return ShareInfo.from_dict(res) - def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ShareInfo]: - """List shares. + + + + def list(self + + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ShareInfo]: + """List shares. + Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of shares to return. - when set to 0, the page length is set to a server configured value (recommended); - when set to a value greater than 0, the page length is the minimum of this @@ -3859,38 +3431,43 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ShareInfo` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - if "max_results" not in query: - query["max_results"] = 0 + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + if "max_results" not in query: query['max_results'] = 0 while True: - json = self._api.do("GET", "/api/2.1/unity-catalog/shares", query=query, headers=headers) - if "shares" in json: - for v in json["shares"]: - yield ShareInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def share_permissions( - self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None - ) -> GetSharePermissionsResponse: - """Get permissions. + json = self._api.do('GET','/api/2.1/unity-catalog/shares', query=query + + , headers=headers + ) + if 'shares' in json: + for v in json['shares']: + yield ShareInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def share_permissions(self + , name: str + , * + , max_results: Optional[int] = None, page_token: Optional[str] = None) -> GetSharePermissionsResponse: + """Get permissions. + Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the owner of the share. - + :param name: str The name of the share. :param max_results: int (optional) @@ -3903,50 +3480,47 @@ def share_permissions( unset from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: :class:`GetSharePermissionsResponse` """ - + query = {} - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.1/unity-catalog/shares/{name}/permissions", query=query, headers=headers) + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.1/unity-catalog/shares/{name}/permissions', query=query + + , headers=headers + ) return GetSharePermissionsResponse.from_dict(res) - def update( - self, - name: str, - *, - comment: Optional[str] = None, - new_name: Optional[str] = None, - owner: Optional[str] = None, - storage_root: Optional[str] = None, - updates: Optional[List[SharedDataObjectUpdate]] = None, - ) -> ShareInfo: - """Update a share. + + + + def update(self + , name: str + , * + , comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None, storage_root: Optional[str] = None, updates: Optional[List[SharedDataObjectUpdate]] = None) -> ShareInfo: + """Update a share. + Updates the share with the changes and data objects in the request. The caller must be the owner of the share or a metastore admin. - + When the caller is a metastore admin, only the __owner__ field can be updated. - + In the case that the share name is changed, **updateShare** requires that the caller is both the share owner and a metastore admin. - + If there are notebook files in the share, the __storage_root__ field cannot be updated. - + For each table that is added through this method, the share owner must also have **SELECT** privilege on the table. This privilege must be maintained indefinitely for recipients to be able to access the table. Typically, you should use a group as the share owner. - + Table removals through **update** do not require additional privileges. - + :param name: str The name of the share. :param comment: str (optional) @@ -3959,61 +3533,58 @@ def update( Storage root URL for the share. :param updates: List[:class:`SharedDataObjectUpdate`] (optional) Array of shared data object updates. - + :returns: :class:`ShareInfo` """ body = {} - if comment is not None: - body["comment"] = comment - if new_name is not None: - body["new_name"] = new_name - if owner is not None: - body["owner"] = owner - if storage_root is not None: - body["storage_root"] = storage_root - if updates is not None: - body["updates"] = [v.as_dict() for v in updates] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/shares/{name}", body=body, headers=headers) + if comment is not None: body['comment'] = comment + if new_name is not None: body['new_name'] = new_name + if owner is not None: body['owner'] = owner + if storage_root is not None: body['storage_root'] = storage_root + if updates is not None: body['updates'] = [v.as_dict() for v in updates] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/shares/{name}', body=body + + , headers=headers + ) return ShareInfo.from_dict(res) - def update_permissions( - self, - name: str, - *, - changes: Optional[List[PermissionsChange]] = None, - omit_permissions_list: Optional[bool] = None, - ) -> UpdateSharePermissionsResponse: - """Update permissions. + + + + def update_permissions(self + , name: str + , * + , changes: Optional[List[PermissionsChange]] = None, omit_permissions_list: Optional[bool] = None) -> UpdateSharePermissionsResponse: + """Update permissions. + Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an owner of the share. - + For new recipient grants, the user must also be the recipient owner or metastore admin. recipient revocations do not require additional privileges. - + :param name: str The name of the share. :param changes: List[:class:`PermissionsChange`] (optional) - Array of permission changes. + Array of permissions change objects. :param omit_permissions_list: bool (optional) Optional. Whether to return the latest permissions list of the share in the response. - + :returns: :class:`UpdateSharePermissionsResponse` """ body = {} - if changes is not None: - body["changes"] = [v.as_dict() for v in changes] - if omit_permissions_list is not None: - body["omit_permissions_list"] = omit_permissions_list - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.1/unity-catalog/shares/{name}/permissions", body=body, headers=headers) + if changes is not None: body['changes'] = [v.as_dict() for v in changes] + if omit_permissions_list is not None: body['omit_permissions_list'] = omit_permissions_list + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.1/unity-catalog/shares/{name}/permissions', body=body + + , headers=headers + ) return UpdateSharePermissionsResponse.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index 0cef4c2e4..cf3dc44bf 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -1,1078 +1,852 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class AccessControl: group_name: Optional[str] = None - + permission_level: Optional[PermissionLevel] = None """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query * `CAN_MANAGE`: Can manage the query""" - + user_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AccessControl into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AccessControl into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccessControl: """Deserializes the AccessControl from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", PermissionLevel), - user_name=d.get("user_name", None), - ) - + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), user_name=d.get('user_name', None)) + -class Aggregation(Enum): - AVG = "AVG" - COUNT = "COUNT" - COUNT_DISTINCT = "COUNT_DISTINCT" - MAX = "MAX" - MEDIAN = "MEDIAN" - MIN = "MIN" - STDDEV = "STDDEV" - SUM = "SUM" +class Aggregation(Enum): + + + AVG = 'AVG' + COUNT = 'COUNT' + COUNT_DISTINCT = 'COUNT_DISTINCT' + MAX = 'MAX' + MEDIAN = 'MEDIAN' + MIN = 'MIN' + STDDEV = 'STDDEV' + SUM = 'SUM' @dataclass class Alert: condition: Optional[AlertCondition] = None """Trigger conditions of the alert.""" - + create_time: Optional[str] = None """The timestamp indicating when the alert was created.""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This can include email subject entries and Slack notification headers, for example. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + display_name: Optional[str] = None """The display name of the alert.""" - + id: Optional[str] = None """UUID identifying the alert.""" - + lifecycle_state: Optional[LifecycleState] = None """The workspace state of the alert. Used for tracking trashed status.""" - + notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + owner_user_name: Optional[str] = None """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" - + parent_path: Optional[str] = None """The workspace path of the folder containing the alert.""" - + query_id: Optional[str] = None """UUID of the query attached to the alert.""" - + seconds_to_retrigger: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + state: Optional[AlertState] = None """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not yet been evaluated or ran into an error during the last evaluation.""" - + trigger_time: Optional[str] = None """Timestamp when the alert was last triggered, if the alert has been triggered before.""" - + update_time: Optional[str] = None """The timestamp indicating when the alert was updated.""" - + def as_dict(self) -> dict: """Serializes the Alert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition: - body["condition"] = self.condition.as_dict() - if self.create_time is not None: - body["create_time"] = self.create_time - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state.value - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_id is not None: - body["query_id"] = self.query_id - if self.seconds_to_retrigger is not None: - body["seconds_to_retrigger"] = self.seconds_to_retrigger - if self.state is not None: - body["state"] = self.state.value - if self.trigger_time is not None: - body["trigger_time"] = self.trigger_time - if self.update_time is not None: - body["update_time"] = self.update_time + if self.condition: body['condition'] = self.condition.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.state is not None: body['state'] = self.state.value + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the Alert into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition: - body["condition"] = self.condition - if self.create_time is not None: - body["create_time"] = self.create_time - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_id is not None: - body["query_id"] = self.query_id - if self.seconds_to_retrigger is not None: - body["seconds_to_retrigger"] = self.seconds_to_retrigger - if self.state is not None: - body["state"] = self.state - if self.trigger_time is not None: - body["trigger_time"] = self.trigger_time - if self.update_time is not None: - body["update_time"] = self.update_time + if self.condition: body['condition'] = self.condition + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.state is not None: body['state'] = self.state + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Alert: """Deserializes the Alert from a dictionary.""" - return cls( - condition=_from_dict(d, "condition", AlertCondition), - create_time=d.get("create_time", None), - custom_body=d.get("custom_body", None), - custom_subject=d.get("custom_subject", None), - display_name=d.get("display_name", None), - id=d.get("id", None), - lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), - notify_on_ok=d.get("notify_on_ok", None), - owner_user_name=d.get("owner_user_name", None), - parent_path=d.get("parent_path", None), - query_id=d.get("query_id", None), - seconds_to_retrigger=d.get("seconds_to_retrigger", None), - state=_enum(d, "state", AlertState), - trigger_time=d.get("trigger_time", None), - update_time=d.get("update_time", None), - ) + return cls(condition=_from_dict(d, 'condition', AlertCondition), create_time=d.get('create_time', None), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), display_name=d.get('display_name', None), id=d.get('id', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), notify_on_ok=d.get('notify_on_ok', None), owner_user_name=d.get('owner_user_name', None), parent_path=d.get('parent_path', None), query_id=d.get('query_id', None), seconds_to_retrigger=d.get('seconds_to_retrigger', None), state=_enum(d, 'state', AlertState), trigger_time=d.get('trigger_time', None), update_time=d.get('update_time', None)) + + @dataclass class AlertCondition: empty_result_state: Optional[AlertState] = None """Alert state if result is empty.""" - + op: Optional[AlertOperator] = None """Operator used for comparison in alert evaluation.""" - + operand: Optional[AlertConditionOperand] = None """Name of the column from the query result to use for comparison in alert evaluation.""" - + threshold: Optional[AlertConditionThreshold] = None """Threshold value used for comparison in alert evaluation.""" - + def as_dict(self) -> dict: """Serializes the AlertCondition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.empty_result_state is not None: - body["empty_result_state"] = self.empty_result_state.value - if self.op is not None: - body["op"] = self.op.value - if self.operand: - body["operand"] = self.operand.as_dict() - if self.threshold: - body["threshold"] = self.threshold.as_dict() + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value + if self.op is not None: body['op'] = self.op.value + if self.operand: body['operand'] = self.operand.as_dict() + if self.threshold: body['threshold'] = self.threshold.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertCondition into a shallow dictionary of its immediate attributes.""" body = {} - if self.empty_result_state is not None: - body["empty_result_state"] = self.empty_result_state - if self.op is not None: - body["op"] = self.op - if self.operand: - body["operand"] = self.operand - if self.threshold: - body["threshold"] = self.threshold + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state + if self.op is not None: body['op'] = self.op + if self.operand: body['operand'] = self.operand + if self.threshold: body['threshold'] = self.threshold return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertCondition: """Deserializes the AlertCondition from a dictionary.""" - return cls( - empty_result_state=_enum(d, "empty_result_state", AlertState), - op=_enum(d, "op", AlertOperator), - operand=_from_dict(d, "operand", AlertConditionOperand), - threshold=_from_dict(d, "threshold", AlertConditionThreshold), - ) + return cls(empty_result_state=_enum(d, 'empty_result_state', AlertState), op=_enum(d, 'op', AlertOperator), operand=_from_dict(d, 'operand', AlertConditionOperand), threshold=_from_dict(d, 'threshold', AlertConditionThreshold)) + + @dataclass class AlertConditionOperand: column: Optional[AlertOperandColumn] = None - + def as_dict(self) -> dict: """Serializes the AlertConditionOperand into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column: - body["column"] = self.column.as_dict() + if self.column: body['column'] = self.column.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertConditionOperand into a shallow dictionary of its immediate attributes.""" body = {} - if self.column: - body["column"] = self.column + if self.column: body['column'] = self.column return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertConditionOperand: """Deserializes the AlertConditionOperand from a dictionary.""" - return cls(column=_from_dict(d, "column", AlertOperandColumn)) + return cls(column=_from_dict(d, 'column', AlertOperandColumn)) + + @dataclass class AlertConditionThreshold: value: Optional[AlertOperandValue] = None - + def as_dict(self) -> dict: """Serializes the AlertConditionThreshold into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value: - body["value"] = self.value.as_dict() + if self.value: body['value'] = self.value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertConditionThreshold into a shallow dictionary of its immediate attributes.""" body = {} - if self.value: - body["value"] = self.value + if self.value: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertConditionThreshold: """Deserializes the AlertConditionThreshold from a dictionary.""" - return cls(value=_from_dict(d, "value", AlertOperandValue)) + return cls(value=_from_dict(d, 'value', AlertOperandValue)) + + class AlertEvaluationState(Enum): """UNSPECIFIED - default unspecify value for proto enum, do not use it in the code UNKNOWN - alert not yet evaluated TRIGGERED - alert is triggered OK - alert is not triggered ERROR - alert evaluation failed""" - - ERROR = "ERROR" - OK = "OK" - TRIGGERED = "TRIGGERED" - UNKNOWN = "UNKNOWN" - + + ERROR = 'ERROR' + OK = 'OK' + TRIGGERED = 'TRIGGERED' + UNKNOWN = 'UNKNOWN' @dataclass class AlertOperandColumn: name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertOperandColumn into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the AlertOperandColumn into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertOperandColumn: """Deserializes the AlertOperandColumn from a dictionary.""" - return cls(name=d.get("name", None)) + return cls(name=d.get('name', None)) + + @dataclass class AlertOperandValue: bool_value: Optional[bool] = None - + double_value: Optional[float] = None - + string_value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertOperandValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bool_value is not None: - body["bool_value"] = self.bool_value - if self.double_value is not None: - body["double_value"] = self.double_value - if self.string_value is not None: - body["string_value"] = self.string_value + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.double_value is not None: body['double_value'] = self.double_value + if self.string_value is not None: body['string_value'] = self.string_value return body def as_shallow_dict(self) -> dict: """Serializes the AlertOperandValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.bool_value is not None: - body["bool_value"] = self.bool_value - if self.double_value is not None: - body["double_value"] = self.double_value - if self.string_value is not None: - body["string_value"] = self.string_value + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.double_value is not None: body['double_value'] = self.double_value + if self.string_value is not None: body['string_value'] = self.string_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertOperandValue: """Deserializes the AlertOperandValue from a dictionary.""" - return cls( - bool_value=d.get("bool_value", None), - double_value=d.get("double_value", None), - string_value=d.get("string_value", None), - ) - + return cls(bool_value=d.get('bool_value', None), double_value=d.get('double_value', None), string_value=d.get('string_value', None)) + -class AlertOperator(Enum): - EQUAL = "EQUAL" - GREATER_THAN = "GREATER_THAN" - GREATER_THAN_OR_EQUAL = "GREATER_THAN_OR_EQUAL" - IS_NULL = "IS_NULL" - LESS_THAN = "LESS_THAN" - LESS_THAN_OR_EQUAL = "LESS_THAN_OR_EQUAL" - NOT_EQUAL = "NOT_EQUAL" +class AlertOperator(Enum): + + + EQUAL = 'EQUAL' + GREATER_THAN = 'GREATER_THAN' + GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL' + IS_NULL = 'IS_NULL' + LESS_THAN = 'LESS_THAN' + LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL' + NOT_EQUAL = 'NOT_EQUAL' @dataclass class AlertOptions: """Alert configuration options.""" - + column: str """Name of column in the query result to compare in alert evaluation.""" - + op: str """Operator used to compare in alert evaluation: `>`, `>=`, `<`, `<=`, `==`, `!=`""" - + value: Any """Value used to compare in alert evaluation. Supported types include strings (eg. 'foobar'), floats (eg. 123.4), and booleans (true).""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This includes email subject, Slack notification header, etc. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + empty_result_state: Optional[AlertOptionsEmptyResultState] = None """State that alert evaluates to when query result is empty.""" - + muted: Optional[bool] = None """Whether or not the alert is muted. If an alert is muted, it will not notify users and notification destinations when triggered.""" - + def as_dict(self) -> dict: """Serializes the AlertOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column is not None: - body["column"] = self.column - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.empty_result_state is not None: - body["empty_result_state"] = self.empty_result_state.value - if self.muted is not None: - body["muted"] = self.muted - if self.op is not None: - body["op"] = self.op - if self.value: - body["value"] = self.value + if self.column is not None: body['column'] = self.column + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value + if self.muted is not None: body['muted'] = self.muted + if self.op is not None: body['op'] = self.op + if self.value: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the AlertOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.column is not None: - body["column"] = self.column - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.empty_result_state is not None: - body["empty_result_state"] = self.empty_result_state - if self.muted is not None: - body["muted"] = self.muted - if self.op is not None: - body["op"] = self.op - if self.value: - body["value"] = self.value + if self.column is not None: body['column'] = self.column + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state + if self.muted is not None: body['muted'] = self.muted + if self.op is not None: body['op'] = self.op + if self.value: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertOptions: """Deserializes the AlertOptions from a dictionary.""" - return cls( - column=d.get("column", None), - custom_body=d.get("custom_body", None), - custom_subject=d.get("custom_subject", None), - empty_result_state=_enum(d, "empty_result_state", AlertOptionsEmptyResultState), - muted=d.get("muted", None), - op=d.get("op", None), - value=d.get("value", None), - ) + return cls(column=d.get('column', None), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), empty_result_state=_enum(d, 'empty_result_state', AlertOptionsEmptyResultState), muted=d.get('muted', None), op=d.get('op', None), value=d.get('value', None)) + + class AlertOptionsEmptyResultState(Enum): """State that alert evaluates to when query result is empty.""" - - OK = "ok" - TRIGGERED = "triggered" - UNKNOWN = "unknown" - + + OK = 'ok' + TRIGGERED = 'triggered' + UNKNOWN = 'unknown' @dataclass class AlertQuery: created_at: Optional[str] = None """The timestamp when this query was created.""" - + data_source_id: Optional[str] = None """Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + id: Optional[str] = None """Query ID.""" - + is_archived: Optional[bool] = None """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear in search results. If this boolean is `true`, the `options` property for this query includes a `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" - + is_draft: Optional[bool] = None """Whether the query is a draft. Draft queries only appear in list views for their owners. Visualizations from draft queries cannot appear on dashboards.""" - + is_safe: Optional[bool] = None """Text parameter types are not safe from SQL injection for all types of data source. Set this Boolean parameter to `true` if a query either does not use any text type parameters or uses a data source type where text type parameters are handled safely.""" - + name: Optional[str] = None """The title of this query that appears in list views, widget headings, and on the query page.""" - + options: Optional[QueryOptions] = None - + query: Optional[str] = None """The text of the query to be run.""" - + tags: Optional[List[str]] = None - + updated_at: Optional[str] = None """The timestamp at which this query was last updated.""" - + user_id: Optional[int] = None """The ID of the user who owns the query.""" - + def as_dict(self) -> dict: """Serializes the AlertQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.is_archived is not None: - body["is_archived"] = self.is_archived - if self.is_draft is not None: - body["is_draft"] = self.is_draft - if self.is_safe is not None: - body["is_safe"] = self.is_safe - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.query is not None: - body["query"] = self.query - if self.tags: - body["tags"] = [v for v in self.tags] - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.user_id is not None: - body["user_id"] = self.user_id + if self.created_at is not None: body['created_at'] = self.created_at + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_safe is not None: body['is_safe'] = self.is_safe + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.query is not None: body['query'] = self.query + if self.tags: body['tags'] = [v for v in self.tags] + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user_id is not None: body['user_id'] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the AlertQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.is_archived is not None: - body["is_archived"] = self.is_archived - if self.is_draft is not None: - body["is_draft"] = self.is_draft - if self.is_safe is not None: - body["is_safe"] = self.is_safe - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query is not None: - body["query"] = self.query - if self.tags: - body["tags"] = self.tags - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.user_id is not None: - body["user_id"] = self.user_id + if self.created_at is not None: body['created_at'] = self.created_at + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_safe is not None: body['is_safe'] = self.is_safe + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query is not None: body['query'] = self.query + if self.tags: body['tags'] = self.tags + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user_id is not None: body['user_id'] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertQuery: """Deserializes the AlertQuery from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - data_source_id=d.get("data_source_id", None), - description=d.get("description", None), - id=d.get("id", None), - is_archived=d.get("is_archived", None), - is_draft=d.get("is_draft", None), - is_safe=d.get("is_safe", None), - name=d.get("name", None), - options=_from_dict(d, "options", QueryOptions), - query=d.get("query", None), - tags=d.get("tags", None), - updated_at=d.get("updated_at", None), - user_id=d.get("user_id", None), - ) - + return cls(created_at=d.get('created_at', None), data_source_id=d.get('data_source_id', None), description=d.get('description', None), id=d.get('id', None), is_archived=d.get('is_archived', None), is_draft=d.get('is_draft', None), is_safe=d.get('is_safe', None), name=d.get('name', None), options=_from_dict(d, 'options', QueryOptions), query=d.get('query', None), tags=d.get('tags', None), updated_at=d.get('updated_at', None), user_id=d.get('user_id', None)) + -class AlertState(Enum): - OK = "OK" - TRIGGERED = "TRIGGERED" - UNKNOWN = "UNKNOWN" +class AlertState(Enum): + + + OK = 'OK' + TRIGGERED = 'TRIGGERED' + UNKNOWN = 'UNKNOWN' @dataclass class AlertV2: create_time: Optional[str] = None """The timestamp indicating when the alert was created.""" - + custom_description: Optional[str] = None """Custom description for the alert. support mustache template.""" - + custom_summary: Optional[str] = None """Custom summary for the alert. support mustache template.""" - + display_name: Optional[str] = None """The display name of the alert.""" - + evaluation: Optional[AlertV2Evaluation] = None - + id: Optional[str] = None """UUID identifying the alert.""" - + lifecycle_state: Optional[LifecycleState] = None """Indicates whether the query is trashed.""" - + owner_user_name: Optional[str] = None """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" - + parent_path: Optional[str] = None """The workspace path of the folder containing the alert. Can only be set on create, and cannot be updated.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_user_name: Optional[str] = None """The run as username. This field is set to "Unavailable" if the user has been deleted.""" - + schedule: Optional[CronSchedule] = None - + update_time: Optional[str] = None """The timestamp indicating when the alert was updated.""" - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the alert.""" - + def as_dict(self) -> dict: """Serializes the AlertV2 into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.custom_description is not None: - body["custom_description"] = self.custom_description - if self.custom_summary is not None: - body["custom_summary"] = self.custom_summary - if self.display_name is not None: - body["display_name"] = self.display_name - if self.evaluation: - body["evaluation"] = self.evaluation.as_dict() - if self.id is not None: - body["id"] = self.id - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state.value - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_description is not None: body['custom_description'] = self.custom_description + if self.custom_summary is not None: body['custom_summary'] = self.custom_summary + if self.display_name is not None: body['display_name'] = self.display_name + if self.evaluation: body['evaluation'] = self.evaluation.as_dict() + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.schedule: body['schedule'] = self.schedule.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2 into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.custom_description is not None: - body["custom_description"] = self.custom_description - if self.custom_summary is not None: - body["custom_summary"] = self.custom_summary - if self.display_name is not None: - body["display_name"] = self.display_name - if self.evaluation: - body["evaluation"] = self.evaluation - if self.id is not None: - body["id"] = self.id - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_user_name is not None: - body["run_as_user_name"] = self.run_as_user_name - if self.schedule: - body["schedule"] = self.schedule - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_description is not None: body['custom_description'] = self.custom_description + if self.custom_summary is not None: body['custom_summary'] = self.custom_summary + if self.display_name is not None: body['display_name'] = self.display_name + if self.evaluation: body['evaluation'] = self.evaluation + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name + if self.schedule: body['schedule'] = self.schedule + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2: """Deserializes the AlertV2 from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - custom_description=d.get("custom_description", None), - custom_summary=d.get("custom_summary", None), - display_name=d.get("display_name", None), - evaluation=_from_dict(d, "evaluation", AlertV2Evaluation), - id=d.get("id", None), - lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), - owner_user_name=d.get("owner_user_name", None), - parent_path=d.get("parent_path", None), - query_text=d.get("query_text", None), - run_as_user_name=d.get("run_as_user_name", None), - schedule=_from_dict(d, "schedule", CronSchedule), - update_time=d.get("update_time", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(create_time=d.get('create_time', None), custom_description=d.get('custom_description', None), custom_summary=d.get('custom_summary', None), display_name=d.get('display_name', None), evaluation=_from_dict(d, 'evaluation', AlertV2Evaluation), id=d.get('id', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), owner_user_name=d.get('owner_user_name', None), parent_path=d.get('parent_path', None), query_text=d.get('query_text', None), run_as_user_name=d.get('run_as_user_name', None), schedule=_from_dict(d, 'schedule', CronSchedule), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class AlertV2Evaluation: comparison_operator: Optional[ComparisonOperator] = None """Operator used for comparison in alert evaluation.""" - + empty_result_state: Optional[AlertEvaluationState] = None """Alert state if result is empty.""" - + last_evaluated_at: Optional[str] = None """Timestamp of the last evaluation.""" - + notification: Optional[AlertV2Notification] = None """User or Notification Destination to notify when alert is triggered.""" - + source: Optional[AlertV2OperandColumn] = None """Source column from result to use to evaluate alert""" - + state: Optional[AlertEvaluationState] = None """Latest state of alert evaluation.""" - + threshold: Optional[AlertV2Operand] = None """Threshold to user for alert evaluation, can be a column or a value.""" - + def as_dict(self) -> dict: """Serializes the AlertV2Evaluation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comparison_operator is not None: - body["comparison_operator"] = self.comparison_operator.value - if self.empty_result_state is not None: - body["empty_result_state"] = self.empty_result_state.value - if self.last_evaluated_at is not None: - body["last_evaluated_at"] = self.last_evaluated_at - if self.notification: - body["notification"] = self.notification.as_dict() - if self.source: - body["source"] = self.source.as_dict() - if self.state is not None: - body["state"] = self.state.value - if self.threshold: - body["threshold"] = self.threshold.as_dict() + if self.comparison_operator is not None: body['comparison_operator'] = self.comparison_operator.value + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value + if self.last_evaluated_at is not None: body['last_evaluated_at'] = self.last_evaluated_at + if self.notification: body['notification'] = self.notification.as_dict() + if self.source: body['source'] = self.source.as_dict() + if self.state is not None: body['state'] = self.state.value + if self.threshold: body['threshold'] = self.threshold.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2Evaluation into a shallow dictionary of its immediate attributes.""" body = {} - if self.comparison_operator is not None: - body["comparison_operator"] = self.comparison_operator - if self.empty_result_state is not None: - body["empty_result_state"] = self.empty_result_state - if self.last_evaluated_at is not None: - body["last_evaluated_at"] = self.last_evaluated_at - if self.notification: - body["notification"] = self.notification - if self.source: - body["source"] = self.source - if self.state is not None: - body["state"] = self.state - if self.threshold: - body["threshold"] = self.threshold + if self.comparison_operator is not None: body['comparison_operator'] = self.comparison_operator + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state + if self.last_evaluated_at is not None: body['last_evaluated_at'] = self.last_evaluated_at + if self.notification: body['notification'] = self.notification + if self.source: body['source'] = self.source + if self.state is not None: body['state'] = self.state + if self.threshold: body['threshold'] = self.threshold return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2Evaluation: """Deserializes the AlertV2Evaluation from a dictionary.""" - return cls( - comparison_operator=_enum(d, "comparison_operator", ComparisonOperator), - empty_result_state=_enum(d, "empty_result_state", AlertEvaluationState), - last_evaluated_at=d.get("last_evaluated_at", None), - notification=_from_dict(d, "notification", AlertV2Notification), - source=_from_dict(d, "source", AlertV2OperandColumn), - state=_enum(d, "state", AlertEvaluationState), - threshold=_from_dict(d, "threshold", AlertV2Operand), - ) + return cls(comparison_operator=_enum(d, 'comparison_operator', ComparisonOperator), empty_result_state=_enum(d, 'empty_result_state', AlertEvaluationState), last_evaluated_at=d.get('last_evaluated_at', None), notification=_from_dict(d, 'notification', AlertV2Notification), source=_from_dict(d, 'source', AlertV2OperandColumn), state=_enum(d, 'state', AlertEvaluationState), threshold=_from_dict(d, 'threshold', AlertV2Operand)) + + @dataclass class AlertV2Notification: notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + retrigger_seconds: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + subscriptions: Optional[List[AlertV2Subscription]] = None - + def as_dict(self) -> dict: """Serializes the AlertV2Notification into a dictionary suitable for use as a JSON request body.""" body = {} - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.retrigger_seconds is not None: - body["retrigger_seconds"] = self.retrigger_seconds - if self.subscriptions: - body["subscriptions"] = [v.as_dict() for v in self.subscriptions] + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.retrigger_seconds is not None: body['retrigger_seconds'] = self.retrigger_seconds + if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2Notification into a shallow dictionary of its immediate attributes.""" body = {} - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.retrigger_seconds is not None: - body["retrigger_seconds"] = self.retrigger_seconds - if self.subscriptions: - body["subscriptions"] = self.subscriptions + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.retrigger_seconds is not None: body['retrigger_seconds'] = self.retrigger_seconds + if self.subscriptions: body['subscriptions'] = self.subscriptions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2Notification: """Deserializes the AlertV2Notification from a dictionary.""" - return cls( - notify_on_ok=d.get("notify_on_ok", None), - retrigger_seconds=d.get("retrigger_seconds", None), - subscriptions=_repeated_dict(d, "subscriptions", AlertV2Subscription), - ) + return cls(notify_on_ok=d.get('notify_on_ok', None), retrigger_seconds=d.get('retrigger_seconds', None), subscriptions=_repeated_dict(d, 'subscriptions', AlertV2Subscription)) + + @dataclass class AlertV2Operand: column: Optional[AlertV2OperandColumn] = None - + value: Optional[AlertV2OperandValue] = None - + def as_dict(self) -> dict: """Serializes the AlertV2Operand into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column: - body["column"] = self.column.as_dict() - if self.value: - body["value"] = self.value.as_dict() + if self.column: body['column'] = self.column.as_dict() + if self.value: body['value'] = self.value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2Operand into a shallow dictionary of its immediate attributes.""" body = {} - if self.column: - body["column"] = self.column - if self.value: - body["value"] = self.value + if self.column: body['column'] = self.column + if self.value: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2Operand: """Deserializes the AlertV2Operand from a dictionary.""" - return cls( - column=_from_dict(d, "column", AlertV2OperandColumn), value=_from_dict(d, "value", AlertV2OperandValue) - ) + return cls(column=_from_dict(d, 'column', AlertV2OperandColumn), value=_from_dict(d, 'value', AlertV2OperandValue)) + + @dataclass class AlertV2OperandColumn: aggregation: Optional[Aggregation] = None - + display: Optional[str] = None - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertV2OperandColumn into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aggregation is not None: - body["aggregation"] = self.aggregation.value - if self.display is not None: - body["display"] = self.display - if self.name is not None: - body["name"] = self.name + if self.aggregation is not None: body['aggregation'] = self.aggregation.value + if self.display is not None: body['display'] = self.display + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2OperandColumn into a shallow dictionary of its immediate attributes.""" body = {} - if self.aggregation is not None: - body["aggregation"] = self.aggregation - if self.display is not None: - body["display"] = self.display - if self.name is not None: - body["name"] = self.name + if self.aggregation is not None: body['aggregation'] = self.aggregation + if self.display is not None: body['display'] = self.display + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2OperandColumn: """Deserializes the AlertV2OperandColumn from a dictionary.""" - return cls( - aggregation=_enum(d, "aggregation", Aggregation), display=d.get("display", None), name=d.get("name", None) - ) + return cls(aggregation=_enum(d, 'aggregation', Aggregation), display=d.get('display', None), name=d.get('name', None)) + + @dataclass class AlertV2OperandValue: bool_value: Optional[bool] = None - + double_value: Optional[float] = None - + string_value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertV2OperandValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bool_value is not None: - body["bool_value"] = self.bool_value - if self.double_value is not None: - body["double_value"] = self.double_value - if self.string_value is not None: - body["string_value"] = self.string_value + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.double_value is not None: body['double_value'] = self.double_value + if self.string_value is not None: body['string_value'] = self.string_value return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2OperandValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.bool_value is not None: - body["bool_value"] = self.bool_value - if self.double_value is not None: - body["double_value"] = self.double_value - if self.string_value is not None: - body["string_value"] = self.string_value + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.double_value is not None: body['double_value'] = self.double_value + if self.string_value is not None: body['string_value'] = self.string_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2OperandValue: """Deserializes the AlertV2OperandValue from a dictionary.""" - return cls( - bool_value=d.get("bool_value", None), - double_value=d.get("double_value", None), - string_value=d.get("string_value", None), - ) + return cls(bool_value=d.get('bool_value', None), double_value=d.get('double_value', None), string_value=d.get('string_value', None)) + + @dataclass class AlertV2Subscription: destination_id: Optional[str] = None - + user_email: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertV2Subscription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_id is not None: - body["destination_id"] = self.destination_id - if self.user_email is not None: - body["user_email"] = self.user_email + if self.destination_id is not None: body['destination_id'] = self.destination_id + if self.user_email is not None: body['user_email'] = self.user_email return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2Subscription into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_id is not None: - body["destination_id"] = self.destination_id - if self.user_email is not None: - body["user_email"] = self.user_email + if self.destination_id is not None: body['destination_id'] = self.destination_id + if self.user_email is not None: body['user_email'] = self.user_email return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2Subscription: """Deserializes the AlertV2Subscription from a dictionary.""" - return cls(destination_id=d.get("destination_id", None), user_email=d.get("user_email", None)) + return cls(destination_id=d.get('destination_id', None), user_email=d.get('user_email', None)) + + @dataclass class BaseChunkInfo: """Describes metadata for a particular chunk, within a result set; this structure is used both within a manifest, and when fetching individual chunk data or links.""" - + byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" - + chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" - + row_count: Optional[int] = None """The number of rows within the result chunk.""" - + row_offset: Optional[int] = None """The starting row offset within the result set.""" - + def as_dict(self) -> dict: """Serializes the BaseChunkInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.byte_count is not None: - body["byte_count"] = self.byte_count - if self.chunk_index is not None: - body["chunk_index"] = self.chunk_index - if self.row_count is not None: - body["row_count"] = self.row_count - if self.row_offset is not None: - body["row_offset"] = self.row_offset + if self.byte_count is not None: body['byte_count'] = self.byte_count + if self.chunk_index is not None: body['chunk_index'] = self.chunk_index + if self.row_count is not None: body['row_count'] = self.row_count + if self.row_offset is not None: body['row_offset'] = self.row_offset return body def as_shallow_dict(self) -> dict: """Serializes the BaseChunkInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.byte_count is not None: - body["byte_count"] = self.byte_count - if self.chunk_index is not None: - body["chunk_index"] = self.chunk_index - if self.row_count is not None: - body["row_count"] = self.row_count - if self.row_offset is not None: - body["row_offset"] = self.row_offset + if self.byte_count is not None: body['byte_count'] = self.byte_count + if self.chunk_index is not None: body['chunk_index'] = self.chunk_index + if self.row_count is not None: body['row_count'] = self.row_count + if self.row_offset is not None: body['row_offset'] = self.row_offset return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BaseChunkInfo: """Deserializes the BaseChunkInfo from a dictionary.""" - return cls( - byte_count=d.get("byte_count", None), - chunk_index=d.get("chunk_index", None), - row_count=d.get("row_count", None), - row_offset=d.get("row_offset", None), - ) + return cls(byte_count=d.get('byte_count', None), chunk_index=d.get('chunk_index', None), row_count=d.get('row_count', None), row_offset=d.get('row_offset', None)) + + + + + @dataclass @@ -1091,468 +865,377 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelExecutionResponse: """Deserializes the CancelExecutionResponse from a dictionary.""" return cls() + + @dataclass class Channel: """Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be chosen only when `dbsql_version` is specified.""" - + dbsql_version: Optional[str] = None - + name: Optional[ChannelName] = None - + def as_dict(self) -> dict: """Serializes the Channel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbsql_version is not None: - body["dbsql_version"] = self.dbsql_version - if self.name is not None: - body["name"] = self.name.value + if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version + if self.name is not None: body['name'] = self.name.value return body def as_shallow_dict(self) -> dict: """Serializes the Channel into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbsql_version is not None: - body["dbsql_version"] = self.dbsql_version - if self.name is not None: - body["name"] = self.name + if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Channel: """Deserializes the Channel from a dictionary.""" - return cls(dbsql_version=d.get("dbsql_version", None), name=_enum(d, "name", ChannelName)) + return cls(dbsql_version=d.get('dbsql_version', None), name=_enum(d, 'name', ChannelName)) + + @dataclass class ChannelInfo: """Details about a Channel.""" - + dbsql_version: Optional[str] = None """DB SQL Version the Channel is mapped to.""" - + name: Optional[ChannelName] = None """Name of the channel""" - + def as_dict(self) -> dict: """Serializes the ChannelInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbsql_version is not None: - body["dbsql_version"] = self.dbsql_version - if self.name is not None: - body["name"] = self.name.value + if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version + if self.name is not None: body['name'] = self.name.value return body def as_shallow_dict(self) -> dict: """Serializes the ChannelInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbsql_version is not None: - body["dbsql_version"] = self.dbsql_version - if self.name is not None: - body["name"] = self.name + if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ChannelInfo: """Deserializes the ChannelInfo from a dictionary.""" - return cls(dbsql_version=d.get("dbsql_version", None), name=_enum(d, "name", ChannelName)) - + return cls(dbsql_version=d.get('dbsql_version', None), name=_enum(d, 'name', ChannelName)) + -class ChannelName(Enum): - CHANNEL_NAME_CURRENT = "CHANNEL_NAME_CURRENT" - CHANNEL_NAME_CUSTOM = "CHANNEL_NAME_CUSTOM" - CHANNEL_NAME_PREVIEW = "CHANNEL_NAME_PREVIEW" - CHANNEL_NAME_PREVIOUS = "CHANNEL_NAME_PREVIOUS" +class ChannelName(Enum): + + + CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT' + CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM' + CHANNEL_NAME_PREVIEW = 'CHANNEL_NAME_PREVIEW' + CHANNEL_NAME_PREVIOUS = 'CHANNEL_NAME_PREVIOUS' @dataclass class ClientConfig: allow_custom_js_visualizations: Optional[bool] = None - + allow_downloads: Optional[bool] = None - + allow_external_shares: Optional[bool] = None - + allow_subscriptions: Optional[bool] = None - + date_format: Optional[str] = None - + date_time_format: Optional[str] = None - + disable_publish: Optional[bool] = None - + enable_legacy_autodetect_types: Optional[bool] = None - + feature_show_permissions_control: Optional[bool] = None - + hide_plotly_mode_bar: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the ClientConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_custom_js_visualizations is not None: - body["allow_custom_js_visualizations"] = self.allow_custom_js_visualizations - if self.allow_downloads is not None: - body["allow_downloads"] = self.allow_downloads - if self.allow_external_shares is not None: - body["allow_external_shares"] = self.allow_external_shares - if self.allow_subscriptions is not None: - body["allow_subscriptions"] = self.allow_subscriptions - if self.date_format is not None: - body["date_format"] = self.date_format - if self.date_time_format is not None: - body["date_time_format"] = self.date_time_format - if self.disable_publish is not None: - body["disable_publish"] = self.disable_publish - if self.enable_legacy_autodetect_types is not None: - body["enable_legacy_autodetect_types"] = self.enable_legacy_autodetect_types - if self.feature_show_permissions_control is not None: - body["feature_show_permissions_control"] = self.feature_show_permissions_control - if self.hide_plotly_mode_bar is not None: - body["hide_plotly_mode_bar"] = self.hide_plotly_mode_bar + if self.allow_custom_js_visualizations is not None: body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations + if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads + if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares + if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions + if self.date_format is not None: body['date_format'] = self.date_format + if self.date_time_format is not None: body['date_time_format'] = self.date_time_format + if self.disable_publish is not None: body['disable_publish'] = self.disable_publish + if self.enable_legacy_autodetect_types is not None: body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types + if self.feature_show_permissions_control is not None: body['feature_show_permissions_control'] = self.feature_show_permissions_control + if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar return body def as_shallow_dict(self) -> dict: """Serializes the ClientConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_custom_js_visualizations is not None: - body["allow_custom_js_visualizations"] = self.allow_custom_js_visualizations - if self.allow_downloads is not None: - body["allow_downloads"] = self.allow_downloads - if self.allow_external_shares is not None: - body["allow_external_shares"] = self.allow_external_shares - if self.allow_subscriptions is not None: - body["allow_subscriptions"] = self.allow_subscriptions - if self.date_format is not None: - body["date_format"] = self.date_format - if self.date_time_format is not None: - body["date_time_format"] = self.date_time_format - if self.disable_publish is not None: - body["disable_publish"] = self.disable_publish - if self.enable_legacy_autodetect_types is not None: - body["enable_legacy_autodetect_types"] = self.enable_legacy_autodetect_types - if self.feature_show_permissions_control is not None: - body["feature_show_permissions_control"] = self.feature_show_permissions_control - if self.hide_plotly_mode_bar is not None: - body["hide_plotly_mode_bar"] = self.hide_plotly_mode_bar + if self.allow_custom_js_visualizations is not None: body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations + if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads + if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares + if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions + if self.date_format is not None: body['date_format'] = self.date_format + if self.date_time_format is not None: body['date_time_format'] = self.date_time_format + if self.disable_publish is not None: body['disable_publish'] = self.disable_publish + if self.enable_legacy_autodetect_types is not None: body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types + if self.feature_show_permissions_control is not None: body['feature_show_permissions_control'] = self.feature_show_permissions_control + if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClientConfig: """Deserializes the ClientConfig from a dictionary.""" - return cls( - allow_custom_js_visualizations=d.get("allow_custom_js_visualizations", None), - allow_downloads=d.get("allow_downloads", None), - allow_external_shares=d.get("allow_external_shares", None), - allow_subscriptions=d.get("allow_subscriptions", None), - date_format=d.get("date_format", None), - date_time_format=d.get("date_time_format", None), - disable_publish=d.get("disable_publish", None), - enable_legacy_autodetect_types=d.get("enable_legacy_autodetect_types", None), - feature_show_permissions_control=d.get("feature_show_permissions_control", None), - hide_plotly_mode_bar=d.get("hide_plotly_mode_bar", None), - ) + return cls(allow_custom_js_visualizations=d.get('allow_custom_js_visualizations', None), allow_downloads=d.get('allow_downloads', None), allow_external_shares=d.get('allow_external_shares', None), allow_subscriptions=d.get('allow_subscriptions', None), date_format=d.get('date_format', None), date_time_format=d.get('date_time_format', None), disable_publish=d.get('disable_publish', None), enable_legacy_autodetect_types=d.get('enable_legacy_autodetect_types', None), feature_show_permissions_control=d.get('feature_show_permissions_control', None), hide_plotly_mode_bar=d.get('hide_plotly_mode_bar', None)) + + @dataclass class ColumnInfo: name: Optional[str] = None """The name of the column.""" - + position: Optional[int] = None """The ordinal position of the column (starting at position 0).""" - + type_interval_type: Optional[str] = None """The format of the interval type.""" - + type_name: Optional[ColumnInfoTypeName] = None """The name of the base data type. This doesn't include details for complex types such as STRUCT, MAP or ARRAY.""" - + type_precision: Optional[int] = None """Specifies the number of digits in a number. This applies to the DECIMAL type.""" - + type_scale: Optional[int] = None """Specifies the number of digits to the right of the decimal point in a number. This applies to the DECIMAL type.""" - + type_text: Optional[str] = None """The full SQL type specification.""" - + def as_dict(self) -> dict: """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_name is not None: - body["type_name"] = self.type_name.value - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_name is not None: body['type_name'] = self.type_name.value + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text return body def as_shallow_dict(self) -> dict: """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_name is not None: - body["type_name"] = self.type_name - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.name is not None: body['name'] = self.name + if self.position is not None: body['position'] = self.position + if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type + if self.type_name is not None: body['type_name'] = self.type_name + if self.type_precision is not None: body['type_precision'] = self.type_precision + if self.type_scale is not None: body['type_scale'] = self.type_scale + if self.type_text is not None: body['type_text'] = self.type_text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" - return cls( - name=d.get("name", None), - position=d.get("position", None), - type_interval_type=d.get("type_interval_type", None), - type_name=_enum(d, "type_name", ColumnInfoTypeName), - type_precision=d.get("type_precision", None), - type_scale=d.get("type_scale", None), - type_text=d.get("type_text", None), - ) + return cls(name=d.get('name', None), position=d.get('position', None), type_interval_type=d.get('type_interval_type', None), type_name=_enum(d, 'type_name', ColumnInfoTypeName), type_precision=d.get('type_precision', None), type_scale=d.get('type_scale', None), type_text=d.get('type_text', None)) + + class ColumnInfoTypeName(Enum): """The name of the base data type. This doesn't include details for complex types such as STRUCT, MAP or ARRAY.""" - - ARRAY = "ARRAY" - BINARY = "BINARY" - BOOLEAN = "BOOLEAN" - BYTE = "BYTE" - CHAR = "CHAR" - DATE = "DATE" - DECIMAL = "DECIMAL" - DOUBLE = "DOUBLE" - FLOAT = "FLOAT" - INT = "INT" - INTERVAL = "INTERVAL" - LONG = "LONG" - MAP = "MAP" - NULL = "NULL" - SHORT = "SHORT" - STRING = "STRING" - STRUCT = "STRUCT" - TIMESTAMP = "TIMESTAMP" - USER_DEFINED_TYPE = "USER_DEFINED_TYPE" - + + ARRAY = 'ARRAY' + BINARY = 'BINARY' + BOOLEAN = 'BOOLEAN' + BYTE = 'BYTE' + CHAR = 'CHAR' + DATE = 'DATE' + DECIMAL = 'DECIMAL' + DOUBLE = 'DOUBLE' + FLOAT = 'FLOAT' + INT = 'INT' + INTERVAL = 'INTERVAL' + LONG = 'LONG' + MAP = 'MAP' + NULL = 'NULL' + SHORT = 'SHORT' + STRING = 'STRING' + STRUCT = 'STRUCT' + TIMESTAMP = 'TIMESTAMP' + USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' class ComparisonOperator(Enum): - - EQUAL = "EQUAL" - GREATER_THAN = "GREATER_THAN" - GREATER_THAN_OR_EQUAL = "GREATER_THAN_OR_EQUAL" - IS_NOT_NULL = "IS_NOT_NULL" - IS_NULL = "IS_NULL" - LESS_THAN = "LESS_THAN" - LESS_THAN_OR_EQUAL = "LESS_THAN_OR_EQUAL" - NOT_EQUAL = "NOT_EQUAL" - + + + EQUAL = 'EQUAL' + GREATER_THAN = 'GREATER_THAN' + GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL' + IS_NOT_NULL = 'IS_NOT_NULL' + IS_NULL = 'IS_NULL' + LESS_THAN = 'LESS_THAN' + LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL' + NOT_EQUAL = 'NOT_EQUAL' @dataclass class CreateAlert: name: str """Name of the alert.""" - + options: AlertOptions """Alert configuration options.""" - + query_id: str """Query ID.""" - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + rearm: Optional[int] = None """Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again.""" - + def as_dict(self) -> dict: """Serializes the CreateAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.parent is not None: - body["parent"] = self.parent - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.parent is not None: body['parent'] = self.parent + if self.query_id is not None: body['query_id'] = self.query_id + if self.rearm is not None: body['rearm'] = self.rearm return body def as_shallow_dict(self) -> dict: """Serializes the CreateAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.query_id is not None: body['query_id'] = self.query_id + if self.rearm is not None: body['rearm'] = self.rearm return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateAlert: """Deserializes the CreateAlert from a dictionary.""" - return cls( - name=d.get("name", None), - options=_from_dict(d, "options", AlertOptions), - parent=d.get("parent", None), - query_id=d.get("query_id", None), - rearm=d.get("rearm", None), - ) + return cls(name=d.get('name', None), options=_from_dict(d, 'options', AlertOptions), parent=d.get('parent', None), query_id=d.get('query_id', None), rearm=d.get('rearm', None)) + + @dataclass class CreateAlertRequest: alert: Optional[CreateAlertRequestAlert] = None - + auto_resolve_display_name: Optional[bool] = None """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name.""" - + def as_dict(self) -> dict: """Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert: - body["alert"] = self.alert.as_dict() - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name + if self.alert: body['alert'] = self.alert.as_dict() + if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateAlertRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert: - body["alert"] = self.alert - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name + if self.alert: body['alert'] = self.alert + if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateAlertRequest: """Deserializes the CreateAlertRequest from a dictionary.""" - return cls( - alert=_from_dict(d, "alert", CreateAlertRequestAlert), - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - ) + return cls(alert=_from_dict(d, 'alert', CreateAlertRequestAlert), auto_resolve_display_name=d.get('auto_resolve_display_name', None)) + + @dataclass class CreateAlertRequestAlert: condition: Optional[AlertCondition] = None """Trigger conditions of the alert.""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This can include email subject entries and Slack notification headers, for example. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + display_name: Optional[str] = None """The display name of the alert.""" - + notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + parent_path: Optional[str] = None """The workspace path of the folder containing the alert.""" - + query_id: Optional[str] = None """UUID of the query attached to the alert.""" - + seconds_to_retrigger: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + def as_dict(self) -> dict: """Serializes the CreateAlertRequestAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition: - body["condition"] = self.condition.as_dict() - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.display_name is not None: - body["display_name"] = self.display_name - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_id is not None: - body["query_id"] = self.query_id - if self.seconds_to_retrigger is not None: - body["seconds_to_retrigger"] = self.seconds_to_retrigger + if self.condition: body['condition'] = self.condition.as_dict() + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger return body def as_shallow_dict(self) -> dict: """Serializes the CreateAlertRequestAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition: - body["condition"] = self.condition - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.display_name is not None: - body["display_name"] = self.display_name - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_id is not None: - body["query_id"] = self.query_id - if self.seconds_to_retrigger is not None: - body["seconds_to_retrigger"] = self.seconds_to_retrigger + if self.condition: body['condition'] = self.condition + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateAlertRequestAlert: """Deserializes the CreateAlertRequestAlert from a dictionary.""" - return cls( - condition=_from_dict(d, "condition", AlertCondition), - custom_body=d.get("custom_body", None), - custom_subject=d.get("custom_subject", None), - display_name=d.get("display_name", None), - notify_on_ok=d.get("notify_on_ok", None), - parent_path=d.get("parent_path", None), - query_id=d.get("query_id", None), - seconds_to_retrigger=d.get("seconds_to_retrigger", None), - ) + return cls(condition=_from_dict(d, 'condition', AlertCondition), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), display_name=d.get('display_name', None), notify_on_ok=d.get('notify_on_ok', None), parent_path=d.get('parent_path', None), query_id=d.get('query_id', None), seconds_to_retrigger=d.get('seconds_to_retrigger', None)) + + + + + @dataclass @@ -1560,225 +1243,222 @@ class CreateQueryRequest: auto_resolve_display_name: Optional[bool] = None """If true, automatically resolve query display name conflicts. Otherwise, fail the request if the query's display name conflicts with an existing query's display name.""" - + query: Optional[CreateQueryRequestQuery] = None - + def as_dict(self) -> dict: """Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.query: - body["query"] = self.query.as_dict() + if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name + if self.query: body['query'] = self.query.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateQueryRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.query: - body["query"] = self.query + if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name + if self.query: body['query'] = self.query return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateQueryRequest: """Deserializes the CreateQueryRequest from a dictionary.""" - return cls( - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - query=_from_dict(d, "query", CreateQueryRequestQuery), - ) + return cls(auto_resolve_display_name=d.get('auto_resolve_display_name', None), query=_from_dict(d, 'query', CreateQueryRequestQuery)) + + @dataclass class CreateQueryRequestQuery: apply_auto_limit: Optional[bool] = None """Whether to apply a 1000 row limit to the query result.""" - + catalog: Optional[str] = None """Name of the catalog where this query will be executed.""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + display_name: Optional[str] = None """Display name of the query that appears in list views, widget headings, and on the query page.""" - + parameters: Optional[List[QueryParameter]] = None """List of query parameter definitions.""" - + parent_path: Optional[str] = None """Workspace path of the workspace folder containing the object.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_mode: Optional[RunAsMode] = None """Sets the "Run as" role for the object.""" - + schema: Optional[str] = None """Name of the schema where this query will be executed.""" - + tags: Optional[List[str]] = None - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the query.""" - + def as_dict(self) -> dict: """Serializes the CreateQueryRequestQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_auto_limit is not None: - body["apply_auto_limit"] = self.apply_auto_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_mode is not None: - body["run_as_mode"] = self.run_as_mode.value - if self.schema is not None: - body["schema"] = self.schema - if self.tags: - body["tags"] = [v for v in self.tags] - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateQueryRequestQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_auto_limit is not None: - body["apply_auto_limit"] = self.apply_auto_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.parameters: - body["parameters"] = self.parameters - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_mode is not None: - body["run_as_mode"] = self.run_as_mode - if self.schema is not None: - body["schema"] = self.schema - if self.tags: - body["tags"] = self.tags - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.parameters: body['parameters'] = self.parameters + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = self.tags + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateQueryRequestQuery: """Deserializes the CreateQueryRequestQuery from a dictionary.""" - return cls( - apply_auto_limit=d.get("apply_auto_limit", None), - catalog=d.get("catalog", None), - description=d.get("description", None), - display_name=d.get("display_name", None), - parameters=_repeated_dict(d, "parameters", QueryParameter), - parent_path=d.get("parent_path", None), - query_text=d.get("query_text", None), - run_as_mode=_enum(d, "run_as_mode", RunAsMode), - schema=d.get("schema", None), - tags=d.get("tags", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(apply_auto_limit=d.get('apply_auto_limit', None), catalog=d.get('catalog', None), description=d.get('description', None), display_name=d.get('display_name', None), parameters=_repeated_dict(d, 'parameters', QueryParameter), parent_path=d.get('parent_path', None), query_text=d.get('query_text', None), run_as_mode=_enum(d, 'run_as_mode', RunAsMode), schema=d.get('schema', None), tags=d.get('tags', None), warehouse_id=d.get('warehouse_id', None)) + + + + +@dataclass +class CreateQueryVisualizationsLegacyRequest: + """Add visualization to a query""" + + query_id: str + """The identifier returned by :method:queries/create""" + + type: str + """The type of visualization: chart, table, pivot table, and so on.""" + + options: Any + """The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON.""" + + description: Optional[str] = None + """A short description of this visualization. This is not displayed in the UI.""" + + name: Optional[str] = None + """The name of the visualization that appears on dashboards and the query screen.""" + + def as_dict(self) -> dict: + """Serializes the CreateQueryVisualizationsLegacyRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query_id is not None: body['query_id'] = self.query_id + if self.type is not None: body['type'] = self.type + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateQueryVisualizationsLegacyRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query_id is not None: body['query_id'] = self.query_id + if self.type is not None: body['type'] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateQueryVisualizationsLegacyRequest: + """Deserializes the CreateQueryVisualizationsLegacyRequest from a dictionary.""" + return cls(description=d.get('description', None), name=d.get('name', None), options=d.get('options', None), query_id=d.get('query_id', None), type=d.get('type', None)) + + @dataclass class CreateVisualizationRequest: visualization: Optional[CreateVisualizationRequestVisualization] = None - + def as_dict(self) -> dict: """Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.visualization: - body["visualization"] = self.visualization.as_dict() + if self.visualization: body['visualization'] = self.visualization.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateVisualizationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.visualization: - body["visualization"] = self.visualization + if self.visualization: body['visualization'] = self.visualization return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequest: """Deserializes the CreateVisualizationRequest from a dictionary.""" - return cls(visualization=_from_dict(d, "visualization", CreateVisualizationRequestVisualization)) + return cls(visualization=_from_dict(d, 'visualization', CreateVisualizationRequestVisualization)) + + @dataclass class CreateVisualizationRequestVisualization: display_name: Optional[str] = None """The display name of the visualization.""" - + query_id: Optional[str] = None """UUID of the query that the visualization is attached to.""" - + serialized_options: Optional[str] = None """The visualization options varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization options directly.""" - + serialized_query_plan: Optional[str] = None """The visualization query plan varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying the visualization query plan directly.""" - + type: Optional[str] = None """The type of visualization: counter, table, funnel, and so on.""" - + def as_dict(self) -> dict: """Serializes the CreateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.query_id is not None: - body["query_id"] = self.query_id - if self.serialized_options is not None: - body["serialized_options"] = self.serialized_options - if self.serialized_query_plan is not None: - body["serialized_query_plan"] = self.serialized_query_plan - if self.type is not None: - body["type"] = self.type + if self.display_name is not None: body['display_name'] = self.display_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type return body def as_shallow_dict(self) -> dict: """Serializes the CreateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.query_id is not None: - body["query_id"] = self.query_id - if self.serialized_options is not None: - body["serialized_options"] = self.serialized_options - if self.serialized_query_plan is not None: - body["serialized_query_plan"] = self.serialized_query_plan - if self.type is not None: - body["type"] = self.type + if self.display_name is not None: body['display_name'] = self.display_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequestVisualization: """Deserializes the CreateVisualizationRequestVisualization from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - query_id=d.get("query_id", None), - serialized_options=d.get("serialized_options", None), - serialized_query_plan=d.get("serialized_query_plan", None), - type=d.get("type", None), - ) + return cls(display_name=d.get('display_name', None), query_id=d.get('query_id', None), serialized_options=d.get('serialized_options', None), serialized_query_plan=d.get('serialized_query_plan', None), type=d.get('type', None)) + + @dataclass @@ -1791,10 +1471,10 @@ class CreateWarehouseRequest: non-serverless warehouses - 0 indicates no autostop. Defaults to 120 mins""" - + channel: Optional[Channel] = None """Channel Details""" - + cluster_size: Optional[str] = None """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, @@ -1802,28 +1482,28 @@ class CreateWarehouseRequest: Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large""" - + creator_name: Optional[str] = None """warehouse creator name""" - + enable_photon: Optional[bool] = None """Configures whether the warehouse should use Photon optimized clusters. Defaults to false.""" - + enable_serverless_compute: Optional[bool] = None """Configures whether the warehouse should use serverless compute""" - + instance_profile_arn: Optional[str] = None """Deprecated. Instance profile used to pass IAM role to the cluster""" - + max_num_clusters: Optional[int] = None """Maximum number of clusters that the autoscaler will create to handle concurrent queries. Supported values: - Must be >= min_num_clusters - Must be <= 30. Defaults to min_clusters if unset.""" - + min_num_clusters: Optional[int] = None """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce @@ -1833,463 +1513,338 @@ class CreateWarehouseRequest: Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) Defaults to 1""" - + name: Optional[str] = None """Logical name for the cluster. Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - + spot_instance_policy: Optional[SpotInstancePolicy] = None """Configurations whether the warehouse should use spot instances.""" - + tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45.""" - + warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - + def as_dict(self) -> dict: """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel.as_dict() - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy.value - if self.tags: - body["tags"] = self.tags.as_dict() - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type.value + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel.as_dict() + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy.value + if self.tags: body['tags'] = self.tags.as_dict() + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateWarehouseRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy - if self.tags: - body["tags"] = self.tags - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy + if self.tags: body['tags'] = self.tags + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWarehouseRequest: """Deserializes the CreateWarehouseRequest from a dictionary.""" - return cls( - auto_stop_mins=d.get("auto_stop_mins", None), - channel=_from_dict(d, "channel", Channel), - cluster_size=d.get("cluster_size", None), - creator_name=d.get("creator_name", None), - enable_photon=d.get("enable_photon", None), - enable_serverless_compute=d.get("enable_serverless_compute", None), - instance_profile_arn=d.get("instance_profile_arn", None), - max_num_clusters=d.get("max_num_clusters", None), - min_num_clusters=d.get("min_num_clusters", None), - name=d.get("name", None), - spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), - tags=_from_dict(d, "tags", EndpointTags), - warehouse_type=_enum(d, "warehouse_type", CreateWarehouseRequestWarehouseType), - ) + return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), creator_name=d.get('creator_name', None), enable_photon=d.get('enable_photon', None), enable_serverless_compute=d.get('enable_serverless_compute', None), instance_profile_arn=d.get('instance_profile_arn', None), max_num_clusters=d.get('max_num_clusters', None), min_num_clusters=d.get('min_num_clusters', None), name=d.get('name', None), spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy), tags=_from_dict(d, 'tags', EndpointTags), warehouse_type=_enum(d, 'warehouse_type', CreateWarehouseRequestWarehouseType)) + + class CreateWarehouseRequestWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - - CLASSIC = "CLASSIC" - PRO = "PRO" - TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" - + + CLASSIC = 'CLASSIC' + PRO = 'PRO' + TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' @dataclass class CreateWarehouseResponse: id: Optional[str] = None """Id for the SQL warehouse. This value is unique across all SQL warehouses.""" - + def as_dict(self) -> dict: """Serializes the CreateWarehouseResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the CreateWarehouseResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWarehouseResponse: """Deserializes the CreateWarehouseResponse from a dictionary.""" - return cls(id=d.get("id", None)) + return cls(id=d.get('id', None)) + + @dataclass class CreateWidget: dashboard_id: str """Dashboard ID returned by :method:dashboards/create.""" - + options: WidgetOptions - + width: int """Width of a widget""" - + id: Optional[str] = None """Widget ID returned by :method:dashboardwidgets/create""" - + text: Optional[str] = None """If this is a textbox widget, the application displays this text. This field is ignored if the widget contains a visualization in the `visualization` field.""" - + visualization_id: Optional[str] = None """Query Vizualization ID returned by :method:queryvisualizations/create.""" - + def as_dict(self) -> dict: """Serializes the CreateWidget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.options: - body["options"] = self.options.as_dict() - if self.text is not None: - body["text"] = self.text - if self.visualization_id is not None: - body["visualization_id"] = self.visualization_id - if self.width is not None: - body["width"] = self.width + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.id is not None: body['id'] = self.id + if self.options: body['options'] = self.options.as_dict() + if self.text is not None: body['text'] = self.text + if self.visualization_id is not None: body['visualization_id'] = self.visualization_id + if self.width is not None: body['width'] = self.width return body def as_shallow_dict(self) -> dict: """Serializes the CreateWidget into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.options: - body["options"] = self.options - if self.text is not None: - body["text"] = self.text - if self.visualization_id is not None: - body["visualization_id"] = self.visualization_id - if self.width is not None: - body["width"] = self.width + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.id is not None: body['id'] = self.id + if self.options: body['options'] = self.options + if self.text is not None: body['text'] = self.text + if self.visualization_id is not None: body['visualization_id'] = self.visualization_id + if self.width is not None: body['width'] = self.width return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWidget: """Deserializes the CreateWidget from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - id=d.get("id", None), - options=_from_dict(d, "options", WidgetOptions), - text=d.get("text", None), - visualization_id=d.get("visualization_id", None), - width=d.get("width", None), - ) + return cls(dashboard_id=d.get('dashboard_id', None), id=d.get('id', None), options=_from_dict(d, 'options', WidgetOptions), text=d.get('text', None), visualization_id=d.get('visualization_id', None), width=d.get('width', None)) + + @dataclass class CronSchedule: pause_status: Optional[SchedulePauseStatus] = None """Indicate whether this schedule is paused or not.""" - + quartz_cron_schedule: Optional[str] = None """A cron expression using quartz syntax that specifies the schedule for this pipeline. Should use the quartz format described here: http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html""" - + timezone_id: Optional[str] = None """A Java timezone id. The schedule will be resolved using this timezone. This will be combined with the quartz_cron_schedule to determine the schedule. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.""" - + def as_dict(self) -> dict: """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status.value - if self.quartz_cron_schedule is not None: - body["quartz_cron_schedule"] = self.quartz_cron_schedule - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.pause_status is not None: body['pause_status'] = self.pause_status.value + if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status - if self.quartz_cron_schedule is not None: - body["quartz_cron_schedule"] = self.quartz_cron_schedule - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule + if self.timezone_id is not None: body['timezone_id'] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: """Deserializes the CronSchedule from a dictionary.""" - return cls( - pause_status=_enum(d, "pause_status", SchedulePauseStatus), - quartz_cron_schedule=d.get("quartz_cron_schedule", None), - timezone_id=d.get("timezone_id", None), - ) + return cls(pause_status=_enum(d, 'pause_status', SchedulePauseStatus), quartz_cron_schedule=d.get('quartz_cron_schedule', None), timezone_id=d.get('timezone_id', None)) + + @dataclass class Dashboard: """A JSON representing a dashboard containing widgets of visualizations and text boxes.""" - + can_edit: Optional[bool] = None """Whether the authenticated user can edit the query definition.""" - + created_at: Optional[str] = None """Timestamp when this dashboard was created.""" - + dashboard_filters_enabled: Optional[bool] = None """In the web application, query filters that share a name are coupled to a single selection box if this value is `true`.""" - + id: Optional[str] = None """The ID for this dashboard.""" - + is_archived: Optional[bool] = None """Indicates whether a dashboard is trashed. Trashed dashboards won't appear in list views. If this boolean is `true`, the `options` property for this dashboard includes a `moved_to_trash_at` timestamp. Items in trash are permanently deleted after 30 days.""" - + is_draft: Optional[bool] = None """Whether a dashboard is a draft. Draft dashboards only appear in list views for their owners.""" - + is_favorite: Optional[bool] = None """Indicates whether this query object appears in the current user's favorites list. This flag determines whether the star icon for favorites is selected.""" - + name: Optional[str] = None """The title of the dashboard that appears in list views and at the top of the dashboard page.""" - + options: Optional[DashboardOptions] = None - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + permission_tier: Optional[PermissionLevel] = None """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query * `CAN_MANAGE`: Can manage the query""" - + slug: Optional[str] = None """URL slug. Usually mirrors the query name with dashes (`-`) instead of spaces. Appears in the URL for this query.""" - + tags: Optional[List[str]] = None - + updated_at: Optional[str] = None """Timestamp when this dashboard was last updated.""" - + user: Optional[User] = None - + user_id: Optional[int] = None """The ID of the user who owns the dashboard.""" - + widgets: Optional[List[Widget]] = None - + def as_dict(self) -> dict: """Serializes the Dashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_edit is not None: - body["can_edit"] = self.can_edit - if self.created_at is not None: - body["created_at"] = self.created_at - if self.dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = self.dashboard_filters_enabled - if self.id is not None: - body["id"] = self.id - if self.is_archived is not None: - body["is_archived"] = self.is_archived - if self.is_draft is not None: - body["is_draft"] = self.is_draft - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.parent is not None: - body["parent"] = self.parent - if self.permission_tier is not None: - body["permission_tier"] = self.permission_tier.value - if self.slug is not None: - body["slug"] = self.slug - if self.tags: - body["tags"] = [v for v in self.tags] - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.user: - body["user"] = self.user.as_dict() - if self.user_id is not None: - body["user_id"] = self.user_id - if self.widgets: - body["widgets"] = [v.as_dict() for v in self.widgets] + if self.can_edit is not None: body['can_edit'] = self.can_edit + if self.created_at is not None: body['created_at'] = self.created_at + if self.dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = self.dashboard_filters_enabled + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.parent is not None: body['parent'] = self.parent + if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value + if self.slug is not None: body['slug'] = self.slug + if self.tags: body['tags'] = [v for v in self.tags] + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user.as_dict() + if self.user_id is not None: body['user_id'] = self.user_id + if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets] return body def as_shallow_dict(self) -> dict: """Serializes the Dashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.can_edit is not None: - body["can_edit"] = self.can_edit - if self.created_at is not None: - body["created_at"] = self.created_at - if self.dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = self.dashboard_filters_enabled - if self.id is not None: - body["id"] = self.id - if self.is_archived is not None: - body["is_archived"] = self.is_archived - if self.is_draft is not None: - body["is_draft"] = self.is_draft - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.permission_tier is not None: - body["permission_tier"] = self.permission_tier - if self.slug is not None: - body["slug"] = self.slug - if self.tags: - body["tags"] = self.tags - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.user: - body["user"] = self.user - if self.user_id is not None: - body["user_id"] = self.user_id - if self.widgets: - body["widgets"] = self.widgets + if self.can_edit is not None: body['can_edit'] = self.can_edit + if self.created_at is not None: body['created_at'] = self.created_at + if self.dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = self.dashboard_filters_enabled + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.permission_tier is not None: body['permission_tier'] = self.permission_tier + if self.slug is not None: body['slug'] = self.slug + if self.tags: body['tags'] = self.tags + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user + if self.user_id is not None: body['user_id'] = self.user_id + if self.widgets: body['widgets'] = self.widgets return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Dashboard: """Deserializes the Dashboard from a dictionary.""" - return cls( - can_edit=d.get("can_edit", None), - created_at=d.get("created_at", None), - dashboard_filters_enabled=d.get("dashboard_filters_enabled", None), - id=d.get("id", None), - is_archived=d.get("is_archived", None), - is_draft=d.get("is_draft", None), - is_favorite=d.get("is_favorite", None), - name=d.get("name", None), - options=_from_dict(d, "options", DashboardOptions), - parent=d.get("parent", None), - permission_tier=_enum(d, "permission_tier", PermissionLevel), - slug=d.get("slug", None), - tags=d.get("tags", None), - updated_at=d.get("updated_at", None), - user=_from_dict(d, "user", User), - user_id=d.get("user_id", None), - widgets=_repeated_dict(d, "widgets", Widget), - ) + return cls(can_edit=d.get('can_edit', None), created_at=d.get('created_at', None), dashboard_filters_enabled=d.get('dashboard_filters_enabled', None), id=d.get('id', None), is_archived=d.get('is_archived', None), is_draft=d.get('is_draft', None), is_favorite=d.get('is_favorite', None), name=d.get('name', None), options=_from_dict(d, 'options', DashboardOptions), parent=d.get('parent', None), permission_tier=_enum(d, 'permission_tier', PermissionLevel), slug=d.get('slug', None), tags=d.get('tags', None), updated_at=d.get('updated_at', None), user=_from_dict(d, 'user', User), user_id=d.get('user_id', None), widgets=_repeated_dict(d, 'widgets', Widget)) + + @dataclass class DashboardEditContent: dashboard_id: Optional[str] = None - + name: Optional[str] = None """The title of this dashboard that appears in list views and at the top of the dashboard page.""" - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the DashboardEditContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.name is not None: - body["name"] = self.name - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.name is not None: body['name'] = self.name + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the DashboardEditContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.name is not None: - body["name"] = self.name - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.name is not None: body['name'] = self.name + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardEditContent: """Deserializes the DashboardEditContent from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - name=d.get("name", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - ) + return cls(dashboard_id=d.get('dashboard_id', None), name=d.get('name', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None)) + + @dataclass @@ -2297,348 +1852,299 @@ class DashboardOptions: moved_to_trash_at: Optional[str] = None """The timestamp when this dashboard was moved to trash. Only present when the `is_archived` property is `true`. Trashed items are deleted after thirty days.""" - + def as_dict(self) -> dict: """Serializes the DashboardOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.moved_to_trash_at is not None: - body["moved_to_trash_at"] = self.moved_to_trash_at + if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at return body def as_shallow_dict(self) -> dict: """Serializes the DashboardOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.moved_to_trash_at is not None: - body["moved_to_trash_at"] = self.moved_to_trash_at + if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardOptions: """Deserializes the DashboardOptions from a dictionary.""" - return cls(moved_to_trash_at=d.get("moved_to_trash_at", None)) + return cls(moved_to_trash_at=d.get('moved_to_trash_at', None)) + + @dataclass class DashboardPostContent: name: str """The title of this dashboard that appears in list views and at the top of the dashboard page.""" - + dashboard_filters_enabled: Optional[bool] = None """Indicates whether the dashboard filters are enabled""" - + is_favorite: Optional[bool] = None """Indicates whether this dashboard object should appear in the current user's favorites list.""" - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the DashboardPostContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = self.dashboard_filters_enabled - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.name is not None: - body["name"] = self.name - if self.parent is not None: - body["parent"] = self.parent - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] + if self.dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = self.dashboard_filters_enabled + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.name is not None: body['name'] = self.name + if self.parent is not None: body['parent'] = self.parent + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the DashboardPostContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = self.dashboard_filters_enabled - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.name is not None: - body["name"] = self.name - if self.parent is not None: - body["parent"] = self.parent - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags + if self.dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = self.dashboard_filters_enabled + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.name is not None: body['name'] = self.name + if self.parent is not None: body['parent'] = self.parent + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardPostContent: """Deserializes the DashboardPostContent from a dictionary.""" - return cls( - dashboard_filters_enabled=d.get("dashboard_filters_enabled", None), - is_favorite=d.get("is_favorite", None), - name=d.get("name", None), - parent=d.get("parent", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - ) + return cls(dashboard_filters_enabled=d.get('dashboard_filters_enabled', None), is_favorite=d.get('is_favorite', None), name=d.get('name', None), parent=d.get('parent', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None)) + + @dataclass class DataSource: """A JSON object representing a DBSQL data source / SQL warehouse.""" - + id: Optional[str] = None """Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + name: Optional[str] = None """The string name of this data source / SQL warehouse as it appears in the Databricks SQL web application.""" - + pause_reason: Optional[str] = None """Reserved for internal use.""" - + paused: Optional[int] = None """Reserved for internal use.""" - + supports_auto_limit: Optional[bool] = None """Reserved for internal use.""" - + syntax: Optional[str] = None """Reserved for internal use.""" - + type: Optional[str] = None """The type of data source. For SQL warehouses, this will be `databricks_internal`.""" - + view_only: Optional[bool] = None """Reserved for internal use.""" - + warehouse_id: Optional[str] = None """The ID of the associated SQL warehouse, if this data source is backed by a SQL warehouse.""" - + def as_dict(self) -> dict: """Serializes the DataSource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.pause_reason is not None: - body["pause_reason"] = self.pause_reason - if self.paused is not None: - body["paused"] = self.paused - if self.supports_auto_limit is not None: - body["supports_auto_limit"] = self.supports_auto_limit - if self.syntax is not None: - body["syntax"] = self.syntax - if self.type is not None: - body["type"] = self.type - if self.view_only is not None: - body["view_only"] = self.view_only - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.pause_reason is not None: body['pause_reason'] = self.pause_reason + if self.paused is not None: body['paused'] = self.paused + if self.supports_auto_limit is not None: body['supports_auto_limit'] = self.supports_auto_limit + if self.syntax is not None: body['syntax'] = self.syntax + if self.type is not None: body['type'] = self.type + if self.view_only is not None: body['view_only'] = self.view_only + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the DataSource into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.pause_reason is not None: - body["pause_reason"] = self.pause_reason - if self.paused is not None: - body["paused"] = self.paused - if self.supports_auto_limit is not None: - body["supports_auto_limit"] = self.supports_auto_limit - if self.syntax is not None: - body["syntax"] = self.syntax - if self.type is not None: - body["type"] = self.type - if self.view_only is not None: - body["view_only"] = self.view_only - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.pause_reason is not None: body['pause_reason'] = self.pause_reason + if self.paused is not None: body['paused'] = self.paused + if self.supports_auto_limit is not None: body['supports_auto_limit'] = self.supports_auto_limit + if self.syntax is not None: body['syntax'] = self.syntax + if self.type is not None: body['type'] = self.type + if self.view_only is not None: body['view_only'] = self.view_only + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataSource: """Deserializes the DataSource from a dictionary.""" - return cls( - id=d.get("id", None), - name=d.get("name", None), - pause_reason=d.get("pause_reason", None), - paused=d.get("paused", None), - supports_auto_limit=d.get("supports_auto_limit", None), - syntax=d.get("syntax", None), - type=d.get("type", None), - view_only=d.get("view_only", None), - warehouse_id=d.get("warehouse_id", None), - ) - + return cls(id=d.get('id', None), name=d.get('name', None), pause_reason=d.get('pause_reason', None), paused=d.get('paused', None), supports_auto_limit=d.get('supports_auto_limit', None), syntax=d.get('syntax', None), type=d.get('type', None), view_only=d.get('view_only', None), warehouse_id=d.get('warehouse_id', None)) + -class DatePrecision(Enum): - DAY_PRECISION = "DAY_PRECISION" - MINUTE_PRECISION = "MINUTE_PRECISION" - SECOND_PRECISION = "SECOND_PRECISION" +class DatePrecision(Enum): + + + DAY_PRECISION = 'DAY_PRECISION' + MINUTE_PRECISION = 'MINUTE_PRECISION' + SECOND_PRECISION = 'SECOND_PRECISION' @dataclass class DateRange: start: str - + end: str - + def as_dict(self) -> dict: """Serializes the DateRange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end is not None: - body["end"] = self.end - if self.start is not None: - body["start"] = self.start + if self.end is not None: body['end'] = self.end + if self.start is not None: body['start'] = self.start return body def as_shallow_dict(self) -> dict: """Serializes the DateRange into a shallow dictionary of its immediate attributes.""" body = {} - if self.end is not None: - body["end"] = self.end - if self.start is not None: - body["start"] = self.start + if self.end is not None: body['end'] = self.end + if self.start is not None: body['start'] = self.start return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DateRange: """Deserializes the DateRange from a dictionary.""" - return cls(end=d.get("end", None), start=d.get("start", None)) + return cls(end=d.get('end', None), start=d.get('start', None)) + + @dataclass class DateRangeValue: date_range_value: Optional[DateRange] = None """Manually specified date-time range value.""" - + dynamic_date_range_value: Optional[DateRangeValueDynamicDateRange] = None """Dynamic date-time range value based on current date-time.""" - + precision: Optional[DatePrecision] = None """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION (YYYY-MM-DD).""" - + start_day_of_week: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the DateRangeValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.date_range_value: - body["date_range_value"] = self.date_range_value.as_dict() - if self.dynamic_date_range_value is not None: - body["dynamic_date_range_value"] = self.dynamic_date_range_value.value - if self.precision is not None: - body["precision"] = self.precision.value - if self.start_day_of_week is not None: - body["start_day_of_week"] = self.start_day_of_week + if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict() + if self.dynamic_date_range_value is not None: body['dynamic_date_range_value'] = self.dynamic_date_range_value.value + if self.precision is not None: body['precision'] = self.precision.value + if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week return body def as_shallow_dict(self) -> dict: """Serializes the DateRangeValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.date_range_value: - body["date_range_value"] = self.date_range_value - if self.dynamic_date_range_value is not None: - body["dynamic_date_range_value"] = self.dynamic_date_range_value - if self.precision is not None: - body["precision"] = self.precision - if self.start_day_of_week is not None: - body["start_day_of_week"] = self.start_day_of_week + if self.date_range_value: body['date_range_value'] = self.date_range_value + if self.dynamic_date_range_value is not None: body['dynamic_date_range_value'] = self.dynamic_date_range_value + if self.precision is not None: body['precision'] = self.precision + if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DateRangeValue: """Deserializes the DateRangeValue from a dictionary.""" - return cls( - date_range_value=_from_dict(d, "date_range_value", DateRange), - dynamic_date_range_value=_enum(d, "dynamic_date_range_value", DateRangeValueDynamicDateRange), - precision=_enum(d, "precision", DatePrecision), - start_day_of_week=d.get("start_day_of_week", None), - ) - + return cls(date_range_value=_from_dict(d, 'date_range_value', DateRange), dynamic_date_range_value=_enum(d, 'dynamic_date_range_value', DateRangeValueDynamicDateRange), precision=_enum(d, 'precision', DatePrecision), start_day_of_week=d.get('start_day_of_week', None)) + -class DateRangeValueDynamicDateRange(Enum): - LAST_12_MONTHS = "LAST_12_MONTHS" - LAST_14_DAYS = "LAST_14_DAYS" - LAST_24_HOURS = "LAST_24_HOURS" - LAST_30_DAYS = "LAST_30_DAYS" - LAST_60_DAYS = "LAST_60_DAYS" - LAST_7_DAYS = "LAST_7_DAYS" - LAST_8_HOURS = "LAST_8_HOURS" - LAST_90_DAYS = "LAST_90_DAYS" - LAST_HOUR = "LAST_HOUR" - LAST_MONTH = "LAST_MONTH" - LAST_WEEK = "LAST_WEEK" - LAST_YEAR = "LAST_YEAR" - THIS_MONTH = "THIS_MONTH" - THIS_WEEK = "THIS_WEEK" - THIS_YEAR = "THIS_YEAR" - TODAY = "TODAY" - YESTERDAY = "YESTERDAY" +class DateRangeValueDynamicDateRange(Enum): + + + LAST_12_MONTHS = 'LAST_12_MONTHS' + LAST_14_DAYS = 'LAST_14_DAYS' + LAST_24_HOURS = 'LAST_24_HOURS' + LAST_30_DAYS = 'LAST_30_DAYS' + LAST_60_DAYS = 'LAST_60_DAYS' + LAST_7_DAYS = 'LAST_7_DAYS' + LAST_8_HOURS = 'LAST_8_HOURS' + LAST_90_DAYS = 'LAST_90_DAYS' + LAST_HOUR = 'LAST_HOUR' + LAST_MONTH = 'LAST_MONTH' + LAST_WEEK = 'LAST_WEEK' + LAST_YEAR = 'LAST_YEAR' + THIS_MONTH = 'THIS_MONTH' + THIS_WEEK = 'THIS_WEEK' + THIS_YEAR = 'THIS_YEAR' + TODAY = 'TODAY' + YESTERDAY = 'YESTERDAY' @dataclass class DateValue: date_value: Optional[str] = None """Manually specified date-time value.""" - + dynamic_date_value: Optional[DateValueDynamicDate] = None """Dynamic date-time value based on current date-time.""" - + precision: Optional[DatePrecision] = None """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION (YYYY-MM-DD).""" - + def as_dict(self) -> dict: """Serializes the DateValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.date_value is not None: - body["date_value"] = self.date_value - if self.dynamic_date_value is not None: - body["dynamic_date_value"] = self.dynamic_date_value.value - if self.precision is not None: - body["precision"] = self.precision.value + if self.date_value is not None: body['date_value'] = self.date_value + if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value.value + if self.precision is not None: body['precision'] = self.precision.value return body def as_shallow_dict(self) -> dict: """Serializes the DateValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.date_value is not None: - body["date_value"] = self.date_value - if self.dynamic_date_value is not None: - body["dynamic_date_value"] = self.dynamic_date_value - if self.precision is not None: - body["precision"] = self.precision + if self.date_value is not None: body['date_value'] = self.date_value + if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value + if self.precision is not None: body['precision'] = self.precision return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DateValue: """Deserializes the DateValue from a dictionary.""" - return cls( - date_value=d.get("date_value", None), - dynamic_date_value=_enum(d, "dynamic_date_value", DateValueDynamicDate), - precision=_enum(d, "precision", DatePrecision), - ) + return cls(date_value=d.get('date_value', None), dynamic_date_value=_enum(d, 'dynamic_date_value', DateValueDynamicDate), precision=_enum(d, 'precision', DatePrecision)) + + class DateValueDynamicDate(Enum): + + + NOW = 'NOW' + YESTERDAY = 'YESTERDAY' + + + + + + + + + + + + + - NOW = "NOW" - YESTERDAY = "YESTERDAY" @dataclass @@ -2657,6 +2163,14 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + + + + + + + @dataclass @@ -2675,71 +2189,59 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteWarehouseResponse: """Deserializes the DeleteWarehouseResponse from a dictionary.""" return cls() + -class Disposition(Enum): - - EXTERNAL_LINKS = "EXTERNAL_LINKS" - INLINE = "INLINE" +class Disposition(Enum): + + + EXTERNAL_LINKS = 'EXTERNAL_LINKS' + INLINE = 'INLINE' @dataclass class EditAlert: name: str """Name of the alert.""" - + options: AlertOptions """Alert configuration options.""" - + query_id: str """Query ID.""" - + alert_id: Optional[str] = None - + rearm: Optional[int] = None """Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again.""" - + def as_dict(self) -> dict: """Serializes the EditAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.query_id is not None: body['query_id'] = self.query_id + if self.rearm is not None: body['rearm'] = self.rearm return body def as_shallow_dict(self) -> dict: """Serializes the EditAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query_id is not None: body['query_id'] = self.query_id + if self.rearm is not None: body['rearm'] = self.rearm return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditAlert: """Deserializes the EditAlert from a dictionary.""" - return cls( - alert_id=d.get("alert_id", None), - name=d.get("name", None), - options=_from_dict(d, "options", AlertOptions), - query_id=d.get("query_id", None), - rearm=d.get("rearm", None), - ) + return cls(alert_id=d.get('alert_id', None), name=d.get('name', None), options=_from_dict(d, 'options', AlertOptions), query_id=d.get('query_id', None), rearm=d.get('rearm', None)) + + @dataclass @@ -2751,10 +2253,10 @@ class EditWarehouseRequest: Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. Defaults to 120 mins""" - + channel: Optional[Channel] = None """Channel Details""" - + cluster_size: Optional[str] = None """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, @@ -2762,31 +2264,31 @@ class EditWarehouseRequest: Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large""" - + creator_name: Optional[str] = None """warehouse creator name""" - + enable_photon: Optional[bool] = None """Configures whether the warehouse should use Photon optimized clusters. Defaults to false.""" - + enable_serverless_compute: Optional[bool] = None """Configures whether the warehouse should use serverless compute.""" - + id: Optional[str] = None """Required. Id of the warehouse to configure.""" - + instance_profile_arn: Optional[str] = None """Deprecated. Instance profile used to pass IAM role to the cluster""" - + max_num_clusters: Optional[int] = None """Maximum number of clusters that the autoscaler will create to handle concurrent queries. Supported values: - Must be >= min_num_clusters - Must be <= 30. Defaults to min_clusters if unset.""" - + min_num_clusters: Optional[int] = None """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce @@ -2796,120 +2298,78 @@ class EditWarehouseRequest: Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) Defaults to 1""" - + name: Optional[str] = None """Logical name for the cluster. Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - + spot_instance_policy: Optional[SpotInstancePolicy] = None """Configurations whether the warehouse should use spot instances.""" - + tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45.""" - + warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - + def as_dict(self) -> dict: """Serializes the EditWarehouseRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel.as_dict() - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy.value - if self.tags: - body["tags"] = self.tags.as_dict() - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type.value + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel.as_dict() + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute + if self.id is not None: body['id'] = self.id + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy.value + if self.tags: body['tags'] = self.tags.as_dict() + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the EditWarehouseRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy - if self.tags: - body["tags"] = self.tags - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute + if self.id is not None: body['id'] = self.id + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy + if self.tags: body['tags'] = self.tags + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditWarehouseRequest: """Deserializes the EditWarehouseRequest from a dictionary.""" - return cls( - auto_stop_mins=d.get("auto_stop_mins", None), - channel=_from_dict(d, "channel", Channel), - cluster_size=d.get("cluster_size", None), - creator_name=d.get("creator_name", None), - enable_photon=d.get("enable_photon", None), - enable_serverless_compute=d.get("enable_serverless_compute", None), - id=d.get("id", None), - instance_profile_arn=d.get("instance_profile_arn", None), - max_num_clusters=d.get("max_num_clusters", None), - min_num_clusters=d.get("min_num_clusters", None), - name=d.get("name", None), - spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), - tags=_from_dict(d, "tags", EndpointTags), - warehouse_type=_enum(d, "warehouse_type", EditWarehouseRequestWarehouseType), - ) + return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), creator_name=d.get('creator_name', None), enable_photon=d.get('enable_photon', None), enable_serverless_compute=d.get('enable_serverless_compute', None), id=d.get('id', None), instance_profile_arn=d.get('instance_profile_arn', None), max_num_clusters=d.get('max_num_clusters', None), min_num_clusters=d.get('min_num_clusters', None), name=d.get('name', None), spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy), tags=_from_dict(d, 'tags', EndpointTags), warehouse_type=_enum(d, 'warehouse_type', EditWarehouseRequestWarehouseType)) + + class EditWarehouseRequestWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - - CLASSIC = "CLASSIC" - PRO = "PRO" - TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" - + + CLASSIC = 'CLASSIC' + PRO = 'PRO' + TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' @dataclass class EditWarehouseResponse: @@ -2927,13 +2387,15 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditWarehouseResponse: """Deserializes the EditWarehouseResponse from a dictionary.""" return cls() + + @dataclass class Empty: """Represents an empty message, similar to google.protobuf.Empty, which is not available in the firm right now.""" - + def as_dict(self) -> dict: """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2948,96 +2410,82 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Empty: """Deserializes the Empty from a dictionary.""" return cls() + + @dataclass class EndpointConfPair: key: Optional[str] = None - + value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the EndpointConfPair into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointConfPair into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointConfPair: """Deserializes the EndpointConfPair from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class EndpointHealth: details: Optional[str] = None """Details about errors that are causing current degraded/failed status.""" - + failure_reason: Optional[TerminationReason] = None """The reason for failure to bring up clusters for this warehouse. This is available when status is 'FAILED' and sometimes when it is DEGRADED.""" - + message: Optional[str] = None """Deprecated. split into summary and details for security""" - + status: Optional[Status] = None """Health status of the warehouse.""" - + summary: Optional[str] = None """A short summary of the health status in case of degraded/failed warehouses.""" - + def as_dict(self) -> dict: """Serializes the EndpointHealth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.details is not None: - body["details"] = self.details - if self.failure_reason: - body["failure_reason"] = self.failure_reason.as_dict() - if self.message is not None: - body["message"] = self.message - if self.status is not None: - body["status"] = self.status.value - if self.summary is not None: - body["summary"] = self.summary + if self.details is not None: body['details'] = self.details + if self.failure_reason: body['failure_reason'] = self.failure_reason.as_dict() + if self.message is not None: body['message'] = self.message + if self.status is not None: body['status'] = self.status.value + if self.summary is not None: body['summary'] = self.summary return body def as_shallow_dict(self) -> dict: """Serializes the EndpointHealth into a shallow dictionary of its immediate attributes.""" body = {} - if self.details is not None: - body["details"] = self.details - if self.failure_reason: - body["failure_reason"] = self.failure_reason - if self.message is not None: - body["message"] = self.message - if self.status is not None: - body["status"] = self.status - if self.summary is not None: - body["summary"] = self.summary + if self.details is not None: body['details'] = self.details + if self.failure_reason: body['failure_reason'] = self.failure_reason + if self.message is not None: body['message'] = self.message + if self.status is not None: body['status'] = self.status + if self.summary is not None: body['summary'] = self.summary return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointHealth: """Deserializes the EndpointHealth from a dictionary.""" - return cls( - details=d.get("details", None), - failure_reason=_from_dict(d, "failure_reason", TerminationReason), - message=d.get("message", None), - status=_enum(d, "status", Status), - summary=d.get("summary", None), - ) + return cls(details=d.get('details', None), failure_reason=_from_dict(d, 'failure_reason', TerminationReason), message=d.get('message', None), status=_enum(d, 'status', Status), summary=d.get('summary', None)) + + @dataclass @@ -3049,10 +2497,10 @@ class EndpointInfo: Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. Defaults to 120 mins""" - + channel: Optional[Channel] = None """Channel Details""" - + cluster_size: Optional[str] = None """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, @@ -3060,37 +2508,37 @@ class EndpointInfo: Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large""" - + creator_name: Optional[str] = None """warehouse creator name""" - + enable_photon: Optional[bool] = None """Configures whether the warehouse should use Photon optimized clusters. Defaults to false.""" - + enable_serverless_compute: Optional[bool] = None """Configures whether the warehouse should use serverless compute""" - + health: Optional[EndpointHealth] = None """Optional health status. Assume the warehouse is healthy if this field is not set.""" - + id: Optional[str] = None """unique identifier for warehouse""" - + instance_profile_arn: Optional[str] = None """Deprecated. Instance profile used to pass IAM role to the cluster""" - + jdbc_url: Optional[str] = None """the jdbc connection string for this warehouse""" - + max_num_clusters: Optional[int] = None """Maximum number of clusters that the autoscaler will create to handle concurrent queries. Supported values: - Must be >= min_num_clusters - Must be <= 30. Defaults to min_clusters if unset.""" - + min_num_clusters: Optional[int] = None """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce @@ -3100,284 +2548,214 @@ class EndpointInfo: Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) Defaults to 1""" - + name: Optional[str] = None """Logical name for the cluster. Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - + num_active_sessions: Optional[int] = None """Deprecated. current number of active sessions for the warehouse""" - + num_clusters: Optional[int] = None """current number of clusters running for the service""" - + odbc_params: Optional[OdbcParams] = None """ODBC parameters for the SQL warehouse""" - + spot_instance_policy: Optional[SpotInstancePolicy] = None """Configurations whether the warehouse should use spot instances.""" - + state: Optional[State] = None """State of the warehouse""" - + tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45.""" - + warehouse_type: Optional[EndpointInfoWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - + def as_dict(self) -> dict: """Serializes the EndpointInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel.as_dict() - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.health: - body["health"] = self.health.as_dict() - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.jdbc_url is not None: - body["jdbc_url"] = self.jdbc_url - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.num_active_sessions is not None: - body["num_active_sessions"] = self.num_active_sessions - if self.num_clusters is not None: - body["num_clusters"] = self.num_clusters - if self.odbc_params: - body["odbc_params"] = self.odbc_params.as_dict() - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy.value - if self.state is not None: - body["state"] = self.state.value - if self.tags: - body["tags"] = self.tags.as_dict() - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type.value + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel.as_dict() + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute + if self.health: body['health'] = self.health.as_dict() + if self.id is not None: body['id'] = self.id + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions + if self.num_clusters is not None: body['num_clusters'] = self.num_clusters + if self.odbc_params: body['odbc_params'] = self.odbc_params.as_dict() + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy.value + if self.state is not None: body['state'] = self.state.value + if self.tags: body['tags'] = self.tags.as_dict() + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.health: - body["health"] = self.health - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.jdbc_url is not None: - body["jdbc_url"] = self.jdbc_url - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.num_active_sessions is not None: - body["num_active_sessions"] = self.num_active_sessions - if self.num_clusters is not None: - body["num_clusters"] = self.num_clusters - if self.odbc_params: - body["odbc_params"] = self.odbc_params - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy - if self.state is not None: - body["state"] = self.state - if self.tags: - body["tags"] = self.tags - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute + if self.health: body['health'] = self.health + if self.id is not None: body['id'] = self.id + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions + if self.num_clusters is not None: body['num_clusters'] = self.num_clusters + if self.odbc_params: body['odbc_params'] = self.odbc_params + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy + if self.state is not None: body['state'] = self.state + if self.tags: body['tags'] = self.tags + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointInfo: """Deserializes the EndpointInfo from a dictionary.""" - return cls( - auto_stop_mins=d.get("auto_stop_mins", None), - channel=_from_dict(d, "channel", Channel), - cluster_size=d.get("cluster_size", None), - creator_name=d.get("creator_name", None), - enable_photon=d.get("enable_photon", None), - enable_serverless_compute=d.get("enable_serverless_compute", None), - health=_from_dict(d, "health", EndpointHealth), - id=d.get("id", None), - instance_profile_arn=d.get("instance_profile_arn", None), - jdbc_url=d.get("jdbc_url", None), - max_num_clusters=d.get("max_num_clusters", None), - min_num_clusters=d.get("min_num_clusters", None), - name=d.get("name", None), - num_active_sessions=d.get("num_active_sessions", None), - num_clusters=d.get("num_clusters", None), - odbc_params=_from_dict(d, "odbc_params", OdbcParams), - spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), - state=_enum(d, "state", State), - tags=_from_dict(d, "tags", EndpointTags), - warehouse_type=_enum(d, "warehouse_type", EndpointInfoWarehouseType), - ) + return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), creator_name=d.get('creator_name', None), enable_photon=d.get('enable_photon', None), enable_serverless_compute=d.get('enable_serverless_compute', None), health=_from_dict(d, 'health', EndpointHealth), id=d.get('id', None), instance_profile_arn=d.get('instance_profile_arn', None), jdbc_url=d.get('jdbc_url', None), max_num_clusters=d.get('max_num_clusters', None), min_num_clusters=d.get('min_num_clusters', None), name=d.get('name', None), num_active_sessions=d.get('num_active_sessions', None), num_clusters=d.get('num_clusters', None), odbc_params=_from_dict(d, 'odbc_params', OdbcParams), spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy), state=_enum(d, 'state', State), tags=_from_dict(d, 'tags', EndpointTags), warehouse_type=_enum(d, 'warehouse_type', EndpointInfoWarehouseType)) + + class EndpointInfoWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - - CLASSIC = "CLASSIC" - PRO = "PRO" - TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" - + + CLASSIC = 'CLASSIC' + PRO = 'PRO' + TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' @dataclass class EndpointTagPair: key: Optional[str] = None - + value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the EndpointTagPair into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointTagPair into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointTagPair: """Deserializes the EndpointTagPair from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class EndpointTags: custom_tags: Optional[List[EndpointTagPair]] = None - + def as_dict(self) -> dict: """Serializes the EndpointTags into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: - body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] return body def as_shallow_dict(self) -> dict: """Serializes the EndpointTags into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags + if self.custom_tags: body['custom_tags'] = self.custom_tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointTags: """Deserializes the EndpointTags from a dictionary.""" - return cls(custom_tags=_repeated_dict(d, "custom_tags", EndpointTagPair)) + return cls(custom_tags=_repeated_dict(d, 'custom_tags', EndpointTagPair)) + + @dataclass class EnumValue: enum_options: Optional[str] = None """List of valid query parameter values, newline delimited.""" - + multi_values_options: Optional[MultiValuesOptions] = None """If specified, allows multiple values to be selected for this parameter.""" - + values: Optional[List[str]] = None """List of selected query parameter values.""" - + def as_dict(self) -> dict: """Serializes the EnumValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enum_options is not None: - body["enum_options"] = self.enum_options - if self.multi_values_options: - body["multi_values_options"] = self.multi_values_options.as_dict() - if self.values: - body["values"] = [v for v in self.values] + if self.enum_options is not None: body['enum_options'] = self.enum_options + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict() + if self.values: body['values'] = [v for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the EnumValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.enum_options is not None: - body["enum_options"] = self.enum_options - if self.multi_values_options: - body["multi_values_options"] = self.multi_values_options - if self.values: - body["values"] = self.values + if self.enum_options is not None: body['enum_options'] = self.enum_options + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options + if self.values: body['values'] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnumValue: """Deserializes the EnumValue from a dictionary.""" - return cls( - enum_options=d.get("enum_options", None), - multi_values_options=_from_dict(d, "multi_values_options", MultiValuesOptions), - values=d.get("values", None), - ) + return cls(enum_options=d.get('enum_options', None), multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions), values=d.get('values', None)) + + @dataclass class ExecuteStatementRequest: statement: str """The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.""" - + warehouse_id: str """Warehouse upon which to execute a statement. See also [What are SQL warehouses?] [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html""" - + byte_limit: Optional[int] = None """Applies the given byte limit to the statement's result size. Byte counts are based on internal data representations and might not match the final size in the requested `format`. If the result was truncated due to the byte limit, then `truncated` in the response is set to `true`. When using `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` is not explcitly set.""" - + catalog: Optional[str] = None """Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html""" - + disposition: Optional[Disposition] = None - + format: Optional[Format] = None """Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. @@ -3409,7 +2787,7 @@ class ExecuteStatementRequest: [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180""" - + on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None """When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution doesn't finish within this time, `on_wait_timeout` determines whether the execution @@ -3417,7 +2795,7 @@ class ExecuteStatementRequest: asynchronously and the call returns a statement ID which can be used for polling with :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled and the call returns with a `CANCELED` state.""" - + parameters: Optional[List[StatementParameterListItem]] = None """A list of parameters to pass into a SQL statement containing parameter markers. A parameter consists of a name, a value, and optionally a type. To represent a NULL value, the `value` field @@ -3446,17 +2824,17 @@ class ExecuteStatementRequest: [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html""" - + row_limit: Optional[int] = None """Applies the given row limit to the statement's result set, but unlike the `LIMIT` clause in SQL, it also sets the `truncated` field in the response to indicate whether the result was trimmed due to the limit or not.""" - + schema: Optional[str] = None """Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL. [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html""" - + wait_timeout: Optional[str] = None """The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set to 0 or to a value between 5 and 50. @@ -3470,77 +2848,45 @@ class ExecuteStatementRequest: returns immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached.""" - + def as_dict(self) -> dict: """Serializes the ExecuteStatementRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.byte_limit is not None: - body["byte_limit"] = self.byte_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.disposition is not None: - body["disposition"] = self.disposition.value - if self.format is not None: - body["format"] = self.format.value - if self.on_wait_timeout is not None: - body["on_wait_timeout"] = self.on_wait_timeout.value - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.row_limit is not None: - body["row_limit"] = self.row_limit - if self.schema is not None: - body["schema"] = self.schema - if self.statement is not None: - body["statement"] = self.statement - if self.wait_timeout is not None: - body["wait_timeout"] = self.wait_timeout - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.byte_limit is not None: body['byte_limit'] = self.byte_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.disposition is not None: body['disposition'] = self.disposition.value + if self.format is not None: body['format'] = self.format.value + if self.on_wait_timeout is not None: body['on_wait_timeout'] = self.on_wait_timeout.value + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.row_limit is not None: body['row_limit'] = self.row_limit + if self.schema is not None: body['schema'] = self.schema + if self.statement is not None: body['statement'] = self.statement + if self.wait_timeout is not None: body['wait_timeout'] = self.wait_timeout + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the ExecuteStatementRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.byte_limit is not None: - body["byte_limit"] = self.byte_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.disposition is not None: - body["disposition"] = self.disposition - if self.format is not None: - body["format"] = self.format - if self.on_wait_timeout is not None: - body["on_wait_timeout"] = self.on_wait_timeout - if self.parameters: - body["parameters"] = self.parameters - if self.row_limit is not None: - body["row_limit"] = self.row_limit - if self.schema is not None: - body["schema"] = self.schema - if self.statement is not None: - body["statement"] = self.statement - if self.wait_timeout is not None: - body["wait_timeout"] = self.wait_timeout - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.byte_limit is not None: body['byte_limit'] = self.byte_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.disposition is not None: body['disposition'] = self.disposition + if self.format is not None: body['format'] = self.format + if self.on_wait_timeout is not None: body['on_wait_timeout'] = self.on_wait_timeout + if self.parameters: body['parameters'] = self.parameters + if self.row_limit is not None: body['row_limit'] = self.row_limit + if self.schema is not None: body['schema'] = self.schema + if self.statement is not None: body['statement'] = self.statement + if self.wait_timeout is not None: body['wait_timeout'] = self.wait_timeout + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExecuteStatementRequest: """Deserializes the ExecuteStatementRequest from a dictionary.""" - return cls( - byte_limit=d.get("byte_limit", None), - catalog=d.get("catalog", None), - disposition=_enum(d, "disposition", Disposition), - format=_enum(d, "format", Format), - on_wait_timeout=_enum(d, "on_wait_timeout", ExecuteStatementRequestOnWaitTimeout), - parameters=_repeated_dict(d, "parameters", StatementParameterListItem), - row_limit=d.get("row_limit", None), - schema=d.get("schema", None), - statement=d.get("statement", None), - wait_timeout=d.get("wait_timeout", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(byte_limit=d.get('byte_limit', None), catalog=d.get('catalog', None), disposition=_enum(d, 'disposition', Disposition), format=_enum(d, 'format', Format), on_wait_timeout=_enum(d, 'on_wait_timeout', ExecuteStatementRequestOnWaitTimeout), parameters=_repeated_dict(d, 'parameters', StatementParameterListItem), row_limit=d.get('row_limit', None), schema=d.get('schema', None), statement=d.get('statement', None), wait_timeout=d.get('wait_timeout', None), warehouse_id=d.get('warehouse_id', None)) + + class ExecuteStatementRequestOnWaitTimeout(Enum): @@ -3550,299 +2896,272 @@ class ExecuteStatementRequestOnWaitTimeout(Enum): asynchronously and the call returns a statement ID which can be used for polling with :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled and the call returns with a `CANCELED` state.""" - - CANCEL = "CANCEL" - CONTINUE = "CONTINUE" - + + CANCEL = 'CANCEL' + CONTINUE = 'CONTINUE' @dataclass class ExternalLink: byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" - + chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" - + expiration: Optional[str] = None """Indicates the date-time that the given external link will expire and becomes invalid, after which point a new `external_link` must be requested.""" - + external_link: Optional[str] = None - - http_headers: Optional[Dict[str, str]] = None + + http_headers: Optional[Dict[str,str]] = None """HTTP headers that must be included with a GET request to the `external_link`. Each header is provided as a key-value pair. Headers are typically used to pass a decryption key to the external service. The values of these headers should be considered sensitive and the client should not expose these values in a log.""" - + next_chunk_index: Optional[int] = None """When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are no more chunks. The next chunk can be fetched with a :method:statementexecution/getStatementResultChunkN request.""" - + next_chunk_internal_link: Optional[str] = None """When fetching, provides a link to fetch the _next_ chunk. If absent, indicates there are no more chunks. This link is an absolute `path` to be joined with your `$DATABRICKS_HOST`, and should be treated as an opaque link. This is an alternative to using `next_chunk_index`.""" - + row_count: Optional[int] = None """The number of rows within the result chunk.""" - + row_offset: Optional[int] = None """The starting row offset within the result set.""" - + def as_dict(self) -> dict: """Serializes the ExternalLink into a dictionary suitable for use as a JSON request body.""" body = {} - if self.byte_count is not None: - body["byte_count"] = self.byte_count - if self.chunk_index is not None: - body["chunk_index"] = self.chunk_index - if self.expiration is not None: - body["expiration"] = self.expiration - if self.external_link is not None: - body["external_link"] = self.external_link - if self.http_headers: - body["http_headers"] = self.http_headers - if self.next_chunk_index is not None: - body["next_chunk_index"] = self.next_chunk_index - if self.next_chunk_internal_link is not None: - body["next_chunk_internal_link"] = self.next_chunk_internal_link - if self.row_count is not None: - body["row_count"] = self.row_count - if self.row_offset is not None: - body["row_offset"] = self.row_offset + if self.byte_count is not None: body['byte_count'] = self.byte_count + if self.chunk_index is not None: body['chunk_index'] = self.chunk_index + if self.expiration is not None: body['expiration'] = self.expiration + if self.external_link is not None: body['external_link'] = self.external_link + if self.http_headers: body['http_headers'] = self.http_headers + if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index + if self.next_chunk_internal_link is not None: body['next_chunk_internal_link'] = self.next_chunk_internal_link + if self.row_count is not None: body['row_count'] = self.row_count + if self.row_offset is not None: body['row_offset'] = self.row_offset return body def as_shallow_dict(self) -> dict: """Serializes the ExternalLink into a shallow dictionary of its immediate attributes.""" body = {} - if self.byte_count is not None: - body["byte_count"] = self.byte_count - if self.chunk_index is not None: - body["chunk_index"] = self.chunk_index - if self.expiration is not None: - body["expiration"] = self.expiration - if self.external_link is not None: - body["external_link"] = self.external_link - if self.http_headers: - body["http_headers"] = self.http_headers - if self.next_chunk_index is not None: - body["next_chunk_index"] = self.next_chunk_index - if self.next_chunk_internal_link is not None: - body["next_chunk_internal_link"] = self.next_chunk_internal_link - if self.row_count is not None: - body["row_count"] = self.row_count - if self.row_offset is not None: - body["row_offset"] = self.row_offset + if self.byte_count is not None: body['byte_count'] = self.byte_count + if self.chunk_index is not None: body['chunk_index'] = self.chunk_index + if self.expiration is not None: body['expiration'] = self.expiration + if self.external_link is not None: body['external_link'] = self.external_link + if self.http_headers: body['http_headers'] = self.http_headers + if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index + if self.next_chunk_internal_link is not None: body['next_chunk_internal_link'] = self.next_chunk_internal_link + if self.row_count is not None: body['row_count'] = self.row_count + if self.row_offset is not None: body['row_offset'] = self.row_offset return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalLink: """Deserializes the ExternalLink from a dictionary.""" - return cls( - byte_count=d.get("byte_count", None), - chunk_index=d.get("chunk_index", None), - expiration=d.get("expiration", None), - external_link=d.get("external_link", None), - http_headers=d.get("http_headers", None), - next_chunk_index=d.get("next_chunk_index", None), - next_chunk_internal_link=d.get("next_chunk_internal_link", None), - row_count=d.get("row_count", None), - row_offset=d.get("row_offset", None), - ) + return cls(byte_count=d.get('byte_count', None), chunk_index=d.get('chunk_index', None), expiration=d.get('expiration', None), external_link=d.get('external_link', None), http_headers=d.get('http_headers', None), next_chunk_index=d.get('next_chunk_index', None), next_chunk_internal_link=d.get('next_chunk_internal_link', None), row_count=d.get('row_count', None), row_offset=d.get('row_offset', None)) + + @dataclass class ExternalQuerySource: alert_id: Optional[str] = None """The canonical identifier for this SQL alert""" - + dashboard_id: Optional[str] = None """The canonical identifier for this Lakeview dashboard""" - + genie_space_id: Optional[str] = None """The canonical identifier for this Genie space""" - + job_info: Optional[ExternalQuerySourceJobInfo] = None - + legacy_dashboard_id: Optional[str] = None """The canonical identifier for this legacy dashboard""" - + notebook_id: Optional[str] = None """The canonical identifier for this notebook""" - + sql_query_id: Optional[str] = None """The canonical identifier for this SQL query""" - + def as_dict(self) -> dict: """Serializes the ExternalQuerySource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.genie_space_id is not None: - body["genie_space_id"] = self.genie_space_id - if self.job_info: - body["job_info"] = self.job_info.as_dict() - if self.legacy_dashboard_id is not None: - body["legacy_dashboard_id"] = self.legacy_dashboard_id - if self.notebook_id is not None: - body["notebook_id"] = self.notebook_id - if self.sql_query_id is not None: - body["sql_query_id"] = self.sql_query_id + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.genie_space_id is not None: body['genie_space_id'] = self.genie_space_id + if self.job_info: body['job_info'] = self.job_info.as_dict() + if self.legacy_dashboard_id is not None: body['legacy_dashboard_id'] = self.legacy_dashboard_id + if self.notebook_id is not None: body['notebook_id'] = self.notebook_id + if self.sql_query_id is not None: body['sql_query_id'] = self.sql_query_id return body def as_shallow_dict(self) -> dict: """Serializes the ExternalQuerySource into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.genie_space_id is not None: - body["genie_space_id"] = self.genie_space_id - if self.job_info: - body["job_info"] = self.job_info - if self.legacy_dashboard_id is not None: - body["legacy_dashboard_id"] = self.legacy_dashboard_id - if self.notebook_id is not None: - body["notebook_id"] = self.notebook_id - if self.sql_query_id is not None: - body["sql_query_id"] = self.sql_query_id + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.genie_space_id is not None: body['genie_space_id'] = self.genie_space_id + if self.job_info: body['job_info'] = self.job_info + if self.legacy_dashboard_id is not None: body['legacy_dashboard_id'] = self.legacy_dashboard_id + if self.notebook_id is not None: body['notebook_id'] = self.notebook_id + if self.sql_query_id is not None: body['sql_query_id'] = self.sql_query_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalQuerySource: """Deserializes the ExternalQuerySource from a dictionary.""" - return cls( - alert_id=d.get("alert_id", None), - dashboard_id=d.get("dashboard_id", None), - genie_space_id=d.get("genie_space_id", None), - job_info=_from_dict(d, "job_info", ExternalQuerySourceJobInfo), - legacy_dashboard_id=d.get("legacy_dashboard_id", None), - notebook_id=d.get("notebook_id", None), - sql_query_id=d.get("sql_query_id", None), - ) + return cls(alert_id=d.get('alert_id', None), dashboard_id=d.get('dashboard_id', None), genie_space_id=d.get('genie_space_id', None), job_info=_from_dict(d, 'job_info', ExternalQuerySourceJobInfo), legacy_dashboard_id=d.get('legacy_dashboard_id', None), notebook_id=d.get('notebook_id', None), sql_query_id=d.get('sql_query_id', None)) + + @dataclass class ExternalQuerySourceJobInfo: job_id: Optional[str] = None """The canonical identifier for this job.""" - + job_run_id: Optional[str] = None """The canonical identifier of the run. This ID is unique across all runs of all jobs.""" - + job_task_run_id: Optional[str] = None """The canonical identifier of the task run.""" - + def as_dict(self) -> dict: """Serializes the ExternalQuerySourceJobInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_run_id is not None: - body["job_run_id"] = self.job_run_id - if self.job_task_run_id is not None: - body["job_task_run_id"] = self.job_task_run_id + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.job_task_run_id is not None: body['job_task_run_id'] = self.job_task_run_id return body def as_shallow_dict(self) -> dict: """Serializes the ExternalQuerySourceJobInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_run_id is not None: - body["job_run_id"] = self.job_run_id - if self.job_task_run_id is not None: - body["job_task_run_id"] = self.job_task_run_id + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.job_task_run_id is not None: body['job_task_run_id'] = self.job_task_run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalQuerySourceJobInfo: """Deserializes the ExternalQuerySourceJobInfo from a dictionary.""" - return cls( - job_id=d.get("job_id", None), - job_run_id=d.get("job_run_id", None), - job_task_run_id=d.get("job_task_run_id", None), - ) + return cls(job_id=d.get('job_id', None), job_run_id=d.get('job_run_id', None), job_task_run_id=d.get('job_task_run_id', None)) + + class Format(Enum): + + + ARROW_STREAM = 'ARROW_STREAM' + CSV = 'CSV' + JSON_ARRAY = 'JSON_ARRAY' + + + + + + + + + + + + + + + + + + + - ARROW_STREAM = "ARROW_STREAM" - CSV = "CSV" - JSON_ARRAY = "JSON_ARRAY" @dataclass class GetResponse: access_control_list: Optional[List[AccessControl]] = None - + object_id: Optional[str] = None """An object's type and UUID, separated by a forward slash (/) character.""" - + object_type: Optional[ObjectType] = None """A singular noun object type.""" - + def as_dict(self) -> dict: """Serializes the GetResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type.value + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type.value return body def as_shallow_dict(self) -> dict: """Serializes the GetResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetResponse: """Deserializes the GetResponse from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControl), - object_id=d.get("object_id", None), - object_type=_enum(d, "object_type", ObjectType), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControl), object_id=d.get('object_id', None), object_type=_enum(d, 'object_type', ObjectType)) + + + + + + + + + + + @dataclass class GetWarehousePermissionLevelsResponse: permission_levels: Optional[List[WarehousePermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetWarehousePermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetWarehousePermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWarehousePermissionLevelsResponse: """Deserializes the GetWarehousePermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", WarehousePermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', WarehousePermissionsDescription)) + + + + + + + + @dataclass @@ -3854,10 +3173,10 @@ class GetWarehouseResponse: Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. Defaults to 120 mins""" - + channel: Optional[Channel] = None """Channel Details""" - + cluster_size: Optional[str] = None """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, @@ -3865,37 +3184,37 @@ class GetWarehouseResponse: Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large""" - + creator_name: Optional[str] = None """warehouse creator name""" - + enable_photon: Optional[bool] = None """Configures whether the warehouse should use Photon optimized clusters. Defaults to false.""" - + enable_serverless_compute: Optional[bool] = None """Configures whether the warehouse should use serverless compute""" - + health: Optional[EndpointHealth] = None """Optional health status. Assume the warehouse is healthy if this field is not set.""" - + id: Optional[str] = None """unique identifier for warehouse""" - + instance_profile_arn: Optional[str] = None """Deprecated. Instance profile used to pass IAM role to the cluster""" - + jdbc_url: Optional[str] = None """the jdbc connection string for this warehouse""" - + max_num_clusters: Optional[int] = None """Maximum number of clusters that the autoscaler will create to handle concurrent queries. Supported values: - Must be >= min_num_clusters - Must be <= 30. Defaults to min_clusters if unset.""" - + min_num_clusters: Optional[int] = None """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce @@ -3905,599 +3224,408 @@ class GetWarehouseResponse: Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) Defaults to 1""" - + name: Optional[str] = None """Logical name for the cluster. Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - + num_active_sessions: Optional[int] = None """Deprecated. current number of active sessions for the warehouse""" - + num_clusters: Optional[int] = None """current number of clusters running for the service""" - + odbc_params: Optional[OdbcParams] = None """ODBC parameters for the SQL warehouse""" - + spot_instance_policy: Optional[SpotInstancePolicy] = None """Configurations whether the warehouse should use spot instances.""" - + state: Optional[State] = None """State of the warehouse""" - + tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45.""" - + warehouse_type: Optional[GetWarehouseResponseWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - + def as_dict(self) -> dict: """Serializes the GetWarehouseResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel.as_dict() - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.health: - body["health"] = self.health.as_dict() - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.jdbc_url is not None: - body["jdbc_url"] = self.jdbc_url - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.num_active_sessions is not None: - body["num_active_sessions"] = self.num_active_sessions - if self.num_clusters is not None: - body["num_clusters"] = self.num_clusters - if self.odbc_params: - body["odbc_params"] = self.odbc_params.as_dict() - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy.value - if self.state is not None: - body["state"] = self.state.value - if self.tags: - body["tags"] = self.tags.as_dict() - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type.value + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel.as_dict() + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute + if self.health: body['health'] = self.health.as_dict() + if self.id is not None: body['id'] = self.id + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions + if self.num_clusters is not None: body['num_clusters'] = self.num_clusters + if self.odbc_params: body['odbc_params'] = self.odbc_params.as_dict() + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy.value + if self.state is not None: body['state'] = self.state.value + if self.tags: body['tags'] = self.tags.as_dict() + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the GetWarehouseResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.health: - body["health"] = self.health - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.jdbc_url is not None: - body["jdbc_url"] = self.jdbc_url - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.num_active_sessions is not None: - body["num_active_sessions"] = self.num_active_sessions - if self.num_clusters is not None: - body["num_clusters"] = self.num_clusters - if self.odbc_params: - body["odbc_params"] = self.odbc_params - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy - if self.state is not None: - body["state"] = self.state - if self.tags: - body["tags"] = self.tags - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute + if self.health: body['health'] = self.health + if self.id is not None: body['id'] = self.id + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions + if self.num_clusters is not None: body['num_clusters'] = self.num_clusters + if self.odbc_params: body['odbc_params'] = self.odbc_params + if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy + if self.state is not None: body['state'] = self.state + if self.tags: body['tags'] = self.tags + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWarehouseResponse: """Deserializes the GetWarehouseResponse from a dictionary.""" - return cls( - auto_stop_mins=d.get("auto_stop_mins", None), - channel=_from_dict(d, "channel", Channel), - cluster_size=d.get("cluster_size", None), - creator_name=d.get("creator_name", None), - enable_photon=d.get("enable_photon", None), - enable_serverless_compute=d.get("enable_serverless_compute", None), - health=_from_dict(d, "health", EndpointHealth), - id=d.get("id", None), - instance_profile_arn=d.get("instance_profile_arn", None), - jdbc_url=d.get("jdbc_url", None), - max_num_clusters=d.get("max_num_clusters", None), - min_num_clusters=d.get("min_num_clusters", None), - name=d.get("name", None), - num_active_sessions=d.get("num_active_sessions", None), - num_clusters=d.get("num_clusters", None), - odbc_params=_from_dict(d, "odbc_params", OdbcParams), - spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), - state=_enum(d, "state", State), - tags=_from_dict(d, "tags", EndpointTags), - warehouse_type=_enum(d, "warehouse_type", GetWarehouseResponseWarehouseType), - ) + return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), creator_name=d.get('creator_name', None), enable_photon=d.get('enable_photon', None), enable_serverless_compute=d.get('enable_serverless_compute', None), health=_from_dict(d, 'health', EndpointHealth), id=d.get('id', None), instance_profile_arn=d.get('instance_profile_arn', None), jdbc_url=d.get('jdbc_url', None), max_num_clusters=d.get('max_num_clusters', None), min_num_clusters=d.get('min_num_clusters', None), name=d.get('name', None), num_active_sessions=d.get('num_active_sessions', None), num_clusters=d.get('num_clusters', None), odbc_params=_from_dict(d, 'odbc_params', OdbcParams), spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy), state=_enum(d, 'state', State), tags=_from_dict(d, 'tags', EndpointTags), warehouse_type=_enum(d, 'warehouse_type', GetWarehouseResponseWarehouseType)) + + class GetWarehouseResponseWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - - CLASSIC = "CLASSIC" - PRO = "PRO" - TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" - + + CLASSIC = 'CLASSIC' + PRO = 'PRO' + TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' @dataclass class GetWorkspaceWarehouseConfigResponse: channel: Optional[Channel] = None """Optional: Channel selection details""" - + config_param: Optional[RepeatedEndpointConfPairs] = None """Deprecated: Use sql_configuration_parameters""" - + data_access_config: Optional[List[EndpointConfPair]] = None """Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K""" - + enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None """List of Warehouse Types allowed in this workspace (limits allowed value of the type field in CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses to be converted to another type. Used by frontend to save specific type availability in the warehouse create and edit form UI.""" - + global_param: Optional[RepeatedEndpointConfPairs] = None """Deprecated: Use sql_configuration_parameters""" - + google_service_account: Optional[str] = None """GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage""" - + instance_profile_arn: Optional[str] = None """AWS Only: Instance profile used to pass IAM role to the cluster""" - + security_policy: Optional[GetWorkspaceWarehouseConfigResponseSecurityPolicy] = None """Security policy for warehouses""" - + sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None """SQL configuration parameters""" - + def as_dict(self) -> dict: """Serializes the GetWorkspaceWarehouseConfigResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.channel: - body["channel"] = self.channel.as_dict() - if self.config_param: - body["config_param"] = self.config_param.as_dict() - if self.data_access_config: - body["data_access_config"] = [v.as_dict() for v in self.data_access_config] - if self.enabled_warehouse_types: - body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types] - if self.global_param: - body["global_param"] = self.global_param.as_dict() - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.security_policy is not None: - body["security_policy"] = self.security_policy.value - if self.sql_configuration_parameters: - body["sql_configuration_parameters"] = self.sql_configuration_parameters.as_dict() + if self.channel: body['channel'] = self.channel.as_dict() + if self.config_param: body['config_param'] = self.config_param.as_dict() + if self.data_access_config: body['data_access_config'] = [v.as_dict() for v in self.data_access_config] + if self.enabled_warehouse_types: body['enabled_warehouse_types'] = [v.as_dict() for v in self.enabled_warehouse_types] + if self.global_param: body['global_param'] = self.global_param.as_dict() + if self.google_service_account is not None: body['google_service_account'] = self.google_service_account + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.security_policy is not None: body['security_policy'] = self.security_policy.value + if self.sql_configuration_parameters: body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetWorkspaceWarehouseConfigResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.channel: - body["channel"] = self.channel - if self.config_param: - body["config_param"] = self.config_param - if self.data_access_config: - body["data_access_config"] = self.data_access_config - if self.enabled_warehouse_types: - body["enabled_warehouse_types"] = self.enabled_warehouse_types - if self.global_param: - body["global_param"] = self.global_param - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.security_policy is not None: - body["security_policy"] = self.security_policy - if self.sql_configuration_parameters: - body["sql_configuration_parameters"] = self.sql_configuration_parameters + if self.channel: body['channel'] = self.channel + if self.config_param: body['config_param'] = self.config_param + if self.data_access_config: body['data_access_config'] = self.data_access_config + if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types + if self.global_param: body['global_param'] = self.global_param + if self.google_service_account is not None: body['google_service_account'] = self.google_service_account + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.security_policy is not None: body['security_policy'] = self.security_policy + if self.sql_configuration_parameters: body['sql_configuration_parameters'] = self.sql_configuration_parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceWarehouseConfigResponse: """Deserializes the GetWorkspaceWarehouseConfigResponse from a dictionary.""" - return cls( - channel=_from_dict(d, "channel", Channel), - config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs), - data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair), - enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair), - global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs), - google_service_account=d.get("google_service_account", None), - instance_profile_arn=d.get("instance_profile_arn", None), - security_policy=_enum(d, "security_policy", GetWorkspaceWarehouseConfigResponseSecurityPolicy), - sql_configuration_parameters=_from_dict(d, "sql_configuration_parameters", RepeatedEndpointConfPairs), - ) + return cls(channel=_from_dict(d, 'channel', Channel), config_param=_from_dict(d, 'config_param', RepeatedEndpointConfPairs), data_access_config=_repeated_dict(d, 'data_access_config', EndpointConfPair), enabled_warehouse_types=_repeated_dict(d, 'enabled_warehouse_types', WarehouseTypePair), global_param=_from_dict(d, 'global_param', RepeatedEndpointConfPairs), google_service_account=d.get('google_service_account', None), instance_profile_arn=d.get('instance_profile_arn', None), security_policy=_enum(d, 'security_policy', GetWorkspaceWarehouseConfigResponseSecurityPolicy), sql_configuration_parameters=_from_dict(d, 'sql_configuration_parameters', RepeatedEndpointConfPairs)) + + class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum): """Security policy for warehouses""" - - DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL" - NONE = "NONE" - PASSTHROUGH = "PASSTHROUGH" - + + DATA_ACCESS_CONTROL = 'DATA_ACCESS_CONTROL' + NONE = 'NONE' + PASSTHROUGH = 'PASSTHROUGH' @dataclass class LegacyAlert: created_at: Optional[str] = None """Timestamp when the alert was created.""" - + id: Optional[str] = None """Alert ID.""" - + last_triggered_at: Optional[str] = None """Timestamp when the alert was last triggered.""" - + name: Optional[str] = None """Name of the alert.""" - + options: Optional[AlertOptions] = None """Alert configuration options.""" - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + query: Optional[AlertQuery] = None - + rearm: Optional[int] = None """Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again.""" - + state: Optional[LegacyAlertState] = None """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" - + updated_at: Optional[str] = None """Timestamp when the alert was last updated.""" - + user: Optional[User] = None - + def as_dict(self) -> dict: """Serializes the LegacyAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.id is not None: - body["id"] = self.id - if self.last_triggered_at is not None: - body["last_triggered_at"] = self.last_triggered_at - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.parent is not None: - body["parent"] = self.parent - if self.query: - body["query"] = self.query.as_dict() - if self.rearm is not None: - body["rearm"] = self.rearm - if self.state is not None: - body["state"] = self.state.value - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.user: - body["user"] = self.user.as_dict() + if self.created_at is not None: body['created_at'] = self.created_at + if self.id is not None: body['id'] = self.id + if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.parent is not None: body['parent'] = self.parent + if self.query: body['query'] = self.query.as_dict() + if self.rearm is not None: body['rearm'] = self.rearm + if self.state is not None: body['state'] = self.state.value + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the LegacyAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.id is not None: - body["id"] = self.id - if self.last_triggered_at is not None: - body["last_triggered_at"] = self.last_triggered_at - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query: - body["query"] = self.query - if self.rearm is not None: - body["rearm"] = self.rearm - if self.state is not None: - body["state"] = self.state - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.user: - body["user"] = self.user + if self.created_at is not None: body['created_at'] = self.created_at + if self.id is not None: body['id'] = self.id + if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.query: body['query'] = self.query + if self.rearm is not None: body['rearm'] = self.rearm + if self.state is not None: body['state'] = self.state + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LegacyAlert: """Deserializes the LegacyAlert from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - id=d.get("id", None), - last_triggered_at=d.get("last_triggered_at", None), - name=d.get("name", None), - options=_from_dict(d, "options", AlertOptions), - parent=d.get("parent", None), - query=_from_dict(d, "query", AlertQuery), - rearm=d.get("rearm", None), - state=_enum(d, "state", LegacyAlertState), - updated_at=d.get("updated_at", None), - user=_from_dict(d, "user", User), - ) + return cls(created_at=d.get('created_at', None), id=d.get('id', None), last_triggered_at=d.get('last_triggered_at', None), name=d.get('name', None), options=_from_dict(d, 'options', AlertOptions), parent=d.get('parent', None), query=_from_dict(d, 'query', AlertQuery), rearm=d.get('rearm', None), state=_enum(d, 'state', LegacyAlertState), updated_at=d.get('updated_at', None), user=_from_dict(d, 'user', User)) + + class LegacyAlertState(Enum): """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" - - OK = "ok" - TRIGGERED = "triggered" - UNKNOWN = "unknown" - + + OK = 'ok' + TRIGGERED = 'triggered' + UNKNOWN = 'unknown' @dataclass class LegacyQuery: can_edit: Optional[bool] = None """Describes whether the authenticated user is allowed to edit the definition of this query.""" - + created_at: Optional[str] = None """The timestamp when this query was created.""" - + data_source_id: Optional[str] = None """Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + id: Optional[str] = None """Query ID.""" - + is_archived: Optional[bool] = None """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear in search results. If this boolean is `true`, the `options` property for this query includes a `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" - + is_draft: Optional[bool] = None """Whether the query is a draft. Draft queries only appear in list views for their owners. Visualizations from draft queries cannot appear on dashboards.""" - + is_favorite: Optional[bool] = None """Whether this query object appears in the current user's favorites list. This flag determines whether the star icon for favorites is selected.""" - + is_safe: Optional[bool] = None """Text parameter types are not safe from SQL injection for all types of data source. Set this Boolean parameter to `true` if a query either does not use any text type parameters or uses a data source type where text type parameters are handled safely.""" - + last_modified_by: Optional[User] = None - + last_modified_by_id: Optional[int] = None """The ID of the user who last saved changes to this query.""" - + latest_query_data_id: Optional[str] = None """If there is a cached result for this query and user, this field includes the query result ID. If this query uses parameters, this field is always null.""" - + name: Optional[str] = None """The title of this query that appears in list views, widget headings, and on the query page.""" - + options: Optional[QueryOptions] = None - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + permission_tier: Optional[PermissionLevel] = None """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query * `CAN_MANAGE`: Can manage the query""" - + query: Optional[str] = None """The text of the query to be run.""" - + query_hash: Optional[str] = None """A SHA-256 hash of the query text along with the authenticated user ID.""" - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + updated_at: Optional[str] = None """The timestamp at which this query was last updated.""" - + user: Optional[User] = None - + user_id: Optional[int] = None """The ID of the user who owns the query.""" - + visualizations: Optional[List[LegacyVisualization]] = None - + def as_dict(self) -> dict: """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_edit is not None: - body["can_edit"] = self.can_edit - if self.created_at is not None: - body["created_at"] = self.created_at - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.is_archived is not None: - body["is_archived"] = self.is_archived - if self.is_draft is not None: - body["is_draft"] = self.is_draft - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.is_safe is not None: - body["is_safe"] = self.is_safe - if self.last_modified_by: - body["last_modified_by"] = self.last_modified_by.as_dict() - if self.last_modified_by_id is not None: - body["last_modified_by_id"] = self.last_modified_by_id - if self.latest_query_data_id is not None: - body["latest_query_data_id"] = self.latest_query_data_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.parent is not None: - body["parent"] = self.parent - if self.permission_tier is not None: - body["permission_tier"] = self.permission_tier.value - if self.query is not None: - body["query"] = self.query - if self.query_hash is not None: - body["query_hash"] = self.query_hash - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.user: - body["user"] = self.user.as_dict() - if self.user_id is not None: - body["user_id"] = self.user_id - if self.visualizations: - body["visualizations"] = [v.as_dict() for v in self.visualizations] + if self.can_edit is not None: body['can_edit'] = self.can_edit + if self.created_at is not None: body['created_at'] = self.created_at + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.is_safe is not None: body['is_safe'] = self.is_safe + if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict() + if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id + if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.parent is not None: body['parent'] = self.parent + if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value + if self.query is not None: body['query'] = self.query + if self.query_hash is not None: body['query_hash'] = self.query_hash + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user.as_dict() + if self.user_id is not None: body['user_id'] = self.user_id + if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations] return body def as_shallow_dict(self) -> dict: """Serializes the LegacyQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.can_edit is not None: - body["can_edit"] = self.can_edit - if self.created_at is not None: - body["created_at"] = self.created_at - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.is_archived is not None: - body["is_archived"] = self.is_archived - if self.is_draft is not None: - body["is_draft"] = self.is_draft - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.is_safe is not None: - body["is_safe"] = self.is_safe - if self.last_modified_by: - body["last_modified_by"] = self.last_modified_by - if self.last_modified_by_id is not None: - body["last_modified_by_id"] = self.last_modified_by_id - if self.latest_query_data_id is not None: - body["latest_query_data_id"] = self.latest_query_data_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.permission_tier is not None: - body["permission_tier"] = self.permission_tier - if self.query is not None: - body["query"] = self.query - if self.query_hash is not None: - body["query_hash"] = self.query_hash - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.user: - body["user"] = self.user - if self.user_id is not None: - body["user_id"] = self.user_id - if self.visualizations: - body["visualizations"] = self.visualizations + if self.can_edit is not None: body['can_edit'] = self.can_edit + if self.created_at is not None: body['created_at'] = self.created_at + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.is_safe is not None: body['is_safe'] = self.is_safe + if self.last_modified_by: body['last_modified_by'] = self.last_modified_by + if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id + if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.permission_tier is not None: body['permission_tier'] = self.permission_tier + if self.query is not None: body['query'] = self.query + if self.query_hash is not None: body['query_hash'] = self.query_hash + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user + if self.user_id is not None: body['user_id'] = self.user_id + if self.visualizations: body['visualizations'] = self.visualizations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LegacyQuery: """Deserializes the LegacyQuery from a dictionary.""" - return cls( - can_edit=d.get("can_edit", None), - created_at=d.get("created_at", None), - data_source_id=d.get("data_source_id", None), - description=d.get("description", None), - id=d.get("id", None), - is_archived=d.get("is_archived", None), - is_draft=d.get("is_draft", None), - is_favorite=d.get("is_favorite", None), - is_safe=d.get("is_safe", None), - last_modified_by=_from_dict(d, "last_modified_by", User), - last_modified_by_id=d.get("last_modified_by_id", None), - latest_query_data_id=d.get("latest_query_data_id", None), - name=d.get("name", None), - options=_from_dict(d, "options", QueryOptions), - parent=d.get("parent", None), - permission_tier=_enum(d, "permission_tier", PermissionLevel), - query=d.get("query", None), - query_hash=d.get("query_hash", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - updated_at=d.get("updated_at", None), - user=_from_dict(d, "user", User), - user_id=d.get("user_id", None), - visualizations=_repeated_dict(d, "visualizations", LegacyVisualization), - ) + return cls(can_edit=d.get('can_edit', None), created_at=d.get('created_at', None), data_source_id=d.get('data_source_id', None), description=d.get('description', None), id=d.get('id', None), is_archived=d.get('is_archived', None), is_draft=d.get('is_draft', None), is_favorite=d.get('is_favorite', None), is_safe=d.get('is_safe', None), last_modified_by=_from_dict(d, 'last_modified_by', User), last_modified_by_id=d.get('last_modified_by_id', None), latest_query_data_id=d.get('latest_query_data_id', None), name=d.get('name', None), options=_from_dict(d, 'options', QueryOptions), parent=d.get('parent', None), permission_tier=_enum(d, 'permission_tier', PermissionLevel), query=d.get('query', None), query_hash=d.get('query_hash', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None), updated_at=d.get('updated_at', None), user=_from_dict(d, 'user', User), user_id=d.get('user_id', None), visualizations=_repeated_dict(d, 'visualizations', LegacyVisualization)) + + @dataclass @@ -4506,1065 +3634,846 @@ class LegacyVisualization: visualization by copying description objects received _from the API_ and then using them to create a new one with a POST request to the same endpoint. Databricks does not recommend constructing ad-hoc visualizations entirely in JSON.""" - + created_at: Optional[str] = None - + description: Optional[str] = None """A short description of this visualization. This is not displayed in the UI.""" - + id: Optional[str] = None """The UUID for this visualization.""" - + name: Optional[str] = None """The name of the visualization that appears on dashboards and the query screen.""" - + options: Optional[Any] = None """The options object varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization settings in JSON.""" - + query: Optional[LegacyQuery] = None - + type: Optional[str] = None """The type of visualization: chart, table, pivot table, and so on.""" - + updated_at: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the LegacyVisualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query: - body["query"] = self.query.as_dict() - if self.type is not None: - body["type"] = self.type - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.created_at is not None: body['created_at'] = self.created_at + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query: body['query'] = self.query.as_dict() + if self.type is not None: body['type'] = self.type + if self.updated_at is not None: body['updated_at'] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the LegacyVisualization into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.description is not None: - body["description"] = self.description - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query: - body["query"] = self.query - if self.type is not None: - body["type"] = self.type - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.created_at is not None: body['created_at'] = self.created_at + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query: body['query'] = self.query + if self.type is not None: body['type'] = self.type + if self.updated_at is not None: body['updated_at'] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LegacyVisualization: """Deserializes the LegacyVisualization from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - description=d.get("description", None), - id=d.get("id", None), - name=d.get("name", None), - options=d.get("options", None), - query=_from_dict(d, "query", LegacyQuery), - type=d.get("type", None), - updated_at=d.get("updated_at", None), - ) + return cls(created_at=d.get('created_at', None), description=d.get('description', None), id=d.get('id', None), name=d.get('name', None), options=d.get('options', None), query=_from_dict(d, 'query', LegacyQuery), type=d.get('type', None), updated_at=d.get('updated_at', None)) + + class LifecycleState(Enum): + + + ACTIVE = 'ACTIVE' + TRASHED = 'TRASHED' + - ACTIVE = "ACTIVE" - TRASHED = "TRASHED" @dataclass class ListAlertsResponse: next_page_token: Optional[str] = None - + results: Optional[List[ListAlertsResponseAlert]] = None - + def as_dict(self) -> dict: """Serializes the ListAlertsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListAlertsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = self.results + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAlertsResponse: """Deserializes the ListAlertsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - results=_repeated_dict(d, "results", ListAlertsResponseAlert), - ) + return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', ListAlertsResponseAlert)) + + @dataclass class ListAlertsResponseAlert: condition: Optional[AlertCondition] = None """Trigger conditions of the alert.""" - + create_time: Optional[str] = None """The timestamp indicating when the alert was created.""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This can include email subject entries and Slack notification headers, for example. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + display_name: Optional[str] = None """The display name of the alert.""" - + id: Optional[str] = None """UUID identifying the alert.""" - + lifecycle_state: Optional[LifecycleState] = None """The workspace state of the alert. Used for tracking trashed status.""" - + notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + owner_user_name: Optional[str] = None """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" - + query_id: Optional[str] = None """UUID of the query attached to the alert.""" - + seconds_to_retrigger: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + state: Optional[AlertState] = None """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not yet been evaluated or ran into an error during the last evaluation.""" - + trigger_time: Optional[str] = None """Timestamp when the alert was last triggered, if the alert has been triggered before.""" - + update_time: Optional[str] = None """The timestamp indicating when the alert was updated.""" - + def as_dict(self) -> dict: """Serializes the ListAlertsResponseAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition: - body["condition"] = self.condition.as_dict() - if self.create_time is not None: - body["create_time"] = self.create_time - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state.value - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.query_id is not None: - body["query_id"] = self.query_id - if self.seconds_to_retrigger is not None: - body["seconds_to_retrigger"] = self.seconds_to_retrigger - if self.state is not None: - body["state"] = self.state.value - if self.trigger_time is not None: - body["trigger_time"] = self.trigger_time - if self.update_time is not None: - body["update_time"] = self.update_time + if self.condition: body['condition'] = self.condition.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.state is not None: body['state'] = self.state.value + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the ListAlertsResponseAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition: - body["condition"] = self.condition - if self.create_time is not None: - body["create_time"] = self.create_time - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.query_id is not None: - body["query_id"] = self.query_id - if self.seconds_to_retrigger is not None: - body["seconds_to_retrigger"] = self.seconds_to_retrigger - if self.state is not None: - body["state"] = self.state - if self.trigger_time is not None: - body["trigger_time"] = self.trigger_time - if self.update_time is not None: - body["update_time"] = self.update_time + if self.condition: body['condition'] = self.condition + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.state is not None: body['state'] = self.state + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAlertsResponseAlert: """Deserializes the ListAlertsResponseAlert from a dictionary.""" - return cls( - condition=_from_dict(d, "condition", AlertCondition), - create_time=d.get("create_time", None), - custom_body=d.get("custom_body", None), - custom_subject=d.get("custom_subject", None), - display_name=d.get("display_name", None), - id=d.get("id", None), - lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), - notify_on_ok=d.get("notify_on_ok", None), - owner_user_name=d.get("owner_user_name", None), - query_id=d.get("query_id", None), - seconds_to_retrigger=d.get("seconds_to_retrigger", None), - state=_enum(d, "state", AlertState), - trigger_time=d.get("trigger_time", None), - update_time=d.get("update_time", None), - ) + return cls(condition=_from_dict(d, 'condition', AlertCondition), create_time=d.get('create_time', None), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), display_name=d.get('display_name', None), id=d.get('id', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), notify_on_ok=d.get('notify_on_ok', None), owner_user_name=d.get('owner_user_name', None), query_id=d.get('query_id', None), seconds_to_retrigger=d.get('seconds_to_retrigger', None), state=_enum(d, 'state', AlertState), trigger_time=d.get('trigger_time', None), update_time=d.get('update_time', None)) + + + + + @dataclass class ListAlertsV2Response: next_page_token: Optional[str] = None - + results: Optional[List[AlertV2]] = None - + def as_dict(self) -> dict: """Serializes the ListAlertsV2Response into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListAlertsV2Response into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = self.results + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAlertsV2Response: """Deserializes the ListAlertsV2Response from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), results=_repeated_dict(d, "results", AlertV2)) + return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', AlertV2)) + + + + + class ListOrder(Enum): + + + CREATED_AT = 'created_at' + NAME = 'name' + + + + - CREATED_AT = "created_at" - NAME = "name" @dataclass class ListQueriesResponse: has_next_page: Optional[bool] = None """Whether there is another page of results.""" - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results.""" - + res: Optional[List[QueryInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListQueriesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.has_next_page is not None: - body["has_next_page"] = self.has_next_page - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.res: - body["res"] = [v.as_dict() for v in self.res] + if self.has_next_page is not None: body['has_next_page'] = self.has_next_page + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.res: body['res'] = [v.as_dict() for v in self.res] return body def as_shallow_dict(self) -> dict: """Serializes the ListQueriesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.has_next_page is not None: - body["has_next_page"] = self.has_next_page - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.res: - body["res"] = self.res + if self.has_next_page is not None: body['has_next_page'] = self.has_next_page + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.res: body['res'] = self.res return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListQueriesResponse: """Deserializes the ListQueriesResponse from a dictionary.""" - return cls( - has_next_page=d.get("has_next_page", None), - next_page_token=d.get("next_page_token", None), - res=_repeated_dict(d, "res", QueryInfo), - ) + return cls(has_next_page=d.get('has_next_page', None), next_page_token=d.get('next_page_token', None), res=_repeated_dict(d, 'res', QueryInfo)) + + + + + @dataclass class ListQueryObjectsResponse: next_page_token: Optional[str] = None - + results: Optional[List[ListQueryObjectsResponseQuery]] = None - + def as_dict(self) -> dict: """Serializes the ListQueryObjectsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListQueryObjectsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = self.results + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListQueryObjectsResponse: """Deserializes the ListQueryObjectsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - results=_repeated_dict(d, "results", ListQueryObjectsResponseQuery), - ) + return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', ListQueryObjectsResponseQuery)) + + @dataclass class ListQueryObjectsResponseQuery: apply_auto_limit: Optional[bool] = None """Whether to apply a 1000 row limit to the query result.""" - + catalog: Optional[str] = None """Name of the catalog where this query will be executed.""" - + create_time: Optional[str] = None """Timestamp when this query was created.""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + display_name: Optional[str] = None """Display name of the query that appears in list views, widget headings, and on the query page.""" - + id: Optional[str] = None """UUID identifying the query.""" - + last_modifier_user_name: Optional[str] = None """Username of the user who last saved changes to this query.""" - + lifecycle_state: Optional[LifecycleState] = None """Indicates whether the query is trashed.""" - + owner_user_name: Optional[str] = None """Username of the user that owns the query.""" - + parameters: Optional[List[QueryParameter]] = None """List of query parameter definitions.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_mode: Optional[RunAsMode] = None """Sets the "Run as" role for the object.""" - + schema: Optional[str] = None """Name of the schema where this query will be executed.""" - + tags: Optional[List[str]] = None - + update_time: Optional[str] = None """Timestamp when this query was last updated.""" - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the query.""" - + def as_dict(self) -> dict: """Serializes the ListQueryObjectsResponseQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_auto_limit is not None: - body["apply_auto_limit"] = self.apply_auto_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.create_time is not None: - body["create_time"] = self.create_time - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.last_modifier_user_name is not None: - body["last_modifier_user_name"] = self.last_modifier_user_name - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state.value - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_mode is not None: - body["run_as_mode"] = self.run_as_mode.value - if self.schema is not None: - body["schema"] = self.schema - if self.tags: - body["tags"] = [v for v in self.tags] - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.last_modifier_user_name is not None: body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the ListQueryObjectsResponseQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_auto_limit is not None: - body["apply_auto_limit"] = self.apply_auto_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.create_time is not None: - body["create_time"] = self.create_time - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.last_modifier_user_name is not None: - body["last_modifier_user_name"] = self.last_modifier_user_name - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parameters: - body["parameters"] = self.parameters - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_mode is not None: - body["run_as_mode"] = self.run_as_mode - if self.schema is not None: - body["schema"] = self.schema - if self.tags: - body["tags"] = self.tags - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.last_modifier_user_name is not None: body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = self.parameters + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = self.tags + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListQueryObjectsResponseQuery: """Deserializes the ListQueryObjectsResponseQuery from a dictionary.""" - return cls( - apply_auto_limit=d.get("apply_auto_limit", None), - catalog=d.get("catalog", None), - create_time=d.get("create_time", None), - description=d.get("description", None), - display_name=d.get("display_name", None), - id=d.get("id", None), - last_modifier_user_name=d.get("last_modifier_user_name", None), - lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), - owner_user_name=d.get("owner_user_name", None), - parameters=_repeated_dict(d, "parameters", QueryParameter), - query_text=d.get("query_text", None), - run_as_mode=_enum(d, "run_as_mode", RunAsMode), - schema=d.get("schema", None), - tags=d.get("tags", None), - update_time=d.get("update_time", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(apply_auto_limit=d.get('apply_auto_limit', None), catalog=d.get('catalog', None), create_time=d.get('create_time', None), description=d.get('description', None), display_name=d.get('display_name', None), id=d.get('id', None), last_modifier_user_name=d.get('last_modifier_user_name', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), owner_user_name=d.get('owner_user_name', None), parameters=_repeated_dict(d, 'parameters', QueryParameter), query_text=d.get('query_text', None), run_as_mode=_enum(d, 'run_as_mode', RunAsMode), schema=d.get('schema', None), tags=d.get('tags', None), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class ListResponse: count: Optional[int] = None """The total number of dashboards.""" - + page: Optional[int] = None """The current page being displayed.""" - + page_size: Optional[int] = None """The number of dashboards per page.""" - + results: Optional[List[Dashboard]] = None """List of dashboards returned.""" - + def as_dict(self) -> dict: """Serializes the ListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.count is not None: - body["count"] = self.count - if self.page is not None: - body["page"] = self.page - if self.page_size is not None: - body["page_size"] = self.page_size - if self.results: - body["results"] = [v.as_dict() for v in self.results] + if self.count is not None: body['count'] = self.count + if self.page is not None: body['page'] = self.page + if self.page_size is not None: body['page_size'] = self.page_size + if self.results: body['results'] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.count is not None: - body["count"] = self.count - if self.page is not None: - body["page"] = self.page - if self.page_size is not None: - body["page_size"] = self.page_size - if self.results: - body["results"] = self.results + if self.count is not None: body['count'] = self.count + if self.page is not None: body['page'] = self.page + if self.page_size is not None: body['page_size'] = self.page_size + if self.results: body['results'] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListResponse: """Deserializes the ListResponse from a dictionary.""" - return cls( - count=d.get("count", None), - page=d.get("page", None), - page_size=d.get("page_size", None), - results=_repeated_dict(d, "results", Dashboard), - ) + return cls(count=d.get('count', None), page=d.get('page', None), page_size=d.get('page_size', None), results=_repeated_dict(d, 'results', Dashboard)) + + + + + @dataclass class ListVisualizationsForQueryResponse: next_page_token: Optional[str] = None - + results: Optional[List[Visualization]] = None - + def as_dict(self) -> dict: """Serializes the ListVisualizationsForQueryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListVisualizationsForQueryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.results: - body["results"] = self.results + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListVisualizationsForQueryResponse: """Deserializes the ListVisualizationsForQueryResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), results=_repeated_dict(d, "results", Visualization)) + return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', Visualization)) + + + + + @dataclass class ListWarehousesResponse: warehouses: Optional[List[EndpointInfo]] = None """A list of warehouses and their configurations.""" - + def as_dict(self) -> dict: """Serializes the ListWarehousesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.warehouses: - body["warehouses"] = [v.as_dict() for v in self.warehouses] + if self.warehouses: body['warehouses'] = [v.as_dict() for v in self.warehouses] return body def as_shallow_dict(self) -> dict: """Serializes the ListWarehousesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.warehouses: - body["warehouses"] = self.warehouses + if self.warehouses: body['warehouses'] = self.warehouses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListWarehousesResponse: """Deserializes the ListWarehousesResponse from a dictionary.""" - return cls(warehouses=_repeated_dict(d, "warehouses", EndpointInfo)) + return cls(warehouses=_repeated_dict(d, 'warehouses', EndpointInfo)) + + @dataclass class MultiValuesOptions: prefix: Optional[str] = None """Character that prefixes each selected parameter value.""" - + separator: Optional[str] = None """Character that separates each selected parameter value. Defaults to a comma.""" - + suffix: Optional[str] = None """Character that suffixes each selected parameter value.""" - + def as_dict(self) -> dict: """Serializes the MultiValuesOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.prefix is not None: - body["prefix"] = self.prefix - if self.separator is not None: - body["separator"] = self.separator - if self.suffix is not None: - body["suffix"] = self.suffix + if self.prefix is not None: body['prefix'] = self.prefix + if self.separator is not None: body['separator'] = self.separator + if self.suffix is not None: body['suffix'] = self.suffix return body def as_shallow_dict(self) -> dict: """Serializes the MultiValuesOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.prefix is not None: - body["prefix"] = self.prefix - if self.separator is not None: - body["separator"] = self.separator - if self.suffix is not None: - body["suffix"] = self.suffix + if self.prefix is not None: body['prefix'] = self.prefix + if self.separator is not None: body['separator'] = self.separator + if self.suffix is not None: body['suffix'] = self.suffix return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MultiValuesOptions: """Deserializes the MultiValuesOptions from a dictionary.""" - return cls(prefix=d.get("prefix", None), separator=d.get("separator", None), suffix=d.get("suffix", None)) + return cls(prefix=d.get('prefix', None), separator=d.get('separator', None), suffix=d.get('suffix', None)) + + @dataclass class NumericValue: value: Optional[float] = None - + def as_dict(self) -> dict: """Serializes the NumericValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the NumericValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NumericValue: """Deserializes the NumericValue from a dictionary.""" - return cls(value=d.get("value", None)) + return cls(value=d.get('value', None)) + + class ObjectType(Enum): """A singular noun object type.""" - - ALERT = "alert" - DASHBOARD = "dashboard" - DATA_SOURCE = "data_source" - QUERY = "query" - + + ALERT = 'alert' + DASHBOARD = 'dashboard' + DATA_SOURCE = 'data_source' + QUERY = 'query' class ObjectTypePlural(Enum): """Always a plural of the object type.""" - - ALERTS = "alerts" - DASHBOARDS = "dashboards" - DATA_SOURCES = "data_sources" - QUERIES = "queries" - + + ALERTS = 'alerts' + DASHBOARDS = 'dashboards' + DATA_SOURCES = 'data_sources' + QUERIES = 'queries' @dataclass class OdbcParams: hostname: Optional[str] = None - + path: Optional[str] = None - + port: Optional[int] = None - + protocol: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the OdbcParams into a dictionary suitable for use as a JSON request body.""" body = {} - if self.hostname is not None: - body["hostname"] = self.hostname - if self.path is not None: - body["path"] = self.path - if self.port is not None: - body["port"] = self.port - if self.protocol is not None: - body["protocol"] = self.protocol + if self.hostname is not None: body['hostname'] = self.hostname + if self.path is not None: body['path'] = self.path + if self.port is not None: body['port'] = self.port + if self.protocol is not None: body['protocol'] = self.protocol return body def as_shallow_dict(self) -> dict: """Serializes the OdbcParams into a shallow dictionary of its immediate attributes.""" body = {} - if self.hostname is not None: - body["hostname"] = self.hostname - if self.path is not None: - body["path"] = self.path - if self.port is not None: - body["port"] = self.port - if self.protocol is not None: - body["protocol"] = self.protocol + if self.hostname is not None: body['hostname'] = self.hostname + if self.path is not None: body['path'] = self.path + if self.port is not None: body['port'] = self.port + if self.protocol is not None: body['protocol'] = self.protocol return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OdbcParams: """Deserializes the OdbcParams from a dictionary.""" - return cls( - hostname=d.get("hostname", None), - path=d.get("path", None), - port=d.get("port", None), - protocol=d.get("protocol", None), - ) + return cls(hostname=d.get('hostname', None), path=d.get('path', None), port=d.get('port', None), protocol=d.get('protocol', None)) + + class OwnableObjectType(Enum): """The singular form of the type of object which can be owned.""" - - ALERT = "alert" - DASHBOARD = "dashboard" - QUERY = "query" - + + ALERT = 'alert' + DASHBOARD = 'dashboard' + QUERY = 'query' @dataclass class Parameter: enum_options: Optional[str] = None """List of valid parameter values, newline delimited. Only applies for dropdown list parameters.""" - + multi_values_options: Optional[MultiValuesOptions] = None """If specified, allows multiple values to be selected for this parameter. Only applies to dropdown list and query-based dropdown list parameters.""" - + name: Optional[str] = None """The literal parameter marker that appears between double curly braces in the query text.""" - + query_id: Optional[str] = None """The UUID of the query that provides the parameter values. Only applies for query-based dropdown list parameters.""" - + title: Optional[str] = None """The text displayed in a parameter picking widget.""" - + type: Optional[ParameterType] = None """Parameters can have several different types.""" - + value: Optional[Any] = None """The default value for this parameter.""" - + def as_dict(self) -> dict: """Serializes the Parameter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enum_options is not None: - body["enumOptions"] = self.enum_options - if self.multi_values_options: - body["multiValuesOptions"] = self.multi_values_options.as_dict() - if self.name is not None: - body["name"] = self.name - if self.query_id is not None: - body["queryId"] = self.query_id - if self.title is not None: - body["title"] = self.title - if self.type is not None: - body["type"] = self.type.value - if self.value: - body["value"] = self.value + if self.enum_options is not None: body['enumOptions'] = self.enum_options + if self.multi_values_options: body['multiValuesOptions'] = self.multi_values_options.as_dict() + if self.name is not None: body['name'] = self.name + if self.query_id is not None: body['queryId'] = self.query_id + if self.title is not None: body['title'] = self.title + if self.type is not None: body['type'] = self.type.value + if self.value: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the Parameter into a shallow dictionary of its immediate attributes.""" body = {} - if self.enum_options is not None: - body["enumOptions"] = self.enum_options - if self.multi_values_options: - body["multiValuesOptions"] = self.multi_values_options - if self.name is not None: - body["name"] = self.name - if self.query_id is not None: - body["queryId"] = self.query_id - if self.title is not None: - body["title"] = self.title - if self.type is not None: - body["type"] = self.type - if self.value: - body["value"] = self.value + if self.enum_options is not None: body['enumOptions'] = self.enum_options + if self.multi_values_options: body['multiValuesOptions'] = self.multi_values_options + if self.name is not None: body['name'] = self.name + if self.query_id is not None: body['queryId'] = self.query_id + if self.title is not None: body['title'] = self.title + if self.type is not None: body['type'] = self.type + if self.value: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Parameter: """Deserializes the Parameter from a dictionary.""" - return cls( - enum_options=d.get("enumOptions", None), - multi_values_options=_from_dict(d, "multiValuesOptions", MultiValuesOptions), - name=d.get("name", None), - query_id=d.get("queryId", None), - title=d.get("title", None), - type=_enum(d, "type", ParameterType), - value=d.get("value", None), - ) + return cls(enum_options=d.get('enumOptions', None), multi_values_options=_from_dict(d, 'multiValuesOptions', MultiValuesOptions), name=d.get('name', None), query_id=d.get('queryId', None), title=d.get('title', None), type=_enum(d, 'type', ParameterType), value=d.get('value', None)) + + class ParameterType(Enum): """Parameters can have several different types.""" - - DATETIME = "datetime" - ENUM = "enum" - NUMBER = "number" - QUERY = "query" - TEXT = "text" - + + DATETIME = 'datetime' + ENUM = 'enum' + NUMBER = 'number' + QUERY = 'query' + TEXT = 'text' class PermissionLevel(Enum): """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query * `CAN_MANAGE`: Can manage the query""" - - CAN_EDIT = "CAN_EDIT" - CAN_MANAGE = "CAN_MANAGE" - CAN_RUN = "CAN_RUN" - CAN_VIEW = "CAN_VIEW" - + + CAN_EDIT = 'CAN_EDIT' + CAN_MANAGE = 'CAN_MANAGE' + CAN_RUN = 'CAN_RUN' + CAN_VIEW = 'CAN_VIEW' class PlansState(Enum): """Possible Reasons for which we have not saved plans in the database""" - - EMPTY = "EMPTY" - EXISTS = "EXISTS" - IGNORED_LARGE_PLANS_SIZE = "IGNORED_LARGE_PLANS_SIZE" - IGNORED_SMALL_DURATION = "IGNORED_SMALL_DURATION" - IGNORED_SPARK_PLAN_TYPE = "IGNORED_SPARK_PLAN_TYPE" - UNKNOWN = "UNKNOWN" - + + EMPTY = 'EMPTY' + EXISTS = 'EXISTS' + IGNORED_LARGE_PLANS_SIZE = 'IGNORED_LARGE_PLANS_SIZE' + IGNORED_SMALL_DURATION = 'IGNORED_SMALL_DURATION' + IGNORED_SPARK_PLAN_TYPE = 'IGNORED_SPARK_PLAN_TYPE' + UNKNOWN = 'UNKNOWN' @dataclass class Query: apply_auto_limit: Optional[bool] = None """Whether to apply a 1000 row limit to the query result.""" - + catalog: Optional[str] = None """Name of the catalog where this query will be executed.""" - + create_time: Optional[str] = None """Timestamp when this query was created.""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + display_name: Optional[str] = None """Display name of the query that appears in list views, widget headings, and on the query page.""" - + id: Optional[str] = None """UUID identifying the query.""" - + last_modifier_user_name: Optional[str] = None """Username of the user who last saved changes to this query.""" - + lifecycle_state: Optional[LifecycleState] = None """Indicates whether the query is trashed.""" - + owner_user_name: Optional[str] = None """Username of the user that owns the query.""" - + parameters: Optional[List[QueryParameter]] = None """List of query parameter definitions.""" - + parent_path: Optional[str] = None """Workspace path of the workspace folder containing the object.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_mode: Optional[RunAsMode] = None """Sets the "Run as" role for the object.""" - + schema: Optional[str] = None """Name of the schema where this query will be executed.""" - + tags: Optional[List[str]] = None - + update_time: Optional[str] = None """Timestamp when this query was last updated.""" - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the query.""" - + def as_dict(self) -> dict: """Serializes the Query into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_auto_limit is not None: - body["apply_auto_limit"] = self.apply_auto_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.create_time is not None: - body["create_time"] = self.create_time - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.last_modifier_user_name is not None: - body["last_modifier_user_name"] = self.last_modifier_user_name - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state.value - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_mode is not None: - body["run_as_mode"] = self.run_as_mode.value - if self.schema is not None: - body["schema"] = self.schema - if self.tags: - body["tags"] = [v for v in self.tags] - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.last_modifier_user_name is not None: body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the Query into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_auto_limit is not None: - body["apply_auto_limit"] = self.apply_auto_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.create_time is not None: - body["create_time"] = self.create_time - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.last_modifier_user_name is not None: - body["last_modifier_user_name"] = self.last_modifier_user_name - if self.lifecycle_state is not None: - body["lifecycle_state"] = self.lifecycle_state - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parameters: - body["parameters"] = self.parameters - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_mode is not None: - body["run_as_mode"] = self.run_as_mode - if self.schema is not None: - body["schema"] = self.schema - if self.tags: - body["tags"] = self.tags - if self.update_time is not None: - body["update_time"] = self.update_time - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.last_modifier_user_name is not None: body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = self.parameters + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = self.tags + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Query: """Deserializes the Query from a dictionary.""" - return cls( - apply_auto_limit=d.get("apply_auto_limit", None), - catalog=d.get("catalog", None), - create_time=d.get("create_time", None), - description=d.get("description", None), - display_name=d.get("display_name", None), - id=d.get("id", None), - last_modifier_user_name=d.get("last_modifier_user_name", None), - lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), - owner_user_name=d.get("owner_user_name", None), - parameters=_repeated_dict(d, "parameters", QueryParameter), - parent_path=d.get("parent_path", None), - query_text=d.get("query_text", None), - run_as_mode=_enum(d, "run_as_mode", RunAsMode), - schema=d.get("schema", None), - tags=d.get("tags", None), - update_time=d.get("update_time", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(apply_auto_limit=d.get('apply_auto_limit', None), catalog=d.get('catalog', None), create_time=d.get('create_time', None), description=d.get('description', None), display_name=d.get('display_name', None), id=d.get('id', None), last_modifier_user_name=d.get('last_modifier_user_name', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), owner_user_name=d.get('owner_user_name', None), parameters=_repeated_dict(d, 'parameters', QueryParameter), parent_path=d.get('parent_path', None), query_text=d.get('query_text', None), run_as_mode=_enum(d, 'run_as_mode', RunAsMode), schema=d.get('schema', None), tags=d.get('tags', None), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class QueryBackedValue: multi_values_options: Optional[MultiValuesOptions] = None """If specified, allows multiple values to be selected for this parameter.""" - + query_id: Optional[str] = None """UUID of the query that provides the parameter values.""" - + values: Optional[List[str]] = None """List of selected query parameter values.""" - + def as_dict(self) -> dict: """Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.multi_values_options: - body["multi_values_options"] = self.multi_values_options.as_dict() - if self.query_id is not None: - body["query_id"] = self.query_id - if self.values: - body["values"] = [v for v in self.values] + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict() + if self.query_id is not None: body['query_id'] = self.query_id + if self.values: body['values'] = [v for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the QueryBackedValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.multi_values_options: - body["multi_values_options"] = self.multi_values_options - if self.query_id is not None: - body["query_id"] = self.query_id - if self.values: - body["values"] = self.values + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options + if self.query_id is not None: body['query_id'] = self.query_id + if self.values: body['values'] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryBackedValue: """Deserializes the QueryBackedValue from a dictionary.""" - return cls( - multi_values_options=_from_dict(d, "multi_values_options", MultiValuesOptions), - query_id=d.get("query_id", None), - values=d.get("values", None), - ) + return cls(multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions), query_id=d.get('query_id', None), values=d.get('values', None)) + + @dataclass @@ -5574,664 +4483,475 @@ class QueryEditContent: warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + name: Optional[str] = None """The title of this query that appears in list views, widget headings, and on the query page.""" - + options: Optional[Any] = None """Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. It can be overridden at runtime.""" - + query: Optional[str] = None """The text of the query to be run.""" - + query_id: Optional[str] = None - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the QueryEditContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query is not None: - body["query"] = self.query - if self.query_id is not None: - body["query_id"] = self.query_id - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query is not None: body['query'] = self.query + if self.query_id is not None: body['query_id'] = self.query_id + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the QueryEditContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query is not None: - body["query"] = self.query - if self.query_id is not None: - body["query_id"] = self.query_id - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query is not None: body['query'] = self.query + if self.query_id is not None: body['query_id'] = self.query_id + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryEditContent: """Deserializes the QueryEditContent from a dictionary.""" - return cls( - data_source_id=d.get("data_source_id", None), - description=d.get("description", None), - name=d.get("name", None), - options=d.get("options", None), - query=d.get("query", None), - query_id=d.get("query_id", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - ) + return cls(data_source_id=d.get('data_source_id', None), description=d.get('description', None), name=d.get('name', None), options=d.get('options', None), query=d.get('query', None), query_id=d.get('query_id', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None)) + + @dataclass class QueryFilter: query_start_time_range: Optional[TimeRange] = None """A range filter for query submitted time. The time range must be <= 30 days.""" - + statement_ids: Optional[List[str]] = None """A list of statement IDs.""" - + statuses: Optional[List[QueryStatus]] = None - + user_ids: Optional[List[int]] = None """A list of user IDs who ran the queries.""" - + warehouse_ids: Optional[List[str]] = None """A list of warehouse IDs.""" - + def as_dict(self) -> dict: """Serializes the QueryFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.query_start_time_range: - body["query_start_time_range"] = self.query_start_time_range.as_dict() - if self.statement_ids: - body["statement_ids"] = [v for v in self.statement_ids] - if self.statuses: - body["statuses"] = [v.value for v in self.statuses] - if self.user_ids: - body["user_ids"] = [v for v in self.user_ids] - if self.warehouse_ids: - body["warehouse_ids"] = [v for v in self.warehouse_ids] + if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict() + if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids] + if self.statuses: body['statuses'] = [v.value for v in self.statuses] + if self.user_ids: body['user_ids'] = [v for v in self.user_ids] + if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids] return body def as_shallow_dict(self) -> dict: """Serializes the QueryFilter into a shallow dictionary of its immediate attributes.""" body = {} - if self.query_start_time_range: - body["query_start_time_range"] = self.query_start_time_range - if self.statement_ids: - body["statement_ids"] = self.statement_ids - if self.statuses: - body["statuses"] = self.statuses - if self.user_ids: - body["user_ids"] = self.user_ids - if self.warehouse_ids: - body["warehouse_ids"] = self.warehouse_ids + if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range + if self.statement_ids: body['statement_ids'] = self.statement_ids + if self.statuses: body['statuses'] = self.statuses + if self.user_ids: body['user_ids'] = self.user_ids + if self.warehouse_ids: body['warehouse_ids'] = self.warehouse_ids return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryFilter: """Deserializes the QueryFilter from a dictionary.""" - return cls( - query_start_time_range=_from_dict(d, "query_start_time_range", TimeRange), - statement_ids=d.get("statement_ids", None), - statuses=_repeated_enum(d, "statuses", QueryStatus), - user_ids=d.get("user_ids", None), - warehouse_ids=d.get("warehouse_ids", None), - ) + return cls(query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange), statement_ids=d.get('statement_ids', None), statuses=_repeated_enum(d, 'statuses', QueryStatus), user_ids=d.get('user_ids', None), warehouse_ids=d.get('warehouse_ids', None)) + + @dataclass class QueryInfo: channel_used: Optional[ChannelInfo] = None """SQL Warehouse channel information at the time of query execution""" - + client_application: Optional[str] = None """Client application that ran the statement. For example: Databricks SQL Editor, Tableau, and Power BI. This field is derived from information provided by client applications. While values are expected to remain static over time, this cannot be guaranteed.""" - + duration: Optional[int] = None """Total execution time of the statement ( excluding result fetch time ).""" - + endpoint_id: Optional[str] = None """Alias for `warehouse_id`.""" - + error_message: Optional[str] = None """Message describing why the query could not complete.""" - + executed_as_user_id: Optional[int] = None """The ID of the user whose credentials were used to run the query.""" - + executed_as_user_name: Optional[str] = None """The email address or username of the user whose credentials were used to run the query.""" - + execution_end_time_ms: Optional[int] = None """The time execution of the query ended.""" - + is_final: Optional[bool] = None """Whether more updates for the query are expected.""" - + lookup_key: Optional[str] = None """A key that can be used to look up query details.""" - + metrics: Optional[QueryMetrics] = None """Metrics about query execution.""" - + plans_state: Optional[PlansState] = None """Whether plans exist for the execution, or the reason why they are missing""" - + query_end_time_ms: Optional[int] = None """The time the query ended.""" - + query_id: Optional[str] = None """The query ID.""" - + query_source: Optional[ExternalQuerySource] = None """A struct that contains key-value pairs representing Databricks entities that were involved in the execution of this statement, such as jobs, notebooks, or dashboards. This field only records Databricks entities.""" - + query_start_time_ms: Optional[int] = None """The time the query started.""" - + query_text: Optional[str] = None """The text of the query.""" - + rows_produced: Optional[int] = None """The number of results returned by the query.""" - + spark_ui_url: Optional[str] = None """URL to the Spark UI query plan.""" - + statement_type: Optional[QueryStatementType] = None """Type of statement for this query""" - + status: Optional[QueryStatus] = None """Query status with one the following values: - `QUEUED`: Query has been received and queued. - `RUNNING`: Query has started. - `CANCELED`: Query has been cancelled by the user. - `FAILED`: Query has failed. - `FINISHED`: Query has completed.""" - + user_id: Optional[int] = None """The ID of the user who ran the query.""" - + user_name: Optional[str] = None """The email address or username of the user who ran the query.""" - + warehouse_id: Optional[str] = None """Warehouse ID.""" - + def as_dict(self) -> dict: """Serializes the QueryInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.channel_used: - body["channel_used"] = self.channel_used.as_dict() - if self.client_application is not None: - body["client_application"] = self.client_application - if self.duration is not None: - body["duration"] = self.duration - if self.endpoint_id is not None: - body["endpoint_id"] = self.endpoint_id - if self.error_message is not None: - body["error_message"] = self.error_message - if self.executed_as_user_id is not None: - body["executed_as_user_id"] = self.executed_as_user_id - if self.executed_as_user_name is not None: - body["executed_as_user_name"] = self.executed_as_user_name - if self.execution_end_time_ms is not None: - body["execution_end_time_ms"] = self.execution_end_time_ms - if self.is_final is not None: - body["is_final"] = self.is_final - if self.lookup_key is not None: - body["lookup_key"] = self.lookup_key - if self.metrics: - body["metrics"] = self.metrics.as_dict() - if self.plans_state is not None: - body["plans_state"] = self.plans_state.value - if self.query_end_time_ms is not None: - body["query_end_time_ms"] = self.query_end_time_ms - if self.query_id is not None: - body["query_id"] = self.query_id - if self.query_source: - body["query_source"] = self.query_source.as_dict() - if self.query_start_time_ms is not None: - body["query_start_time_ms"] = self.query_start_time_ms - if self.query_text is not None: - body["query_text"] = self.query_text - if self.rows_produced is not None: - body["rows_produced"] = self.rows_produced - if self.spark_ui_url is not None: - body["spark_ui_url"] = self.spark_ui_url - if self.statement_type is not None: - body["statement_type"] = self.statement_type.value - if self.status is not None: - body["status"] = self.status.value - if self.user_id is not None: - body["user_id"] = self.user_id - if self.user_name is not None: - body["user_name"] = self.user_name - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.channel_used: body['channel_used'] = self.channel_used.as_dict() + if self.client_application is not None: body['client_application'] = self.client_application + if self.duration is not None: body['duration'] = self.duration + if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id + if self.error_message is not None: body['error_message'] = self.error_message + if self.executed_as_user_id is not None: body['executed_as_user_id'] = self.executed_as_user_id + if self.executed_as_user_name is not None: body['executed_as_user_name'] = self.executed_as_user_name + if self.execution_end_time_ms is not None: body['execution_end_time_ms'] = self.execution_end_time_ms + if self.is_final is not None: body['is_final'] = self.is_final + if self.lookup_key is not None: body['lookup_key'] = self.lookup_key + if self.metrics: body['metrics'] = self.metrics.as_dict() + if self.plans_state is not None: body['plans_state'] = self.plans_state.value + if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms + if self.query_id is not None: body['query_id'] = self.query_id + if self.query_source: body['query_source'] = self.query_source.as_dict() + if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms + if self.query_text is not None: body['query_text'] = self.query_text + if self.rows_produced is not None: body['rows_produced'] = self.rows_produced + if self.spark_ui_url is not None: body['spark_ui_url'] = self.spark_ui_url + if self.statement_type is not None: body['statement_type'] = self.statement_type.value + if self.status is not None: body['status'] = self.status.value + if self.user_id is not None: body['user_id'] = self.user_id + if self.user_name is not None: body['user_name'] = self.user_name + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the QueryInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.channel_used: - body["channel_used"] = self.channel_used - if self.client_application is not None: - body["client_application"] = self.client_application - if self.duration is not None: - body["duration"] = self.duration - if self.endpoint_id is not None: - body["endpoint_id"] = self.endpoint_id - if self.error_message is not None: - body["error_message"] = self.error_message - if self.executed_as_user_id is not None: - body["executed_as_user_id"] = self.executed_as_user_id - if self.executed_as_user_name is not None: - body["executed_as_user_name"] = self.executed_as_user_name - if self.execution_end_time_ms is not None: - body["execution_end_time_ms"] = self.execution_end_time_ms - if self.is_final is not None: - body["is_final"] = self.is_final - if self.lookup_key is not None: - body["lookup_key"] = self.lookup_key - if self.metrics: - body["metrics"] = self.metrics - if self.plans_state is not None: - body["plans_state"] = self.plans_state - if self.query_end_time_ms is not None: - body["query_end_time_ms"] = self.query_end_time_ms - if self.query_id is not None: - body["query_id"] = self.query_id - if self.query_source: - body["query_source"] = self.query_source - if self.query_start_time_ms is not None: - body["query_start_time_ms"] = self.query_start_time_ms - if self.query_text is not None: - body["query_text"] = self.query_text - if self.rows_produced is not None: - body["rows_produced"] = self.rows_produced - if self.spark_ui_url is not None: - body["spark_ui_url"] = self.spark_ui_url - if self.statement_type is not None: - body["statement_type"] = self.statement_type - if self.status is not None: - body["status"] = self.status - if self.user_id is not None: - body["user_id"] = self.user_id - if self.user_name is not None: - body["user_name"] = self.user_name - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.channel_used: body['channel_used'] = self.channel_used + if self.client_application is not None: body['client_application'] = self.client_application + if self.duration is not None: body['duration'] = self.duration + if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id + if self.error_message is not None: body['error_message'] = self.error_message + if self.executed_as_user_id is not None: body['executed_as_user_id'] = self.executed_as_user_id + if self.executed_as_user_name is not None: body['executed_as_user_name'] = self.executed_as_user_name + if self.execution_end_time_ms is not None: body['execution_end_time_ms'] = self.execution_end_time_ms + if self.is_final is not None: body['is_final'] = self.is_final + if self.lookup_key is not None: body['lookup_key'] = self.lookup_key + if self.metrics: body['metrics'] = self.metrics + if self.plans_state is not None: body['plans_state'] = self.plans_state + if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms + if self.query_id is not None: body['query_id'] = self.query_id + if self.query_source: body['query_source'] = self.query_source + if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms + if self.query_text is not None: body['query_text'] = self.query_text + if self.rows_produced is not None: body['rows_produced'] = self.rows_produced + if self.spark_ui_url is not None: body['spark_ui_url'] = self.spark_ui_url + if self.statement_type is not None: body['statement_type'] = self.statement_type + if self.status is not None: body['status'] = self.status + if self.user_id is not None: body['user_id'] = self.user_id + if self.user_name is not None: body['user_name'] = self.user_name + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryInfo: """Deserializes the QueryInfo from a dictionary.""" - return cls( - channel_used=_from_dict(d, "channel_used", ChannelInfo), - client_application=d.get("client_application", None), - duration=d.get("duration", None), - endpoint_id=d.get("endpoint_id", None), - error_message=d.get("error_message", None), - executed_as_user_id=d.get("executed_as_user_id", None), - executed_as_user_name=d.get("executed_as_user_name", None), - execution_end_time_ms=d.get("execution_end_time_ms", None), - is_final=d.get("is_final", None), - lookup_key=d.get("lookup_key", None), - metrics=_from_dict(d, "metrics", QueryMetrics), - plans_state=_enum(d, "plans_state", PlansState), - query_end_time_ms=d.get("query_end_time_ms", None), - query_id=d.get("query_id", None), - query_source=_from_dict(d, "query_source", ExternalQuerySource), - query_start_time_ms=d.get("query_start_time_ms", None), - query_text=d.get("query_text", None), - rows_produced=d.get("rows_produced", None), - spark_ui_url=d.get("spark_ui_url", None), - statement_type=_enum(d, "statement_type", QueryStatementType), - status=_enum(d, "status", QueryStatus), - user_id=d.get("user_id", None), - user_name=d.get("user_name", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(channel_used=_from_dict(d, 'channel_used', ChannelInfo), client_application=d.get('client_application', None), duration=d.get('duration', None), endpoint_id=d.get('endpoint_id', None), error_message=d.get('error_message', None), executed_as_user_id=d.get('executed_as_user_id', None), executed_as_user_name=d.get('executed_as_user_name', None), execution_end_time_ms=d.get('execution_end_time_ms', None), is_final=d.get('is_final', None), lookup_key=d.get('lookup_key', None), metrics=_from_dict(d, 'metrics', QueryMetrics), plans_state=_enum(d, 'plans_state', PlansState), query_end_time_ms=d.get('query_end_time_ms', None), query_id=d.get('query_id', None), query_source=_from_dict(d, 'query_source', ExternalQuerySource), query_start_time_ms=d.get('query_start_time_ms', None), query_text=d.get('query_text', None), rows_produced=d.get('rows_produced', None), spark_ui_url=d.get('spark_ui_url', None), statement_type=_enum(d, 'statement_type', QueryStatementType), status=_enum(d, 'status', QueryStatus), user_id=d.get('user_id', None), user_name=d.get('user_name', None), warehouse_id=d.get('warehouse_id', None)) + + @dataclass class QueryList: count: Optional[int] = None """The total number of queries.""" - + page: Optional[int] = None """The page number that is currently displayed.""" - + page_size: Optional[int] = None """The number of queries per page.""" - + results: Optional[List[LegacyQuery]] = None """List of queries returned.""" - + def as_dict(self) -> dict: """Serializes the QueryList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.count is not None: - body["count"] = self.count - if self.page is not None: - body["page"] = self.page - if self.page_size is not None: - body["page_size"] = self.page_size - if self.results: - body["results"] = [v.as_dict() for v in self.results] + if self.count is not None: body['count'] = self.count + if self.page is not None: body['page'] = self.page + if self.page_size is not None: body['page_size'] = self.page_size + if self.results: body['results'] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the QueryList into a shallow dictionary of its immediate attributes.""" body = {} - if self.count is not None: - body["count"] = self.count - if self.page is not None: - body["page"] = self.page - if self.page_size is not None: - body["page_size"] = self.page_size - if self.results: - body["results"] = self.results + if self.count is not None: body['count'] = self.count + if self.page is not None: body['page'] = self.page + if self.page_size is not None: body['page_size'] = self.page_size + if self.results: body['results'] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryList: """Deserializes the QueryList from a dictionary.""" - return cls( - count=d.get("count", None), - page=d.get("page", None), - page_size=d.get("page_size", None), - results=_repeated_dict(d, "results", LegacyQuery), - ) + return cls(count=d.get('count', None), page=d.get('page', None), page_size=d.get('page_size', None), results=_repeated_dict(d, 'results', LegacyQuery)) + + @dataclass class QueryMetrics: """A query metric that encapsulates a set of measurements for a single query. Metrics come from the driver and are stored in the history service database.""" - + compilation_time_ms: Optional[int] = None """Time spent loading metadata and optimizing the query, in milliseconds.""" - + execution_time_ms: Optional[int] = None """Time spent executing the query, in milliseconds.""" - + network_sent_bytes: Optional[int] = None """Total amount of data sent over the network between executor nodes during shuffle, in bytes.""" - + overloading_queue_start_timestamp: Optional[int] = None """Timestamp of when the query was enqueued waiting while the warehouse was at max load. This field is optional and will not appear if the query skipped the overloading queue.""" - + photon_total_time_ms: Optional[int] = None """Total execution time for all individual Photon query engine tasks in the query, in milliseconds.""" - + provisioning_queue_start_timestamp: Optional[int] = None """Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the warehouse. This field is optional and will not appear if the query skipped the provisioning queue.""" - + pruned_bytes: Optional[int] = None """Total number of bytes in all tables not read due to pruning""" - + pruned_files_count: Optional[int] = None """Total number of files from all tables not read due to pruning""" - + query_compilation_start_timestamp: Optional[int] = None """Timestamp of when the underlying compute started compilation of the query.""" - + read_bytes: Optional[int] = None """Total size of data read by the query, in bytes.""" - + read_cache_bytes: Optional[int] = None """Size of persistent data read from the cache, in bytes.""" - + read_files_count: Optional[int] = None """Number of files read after pruning""" - + read_partitions_count: Optional[int] = None """Number of partitions read after pruning.""" - + read_remote_bytes: Optional[int] = None """Size of persistent data read from cloud object storage on your cloud tenant, in bytes.""" - + result_fetch_time_ms: Optional[int] = None """Time spent fetching the query results after the execution finished, in milliseconds.""" - + result_from_cache: Optional[bool] = None """`true` if the query result was fetched from cache, `false` otherwise.""" - + rows_produced_count: Optional[int] = None """Total number of rows returned by the query.""" - + rows_read_count: Optional[int] = None """Total number of rows read by the query.""" - + spill_to_disk_bytes: Optional[int] = None """Size of data temporarily written to disk while executing the query, in bytes.""" - + + task_time_over_time_range: Optional[TaskTimeOverRange] = None + """sum of task times completed in a range of wall clock time, approximated to a configurable number + of points aggregated over all stages and jobs in the query (based on task_total_time_ms)""" + task_total_time_ms: Optional[int] = None """Sum of execution time for all of the query’s tasks, in milliseconds.""" - + total_time_ms: Optional[int] = None """Total execution time of the query from the client’s point of view, in milliseconds.""" - + write_remote_bytes: Optional[int] = None """Size pf persistent data written to cloud object storage in your cloud tenant, in bytes.""" - + def as_dict(self) -> dict: """Serializes the QueryMetrics into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compilation_time_ms is not None: - body["compilation_time_ms"] = self.compilation_time_ms - if self.execution_time_ms is not None: - body["execution_time_ms"] = self.execution_time_ms - if self.network_sent_bytes is not None: - body["network_sent_bytes"] = self.network_sent_bytes - if self.overloading_queue_start_timestamp is not None: - body["overloading_queue_start_timestamp"] = self.overloading_queue_start_timestamp - if self.photon_total_time_ms is not None: - body["photon_total_time_ms"] = self.photon_total_time_ms - if self.provisioning_queue_start_timestamp is not None: - body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp - if self.pruned_bytes is not None: - body["pruned_bytes"] = self.pruned_bytes - if self.pruned_files_count is not None: - body["pruned_files_count"] = self.pruned_files_count - if self.query_compilation_start_timestamp is not None: - body["query_compilation_start_timestamp"] = self.query_compilation_start_timestamp - if self.read_bytes is not None: - body["read_bytes"] = self.read_bytes - if self.read_cache_bytes is not None: - body["read_cache_bytes"] = self.read_cache_bytes - if self.read_files_count is not None: - body["read_files_count"] = self.read_files_count - if self.read_partitions_count is not None: - body["read_partitions_count"] = self.read_partitions_count - if self.read_remote_bytes is not None: - body["read_remote_bytes"] = self.read_remote_bytes - if self.result_fetch_time_ms is not None: - body["result_fetch_time_ms"] = self.result_fetch_time_ms - if self.result_from_cache is not None: - body["result_from_cache"] = self.result_from_cache - if self.rows_produced_count is not None: - body["rows_produced_count"] = self.rows_produced_count - if self.rows_read_count is not None: - body["rows_read_count"] = self.rows_read_count - if self.spill_to_disk_bytes is not None: - body["spill_to_disk_bytes"] = self.spill_to_disk_bytes - if self.task_total_time_ms is not None: - body["task_total_time_ms"] = self.task_total_time_ms - if self.total_time_ms is not None: - body["total_time_ms"] = self.total_time_ms - if self.write_remote_bytes is not None: - body["write_remote_bytes"] = self.write_remote_bytes + if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms + if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms + if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes + if self.overloading_queue_start_timestamp is not None: body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp + if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms + if self.provisioning_queue_start_timestamp is not None: body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp + if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes + if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count + if self.query_compilation_start_timestamp is not None: body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp + if self.read_bytes is not None: body['read_bytes'] = self.read_bytes + if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes + if self.read_files_count is not None: body['read_files_count'] = self.read_files_count + if self.read_partitions_count is not None: body['read_partitions_count'] = self.read_partitions_count + if self.read_remote_bytes is not None: body['read_remote_bytes'] = self.read_remote_bytes + if self.result_fetch_time_ms is not None: body['result_fetch_time_ms'] = self.result_fetch_time_ms + if self.result_from_cache is not None: body['result_from_cache'] = self.result_from_cache + if self.rows_produced_count is not None: body['rows_produced_count'] = self.rows_produced_count + if self.rows_read_count is not None: body['rows_read_count'] = self.rows_read_count + if self.spill_to_disk_bytes is not None: body['spill_to_disk_bytes'] = self.spill_to_disk_bytes + if self.task_time_over_time_range: body['task_time_over_time_range'] = self.task_time_over_time_range.as_dict() + if self.task_total_time_ms is not None: body['task_total_time_ms'] = self.task_total_time_ms + if self.total_time_ms is not None: body['total_time_ms'] = self.total_time_ms + if self.write_remote_bytes is not None: body['write_remote_bytes'] = self.write_remote_bytes return body def as_shallow_dict(self) -> dict: """Serializes the QueryMetrics into a shallow dictionary of its immediate attributes.""" body = {} - if self.compilation_time_ms is not None: - body["compilation_time_ms"] = self.compilation_time_ms - if self.execution_time_ms is not None: - body["execution_time_ms"] = self.execution_time_ms - if self.network_sent_bytes is not None: - body["network_sent_bytes"] = self.network_sent_bytes - if self.overloading_queue_start_timestamp is not None: - body["overloading_queue_start_timestamp"] = self.overloading_queue_start_timestamp - if self.photon_total_time_ms is not None: - body["photon_total_time_ms"] = self.photon_total_time_ms - if self.provisioning_queue_start_timestamp is not None: - body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp - if self.pruned_bytes is not None: - body["pruned_bytes"] = self.pruned_bytes - if self.pruned_files_count is not None: - body["pruned_files_count"] = self.pruned_files_count - if self.query_compilation_start_timestamp is not None: - body["query_compilation_start_timestamp"] = self.query_compilation_start_timestamp - if self.read_bytes is not None: - body["read_bytes"] = self.read_bytes - if self.read_cache_bytes is not None: - body["read_cache_bytes"] = self.read_cache_bytes - if self.read_files_count is not None: - body["read_files_count"] = self.read_files_count - if self.read_partitions_count is not None: - body["read_partitions_count"] = self.read_partitions_count - if self.read_remote_bytes is not None: - body["read_remote_bytes"] = self.read_remote_bytes - if self.result_fetch_time_ms is not None: - body["result_fetch_time_ms"] = self.result_fetch_time_ms - if self.result_from_cache is not None: - body["result_from_cache"] = self.result_from_cache - if self.rows_produced_count is not None: - body["rows_produced_count"] = self.rows_produced_count - if self.rows_read_count is not None: - body["rows_read_count"] = self.rows_read_count - if self.spill_to_disk_bytes is not None: - body["spill_to_disk_bytes"] = self.spill_to_disk_bytes - if self.task_total_time_ms is not None: - body["task_total_time_ms"] = self.task_total_time_ms - if self.total_time_ms is not None: - body["total_time_ms"] = self.total_time_ms - if self.write_remote_bytes is not None: - body["write_remote_bytes"] = self.write_remote_bytes + if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms + if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms + if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes + if self.overloading_queue_start_timestamp is not None: body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp + if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms + if self.provisioning_queue_start_timestamp is not None: body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp + if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes + if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count + if self.query_compilation_start_timestamp is not None: body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp + if self.read_bytes is not None: body['read_bytes'] = self.read_bytes + if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes + if self.read_files_count is not None: body['read_files_count'] = self.read_files_count + if self.read_partitions_count is not None: body['read_partitions_count'] = self.read_partitions_count + if self.read_remote_bytes is not None: body['read_remote_bytes'] = self.read_remote_bytes + if self.result_fetch_time_ms is not None: body['result_fetch_time_ms'] = self.result_fetch_time_ms + if self.result_from_cache is not None: body['result_from_cache'] = self.result_from_cache + if self.rows_produced_count is not None: body['rows_produced_count'] = self.rows_produced_count + if self.rows_read_count is not None: body['rows_read_count'] = self.rows_read_count + if self.spill_to_disk_bytes is not None: body['spill_to_disk_bytes'] = self.spill_to_disk_bytes + if self.task_time_over_time_range: body['task_time_over_time_range'] = self.task_time_over_time_range + if self.task_total_time_ms is not None: body['task_total_time_ms'] = self.task_total_time_ms + if self.total_time_ms is not None: body['total_time_ms'] = self.total_time_ms + if self.write_remote_bytes is not None: body['write_remote_bytes'] = self.write_remote_bytes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryMetrics: """Deserializes the QueryMetrics from a dictionary.""" - return cls( - compilation_time_ms=d.get("compilation_time_ms", None), - execution_time_ms=d.get("execution_time_ms", None), - network_sent_bytes=d.get("network_sent_bytes", None), - overloading_queue_start_timestamp=d.get("overloading_queue_start_timestamp", None), - photon_total_time_ms=d.get("photon_total_time_ms", None), - provisioning_queue_start_timestamp=d.get("provisioning_queue_start_timestamp", None), - pruned_bytes=d.get("pruned_bytes", None), - pruned_files_count=d.get("pruned_files_count", None), - query_compilation_start_timestamp=d.get("query_compilation_start_timestamp", None), - read_bytes=d.get("read_bytes", None), - read_cache_bytes=d.get("read_cache_bytes", None), - read_files_count=d.get("read_files_count", None), - read_partitions_count=d.get("read_partitions_count", None), - read_remote_bytes=d.get("read_remote_bytes", None), - result_fetch_time_ms=d.get("result_fetch_time_ms", None), - result_from_cache=d.get("result_from_cache", None), - rows_produced_count=d.get("rows_produced_count", None), - rows_read_count=d.get("rows_read_count", None), - spill_to_disk_bytes=d.get("spill_to_disk_bytes", None), - task_total_time_ms=d.get("task_total_time_ms", None), - total_time_ms=d.get("total_time_ms", None), - write_remote_bytes=d.get("write_remote_bytes", None), - ) + return cls(compilation_time_ms=d.get('compilation_time_ms', None), execution_time_ms=d.get('execution_time_ms', None), network_sent_bytes=d.get('network_sent_bytes', None), overloading_queue_start_timestamp=d.get('overloading_queue_start_timestamp', None), photon_total_time_ms=d.get('photon_total_time_ms', None), provisioning_queue_start_timestamp=d.get('provisioning_queue_start_timestamp', None), pruned_bytes=d.get('pruned_bytes', None), pruned_files_count=d.get('pruned_files_count', None), query_compilation_start_timestamp=d.get('query_compilation_start_timestamp', None), read_bytes=d.get('read_bytes', None), read_cache_bytes=d.get('read_cache_bytes', None), read_files_count=d.get('read_files_count', None), read_partitions_count=d.get('read_partitions_count', None), read_remote_bytes=d.get('read_remote_bytes', None), result_fetch_time_ms=d.get('result_fetch_time_ms', None), result_from_cache=d.get('result_from_cache', None), rows_produced_count=d.get('rows_produced_count', None), rows_read_count=d.get('rows_read_count', None), spill_to_disk_bytes=d.get('spill_to_disk_bytes', None), task_time_over_time_range=_from_dict(d, 'task_time_over_time_range', TaskTimeOverRange), task_total_time_ms=d.get('task_total_time_ms', None), total_time_ms=d.get('total_time_ms', None), write_remote_bytes=d.get('write_remote_bytes', None)) + + @dataclass class QueryOptions: catalog: Optional[str] = None """The name of the catalog to execute this query in.""" - + moved_to_trash_at: Optional[str] = None """The timestamp when this query was moved to trash. Only present when the `is_archived` property is `true`. Trashed items are deleted after thirty days.""" - + parameters: Optional[List[Parameter]] = None - + schema: Optional[str] = None """The name of the schema to execute this query in.""" - + def as_dict(self) -> dict: """Serializes the QueryOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog is not None: - body["catalog"] = self.catalog - if self.moved_to_trash_at is not None: - body["moved_to_trash_at"] = self.moved_to_trash_at - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.schema is not None: - body["schema"] = self.schema + if self.catalog is not None: body['catalog'] = self.catalog + if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.schema is not None: body['schema'] = self.schema return body def as_shallow_dict(self) -> dict: """Serializes the QueryOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog is not None: - body["catalog"] = self.catalog - if self.moved_to_trash_at is not None: - body["moved_to_trash_at"] = self.moved_to_trash_at - if self.parameters: - body["parameters"] = self.parameters - if self.schema is not None: - body["schema"] = self.schema + if self.catalog is not None: body['catalog'] = self.catalog + if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at + if self.parameters: body['parameters'] = self.parameters + if self.schema is not None: body['schema'] = self.schema return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryOptions: """Deserializes the QueryOptions from a dictionary.""" - return cls( - catalog=d.get("catalog", None), - moved_to_trash_at=d.get("moved_to_trash_at", None), - parameters=_repeated_dict(d, "parameters", Parameter), - schema=d.get("schema", None), - ) + return cls(catalog=d.get('catalog', None), moved_to_trash_at=d.get('moved_to_trash_at', None), parameters=_repeated_dict(d, 'parameters', Parameter), schema=d.get('schema', None)) + + @dataclass @@ -6239,83 +4959,60 @@ class QueryParameter: date_range_value: Optional[DateRangeValue] = None """Date-range query parameter value. Can only specify one of `dynamic_date_range_value` or `date_range_value`.""" - + date_value: Optional[DateValue] = None """Date query parameter value. Can only specify one of `dynamic_date_value` or `date_value`.""" - + enum_value: Optional[EnumValue] = None """Dropdown query parameter value.""" - + name: Optional[str] = None """Literal parameter marker that appears between double curly braces in the query text.""" - + numeric_value: Optional[NumericValue] = None """Numeric query parameter value.""" - + query_backed_value: Optional[QueryBackedValue] = None """Query-based dropdown query parameter value.""" - + text_value: Optional[TextValue] = None """Text query parameter value.""" - + title: Optional[str] = None """Text displayed in the user-facing parameter widget in the UI.""" - + def as_dict(self) -> dict: """Serializes the QueryParameter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.date_range_value: - body["date_range_value"] = self.date_range_value.as_dict() - if self.date_value: - body["date_value"] = self.date_value.as_dict() - if self.enum_value: - body["enum_value"] = self.enum_value.as_dict() - if self.name is not None: - body["name"] = self.name - if self.numeric_value: - body["numeric_value"] = self.numeric_value.as_dict() - if self.query_backed_value: - body["query_backed_value"] = self.query_backed_value.as_dict() - if self.text_value: - body["text_value"] = self.text_value.as_dict() - if self.title is not None: - body["title"] = self.title + if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict() + if self.date_value: body['date_value'] = self.date_value.as_dict() + if self.enum_value: body['enum_value'] = self.enum_value.as_dict() + if self.name is not None: body['name'] = self.name + if self.numeric_value: body['numeric_value'] = self.numeric_value.as_dict() + if self.query_backed_value: body['query_backed_value'] = self.query_backed_value.as_dict() + if self.text_value: body['text_value'] = self.text_value.as_dict() + if self.title is not None: body['title'] = self.title return body def as_shallow_dict(self) -> dict: """Serializes the QueryParameter into a shallow dictionary of its immediate attributes.""" body = {} - if self.date_range_value: - body["date_range_value"] = self.date_range_value - if self.date_value: - body["date_value"] = self.date_value - if self.enum_value: - body["enum_value"] = self.enum_value - if self.name is not None: - body["name"] = self.name - if self.numeric_value: - body["numeric_value"] = self.numeric_value - if self.query_backed_value: - body["query_backed_value"] = self.query_backed_value - if self.text_value: - body["text_value"] = self.text_value - if self.title is not None: - body["title"] = self.title + if self.date_range_value: body['date_range_value'] = self.date_range_value + if self.date_value: body['date_value'] = self.date_value + if self.enum_value: body['enum_value'] = self.enum_value + if self.name is not None: body['name'] = self.name + if self.numeric_value: body['numeric_value'] = self.numeric_value + if self.query_backed_value: body['query_backed_value'] = self.query_backed_value + if self.text_value: body['text_value'] = self.text_value + if self.title is not None: body['title'] = self.title return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryParameter: """Deserializes the QueryParameter from a dictionary.""" - return cls( - date_range_value=_from_dict(d, "date_range_value", DateRangeValue), - date_value=_from_dict(d, "date_value", DateValue), - enum_value=_from_dict(d, "enum_value", EnumValue), - name=d.get("name", None), - numeric_value=_from_dict(d, "numeric_value", NumericValue), - query_backed_value=_from_dict(d, "query_backed_value", QueryBackedValue), - text_value=_from_dict(d, "text_value", TextValue), - title=d.get("title", None), - ) + return cls(date_range_value=_from_dict(d, 'date_range_value', DateRangeValue), date_value=_from_dict(d, 'date_value', DateValue), enum_value=_from_dict(d, 'enum_value', EnumValue), name=d.get('name', None), numeric_value=_from_dict(d, 'numeric_value', NumericValue), query_backed_value=_from_dict(d, 'query_backed_value', QueryBackedValue), text_value=_from_dict(d, 'text_value', TextValue), title=d.get('title', None)) + + @dataclass @@ -6325,158 +5022,135 @@ class QueryPostContent: warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + name: Optional[str] = None """The title of this query that appears in list views, widget headings, and on the query page.""" - + options: Optional[Any] = None """Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. It can be overridden at runtime.""" - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + query: Optional[str] = None """The text of the query to be run.""" - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the QueryPostContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query is not None: - body["query"] = self.query - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.query is not None: body['query'] = self.query + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the QueryPostContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query is not None: - body["query"] = self.query - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.parent is not None: body['parent'] = self.parent + if self.query is not None: body['query'] = self.query + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role + if self.tags: body['tags'] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryPostContent: """Deserializes the QueryPostContent from a dictionary.""" - return cls( - data_source_id=d.get("data_source_id", None), - description=d.get("description", None), - name=d.get("name", None), - options=d.get("options", None), - parent=d.get("parent", None), - query=d.get("query", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - ) - + return cls(data_source_id=d.get('data_source_id', None), description=d.get('description', None), name=d.get('name', None), options=d.get('options', None), parent=d.get('parent', None), query=d.get('query', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None)) + -class QueryStatementType(Enum): - ALTER = "ALTER" - ANALYZE = "ANALYZE" - COPY = "COPY" - CREATE = "CREATE" - DELETE = "DELETE" - DESCRIBE = "DESCRIBE" - DROP = "DROP" - EXPLAIN = "EXPLAIN" - GRANT = "GRANT" - INSERT = "INSERT" - MERGE = "MERGE" - OPTIMIZE = "OPTIMIZE" - OTHER = "OTHER" - REFRESH = "REFRESH" - REPLACE = "REPLACE" - REVOKE = "REVOKE" - SELECT = "SELECT" - SET = "SET" - SHOW = "SHOW" - TRUNCATE = "TRUNCATE" - UPDATE = "UPDATE" - USE = "USE" +class QueryStatementType(Enum): + + + ALTER = 'ALTER' + ANALYZE = 'ANALYZE' + COPY = 'COPY' + CREATE = 'CREATE' + DELETE = 'DELETE' + DESCRIBE = 'DESCRIBE' + DROP = 'DROP' + EXPLAIN = 'EXPLAIN' + GRANT = 'GRANT' + INSERT = 'INSERT' + MERGE = 'MERGE' + OPTIMIZE = 'OPTIMIZE' + OTHER = 'OTHER' + REFRESH = 'REFRESH' + REPLACE = 'REPLACE' + REVOKE = 'REVOKE' + SELECT = 'SELECT' + SET = 'SET' + SHOW = 'SHOW' + TRUNCATE = 'TRUNCATE' + UPDATE = 'UPDATE' + USE = 'USE' class QueryStatus(Enum): """Statuses which are also used by OperationStatus in runtime""" - - CANCELED = "CANCELED" - COMPILED = "COMPILED" - COMPILING = "COMPILING" - FAILED = "FAILED" - FINISHED = "FINISHED" - QUEUED = "QUEUED" - RUNNING = "RUNNING" - STARTED = "STARTED" - + + CANCELED = 'CANCELED' + COMPILED = 'COMPILED' + COMPILING = 'COMPILING' + FAILED = 'FAILED' + FINISHED = 'FINISHED' + QUEUED = 'QUEUED' + RUNNING = 'RUNNING' + STARTED = 'STARTED' @dataclass class RepeatedEndpointConfPairs: config_pair: Optional[List[EndpointConfPair]] = None """Deprecated: Use configuration_pairs""" - + configuration_pairs: Optional[List[EndpointConfPair]] = None - + def as_dict(self) -> dict: """Serializes the RepeatedEndpointConfPairs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config_pair: - body["config_pair"] = [v.as_dict() for v in self.config_pair] - if self.configuration_pairs: - body["configuration_pairs"] = [v.as_dict() for v in self.configuration_pairs] + if self.config_pair: body['config_pair'] = [v.as_dict() for v in self.config_pair] + if self.configuration_pairs: body['configuration_pairs'] = [v.as_dict() for v in self.configuration_pairs] return body def as_shallow_dict(self) -> dict: """Serializes the RepeatedEndpointConfPairs into a shallow dictionary of its immediate attributes.""" body = {} - if self.config_pair: - body["config_pair"] = self.config_pair - if self.configuration_pairs: - body["configuration_pairs"] = self.configuration_pairs + if self.config_pair: body['config_pair'] = self.config_pair + if self.configuration_pairs: body['configuration_pairs'] = self.configuration_pairs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepeatedEndpointConfPairs: """Deserializes the RepeatedEndpointConfPairs from a dictionary.""" - return cls( - config_pair=_repeated_dict(d, "config_pair", EndpointConfPair), - configuration_pairs=_repeated_dict(d, "configuration_pairs", EndpointConfPair), - ) + return cls(config_pair=_repeated_dict(d, 'config_pair', EndpointConfPair), configuration_pairs=_repeated_dict(d, 'configuration_pairs', EndpointConfPair)) + + + + + + + + @dataclass @@ -6495,6 +5169,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RestoreResponse: """Deserializes the RestoreResponse from a dictionary.""" return cls() + + @dataclass @@ -6502,412 +5178,365 @@ class ResultData: byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" - + chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" - + data_array: Optional[List[List[str]]] = None """The `JSON_ARRAY` format is an array of arrays of values, where each non-null value is formatted as a string. Null values are encoded as JSON `null`.""" - + external_links: Optional[List[ExternalLink]] = None - + next_chunk_index: Optional[int] = None """When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are no more chunks. The next chunk can be fetched with a :method:statementexecution/getStatementResultChunkN request.""" - + next_chunk_internal_link: Optional[str] = None """When fetching, provides a link to fetch the _next_ chunk. If absent, indicates there are no more chunks. This link is an absolute `path` to be joined with your `$DATABRICKS_HOST`, and should be treated as an opaque link. This is an alternative to using `next_chunk_index`.""" - + row_count: Optional[int] = None """The number of rows within the result chunk.""" - + row_offset: Optional[int] = None """The starting row offset within the result set.""" - + def as_dict(self) -> dict: """Serializes the ResultData into a dictionary suitable for use as a JSON request body.""" body = {} - if self.byte_count is not None: - body["byte_count"] = self.byte_count - if self.chunk_index is not None: - body["chunk_index"] = self.chunk_index - if self.data_array: - body["data_array"] = [v for v in self.data_array] - if self.external_links: - body["external_links"] = [v.as_dict() for v in self.external_links] - if self.next_chunk_index is not None: - body["next_chunk_index"] = self.next_chunk_index - if self.next_chunk_internal_link is not None: - body["next_chunk_internal_link"] = self.next_chunk_internal_link - if self.row_count is not None: - body["row_count"] = self.row_count - if self.row_offset is not None: - body["row_offset"] = self.row_offset + if self.byte_count is not None: body['byte_count'] = self.byte_count + if self.chunk_index is not None: body['chunk_index'] = self.chunk_index + if self.data_array: body['data_array'] = [v for v in self.data_array] + if self.external_links: body['external_links'] = [v.as_dict() for v in self.external_links] + if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index + if self.next_chunk_internal_link is not None: body['next_chunk_internal_link'] = self.next_chunk_internal_link + if self.row_count is not None: body['row_count'] = self.row_count + if self.row_offset is not None: body['row_offset'] = self.row_offset return body def as_shallow_dict(self) -> dict: """Serializes the ResultData into a shallow dictionary of its immediate attributes.""" body = {} - if self.byte_count is not None: - body["byte_count"] = self.byte_count - if self.chunk_index is not None: - body["chunk_index"] = self.chunk_index - if self.data_array: - body["data_array"] = self.data_array - if self.external_links: - body["external_links"] = self.external_links - if self.next_chunk_index is not None: - body["next_chunk_index"] = self.next_chunk_index - if self.next_chunk_internal_link is not None: - body["next_chunk_internal_link"] = self.next_chunk_internal_link - if self.row_count is not None: - body["row_count"] = self.row_count - if self.row_offset is not None: - body["row_offset"] = self.row_offset + if self.byte_count is not None: body['byte_count'] = self.byte_count + if self.chunk_index is not None: body['chunk_index'] = self.chunk_index + if self.data_array: body['data_array'] = self.data_array + if self.external_links: body['external_links'] = self.external_links + if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index + if self.next_chunk_internal_link is not None: body['next_chunk_internal_link'] = self.next_chunk_internal_link + if self.row_count is not None: body['row_count'] = self.row_count + if self.row_offset is not None: body['row_offset'] = self.row_offset return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultData: """Deserializes the ResultData from a dictionary.""" - return cls( - byte_count=d.get("byte_count", None), - chunk_index=d.get("chunk_index", None), - data_array=d.get("data_array", None), - external_links=_repeated_dict(d, "external_links", ExternalLink), - next_chunk_index=d.get("next_chunk_index", None), - next_chunk_internal_link=d.get("next_chunk_internal_link", None), - row_count=d.get("row_count", None), - row_offset=d.get("row_offset", None), - ) + return cls(byte_count=d.get('byte_count', None), chunk_index=d.get('chunk_index', None), data_array=d.get('data_array', None), external_links=_repeated_dict(d, 'external_links', ExternalLink), next_chunk_index=d.get('next_chunk_index', None), next_chunk_internal_link=d.get('next_chunk_internal_link', None), row_count=d.get('row_count', None), row_offset=d.get('row_offset', None)) + + @dataclass class ResultManifest: """The result manifest provides schema and metadata for the result set.""" - + chunks: Optional[List[BaseChunkInfo]] = None """Array of result set chunk metadata.""" - + format: Optional[Format] = None - + schema: Optional[ResultSchema] = None """The schema is an ordered list of column descriptions.""" - + total_byte_count: Optional[int] = None """The total number of bytes in the result set. This field is not available when using `INLINE` disposition.""" - + total_chunk_count: Optional[int] = None """The total number of chunks that the result set has been divided into.""" - + total_row_count: Optional[int] = None """The total number of rows in the result set.""" - + truncated: Optional[bool] = None """Indicates whether the result is truncated due to `row_limit` or `byte_limit`.""" - + def as_dict(self) -> dict: """Serializes the ResultManifest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.chunks: - body["chunks"] = [v.as_dict() for v in self.chunks] - if self.format is not None: - body["format"] = self.format.value - if self.schema: - body["schema"] = self.schema.as_dict() - if self.total_byte_count is not None: - body["total_byte_count"] = self.total_byte_count - if self.total_chunk_count is not None: - body["total_chunk_count"] = self.total_chunk_count - if self.total_row_count is not None: - body["total_row_count"] = self.total_row_count - if self.truncated is not None: - body["truncated"] = self.truncated + if self.chunks: body['chunks'] = [v.as_dict() for v in self.chunks] + if self.format is not None: body['format'] = self.format.value + if self.schema: body['schema'] = self.schema.as_dict() + if self.total_byte_count is not None: body['total_byte_count'] = self.total_byte_count + if self.total_chunk_count is not None: body['total_chunk_count'] = self.total_chunk_count + if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + if self.truncated is not None: body['truncated'] = self.truncated return body def as_shallow_dict(self) -> dict: """Serializes the ResultManifest into a shallow dictionary of its immediate attributes.""" body = {} - if self.chunks: - body["chunks"] = self.chunks - if self.format is not None: - body["format"] = self.format - if self.schema: - body["schema"] = self.schema - if self.total_byte_count is not None: - body["total_byte_count"] = self.total_byte_count - if self.total_chunk_count is not None: - body["total_chunk_count"] = self.total_chunk_count - if self.total_row_count is not None: - body["total_row_count"] = self.total_row_count - if self.truncated is not None: - body["truncated"] = self.truncated + if self.chunks: body['chunks'] = self.chunks + if self.format is not None: body['format'] = self.format + if self.schema: body['schema'] = self.schema + if self.total_byte_count is not None: body['total_byte_count'] = self.total_byte_count + if self.total_chunk_count is not None: body['total_chunk_count'] = self.total_chunk_count + if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + if self.truncated is not None: body['truncated'] = self.truncated return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultManifest: """Deserializes the ResultManifest from a dictionary.""" - return cls( - chunks=_repeated_dict(d, "chunks", BaseChunkInfo), - format=_enum(d, "format", Format), - schema=_from_dict(d, "schema", ResultSchema), - total_byte_count=d.get("total_byte_count", None), - total_chunk_count=d.get("total_chunk_count", None), - total_row_count=d.get("total_row_count", None), - truncated=d.get("truncated", None), - ) + return cls(chunks=_repeated_dict(d, 'chunks', BaseChunkInfo), format=_enum(d, 'format', Format), schema=_from_dict(d, 'schema', ResultSchema), total_byte_count=d.get('total_byte_count', None), total_chunk_count=d.get('total_chunk_count', None), total_row_count=d.get('total_row_count', None), truncated=d.get('truncated', None)) + + @dataclass class ResultSchema: """The schema is an ordered list of column descriptions.""" - + column_count: Optional[int] = None - + columns: Optional[List[ColumnInfo]] = None - + def as_dict(self) -> dict: """Serializes the ResultSchema into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column_count is not None: - body["column_count"] = self.column_count - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] + if self.column_count is not None: body['column_count'] = self.column_count + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the ResultSchema into a shallow dictionary of its immediate attributes.""" body = {} - if self.column_count is not None: - body["column_count"] = self.column_count - if self.columns: - body["columns"] = self.columns + if self.column_count is not None: body['column_count'] = self.column_count + if self.columns: body['columns'] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultSchema: """Deserializes the ResultSchema from a dictionary.""" - return cls(column_count=d.get("column_count", None), columns=_repeated_dict(d, "columns", ColumnInfo)) - + return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo)) + -class RunAsMode(Enum): - OWNER = "OWNER" - VIEWER = "VIEWER" +class RunAsMode(Enum): + + + OWNER = 'OWNER' + VIEWER = 'VIEWER' class RunAsRole(Enum): """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - OWNER = "owner" - VIEWER = "viewer" - + + OWNER = 'owner' + VIEWER = 'viewer' class SchedulePauseStatus(Enum): - - PAUSED = "PAUSED" - UNPAUSED = "UNPAUSED" - + + + PAUSED = 'PAUSED' + UNPAUSED = 'UNPAUSED' @dataclass class ServiceError: error_code: Optional[ServiceErrorCode] = None - + message: Optional[str] = None """A brief summary of the error condition.""" - + def as_dict(self) -> dict: """Serializes the ServiceError into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error_code is not None: - body["error_code"] = self.error_code.value - if self.message is not None: - body["message"] = self.message + if self.error_code is not None: body['error_code'] = self.error_code.value + if self.message is not None: body['message'] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the ServiceError into a shallow dictionary of its immediate attributes.""" body = {} - if self.error_code is not None: - body["error_code"] = self.error_code - if self.message is not None: - body["message"] = self.message + if self.error_code is not None: body['error_code'] = self.error_code + if self.message is not None: body['message'] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServiceError: """Deserializes the ServiceError from a dictionary.""" - return cls(error_code=_enum(d, "error_code", ServiceErrorCode), message=d.get("message", None)) + return cls(error_code=_enum(d, 'error_code', ServiceErrorCode), message=d.get('message', None)) + + class ServiceErrorCode(Enum): + + + ABORTED = 'ABORTED' + ALREADY_EXISTS = 'ALREADY_EXISTS' + BAD_REQUEST = 'BAD_REQUEST' + CANCELLED = 'CANCELLED' + DEADLINE_EXCEEDED = 'DEADLINE_EXCEEDED' + INTERNAL_ERROR = 'INTERNAL_ERROR' + IO_ERROR = 'IO_ERROR' + NOT_FOUND = 'NOT_FOUND' + RESOURCE_EXHAUSTED = 'RESOURCE_EXHAUSTED' + SERVICE_UNDER_MAINTENANCE = 'SERVICE_UNDER_MAINTENANCE' + TEMPORARILY_UNAVAILABLE = 'TEMPORARILY_UNAVAILABLE' + UNAUTHENTICATED = 'UNAUTHENTICATED' + UNKNOWN = 'UNKNOWN' + WORKSPACE_TEMPORARILY_UNAVAILABLE = 'WORKSPACE_TEMPORARILY_UNAVAILABLE' + +@dataclass +class SetRequest: + """Set object ACL""" + + access_control_list: Optional[List[AccessControl]] = None + + object_id: Optional[str] = None + """Object ID. The ACL for the object with this UUID is overwritten by this request's POST content.""" + + object_type: Optional[ObjectTypePlural] = None + """The type of object permission to set.""" + + def as_dict(self) -> dict: + """Serializes the SetRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['objectId'] = self.object_id + if self.object_type is not None: body['objectType'] = self.object_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the SetRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['objectId'] = self.object_id + if self.object_type is not None: body['objectType'] = self.object_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> SetRequest: + """Deserializes the SetRequest from a dictionary.""" + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControl), object_id=d.get('objectId', None), object_type=_enum(d, 'objectType', ObjectTypePlural)) + - ABORTED = "ABORTED" - ALREADY_EXISTS = "ALREADY_EXISTS" - BAD_REQUEST = "BAD_REQUEST" - CANCELLED = "CANCELLED" - DEADLINE_EXCEEDED = "DEADLINE_EXCEEDED" - INTERNAL_ERROR = "INTERNAL_ERROR" - IO_ERROR = "IO_ERROR" - NOT_FOUND = "NOT_FOUND" - RESOURCE_EXHAUSTED = "RESOURCE_EXHAUSTED" - SERVICE_UNDER_MAINTENANCE = "SERVICE_UNDER_MAINTENANCE" - TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" - UNAUTHENTICATED = "UNAUTHENTICATED" - UNKNOWN = "UNKNOWN" - WORKSPACE_TEMPORARILY_UNAVAILABLE = "WORKSPACE_TEMPORARILY_UNAVAILABLE" @dataclass class SetResponse: access_control_list: Optional[List[AccessControl]] = None - + object_id: Optional[str] = None """An object's type and UUID, separated by a forward slash (/) character.""" - + object_type: Optional[ObjectType] = None """A singular noun object type.""" - + def as_dict(self) -> dict: """Serializes the SetResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type.value + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type.value return body def as_shallow_dict(self) -> dict: """Serializes the SetResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetResponse: """Deserializes the SetResponse from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControl), - object_id=d.get("object_id", None), - object_type=_enum(d, "object_type", ObjectType), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControl), object_id=d.get('object_id', None), object_type=_enum(d, 'object_type', ObjectType)) + + @dataclass class SetWorkspaceWarehouseConfigRequest: channel: Optional[Channel] = None """Optional: Channel selection details""" - + config_param: Optional[RepeatedEndpointConfPairs] = None """Deprecated: Use sql_configuration_parameters""" - + data_access_config: Optional[List[EndpointConfPair]] = None """Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K""" - + enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None """List of Warehouse Types allowed in this workspace (limits allowed value of the type field in CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses to be converted to another type. Used by frontend to save specific type availability in the warehouse create and edit form UI.""" - + global_param: Optional[RepeatedEndpointConfPairs] = None """Deprecated: Use sql_configuration_parameters""" - + google_service_account: Optional[str] = None """GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage""" - + instance_profile_arn: Optional[str] = None """AWS Only: Instance profile used to pass IAM role to the cluster""" - + security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None """Security policy for warehouses""" - + sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None """SQL configuration parameters""" - + def as_dict(self) -> dict: """Serializes the SetWorkspaceWarehouseConfigRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.channel: - body["channel"] = self.channel.as_dict() - if self.config_param: - body["config_param"] = self.config_param.as_dict() - if self.data_access_config: - body["data_access_config"] = [v.as_dict() for v in self.data_access_config] - if self.enabled_warehouse_types: - body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types] - if self.global_param: - body["global_param"] = self.global_param.as_dict() - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.security_policy is not None: - body["security_policy"] = self.security_policy.value - if self.sql_configuration_parameters: - body["sql_configuration_parameters"] = self.sql_configuration_parameters.as_dict() + if self.channel: body['channel'] = self.channel.as_dict() + if self.config_param: body['config_param'] = self.config_param.as_dict() + if self.data_access_config: body['data_access_config'] = [v.as_dict() for v in self.data_access_config] + if self.enabled_warehouse_types: body['enabled_warehouse_types'] = [v.as_dict() for v in self.enabled_warehouse_types] + if self.global_param: body['global_param'] = self.global_param.as_dict() + if self.google_service_account is not None: body['google_service_account'] = self.google_service_account + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.security_policy is not None: body['security_policy'] = self.security_policy.value + if self.sql_configuration_parameters: body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SetWorkspaceWarehouseConfigRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.channel: - body["channel"] = self.channel - if self.config_param: - body["config_param"] = self.config_param - if self.data_access_config: - body["data_access_config"] = self.data_access_config - if self.enabled_warehouse_types: - body["enabled_warehouse_types"] = self.enabled_warehouse_types - if self.global_param: - body["global_param"] = self.global_param - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.security_policy is not None: - body["security_policy"] = self.security_policy - if self.sql_configuration_parameters: - body["sql_configuration_parameters"] = self.sql_configuration_parameters + if self.channel: body['channel'] = self.channel + if self.config_param: body['config_param'] = self.config_param + if self.data_access_config: body['data_access_config'] = self.data_access_config + if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types + if self.global_param: body['global_param'] = self.global_param + if self.google_service_account is not None: body['google_service_account'] = self.google_service_account + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.security_policy is not None: body['security_policy'] = self.security_policy + if self.sql_configuration_parameters: body['sql_configuration_parameters'] = self.sql_configuration_parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetWorkspaceWarehouseConfigRequest: """Deserializes the SetWorkspaceWarehouseConfigRequest from a dictionary.""" - return cls( - channel=_from_dict(d, "channel", Channel), - config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs), - data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair), - enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair), - global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs), - google_service_account=d.get("google_service_account", None), - instance_profile_arn=d.get("instance_profile_arn", None), - security_policy=_enum(d, "security_policy", SetWorkspaceWarehouseConfigRequestSecurityPolicy), - sql_configuration_parameters=_from_dict(d, "sql_configuration_parameters", RepeatedEndpointConfPairs), - ) + return cls(channel=_from_dict(d, 'channel', Channel), config_param=_from_dict(d, 'config_param', RepeatedEndpointConfPairs), data_access_config=_repeated_dict(d, 'data_access_config', EndpointConfPair), enabled_warehouse_types=_repeated_dict(d, 'enabled_warehouse_types', WarehouseTypePair), global_param=_from_dict(d, 'global_param', RepeatedEndpointConfPairs), google_service_account=d.get('google_service_account', None), instance_profile_arn=d.get('instance_profile_arn', None), security_policy=_enum(d, 'security_policy', SetWorkspaceWarehouseConfigRequestSecurityPolicy), sql_configuration_parameters=_from_dict(d, 'sql_configuration_parameters', RepeatedEndpointConfPairs)) + + class SetWorkspaceWarehouseConfigRequestSecurityPolicy(Enum): """Security policy for warehouses""" - - DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL" - NONE = "NONE" - PASSTHROUGH = "PASSTHROUGH" - + + DATA_ACCESS_CONTROL = 'DATA_ACCESS_CONTROL' + NONE = 'NONE' + PASSTHROUGH = 'PASSTHROUGH' @dataclass class SetWorkspaceWarehouseConfigResponse: @@ -6925,14 +5554,18 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetWorkspaceWarehouseConfigResponse: """Deserializes the SetWorkspaceWarehouseConfigResponse from a dictionary.""" return cls() + + class SpotInstancePolicy(Enum): """Configurations whether the warehouse should use spot instances.""" + + COST_OPTIMIZED = 'COST_OPTIMIZED' + POLICY_UNSPECIFIED = 'POLICY_UNSPECIFIED' + RELIABILITY_OPTIMIZED = 'RELIABILITY_OPTIMIZED' + - COST_OPTIMIZED = "COST_OPTIMIZED" - POLICY_UNSPECIFIED = "POLICY_UNSPECIFIED" - RELIABILITY_OPTIMIZED = "RELIABILITY_OPTIMIZED" @dataclass @@ -6951,24 +5584,25 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> StartWarehouseResponse: """Deserializes the StartWarehouseResponse from a dictionary.""" return cls() + + class State(Enum): """State of the warehouse""" - - DELETED = "DELETED" - DELETING = "DELETING" - RUNNING = "RUNNING" - STARTING = "STARTING" - STOPPED = "STOPPED" - STOPPING = "STOPPING" - + + DELETED = 'DELETED' + DELETING = 'DELETING' + RUNNING = 'RUNNING' + STARTING = 'STARTING' + STOPPED = 'STOPPED' + STOPPING = 'STOPPING' @dataclass class StatementParameterListItem: name: str """The name of a parameter marker to be substituted in the statement.""" - + type: Optional[str] = None """The data type, given as a string. For example: `INT`, `STRING`, `DECIMAL(10,2)`. If no type is given the type is assumed to be `STRING`. Complex types, such as `ARRAY`, `MAP`, and `STRUCT` @@ -6976,87 +5610,72 @@ class StatementParameterListItem: reference. [Data types]: https://docs.databricks.com/sql/language-manual/functions/cast.html""" - + value: Optional[str] = None """The value to substitute, represented as a string. If omitted, the value is interpreted as NULL.""" - + def as_dict(self) -> dict: """Serializes the StatementParameterListItem into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.type is not None: - body["type"] = self.type - if self.value is not None: - body["value"] = self.value + if self.name is not None: body['name'] = self.name + if self.type is not None: body['type'] = self.type + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the StatementParameterListItem into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.type is not None: - body["type"] = self.type - if self.value is not None: - body["value"] = self.value + if self.name is not None: body['name'] = self.name + if self.type is not None: body['type'] = self.type + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StatementParameterListItem: """Deserializes the StatementParameterListItem from a dictionary.""" - return cls(name=d.get("name", None), type=d.get("type", None), value=d.get("value", None)) + return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None)) + + @dataclass class StatementResponse: manifest: Optional[ResultManifest] = None """The result manifest provides schema and metadata for the result set.""" - + result: Optional[ResultData] = None - + statement_id: Optional[str] = None """The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls.""" - + status: Optional[StatementStatus] = None """The status response includes execution state and if relevant, error information.""" - + def as_dict(self) -> dict: """Serializes the StatementResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.manifest: - body["manifest"] = self.manifest.as_dict() - if self.result: - body["result"] = self.result.as_dict() - if self.statement_id is not None: - body["statement_id"] = self.statement_id - if self.status: - body["status"] = self.status.as_dict() + if self.manifest: body['manifest'] = self.manifest.as_dict() + if self.result: body['result'] = self.result.as_dict() + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.status: body['status'] = self.status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the StatementResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.manifest: - body["manifest"] = self.manifest - if self.result: - body["result"] = self.result - if self.statement_id is not None: - body["statement_id"] = self.statement_id - if self.status: - body["status"] = self.status + if self.manifest: body['manifest'] = self.manifest + if self.result: body['result'] = self.result + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.status: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StatementResponse: """Deserializes the StatementResponse from a dictionary.""" - return cls( - manifest=_from_dict(d, "manifest", ResultManifest), - result=_from_dict(d, "result", ResultData), - statement_id=d.get("statement_id", None), - status=_from_dict(d, "status", StatementStatus), - ) + return cls(manifest=_from_dict(d, 'manifest', ResultManifest), result=_from_dict(d, 'result', ResultData), statement_id=d.get('statement_id', None), status=_from_dict(d, 'status', StatementStatus)) + + class StatementState(Enum): @@ -7065,59 +5684,58 @@ class StatementState(Enum): failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: execution successful, and statement closed; result no longer available for fetch""" - - CANCELED = "CANCELED" - CLOSED = "CLOSED" - FAILED = "FAILED" - PENDING = "PENDING" - RUNNING = "RUNNING" - SUCCEEDED = "SUCCEEDED" - + + CANCELED = 'CANCELED' + CLOSED = 'CLOSED' + FAILED = 'FAILED' + PENDING = 'PENDING' + RUNNING = 'RUNNING' + SUCCEEDED = 'SUCCEEDED' @dataclass class StatementStatus: """The status response includes execution state and if relevant, error information.""" - + error: Optional[ServiceError] = None - + state: Optional[StatementState] = None """Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: execution successful, and statement closed; result no longer available for fetch""" - + def as_dict(self) -> dict: """Serializes the StatementStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error: - body["error"] = self.error.as_dict() - if self.state is not None: - body["state"] = self.state.value + if self.error: body['error'] = self.error.as_dict() + if self.state is not None: body['state'] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the StatementStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.error: - body["error"] = self.error - if self.state is not None: - body["state"] = self.state + if self.error: body['error'] = self.error + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StatementStatus: """Deserializes the StatementStatus from a dictionary.""" - return cls(error=_from_dict(d, "error", ServiceError), state=_enum(d, "state", StatementState)) + return cls(error=_from_dict(d, 'error', ServiceError), state=_enum(d, 'state', StatementState)) + + class Status(Enum): """Health status of the warehouse.""" + + DEGRADED = 'DEGRADED' + FAILED = 'FAILED' + HEALTHY = 'HEALTHY' + STATUS_UNSPECIFIED = 'STATUS_UNSPECIFIED' + - DEGRADED = "DEGRADED" - FAILED = "FAILED" - HEALTHY = "HEALTHY" - STATUS_UNSPECIFIED = "STATUS_UNSPECIFIED" @dataclass @@ -7136,252 +5754,343 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> StopWarehouseResponse: """Deserializes the StopWarehouseResponse from a dictionary.""" return cls() + + @dataclass class Success: message: Optional[SuccessMessage] = None - + def as_dict(self) -> dict: """Serializes the Success into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message.value + if self.message is not None: body['message'] = self.message.value return body def as_shallow_dict(self) -> dict: """Serializes the Success into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message + if self.message is not None: body['message'] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Success: """Deserializes the Success from a dictionary.""" - return cls(message=_enum(d, "message", SuccessMessage)) + return cls(message=_enum(d, 'message', SuccessMessage)) + + class SuccessMessage(Enum): + + + SUCCESS = 'Success' + +@dataclass +class TaskTimeOverRange: + entries: Optional[List[TaskTimeOverRangeEntry]] = None + + interval: Optional[int] = None + """interval length for all entries (difference in start time and end time of an entry range) the + same for all entries start time of first interval is query_start_time_ms""" + + def as_dict(self) -> dict: + """Serializes the TaskTimeOverRange into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.entries: body['entries'] = [v.as_dict() for v in self.entries] + if self.interval is not None: body['interval'] = self.interval + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TaskTimeOverRange into a shallow dictionary of its immediate attributes.""" + body = {} + if self.entries: body['entries'] = self.entries + if self.interval is not None: body['interval'] = self.interval + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TaskTimeOverRange: + """Deserializes the TaskTimeOverRange from a dictionary.""" + return cls(entries=_repeated_dict(d, 'entries', TaskTimeOverRangeEntry), interval=d.get('interval', None)) + + + + +@dataclass +class TaskTimeOverRangeEntry: + task_completed_time_ms: Optional[int] = None + """total task completion time in this time range, aggregated over all stages and jobs in the query""" + + def as_dict(self) -> dict: + """Serializes the TaskTimeOverRangeEntry into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.task_completed_time_ms is not None: body['task_completed_time_ms'] = self.task_completed_time_ms + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TaskTimeOverRangeEntry into a shallow dictionary of its immediate attributes.""" + body = {} + if self.task_completed_time_ms is not None: body['task_completed_time_ms'] = self.task_completed_time_ms + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TaskTimeOverRangeEntry: + """Deserializes the TaskTimeOverRangeEntry from a dictionary.""" + return cls(task_completed_time_ms=d.get('task_completed_time_ms', None)) + - SUCCESS = "Success" @dataclass class TerminationReason: code: Optional[TerminationReasonCode] = None """status code indicating why the cluster was terminated""" - - parameters: Optional[Dict[str, str]] = None + + parameters: Optional[Dict[str,str]] = None """list of parameters that provide additional information about why the cluster was terminated""" - + type: Optional[TerminationReasonType] = None """type of the termination""" - + def as_dict(self) -> dict: """Serializes the TerminationReason into a dictionary suitable for use as a JSON request body.""" body = {} - if self.code is not None: - body["code"] = self.code.value - if self.parameters: - body["parameters"] = self.parameters - if self.type is not None: - body["type"] = self.type.value + if self.code is not None: body['code'] = self.code.value + if self.parameters: body['parameters'] = self.parameters + if self.type is not None: body['type'] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the TerminationReason into a shallow dictionary of its immediate attributes.""" body = {} - if self.code is not None: - body["code"] = self.code - if self.parameters: - body["parameters"] = self.parameters - if self.type is not None: - body["type"] = self.type + if self.code is not None: body['code'] = self.code + if self.parameters: body['parameters'] = self.parameters + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TerminationReason: """Deserializes the TerminationReason from a dictionary.""" - return cls( - code=_enum(d, "code", TerminationReasonCode), - parameters=d.get("parameters", None), - type=_enum(d, "type", TerminationReasonType), - ) + return cls(code=_enum(d, 'code', TerminationReasonCode), parameters=d.get('parameters', None), type=_enum(d, 'type', TerminationReasonType)) + + class TerminationReasonCode(Enum): """status code indicating why the cluster was terminated""" - - ABUSE_DETECTED = "ABUSE_DETECTED" - ATTACH_PROJECT_FAILURE = "ATTACH_PROJECT_FAILURE" - AWS_AUTHORIZATION_FAILURE = "AWS_AUTHORIZATION_FAILURE" - AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE" - AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE" - AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE" - AWS_REQUEST_LIMIT_EXCEEDED = "AWS_REQUEST_LIMIT_EXCEEDED" - AWS_UNSUPPORTED_FAILURE = "AWS_UNSUPPORTED_FAILURE" - AZURE_BYOK_KEY_PERMISSION_FAILURE = "AZURE_BYOK_KEY_PERMISSION_FAILURE" - AZURE_EPHEMERAL_DISK_FAILURE = "AZURE_EPHEMERAL_DISK_FAILURE" - AZURE_INVALID_DEPLOYMENT_TEMPLATE = "AZURE_INVALID_DEPLOYMENT_TEMPLATE" - AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION" - AZURE_QUOTA_EXCEEDED_EXCEPTION = "AZURE_QUOTA_EXCEEDED_EXCEPTION" - AZURE_RESOURCE_MANAGER_THROTTLING = "AZURE_RESOURCE_MANAGER_THROTTLING" - AZURE_RESOURCE_PROVIDER_THROTTLING = "AZURE_RESOURCE_PROVIDER_THROTTLING" - AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = "AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE" - AZURE_VM_EXTENSION_FAILURE = "AZURE_VM_EXTENSION_FAILURE" - AZURE_VNET_CONFIGURATION_FAILURE = "AZURE_VNET_CONFIGURATION_FAILURE" - BOOTSTRAP_TIMEOUT = "BOOTSTRAP_TIMEOUT" - BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION" - CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE" - CLOUD_PROVIDER_LAUNCH_FAILURE = "CLOUD_PROVIDER_LAUNCH_FAILURE" - CLOUD_PROVIDER_RESOURCE_STOCKOUT = "CLOUD_PROVIDER_RESOURCE_STOCKOUT" - CLOUD_PROVIDER_SHUTDOWN = "CLOUD_PROVIDER_SHUTDOWN" - COMMUNICATION_LOST = "COMMUNICATION_LOST" - CONTAINER_LAUNCH_FAILURE = "CONTAINER_LAUNCH_FAILURE" - CONTROL_PLANE_REQUEST_FAILURE = "CONTROL_PLANE_REQUEST_FAILURE" - DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE" - DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY" - DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" - DRIVER_UNREACHABLE = "DRIVER_UNREACHABLE" - DRIVER_UNRESPONSIVE = "DRIVER_UNRESPONSIVE" - EXECUTION_COMPONENT_UNHEALTHY = "EXECUTION_COMPONENT_UNHEALTHY" - GCP_QUOTA_EXCEEDED = "GCP_QUOTA_EXCEEDED" - GCP_SERVICE_ACCOUNT_DELETED = "GCP_SERVICE_ACCOUNT_DELETED" - GLOBAL_INIT_SCRIPT_FAILURE = "GLOBAL_INIT_SCRIPT_FAILURE" - HIVE_METASTORE_PROVISIONING_FAILURE = "HIVE_METASTORE_PROVISIONING_FAILURE" - IMAGE_PULL_PERMISSION_DENIED = "IMAGE_PULL_PERMISSION_DENIED" - INACTIVITY = "INACTIVITY" - INIT_SCRIPT_FAILURE = "INIT_SCRIPT_FAILURE" - INSTANCE_POOL_CLUSTER_FAILURE = "INSTANCE_POOL_CLUSTER_FAILURE" - INSTANCE_UNREACHABLE = "INSTANCE_UNREACHABLE" - INTERNAL_ERROR = "INTERNAL_ERROR" - INVALID_ARGUMENT = "INVALID_ARGUMENT" - INVALID_SPARK_IMAGE = "INVALID_SPARK_IMAGE" - IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE" - JOB_FINISHED = "JOB_FINISHED" - K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE" - K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT" - METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY" - NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT" - NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" - NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" - NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" - NPIP_TUNNEL_TOKEN_FAILURE = "NPIP_TUNNEL_TOKEN_FAILURE" - REQUEST_REJECTED = "REQUEST_REJECTED" - REQUEST_THROTTLED = "REQUEST_THROTTLED" - SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" - SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" - SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" - SKIPPED_SLOW_NODES = "SKIPPED_SLOW_NODES" - SLOW_IMAGE_DOWNLOAD = "SLOW_IMAGE_DOWNLOAD" - SPARK_ERROR = "SPARK_ERROR" - SPARK_IMAGE_DOWNLOAD_FAILURE = "SPARK_IMAGE_DOWNLOAD_FAILURE" - SPARK_STARTUP_FAILURE = "SPARK_STARTUP_FAILURE" - SPOT_INSTANCE_TERMINATION = "SPOT_INSTANCE_TERMINATION" - STORAGE_DOWNLOAD_FAILURE = "STORAGE_DOWNLOAD_FAILURE" - STS_CLIENT_SETUP_FAILURE = "STS_CLIENT_SETUP_FAILURE" - SUBNET_EXHAUSTED_FAILURE = "SUBNET_EXHAUSTED_FAILURE" - TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" - TRIAL_EXPIRED = "TRIAL_EXPIRED" - UNEXPECTED_LAUNCH_FAILURE = "UNEXPECTED_LAUNCH_FAILURE" - UNKNOWN = "UNKNOWN" - UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE" - UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE" - USER_REQUEST = "USER_REQUEST" - WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE" - WORKSPACE_CANCELLED_ERROR = "WORKSPACE_CANCELLED_ERROR" - WORKSPACE_CONFIGURATION_ERROR = "WORKSPACE_CONFIGURATION_ERROR" - + + ABUSE_DETECTED = 'ABUSE_DETECTED' + ATTACH_PROJECT_FAILURE = 'ATTACH_PROJECT_FAILURE' + AWS_AUTHORIZATION_FAILURE = 'AWS_AUTHORIZATION_FAILURE' + AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = 'AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE' + AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = 'AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE' + AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = 'AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE' + AWS_REQUEST_LIMIT_EXCEEDED = 'AWS_REQUEST_LIMIT_EXCEEDED' + AWS_UNSUPPORTED_FAILURE = 'AWS_UNSUPPORTED_FAILURE' + AZURE_BYOK_KEY_PERMISSION_FAILURE = 'AZURE_BYOK_KEY_PERMISSION_FAILURE' + AZURE_EPHEMERAL_DISK_FAILURE = 'AZURE_EPHEMERAL_DISK_FAILURE' + AZURE_INVALID_DEPLOYMENT_TEMPLATE = 'AZURE_INVALID_DEPLOYMENT_TEMPLATE' + AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = 'AZURE_OPERATION_NOT_ALLOWED_EXCEPTION' + AZURE_QUOTA_EXCEEDED_EXCEPTION = 'AZURE_QUOTA_EXCEEDED_EXCEPTION' + AZURE_RESOURCE_MANAGER_THROTTLING = 'AZURE_RESOURCE_MANAGER_THROTTLING' + AZURE_RESOURCE_PROVIDER_THROTTLING = 'AZURE_RESOURCE_PROVIDER_THROTTLING' + AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = 'AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE' + AZURE_VM_EXTENSION_FAILURE = 'AZURE_VM_EXTENSION_FAILURE' + AZURE_VNET_CONFIGURATION_FAILURE = 'AZURE_VNET_CONFIGURATION_FAILURE' + BOOTSTRAP_TIMEOUT = 'BOOTSTRAP_TIMEOUT' + BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = 'BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION' + CLOUD_PROVIDER_DISK_SETUP_FAILURE = 'CLOUD_PROVIDER_DISK_SETUP_FAILURE' + CLOUD_PROVIDER_LAUNCH_FAILURE = 'CLOUD_PROVIDER_LAUNCH_FAILURE' + CLOUD_PROVIDER_RESOURCE_STOCKOUT = 'CLOUD_PROVIDER_RESOURCE_STOCKOUT' + CLOUD_PROVIDER_SHUTDOWN = 'CLOUD_PROVIDER_SHUTDOWN' + COMMUNICATION_LOST = 'COMMUNICATION_LOST' + CONTAINER_LAUNCH_FAILURE = 'CONTAINER_LAUNCH_FAILURE' + CONTROL_PLANE_REQUEST_FAILURE = 'CONTROL_PLANE_REQUEST_FAILURE' + DATABASE_CONNECTION_FAILURE = 'DATABASE_CONNECTION_FAILURE' + DBFS_COMPONENT_UNHEALTHY = 'DBFS_COMPONENT_UNHEALTHY' + DOCKER_IMAGE_PULL_FAILURE = 'DOCKER_IMAGE_PULL_FAILURE' + DRIVER_UNREACHABLE = 'DRIVER_UNREACHABLE' + DRIVER_UNRESPONSIVE = 'DRIVER_UNRESPONSIVE' + EXECUTION_COMPONENT_UNHEALTHY = 'EXECUTION_COMPONENT_UNHEALTHY' + GCP_QUOTA_EXCEEDED = 'GCP_QUOTA_EXCEEDED' + GCP_SERVICE_ACCOUNT_DELETED = 'GCP_SERVICE_ACCOUNT_DELETED' + GLOBAL_INIT_SCRIPT_FAILURE = 'GLOBAL_INIT_SCRIPT_FAILURE' + HIVE_METASTORE_PROVISIONING_FAILURE = 'HIVE_METASTORE_PROVISIONING_FAILURE' + IMAGE_PULL_PERMISSION_DENIED = 'IMAGE_PULL_PERMISSION_DENIED' + INACTIVITY = 'INACTIVITY' + INIT_SCRIPT_FAILURE = 'INIT_SCRIPT_FAILURE' + INSTANCE_POOL_CLUSTER_FAILURE = 'INSTANCE_POOL_CLUSTER_FAILURE' + INSTANCE_UNREACHABLE = 'INSTANCE_UNREACHABLE' + INTERNAL_ERROR = 'INTERNAL_ERROR' + INVALID_ARGUMENT = 'INVALID_ARGUMENT' + INVALID_SPARK_IMAGE = 'INVALID_SPARK_IMAGE' + IP_EXHAUSTION_FAILURE = 'IP_EXHAUSTION_FAILURE' + JOB_FINISHED = 'JOB_FINISHED' + K8S_AUTOSCALING_FAILURE = 'K8S_AUTOSCALING_FAILURE' + K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = 'K8S_DBR_CLUSTER_LAUNCH_TIMEOUT' + METASTORE_COMPONENT_UNHEALTHY = 'METASTORE_COMPONENT_UNHEALTHY' + NEPHOS_RESOURCE_MANAGEMENT = 'NEPHOS_RESOURCE_MANAGEMENT' + NETWORK_CONFIGURATION_FAILURE = 'NETWORK_CONFIGURATION_FAILURE' + NFS_MOUNT_FAILURE = 'NFS_MOUNT_FAILURE' + NPIP_TUNNEL_SETUP_FAILURE = 'NPIP_TUNNEL_SETUP_FAILURE' + NPIP_TUNNEL_TOKEN_FAILURE = 'NPIP_TUNNEL_TOKEN_FAILURE' + REQUEST_REJECTED = 'REQUEST_REJECTED' + REQUEST_THROTTLED = 'REQUEST_THROTTLED' + SECRET_RESOLUTION_ERROR = 'SECRET_RESOLUTION_ERROR' + SECURITY_DAEMON_REGISTRATION_EXCEPTION = 'SECURITY_DAEMON_REGISTRATION_EXCEPTION' + SELF_BOOTSTRAP_FAILURE = 'SELF_BOOTSTRAP_FAILURE' + SKIPPED_SLOW_NODES = 'SKIPPED_SLOW_NODES' + SLOW_IMAGE_DOWNLOAD = 'SLOW_IMAGE_DOWNLOAD' + SPARK_ERROR = 'SPARK_ERROR' + SPARK_IMAGE_DOWNLOAD_FAILURE = 'SPARK_IMAGE_DOWNLOAD_FAILURE' + SPARK_STARTUP_FAILURE = 'SPARK_STARTUP_FAILURE' + SPOT_INSTANCE_TERMINATION = 'SPOT_INSTANCE_TERMINATION' + STORAGE_DOWNLOAD_FAILURE = 'STORAGE_DOWNLOAD_FAILURE' + STS_CLIENT_SETUP_FAILURE = 'STS_CLIENT_SETUP_FAILURE' + SUBNET_EXHAUSTED_FAILURE = 'SUBNET_EXHAUSTED_FAILURE' + TEMPORARILY_UNAVAILABLE = 'TEMPORARILY_UNAVAILABLE' + TRIAL_EXPIRED = 'TRIAL_EXPIRED' + UNEXPECTED_LAUNCH_FAILURE = 'UNEXPECTED_LAUNCH_FAILURE' + UNKNOWN = 'UNKNOWN' + UNSUPPORTED_INSTANCE_TYPE = 'UNSUPPORTED_INSTANCE_TYPE' + UPDATE_INSTANCE_PROFILE_FAILURE = 'UPDATE_INSTANCE_PROFILE_FAILURE' + USER_REQUEST = 'USER_REQUEST' + WORKER_SETUP_FAILURE = 'WORKER_SETUP_FAILURE' + WORKSPACE_CANCELLED_ERROR = 'WORKSPACE_CANCELLED_ERROR' + WORKSPACE_CONFIGURATION_ERROR = 'WORKSPACE_CONFIGURATION_ERROR' class TerminationReasonType(Enum): """type of the termination""" - - CLIENT_ERROR = "CLIENT_ERROR" - CLOUD_FAILURE = "CLOUD_FAILURE" - SERVICE_FAULT = "SERVICE_FAULT" - SUCCESS = "SUCCESS" - + + CLIENT_ERROR = 'CLIENT_ERROR' + CLOUD_FAILURE = 'CLOUD_FAILURE' + SERVICE_FAULT = 'SERVICE_FAULT' + SUCCESS = 'SUCCESS' @dataclass class TextValue: value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the TextValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the TextValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: - body["value"] = self.value + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TextValue: """Deserializes the TextValue from a dictionary.""" - return cls(value=d.get("value", None)) + return cls(value=d.get('value', None)) + + @dataclass class TimeRange: end_time_ms: Optional[int] = None """The end time in milliseconds.""" - + start_time_ms: Optional[int] = None """The start time in milliseconds.""" - + def as_dict(self) -> dict: """Serializes the TimeRange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end_time_ms is not None: - body["end_time_ms"] = self.end_time_ms - if self.start_time_ms is not None: - body["start_time_ms"] = self.start_time_ms + if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms + if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms return body def as_shallow_dict(self) -> dict: """Serializes the TimeRange into a shallow dictionary of its immediate attributes.""" body = {} - if self.end_time_ms is not None: - body["end_time_ms"] = self.end_time_ms - if self.start_time_ms is not None: - body["start_time_ms"] = self.start_time_ms + if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms + if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TimeRange: """Deserializes the TimeRange from a dictionary.""" - return cls(end_time_ms=d.get("end_time_ms", None), start_time_ms=d.get("start_time_ms", None)) + return cls(end_time_ms=d.get('end_time_ms', None), start_time_ms=d.get('start_time_ms', None)) + + @dataclass class TransferOwnershipObjectId: new_owner: Optional[str] = None """Email address for the new owner, who must exist in the workspace.""" - + def as_dict(self) -> dict: """Serializes the TransferOwnershipObjectId into a dictionary suitable for use as a JSON request body.""" body = {} - if self.new_owner is not None: - body["new_owner"] = self.new_owner + if self.new_owner is not None: body['new_owner'] = self.new_owner return body def as_shallow_dict(self) -> dict: """Serializes the TransferOwnershipObjectId into a shallow dictionary of its immediate attributes.""" body = {} - if self.new_owner is not None: - body["new_owner"] = self.new_owner + if self.new_owner is not None: body['new_owner'] = self.new_owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransferOwnershipObjectId: """Deserializes the TransferOwnershipObjectId from a dictionary.""" - return cls(new_owner=d.get("new_owner", None)) + return cls(new_owner=d.get('new_owner', None)) + + + + +@dataclass +class TransferOwnershipRequest: + """Transfer object ownership""" + + new_owner: Optional[str] = None + """Email address for the new owner, who must exist in the workspace.""" + + object_id: Optional[TransferOwnershipObjectId] = None + """The ID of the object on which to change ownership.""" + + object_type: Optional[OwnableObjectType] = None + """The type of object on which to change ownership.""" + + def as_dict(self) -> dict: + """Serializes the TransferOwnershipRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.new_owner is not None: body['new_owner'] = self.new_owner + if self.object_id: body['objectId'] = self.object_id.as_dict() + if self.object_type is not None: body['objectType'] = self.object_type.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the TransferOwnershipRequest into a shallow dictionary of its immediate attributes.""" + body = {} + if self.new_owner is not None: body['new_owner'] = self.new_owner + if self.object_id: body['objectId'] = self.object_id + if self.object_type is not None: body['objectType'] = self.object_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TransferOwnershipRequest: + """Deserializes the TransferOwnershipRequest from a dictionary.""" + return cls(new_owner=d.get('new_owner', None), object_id=_from_dict(d, 'objectId', TransferOwnershipObjectId), object_type=_enum(d, 'objectType', OwnableObjectType)) + + + + + + + + + + + @dataclass @@ -7396,139 +6105,108 @@ class UpdateAlertRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + alert: Optional[UpdateAlertRequestAlert] = None - + auto_resolve_display_name: Optional[bool] = None """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name.""" - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert: - body["alert"] = self.alert.as_dict() - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask + if self.alert: body['alert'] = self.alert.as_dict() + if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAlertRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert: - body["alert"] = self.alert - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask + if self.alert: body['alert'] = self.alert + if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAlertRequest: """Deserializes the UpdateAlertRequest from a dictionary.""" - return cls( - alert=_from_dict(d, "alert", UpdateAlertRequestAlert), - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - id=d.get("id", None), - update_mask=d.get("update_mask", None), - ) + return cls(alert=_from_dict(d, 'alert', UpdateAlertRequestAlert), auto_resolve_display_name=d.get('auto_resolve_display_name', None), id=d.get('id', None), update_mask=d.get('update_mask', None)) + + @dataclass class UpdateAlertRequestAlert: condition: Optional[AlertCondition] = None """Trigger conditions of the alert.""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This can include email subject entries and Slack notification headers, for example. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + display_name: Optional[str] = None """The display name of the alert.""" - + notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + owner_user_name: Optional[str] = None """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" - + query_id: Optional[str] = None """UUID of the query attached to the alert.""" - + seconds_to_retrigger: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + def as_dict(self) -> dict: """Serializes the UpdateAlertRequestAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition: - body["condition"] = self.condition.as_dict() - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.display_name is not None: - body["display_name"] = self.display_name - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.query_id is not None: - body["query_id"] = self.query_id - if self.seconds_to_retrigger is not None: - body["seconds_to_retrigger"] = self.seconds_to_retrigger + if self.condition: body['condition'] = self.condition.as_dict() + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAlertRequestAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition: - body["condition"] = self.condition - if self.custom_body is not None: - body["custom_body"] = self.custom_body - if self.custom_subject is not None: - body["custom_subject"] = self.custom_subject - if self.display_name is not None: - body["display_name"] = self.display_name - if self.notify_on_ok is not None: - body["notify_on_ok"] = self.notify_on_ok - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.query_id is not None: - body["query_id"] = self.query_id - if self.seconds_to_retrigger is not None: - body["seconds_to_retrigger"] = self.seconds_to_retrigger + if self.condition: body['condition'] = self.condition + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAlertRequestAlert: """Deserializes the UpdateAlertRequestAlert from a dictionary.""" - return cls( - condition=_from_dict(d, "condition", AlertCondition), - custom_body=d.get("custom_body", None), - custom_subject=d.get("custom_subject", None), - display_name=d.get("display_name", None), - notify_on_ok=d.get("notify_on_ok", None), - owner_user_name=d.get("owner_user_name", None), - query_id=d.get("query_id", None), - seconds_to_retrigger=d.get("seconds_to_retrigger", None), - ) + return cls(condition=_from_dict(d, 'condition', AlertCondition), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), display_name=d.get('display_name', None), notify_on_ok=d.get('notify_on_ok', None), owner_user_name=d.get('owner_user_name', None), query_id=d.get('query_id', None), seconds_to_retrigger=d.get('seconds_to_retrigger', None)) + + + + + @dataclass @@ -7543,156 +6221,113 @@ class UpdateQueryRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + auto_resolve_display_name: Optional[bool] = None """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name.""" - + id: Optional[str] = None - + query: Optional[UpdateQueryRequestQuery] = None - + def as_dict(self) -> dict: """Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.query: - body["query"] = self.query.as_dict() - if self.update_mask is not None: - body["update_mask"] = self.update_mask + if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name + if self.id is not None: body['id'] = self.id + if self.query: body['query'] = self.query.as_dict() + if self.update_mask is not None: body['update_mask'] = self.update_mask return body def as_shallow_dict(self) -> dict: """Serializes the UpdateQueryRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.query: - body["query"] = self.query - if self.update_mask is not None: - body["update_mask"] = self.update_mask + if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name + if self.id is not None: body['id'] = self.id + if self.query: body['query'] = self.query + if self.update_mask is not None: body['update_mask'] = self.update_mask return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateQueryRequest: """Deserializes the UpdateQueryRequest from a dictionary.""" - return cls( - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - id=d.get("id", None), - query=_from_dict(d, "query", UpdateQueryRequestQuery), - update_mask=d.get("update_mask", None), - ) + return cls(auto_resolve_display_name=d.get('auto_resolve_display_name', None), id=d.get('id', None), query=_from_dict(d, 'query', UpdateQueryRequestQuery), update_mask=d.get('update_mask', None)) + + @dataclass class UpdateQueryRequestQuery: apply_auto_limit: Optional[bool] = None """Whether to apply a 1000 row limit to the query result.""" - + catalog: Optional[str] = None """Name of the catalog where this query will be executed.""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + display_name: Optional[str] = None """Display name of the query that appears in list views, widget headings, and on the query page.""" - + owner_user_name: Optional[str] = None """Username of the user that owns the query.""" - + parameters: Optional[List[QueryParameter]] = None """List of query parameter definitions.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_mode: Optional[RunAsMode] = None """Sets the "Run as" role for the object.""" - + schema: Optional[str] = None """Name of the schema where this query will be executed.""" - + tags: Optional[List[str]] = None - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the query.""" - + def as_dict(self) -> dict: """Serializes the UpdateQueryRequestQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_auto_limit is not None: - body["apply_auto_limit"] = self.apply_auto_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_mode is not None: - body["run_as_mode"] = self.run_as_mode.value - if self.schema is not None: - body["schema"] = self.schema - if self.tags: - body["tags"] = [v for v in self.tags] - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateQueryRequestQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_auto_limit is not None: - body["apply_auto_limit"] = self.apply_auto_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.description is not None: - body["description"] = self.description - if self.display_name is not None: - body["display_name"] = self.display_name - if self.owner_user_name is not None: - body["owner_user_name"] = self.owner_user_name - if self.parameters: - body["parameters"] = self.parameters - if self.query_text is not None: - body["query_text"] = self.query_text - if self.run_as_mode is not None: - body["run_as_mode"] = self.run_as_mode - if self.schema is not None: - body["schema"] = self.schema - if self.tags: - body["tags"] = self.tags - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = self.parameters + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = self.tags + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateQueryRequestQuery: """Deserializes the UpdateQueryRequestQuery from a dictionary.""" - return cls( - apply_auto_limit=d.get("apply_auto_limit", None), - catalog=d.get("catalog", None), - description=d.get("description", None), - display_name=d.get("display_name", None), - owner_user_name=d.get("owner_user_name", None), - parameters=_repeated_dict(d, "parameters", QueryParameter), - query_text=d.get("query_text", None), - run_as_mode=_enum(d, "run_as_mode", RunAsMode), - schema=d.get("schema", None), - tags=d.get("tags", None), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(apply_auto_limit=d.get('apply_auto_limit', None), catalog=d.get('catalog', None), description=d.get('description', None), display_name=d.get('display_name', None), owner_user_name=d.get('owner_user_name', None), parameters=_repeated_dict(d, 'parameters', QueryParameter), query_text=d.get('query_text', None), run_as_mode=_enum(d, 'run_as_mode', RunAsMode), schema=d.get('schema', None), tags=d.get('tags', None), warehouse_id=d.get('warehouse_id', None)) + + @dataclass @@ -7711,6 +6346,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() + + @dataclass @@ -7725,484 +6362,386 @@ class UpdateVisualizationRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + id: Optional[str] = None - + visualization: Optional[UpdateVisualizationRequestVisualization] = None - + def as_dict(self) -> dict: """Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - if self.visualization: - body["visualization"] = self.visualization.as_dict() + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.visualization: body['visualization'] = self.visualization.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateVisualizationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - if self.visualization: - body["visualization"] = self.visualization + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.visualization: body['visualization'] = self.visualization return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateVisualizationRequest: """Deserializes the UpdateVisualizationRequest from a dictionary.""" - return cls( - id=d.get("id", None), - update_mask=d.get("update_mask", None), - visualization=_from_dict(d, "visualization", UpdateVisualizationRequestVisualization), - ) + return cls(id=d.get('id', None), update_mask=d.get('update_mask', None), visualization=_from_dict(d, 'visualization', UpdateVisualizationRequestVisualization)) + + @dataclass class UpdateVisualizationRequestVisualization: display_name: Optional[str] = None """The display name of the visualization.""" - + serialized_options: Optional[str] = None """The visualization options varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization options directly.""" - + serialized_query_plan: Optional[str] = None """The visualization query plan varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying the visualization query plan directly.""" - + type: Optional[str] = None """The type of visualization: counter, table, funnel, and so on.""" - + def as_dict(self) -> dict: """Serializes the UpdateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.serialized_options is not None: - body["serialized_options"] = self.serialized_options - if self.serialized_query_plan is not None: - body["serialized_query_plan"] = self.serialized_query_plan - if self.type is not None: - body["type"] = self.type + if self.display_name is not None: body['display_name'] = self.display_name + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type return body def as_shallow_dict(self) -> dict: """Serializes the UpdateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.serialized_options is not None: - body["serialized_options"] = self.serialized_options - if self.serialized_query_plan is not None: - body["serialized_query_plan"] = self.serialized_query_plan - if self.type is not None: - body["type"] = self.type + if self.display_name is not None: body['display_name'] = self.display_name + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateVisualizationRequestVisualization: """Deserializes the UpdateVisualizationRequestVisualization from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - serialized_options=d.get("serialized_options", None), - serialized_query_plan=d.get("serialized_query_plan", None), - type=d.get("type", None), - ) + return cls(display_name=d.get('display_name', None), serialized_options=d.get('serialized_options', None), serialized_query_plan=d.get('serialized_query_plan', None), type=d.get('type', None)) + + @dataclass class User: email: Optional[str] = None - + id: Optional[int] = None - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the User into a dictionary suitable for use as a JSON request body.""" body = {} - if self.email is not None: - body["email"] = self.email - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name + if self.email is not None: body['email'] = self.email + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the User into a shallow dictionary of its immediate attributes.""" body = {} - if self.email is not None: - body["email"] = self.email - if self.id is not None: - body["id"] = self.id - if self.name is not None: - body["name"] = self.name + if self.email is not None: body['email'] = self.email + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> User: """Deserializes the User from a dictionary.""" - return cls(email=d.get("email", None), id=d.get("id", None), name=d.get("name", None)) + return cls(email=d.get('email', None), id=d.get('id', None), name=d.get('name', None)) + + @dataclass class Visualization: create_time: Optional[str] = None """The timestamp indicating when the visualization was created.""" - + display_name: Optional[str] = None """The display name of the visualization.""" - + id: Optional[str] = None """UUID identifying the visualization.""" - + query_id: Optional[str] = None """UUID of the query that the visualization is attached to.""" - + serialized_options: Optional[str] = None """The visualization options varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization options directly.""" - + serialized_query_plan: Optional[str] = None """The visualization query plan varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying the visualization query plan directly.""" - + type: Optional[str] = None """The type of visualization: counter, table, funnel, and so on.""" - + update_time: Optional[str] = None """The timestamp indicating when the visualization was updated.""" - + def as_dict(self) -> dict: """Serializes the Visualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.query_id is not None: - body["query_id"] = self.query_id - if self.serialized_options is not None: - body["serialized_options"] = self.serialized_options - if self.serialized_query_plan is not None: - body["serialized_query_plan"] = self.serialized_query_plan - if self.type is not None: - body["type"] = self.type - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type + if self.update_time is not None: body['update_time'] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the Visualization into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: - body["create_time"] = self.create_time - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - if self.query_id is not None: - body["query_id"] = self.query_id - if self.serialized_options is not None: - body["serialized_options"] = self.serialized_options - if self.serialized_query_plan is not None: - body["serialized_query_plan"] = self.serialized_query_plan - if self.type is not None: - body["type"] = self.type - if self.update_time is not None: - body["update_time"] = self.update_time + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Visualization: """Deserializes the Visualization from a dictionary.""" - return cls( - create_time=d.get("create_time", None), - display_name=d.get("display_name", None), - id=d.get("id", None), - query_id=d.get("query_id", None), - serialized_options=d.get("serialized_options", None), - serialized_query_plan=d.get("serialized_query_plan", None), - type=d.get("type", None), - update_time=d.get("update_time", None), - ) + return cls(create_time=d.get('create_time', None), display_name=d.get('display_name', None), id=d.get('id', None), query_id=d.get('query_id', None), serialized_options=d.get('serialized_options', None), serialized_query_plan=d.get('serialized_query_plan', None), type=d.get('type', None), update_time=d.get('update_time', None)) + + @dataclass class WarehouseAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[WarehousePermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the WarehouseAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the WarehouseAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehouseAccessControlRequest: """Deserializes the WarehouseAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", WarehousePermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', WarehousePermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class WarehouseAccessControlResponse: all_permissions: Optional[List[WarehousePermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the WarehouseAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the WarehouseAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehouseAccessControlResponse: """Deserializes the WarehouseAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", WarehousePermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', WarehousePermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class WarehousePermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[WarehousePermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the WarehousePermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the WarehousePermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehousePermission: """Deserializes the WarehousePermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", WarehousePermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', WarehousePermissionLevel)) + + class WarehousePermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = "CAN_MANAGE" - CAN_MONITOR = "CAN_MONITOR" - CAN_USE = "CAN_USE" - CAN_VIEW = "CAN_VIEW" - IS_OWNER = "IS_OWNER" - + + CAN_MANAGE = 'CAN_MANAGE' + CAN_MONITOR = 'CAN_MONITOR' + CAN_USE = 'CAN_USE' + CAN_VIEW = 'CAN_VIEW' + IS_OWNER = 'IS_OWNER' @dataclass class WarehousePermissions: access_control_list: Optional[List[WarehouseAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the WarehousePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the WarehousePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissions: """Deserializes the WarehousePermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", WarehouseAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', WarehouseAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class WarehousePermissionsDescription: description: Optional[str] = None - + permission_level: Optional[WarehousePermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the WarehousePermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the WarehousePermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsDescription: """Deserializes the WarehousePermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", WarehousePermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', WarehousePermissionLevel)) + + @dataclass class WarehousePermissionsRequest: access_control_list: Optional[List[WarehouseAccessControlRequest]] = None - + warehouse_id: Optional[str] = None """The SQL warehouse for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the WarehousePermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the WarehousePermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsRequest: """Deserializes the WarehousePermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", WarehouseAccessControlRequest), - warehouse_id=d.get("warehouse_id", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', WarehouseAccessControlRequest), warehouse_id=d.get('warehouse_id', None)) + + @dataclass @@ -8210,234 +6749,186 @@ class WarehouseTypePair: enabled: Optional[bool] = None """If set to false the specific warehouse type will not be be allowed as a value for warehouse_type in CreateWarehouse and EditWarehouse""" - + warehouse_type: Optional[WarehouseTypePairWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`.""" - + def as_dict(self) -> dict: """Serializes the WarehouseTypePair into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type.value + if self.enabled is not None: body['enabled'] = self.enabled + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the WarehouseTypePair into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type + if self.enabled is not None: body['enabled'] = self.enabled + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehouseTypePair: """Deserializes the WarehouseTypePair from a dictionary.""" - return cls( - enabled=d.get("enabled", None), warehouse_type=_enum(d, "warehouse_type", WarehouseTypePairWarehouseType) - ) + return cls(enabled=d.get('enabled', None), warehouse_type=_enum(d, 'warehouse_type', WarehouseTypePairWarehouseType)) + + class WarehouseTypePairWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`.""" - - CLASSIC = "CLASSIC" - PRO = "PRO" - TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" - + + CLASSIC = 'CLASSIC' + PRO = 'PRO' + TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' @dataclass class Widget: id: Optional[str] = None """The unique ID for this widget.""" - + options: Optional[WidgetOptions] = None - + visualization: Optional[LegacyVisualization] = None """The visualization description API changes frequently and is unsupported. You can duplicate a visualization by copying description objects received _from the API_ and then using them to create a new one with a POST request to the same endpoint. Databricks does not recommend constructing ad-hoc visualizations entirely in JSON.""" - + width: Optional[int] = None """Unused field.""" - + def as_dict(self) -> dict: """Serializes the Widget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.options: - body["options"] = self.options.as_dict() - if self.visualization: - body["visualization"] = self.visualization.as_dict() - if self.width is not None: - body["width"] = self.width + if self.id is not None: body['id'] = self.id + if self.options: body['options'] = self.options.as_dict() + if self.visualization: body['visualization'] = self.visualization.as_dict() + if self.width is not None: body['width'] = self.width return body def as_shallow_dict(self) -> dict: """Serializes the Widget into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: - body["id"] = self.id - if self.options: - body["options"] = self.options - if self.visualization: - body["visualization"] = self.visualization - if self.width is not None: - body["width"] = self.width + if self.id is not None: body['id'] = self.id + if self.options: body['options'] = self.options + if self.visualization: body['visualization'] = self.visualization + if self.width is not None: body['width'] = self.width return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Widget: """Deserializes the Widget from a dictionary.""" - return cls( - id=d.get("id", None), - options=_from_dict(d, "options", WidgetOptions), - visualization=_from_dict(d, "visualization", LegacyVisualization), - width=d.get("width", None), - ) + return cls(id=d.get('id', None), options=_from_dict(d, 'options', WidgetOptions), visualization=_from_dict(d, 'visualization', LegacyVisualization), width=d.get('width', None)) + + @dataclass class WidgetOptions: created_at: Optional[str] = None """Timestamp when this object was created""" - + description: Optional[str] = None """Custom description of the widget""" - + is_hidden: Optional[bool] = None """Whether this widget is hidden on the dashboard.""" - + parameter_mappings: Optional[Any] = None """How parameters used by the visualization in this widget relate to other widgets on the dashboard. Databricks does not recommend modifying this definition in JSON.""" - + position: Optional[WidgetPosition] = None """Coordinates of this widget on a dashboard. This portion of the API changes frequently and is unsupported.""" - + title: Optional[str] = None """Custom title of the widget""" - + updated_at: Optional[str] = None """Timestamp of the last time this object was updated.""" - + def as_dict(self) -> dict: """Serializes the WidgetOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.description is not None: - body["description"] = self.description - if self.is_hidden is not None: - body["isHidden"] = self.is_hidden - if self.parameter_mappings: - body["parameterMappings"] = self.parameter_mappings - if self.position: - body["position"] = self.position.as_dict() - if self.title is not None: - body["title"] = self.title - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.created_at is not None: body['created_at'] = self.created_at + if self.description is not None: body['description'] = self.description + if self.is_hidden is not None: body['isHidden'] = self.is_hidden + if self.parameter_mappings: body['parameterMappings'] = self.parameter_mappings + if self.position: body['position'] = self.position.as_dict() + if self.title is not None: body['title'] = self.title + if self.updated_at is not None: body['updated_at'] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the WidgetOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.description is not None: - body["description"] = self.description - if self.is_hidden is not None: - body["isHidden"] = self.is_hidden - if self.parameter_mappings: - body["parameterMappings"] = self.parameter_mappings - if self.position: - body["position"] = self.position - if self.title is not None: - body["title"] = self.title - if self.updated_at is not None: - body["updated_at"] = self.updated_at + if self.created_at is not None: body['created_at'] = self.created_at + if self.description is not None: body['description'] = self.description + if self.is_hidden is not None: body['isHidden'] = self.is_hidden + if self.parameter_mappings: body['parameterMappings'] = self.parameter_mappings + if self.position: body['position'] = self.position + if self.title is not None: body['title'] = self.title + if self.updated_at is not None: body['updated_at'] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WidgetOptions: """Deserializes the WidgetOptions from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - description=d.get("description", None), - is_hidden=d.get("isHidden", None), - parameter_mappings=d.get("parameterMappings", None), - position=_from_dict(d, "position", WidgetPosition), - title=d.get("title", None), - updated_at=d.get("updated_at", None), - ) + return cls(created_at=d.get('created_at', None), description=d.get('description', None), is_hidden=d.get('isHidden', None), parameter_mappings=d.get('parameterMappings', None), position=_from_dict(d, 'position', WidgetPosition), title=d.get('title', None), updated_at=d.get('updated_at', None)) + + @dataclass class WidgetPosition: """Coordinates of this widget on a dashboard. This portion of the API changes frequently and is unsupported.""" - + auto_height: Optional[bool] = None """reserved for internal use""" - + col: Optional[int] = None """column in the dashboard grid. Values start with 0""" - + row: Optional[int] = None """row in the dashboard grid. Values start with 0""" - + size_x: Optional[int] = None """width of the widget measured in dashboard grid cells""" - + size_y: Optional[int] = None """height of the widget measured in dashboard grid cells""" - + def as_dict(self) -> dict: """Serializes the WidgetPosition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_height is not None: - body["autoHeight"] = self.auto_height - if self.col is not None: - body["col"] = self.col - if self.row is not None: - body["row"] = self.row - if self.size_x is not None: - body["sizeX"] = self.size_x - if self.size_y is not None: - body["sizeY"] = self.size_y + if self.auto_height is not None: body['autoHeight'] = self.auto_height + if self.col is not None: body['col'] = self.col + if self.row is not None: body['row'] = self.row + if self.size_x is not None: body['sizeX'] = self.size_x + if self.size_y is not None: body['sizeY'] = self.size_y return body def as_shallow_dict(self) -> dict: """Serializes the WidgetPosition into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_height is not None: - body["autoHeight"] = self.auto_height - if self.col is not None: - body["col"] = self.col - if self.row is not None: - body["row"] = self.row - if self.size_x is not None: - body["sizeX"] = self.size_x - if self.size_y is not None: - body["sizeY"] = self.size_y + if self.auto_height is not None: body['autoHeight'] = self.auto_height + if self.col is not None: body['col'] = self.col + if self.row is not None: body['row'] = self.row + if self.size_x is not None: body['sizeX'] = self.size_x + if self.size_y is not None: body['sizeY'] = self.size_y return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WidgetPosition: """Deserializes the WidgetPosition from a dictionary.""" - return cls( - auto_height=d.get("autoHeight", None), - col=d.get("col", None), - row=d.get("row", None), - size_x=d.get("sizeX", None), - size_y=d.get("sizeY", None), - ) + return cls(auto_height=d.get('autoHeight', None), col=d.get('col', None), row=d.get('row', None), size_x=d.get('sizeX', None), size_y=d.get('sizeY', None)) + + + + class AlertsAPI: @@ -8445,116 +6936,145 @@ class AlertsAPI: periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, *, alert: Optional[CreateAlertRequestAlert] = None, auto_resolve_display_name: Optional[bool] = None - ) -> Alert: - """Create an alert. + - Creates an alert. + + + + + def create(self + + , * + , alert: Optional[CreateAlertRequestAlert] = None, auto_resolve_display_name: Optional[bool] = None) -> Alert: + """Create an alert. + + Creates an alert. + :param alert: :class:`CreateAlertRequestAlert` (optional) :param auto_resolve_display_name: bool (optional) If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. - + :returns: :class:`Alert` """ body = {} - if alert is not None: - body["alert"] = alert.as_dict() - if auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = auto_resolve_display_name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/sql/alerts", body=body, headers=headers) + if alert is not None: body['alert'] = alert.as_dict() + if auto_resolve_display_name is not None: body['auto_resolve_display_name'] = auto_resolve_display_name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/sql/alerts', body=body + + , headers=headers + ) return Alert.from_dict(res) - def delete(self, id: str): - """Delete an alert. + + + + def delete(self + , id: str + ): + """Delete an alert. + Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. - + :param id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/sql/alerts/{id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/sql/alerts/{id}", headers=headers) + + + - def get(self, id: str) -> Alert: + def get(self + , id: str + ) -> Alert: """Get an alert. - + Gets an alert. - + :param id: str - + :returns: :class:`Alert` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/sql/alerts/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/sql/alerts/{id}' + + , headers=headers + ) return Alert.from_dict(res) - def list( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ListAlertsResponseAlert]: - """List alerts. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ListAlertsResponseAlert]: + """List alerts. + Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListAlertsResponseAlert` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/sql/alerts", query=query, headers=headers) - if "results" in json: - for v in json["results"]: - yield ListAlertsResponseAlert.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - id: str, - update_mask: str, - *, - alert: Optional[UpdateAlertRequestAlert] = None, - auto_resolve_display_name: Optional[bool] = None, - ) -> Alert: - """Update an alert. + json = self._api.do('GET','/api/2.0/sql/alerts', query=query + + , headers=headers + ) + if 'results' in json: + for v in json['results']: + yield ListAlertsResponseAlert.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Updates an alert. + + + + def update(self + , id: str, update_mask: str + , * + , alert: Optional[UpdateAlertRequestAlert] = None, auto_resolve_display_name: Optional[bool] = None) -> Alert: + """Update an alert. + + Updates an alert. + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -8562,7 +7082,7 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. @@ -8570,58 +7090,59 @@ def update( :param auto_resolve_display_name: bool (optional) If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. - + :returns: :class:`Alert` """ body = {} - if alert is not None: - body["alert"] = alert.as_dict() - if auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = auto_resolve_display_name - if update_mask is not None: - body["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/sql/alerts/{id}", body=body, headers=headers) + if alert is not None: body['alert'] = alert.as_dict() + if auto_resolve_display_name is not None: body['auto_resolve_display_name'] = auto_resolve_display_name + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/sql/alerts/{id}', body=body + + , headers=headers + ) return Alert.from_dict(res) - + + class AlertsLegacyAPI: """The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - name: str, - options: AlertOptions, - query_id: str, - *, - parent: Optional[str] = None, - rearm: Optional[int] = None, - ) -> LegacyAlert: - """Create an alert. + + + + + + + def create(self + , name: str, options: AlertOptions, query_id: str + , * + , parent: Optional[str] = None, rearm: Optional[int] = None) -> LegacyAlert: + """Create an alert. + Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification destinations if the condition was met. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param name: str Name of the alert. :param options: :class:`AlertOptions` @@ -8633,102 +7154,123 @@ def create( :param rearm: int (optional) Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - + :returns: :class:`LegacyAlert` """ body = {} - if name is not None: - body["name"] = name - if options is not None: - body["options"] = options.as_dict() - if parent is not None: - body["parent"] = parent - if query_id is not None: - body["query_id"] = query_id - if rearm is not None: - body["rearm"] = rearm - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/sql/alerts", body=body, headers=headers) + if name is not None: body['name'] = name + if options is not None: body['options'] = options.as_dict() + if parent is not None: body['parent'] = parent + if query_id is not None: body['query_id'] = query_id + if rearm is not None: body['rearm'] = rearm + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/preview/sql/alerts', body=body + + , headers=headers + ) return LegacyAlert.from_dict(res) - def delete(self, alert_id: str): - """Delete an alert. + + + + def delete(self + , alert_id: str + ): + """Delete an alert. + Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to the trash. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/preview/sql/alerts/{alert_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/preview/sql/alerts/{alert_id}", headers=headers) + + + - def get(self, alert_id: str) -> LegacyAlert: + def get(self + , alert_id: str + ) -> LegacyAlert: """Get an alert. - + Gets an alert. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str - + :returns: :class:`LegacyAlert` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/sql/alerts/{alert_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/preview/sql/alerts/{alert_id}' + + , headers=headers + ) return LegacyAlert.from_dict(res) + + + + def list(self) -> Iterator[LegacyAlert]: """Get alerts. - + Gets a list of alerts. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :returns: Iterator over :class:`LegacyAlert` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/preview/sql/alerts", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/preview/sql/alerts' + , headers=headers + ) return [LegacyAlert.from_dict(v) for v in res] - def update(self, alert_id: str, name: str, options: AlertOptions, query_id: str, *, rearm: Optional[int] = None): - """Update an alert. + + + + def update(self + , alert_id: str, name: str, options: AlertOptions, query_id: str + , * + , rearm: Optional[int] = None): + """Update an alert. + Updates an alert. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str :param name: str Name of the alert. @@ -8739,119 +7281,158 @@ def update(self, alert_id: str, name: str, options: AlertOptions, query_id: str, :param rearm: int (optional) Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - - + + """ body = {} - if name is not None: - body["name"] = name - if options is not None: - body["options"] = options.as_dict() - if query_id is not None: - body["query_id"] = query_id - if rearm is not None: - body["rearm"] = rearm - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PUT", f"/api/2.0/preview/sql/alerts/{alert_id}", body=body, headers=headers) - + if name is not None: body['name'] = name + if options is not None: body['options'] = options.as_dict() + if query_id is not None: body['query_id'] = query_id + if rearm is not None: body['rearm'] = rearm + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PUT',f'/api/2.0/preview/sql/alerts/{alert_id}', body=body + + , headers=headers + ) + + + class AlertsV2API: - """TODO: Add description""" - + """New version of SQL Alerts""" + def __init__(self, api_client): self._api = api_client + - def create_alert(self, alert: AlertV2) -> AlertV2: - """Create an alert. + - Create Alert + - :param alert: :class:`AlertV2` + + + def create_alert(self + , alert: AlertV2 + ) -> AlertV2: + """Create an alert. + + Create Alert + + :param alert: :class:`AlertV2` + :returns: :class:`AlertV2` """ body = alert.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/alerts", body=body, headers=headers) + query = {} + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/alerts', body=body + + , headers=headers + ) return AlertV2.from_dict(res) - def get_alert(self, id: str) -> AlertV2: - """Get an alert. + + + + def get_alert(self + , id: str + ) -> AlertV2: + """Get an alert. + Gets an alert. - + :param id: str - + :returns: :class:`AlertV2` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/alerts/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/alerts/{id}' + + , headers=headers + ) return AlertV2.from_dict(res) - def list_alerts(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[AlertV2]: - """List alerts. + + + + def list_alerts(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[AlertV2]: + """List alerts. + Gets a list of alerts accessible to the user, ordered by creation time. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`AlertV2` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/alerts", query=query, headers=headers) - if "results" in json: - for v in json["results"]: - yield AlertV2.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def trash_alert(self, id: str): - """Delete an alert. + json = self._api.do('GET','/api/2.0/alerts', query=query + + , headers=headers + ) + if 'results' in json: + for v in json['results']: + yield AlertV2.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + + + + + def trash_alert(self + , id: str + ): + """Delete an alert. + Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. - + :param id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/alerts/{id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/alerts/{id}", headers=headers) + + + - def update_alert(self, id: str, alert: AlertV2, update_mask: str) -> AlertV2: + def update_alert(self + , id: str, alert: AlertV2, update_mask: str + ) -> AlertV2: """Update an alert. - + Update alert - + :param id: str UUID identifying the alert. :param alert: :class:`AlertV2` @@ -8861,44 +7442,47 @@ def update_alert(self, id: str, alert: AlertV2, update_mask: str) -> AlertV2: `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AlertV2` """ body = alert.as_dict() query = {} - if update_mask is not None: - query["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/alerts/{id}", query=query, body=body, headers=headers) + if update_mask is not None: query['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/alerts/{id}', query=query, body=body + + , headers=headers + ) return AlertV2.from_dict(res) - + + class DashboardWidgetsAPI: """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace. Data structures may change over time.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - dashboard_id: str, - options: WidgetOptions, - width: int, - *, - text: Optional[str] = None, - visualization_id: Optional[str] = None, - ) -> Widget: - """Add widget to a dashboard. + + + + + + + def create(self + , dashboard_id: str, options: WidgetOptions, width: int + , * + , text: Optional[str] = None, visualization_id: Optional[str] = None) -> Widget: + """Add widget to a dashboard. + :param dashboard_id: str Dashboard ID returned by :method:dashboards/create. :param options: :class:`WidgetOptions` @@ -8909,55 +7493,56 @@ def create( contains a visualization in the `visualization` field. :param visualization_id: str (optional) Query Vizualization ID returned by :method:queryvisualizations/create. - + :returns: :class:`Widget` """ body = {} - if dashboard_id is not None: - body["dashboard_id"] = dashboard_id - if options is not None: - body["options"] = options.as_dict() - if text is not None: - body["text"] = text - if visualization_id is not None: - body["visualization_id"] = visualization_id - if width is not None: - body["width"] = width - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/sql/widgets", body=body, headers=headers) + if dashboard_id is not None: body['dashboard_id'] = dashboard_id + if options is not None: body['options'] = options.as_dict() + if text is not None: body['text'] = text + if visualization_id is not None: body['visualization_id'] = visualization_id + if width is not None: body['width'] = width + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/preview/sql/widgets', body=body + + , headers=headers + ) return Widget.from_dict(res) - def delete(self, id: str): - """Remove widget. + + + + def delete(self + , id: str + ): + """Remove widget. + :param id: str Widget ID returned by :method:dashboardwidgets/create - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/preview/sql/widgets/{id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/preview/sql/widgets/{id}", headers=headers) - - def update( - self, - id: str, - dashboard_id: str, - options: WidgetOptions, - width: int, - *, - text: Optional[str] = None, - visualization_id: Optional[str] = None, - ) -> Widget: - """Update existing widget. + + + + def update(self + , id: str, dashboard_id: str, options: WidgetOptions, width: int + , * + , text: Optional[str] = None, visualization_id: Optional[str] = None) -> Widget: + """Update existing widget. + :param id: str Widget ID returned by :method:dashboardwidgets/create :param dashboard_id: str @@ -8970,51 +7555,49 @@ def update( contains a visualization in the `visualization` field. :param visualization_id: str (optional) Query Vizualization ID returned by :method:queryvisualizations/create. - + :returns: :class:`Widget` """ body = {} - if dashboard_id is not None: - body["dashboard_id"] = dashboard_id - if options is not None: - body["options"] = options.as_dict() - if text is not None: - body["text"] = text - if visualization_id is not None: - body["visualization_id"] = visualization_id - if width is not None: - body["width"] = width - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/preview/sql/widgets/{id}", body=body, headers=headers) + if dashboard_id is not None: body['dashboard_id'] = dashboard_id + if options is not None: body['options'] = options.as_dict() + if text is not None: body['text'] = text + if visualization_id is not None: body['visualization_id'] = visualization_id + if width is not None: body['width'] = width + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/preview/sql/widgets/{id}', body=body + + , headers=headers + ) return Widget.from_dict(res) - + + class DashboardsAPI: """In general, there is little need to modify dashboards using the API. However, it can be useful to use dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it to create a new one. Dashboards can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - name: str, - *, - dashboard_filters_enabled: Optional[bool] = None, - is_favorite: Optional[bool] = None, - parent: Optional[str] = None, - run_as_role: Optional[RunAsRole] = None, - tags: Optional[List[str]] = None, - ) -> Dashboard: - """Create a dashboard object. + + + + + + + def create(self + , name: str + , * + , dashboard_filters_enabled: Optional[bool] = None, is_favorite: Optional[bool] = None, parent: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None) -> Dashboard: + """Create a dashboard object. + :param name: str The title of this dashboard that appears in list views and at the top of the dashboard page. :param dashboard_filters_enabled: bool (optional) @@ -9027,79 +7610,88 @@ def create( Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`Dashboard` """ body = {} - if dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = dashboard_filters_enabled - if is_favorite is not None: - body["is_favorite"] = is_favorite - if name is not None: - body["name"] = name - if parent is not None: - body["parent"] = parent - if run_as_role is not None: - body["run_as_role"] = run_as_role.value - if tags is not None: - body["tags"] = [v for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/sql/dashboards", body=body, headers=headers) + if dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = dashboard_filters_enabled + if is_favorite is not None: body['is_favorite'] = is_favorite + if name is not None: body['name'] = name + if parent is not None: body['parent'] = parent + if run_as_role is not None: body['run_as_role'] = run_as_role.value + if tags is not None: body['tags'] = [v for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/preview/sql/dashboards', body=body + + , headers=headers + ) return Dashboard.from_dict(res) - def delete(self, dashboard_id: str): - """Remove a dashboard. + + + + def delete(self + , dashboard_id: str + ): + """Remove a dashboard. + Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot be shared. - + :param dashboard_id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/preview/sql/dashboards/{dashboard_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/preview/sql/dashboards/{dashboard_id}", headers=headers) + + + - def get(self, dashboard_id: str) -> Dashboard: + def get(self + , dashboard_id: str + ) -> Dashboard: """Retrieve a definition. - + Returns a JSON representation of a dashboard object, including its visualization and query objects. - + :param dashboard_id: str - + :returns: :class:`Dashboard` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/sql/dashboards/{dashboard_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/preview/sql/dashboards/{dashboard_id}' + + , headers=headers + ) return Dashboard.from_dict(res) - def list( - self, - *, - order: Optional[ListOrder] = None, - page: Optional[int] = None, - page_size: Optional[int] = None, - q: Optional[str] = None, - ) -> Iterator[Dashboard]: - """Get dashboard objects. + + + + def list(self + + , * + , order: Optional[ListOrder] = None, page: Optional[int] = None, page_size: Optional[int] = None, q: Optional[str] = None) -> Iterator[Dashboard]: + """Get dashboard objects. + Fetch a paginated list of dashboard objects. - + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param order: :class:`ListOrder` (optional) Name of dashboard attribute to order by. :param page: int (optional) @@ -9108,70 +7700,77 @@ def list( Number of dashboards to return per page. :param q: str (optional) Full text search term. - + :returns: Iterator over :class:`Dashboard` """ - + query = {} - if order is not None: - query["order"] = order.value - if page is not None: - query["page"] = page - if page_size is not None: - query["page_size"] = page_size - if q is not None: - query["q"] = q - headers = { - "Accept": "application/json", - } - + if order is not None: query['order'] = order.value + if page is not None: query['page'] = page + if page_size is not None: query['page_size'] = page_size + if q is not None: query['q'] = q + headers = {'Accept': 'application/json',} + + # deduplicate items that may have been added during iteration seen = set() - query["page"] = 1 + query['page'] =1 while True: - json = self._api.do("GET", "/api/2.0/preview/sql/dashboards", query=query, headers=headers) - if "results" in json: - for v in json["results"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield Dashboard.from_dict(v) - if "results" not in json or not json["results"]: - return - query["page"] += 1 - - def restore(self, dashboard_id: str): - """Restore a dashboard. + json = self._api.do('GET','/api/2.0/preview/sql/dashboards', query=query + + , headers=headers + ) + if 'results' in json: + for v in json['results']: + i = v['id'] + if i in seen: + continue + seen.add(i) + yield Dashboard.from_dict(v) + if 'results' not in json or not json['results']: + return + query['page'] += 1 + - A restored dashboard appears in list views and searches and can be shared. + + + + def restore(self + , dashboard_id: str + ): + """Restore a dashboard. + + A restored dashboard appears in list views and searches and can be shared. + :param dashboard_id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('POST',f'/api/2.0/preview/sql/dashboards/trash/{dashboard_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("POST", f"/api/2.0/preview/sql/dashboards/trash/{dashboard_id}", headers=headers) + + + - def update( - self, - dashboard_id: str, - *, - name: Optional[str] = None, - run_as_role: Optional[RunAsRole] = None, - tags: Optional[List[str]] = None, - ) -> Dashboard: + def update(self + , dashboard_id: str + , * + , name: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None) -> Dashboard: """Change a dashboard definition. - + Modify this dashboard definition. This operation only affects attributes of the dashboard object. It does not add, modify, or remove widgets. - + **Note**: You cannot undo this operation. - + :param dashboard_id: str :param name: str (optional) The title of this dashboard that appears in list views and at the top of the dashboard page. @@ -9179,331 +7778,383 @@ def update( Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`Dashboard` """ body = {} - if name is not None: - body["name"] = name - if run_as_role is not None: - body["run_as_role"] = run_as_role.value - if tags is not None: - body["tags"] = [v for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/preview/sql/dashboards/{dashboard_id}", body=body, headers=headers) + if name is not None: body['name'] = name + if run_as_role is not None: body['run_as_role'] = run_as_role.value + if tags is not None: body['tags'] = [v for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/preview/sql/dashboards/{dashboard_id}', body=body + + , headers=headers + ) return Dashboard.from_dict(res) - + + class DataSourcesAPI: """This API is provided to assist you in making new query objects. When creating a query object, you may optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't already know the `data_source_id` for your desired SQL warehouse, this API will help you find it. - + This API does not support searches. It returns the full list of SQL warehouses in your workspace. We advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client + + + + + + + + def list(self) -> Iterator[DataSource]: """Get a list of SQL warehouses. - + Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :returns: Iterator over :class:`DataSource` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/preview/sql/data_sources", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/preview/sql/data_sources' + , headers=headers + ) return [DataSource.from_dict(v) for v in res] - + + class DbsqlPermissionsAPI: """The SQL Permissions API is similar to the endpoints of the :method:permissions/set. However, this exposes only one endpoint, which gets the Access Control List for a given object. You cannot modify any permissions using this API. - + There are three levels of permission: - + - `CAN_VIEW`: Allows read-only access - + - `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`) - + - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client + - def get(self, object_type: ObjectTypePlural, object_id: str) -> GetResponse: - """Get object ACL. + - Gets a JSON representation of the access control list (ACL) for a specified object. + + + + + def get(self + , object_type: ObjectTypePlural, object_id: str + ) -> GetResponse: + """Get object ACL. + + Gets a JSON representation of the access control list (ACL) for a specified object. + **Note**: A new version of the Databricks SQL API is now available. Please use :method:workspace/getpermissions instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. :param object_id: str Object ID. An ACL is returned for the object with this UUID. - + :returns: :class:`GetResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}' + + , headers=headers + ) return GetResponse.from_dict(res) - def set( - self, - object_type: ObjectTypePlural, - object_id: str, - *, - access_control_list: Optional[List[AccessControl]] = None, - ) -> SetResponse: - """Set object ACL. + + + + def set(self + , object_type: ObjectTypePlural, object_id: str + , * + , access_control_list: Optional[List[AccessControl]] = None) -> SetResponse: + """Set object ACL. + Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:workspace/setpermissions instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`ObjectTypePlural` The type of object permission to set. :param object_id: str Object ID. The ACL for the object with this UUID is overwritten by this request's POST content. :param access_control_list: List[:class:`AccessControl`] (optional) - + :returns: :class:`SetResponse` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}', body=body + + , headers=headers + ) return SetResponse.from_dict(res) - def transfer_ownership( - self, object_type: OwnableObjectType, object_id: TransferOwnershipObjectId, *, new_owner: Optional[str] = None - ) -> Success: - """Transfer object ownership. + + + + def transfer_ownership(self + , object_type: OwnableObjectType, object_id: TransferOwnershipObjectId + , * + , new_owner: Optional[str] = None) -> Success: + """Transfer object ownership. + Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. - + **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use :method:queries/update and :method:alerts/update respectively instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. :param object_id: :class:`TransferOwnershipObjectId` The ID of the object on which to change ownership. :param new_owner: str (optional) Email address for the new owner, who must exist in the workspace. - + :returns: :class:`Success` """ body = {} - if new_owner is not None: - body["new_owner"] = new_owner - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", - f"/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}/transfer", - body=body, - headers=headers, - ) + if new_owner is not None: body['new_owner'] = new_owner + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}/transfer', body=body + + , headers=headers + ) return Success.from_dict(res) - + + class QueriesAPI: """The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, *, auto_resolve_display_name: Optional[bool] = None, query: Optional[CreateQueryRequestQuery] = None - ) -> Query: - """Create a query. + - Creates a query. + + + + + def create(self + + , * + , auto_resolve_display_name: Optional[bool] = None, query: Optional[CreateQueryRequestQuery] = None) -> Query: + """Create a query. + + Creates a query. + :param auto_resolve_display_name: bool (optional) If true, automatically resolve query display name conflicts. Otherwise, fail the request if the query's display name conflicts with an existing query's display name. :param query: :class:`CreateQueryRequestQuery` (optional) - + :returns: :class:`Query` """ body = {} - if auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = auto_resolve_display_name - if query is not None: - body["query"] = query.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/sql/queries", body=body, headers=headers) + if auto_resolve_display_name is not None: body['auto_resolve_display_name'] = auto_resolve_display_name + if query is not None: body['query'] = query.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/sql/queries', body=body + + , headers=headers + ) return Query.from_dict(res) - def delete(self, id: str): - """Delete a query. + + + + def delete(self + , id: str + ): + """Delete a query. + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is permanently deleted after 30 days. - + :param id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/sql/queries/{id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/sql/queries/{id}", headers=headers) + + + - def get(self, id: str) -> Query: + def get(self + , id: str + ) -> Query: """Get a query. - + Gets a query. - + :param id: str - + :returns: :class:`Query` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/sql/queries/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/sql/queries/{id}' + + , headers=headers + ) return Query.from_dict(res) - def list( - self, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[ListQueryObjectsResponseQuery]: - """List queries. + + + + def list(self + + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ListQueryObjectsResponseQuery]: + """List queries. + Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListQueryObjectsResponseQuery` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/sql/queries", query=query, headers=headers) - if "results" in json: - for v in json["results"]: - yield ListQueryObjectsResponseQuery.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def list_visualizations( - self, id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None - ) -> Iterator[Visualization]: - """List visualizations on a query. + json = self._api.do('GET','/api/2.0/sql/queries', query=query + + , headers=headers + ) + if 'results' in json: + for v in json['results']: + yield ListQueryObjectsResponseQuery.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Gets a list of visualizations on a query. + + + + def list_visualizations(self + , id: str + , * + , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Visualization]: + """List visualizations on a query. + + Gets a list of visualizations on a query. + :param id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Visualization` """ - + query = {} - if page_size is not None: - query["page_size"] = page_size - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", f"/api/2.0/sql/queries/{id}/visualizations", query=query, headers=headers) - if "results" in json: - for v in json["results"]: - yield Visualization.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update( - self, - id: str, - update_mask: str, - *, - auto_resolve_display_name: Optional[bool] = None, - query: Optional[UpdateQueryRequestQuery] = None, - ) -> Query: - """Update a query. + json = self._api.do('GET',f'/api/2.0/sql/queries/{id}/visualizations', query=query + + , headers=headers + ) + if 'results' in json: + for v in json['results']: + yield Visualization.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Updates a query. + + + + def update(self + , id: str, update_mask: str + , * + , auto_resolve_display_name: Optional[bool] = None, query: Optional[UpdateQueryRequestQuery] = None) -> Query: + """Update a query. + + Updates a query. + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -9511,7 +8162,7 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. @@ -9519,70 +8170,68 @@ def update( If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. :param query: :class:`UpdateQueryRequestQuery` (optional) - + :returns: :class:`Query` """ body = {} - if auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = auto_resolve_display_name - if query is not None: - body["query"] = query.as_dict() - if update_mask is not None: - body["update_mask"] = update_mask - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/sql/queries/{id}", body=body, headers=headers) + if auto_resolve_display_name is not None: body['auto_resolve_display_name'] = auto_resolve_display_name + if query is not None: body['query'] = query.as_dict() + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/sql/queries/{id}', body=body + + , headers=headers + ) return Query.from_dict(res) - + + class QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, - *, - data_source_id: Optional[str] = None, - description: Optional[str] = None, - name: Optional[str] = None, - options: Optional[Any] = None, - parent: Optional[str] = None, - query: Optional[str] = None, - run_as_role: Optional[RunAsRole] = None, - tags: Optional[List[str]] = None, - ) -> LegacyQuery: - """Create a new query definition. + + + + + + + def create(self + + , * + , data_source_id: Optional[str] = None, description: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, parent: Optional[str] = None, query: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None) -> LegacyQuery: + """Create a new query definition. + Creates a new query definition. Queries created with this endpoint belong to the authenticated user making the request. - + The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the `data_source_id` from an existing query. - + **Note**: You cannot add a visualization until you create the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] - + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list :param description: str (optional) General description that conveys additional information about this query such as usage notes. @@ -9600,112 +8249,119 @@ def create( Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`LegacyQuery` """ body = {} - if data_source_id is not None: - body["data_source_id"] = data_source_id - if description is not None: - body["description"] = description - if name is not None: - body["name"] = name - if options is not None: - body["options"] = options - if parent is not None: - body["parent"] = parent - if query is not None: - body["query"] = query - if run_as_role is not None: - body["run_as_role"] = run_as_role.value - if tags is not None: - body["tags"] = [v for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/sql/queries", body=body, headers=headers) + if data_source_id is not None: body['data_source_id'] = data_source_id + if description is not None: body['description'] = description + if name is not None: body['name'] = name + if options is not None: body['options'] = options + if parent is not None: body['parent'] = parent + if query is not None: body['query'] = query + if run_as_role is not None: body['run_as_role'] = run_as_role.value + if tags is not None: body['tags'] = [v for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/preview/sql/queries', body=body + + , headers=headers + ) return LegacyQuery.from_dict(res) - def delete(self, query_id: str): - """Delete a query. + + + + def delete(self + , query_id: str + ): + """Delete a query. + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/preview/sql/queries/{query_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/preview/sql/queries/{query_id}", headers=headers) + + + - def get(self, query_id: str) -> LegacyQuery: + def get(self + , query_id: str + ) -> LegacyQuery: """Get a query definition. - + Retrieve a query object definition along with contextual permissions information about the currently authenticated user. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - + :returns: :class:`LegacyQuery` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/preview/sql/queries/{query_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/preview/sql/queries/{query_id}' + + , headers=headers + ) return LegacyQuery.from_dict(res) - def list( - self, - *, - order: Optional[str] = None, - page: Optional[int] = None, - page_size: Optional[int] = None, - q: Optional[str] = None, - ) -> Iterator[LegacyQuery]: - """Get a list of queries. + + + + def list(self + + , * + , order: Optional[str] = None, page: Optional[int] = None, page_size: Optional[int] = None, q: Optional[str] = None) -> Iterator[LegacyQuery]: + """Get a list of queries. + Gets a list of queries. Optionally, this list can be filtered by a search term. - + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param order: str (optional) Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order descending instead. - + - `name`: The name of the query. - + - `created_at`: The timestamp the query was created. - + - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank value is treated as the highest value for sorting. - + - `executed_at`: The timestamp when the query was last run. - + - `created_by`: The user name of the user that created the query. :param page: int (optional) Page number to retrieve. @@ -9713,89 +8369,92 @@ def list( Number of queries to return per page. :param q: str (optional) Full text search term - + :returns: Iterator over :class:`LegacyQuery` """ - + query = {} - if order is not None: - query["order"] = order - if page is not None: - query["page"] = page - if page_size is not None: - query["page_size"] = page_size - if q is not None: - query["q"] = q - headers = { - "Accept": "application/json", - } - + if order is not None: query['order'] = order + if page is not None: query['page'] = page + if page_size is not None: query['page_size'] = page_size + if q is not None: query['q'] = q + headers = {'Accept': 'application/json',} + + # deduplicate items that may have been added during iteration seen = set() - query["page"] = 1 + query['page'] =1 while True: - json = self._api.do("GET", "/api/2.0/preview/sql/queries", query=query, headers=headers) - if "results" in json: - for v in json["results"]: - i = v["id"] - if i in seen: - continue - seen.add(i) - yield LegacyQuery.from_dict(v) - if "results" not in json or not json["results"]: - return - query["page"] += 1 - - def restore(self, query_id: str): - """Restore a query. + json = self._api.do('GET','/api/2.0/preview/sql/queries', query=query + + , headers=headers + ) + if 'results' in json: + for v in json['results']: + i = v['id'] + if i in seen: + continue + seen.add(i) + yield LegacyQuery.from_dict(v) + if 'results' not in json or not json['results']: + return + query['page'] += 1 + + + + + + def restore(self + , query_id: str + ): + """Restore a query. + Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('POST',f'/api/2.0/preview/sql/queries/trash/{query_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("POST", f"/api/2.0/preview/sql/queries/trash/{query_id}", headers=headers) - - def update( - self, - query_id: str, - *, - data_source_id: Optional[str] = None, - description: Optional[str] = None, - name: Optional[str] = None, - options: Optional[Any] = None, - query: Optional[str] = None, - run_as_role: Optional[RunAsRole] = None, - tags: Optional[List[str]] = None, - ) -> LegacyQuery: - """Change a query definition. + + + + def update(self + , query_id: str + , * + , data_source_id: Optional[str] = None, description: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, query: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None) -> LegacyQuery: + """Change a query definition. + Modify this query definition. - + **Note**: You cannot undo this operation. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] - + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list :param description: str (optional) General description that conveys additional information about this query such as usage notes. @@ -9811,56 +8470,54 @@ def update( Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`LegacyQuery` """ body = {} - if data_source_id is not None: - body["data_source_id"] = data_source_id - if description is not None: - body["description"] = description - if name is not None: - body["name"] = name - if options is not None: - body["options"] = options - if query is not None: - body["query"] = query - if run_as_role is not None: - body["run_as_role"] = run_as_role.value - if tags is not None: - body["tags"] = [v for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/preview/sql/queries/{query_id}", body=body, headers=headers) + if data_source_id is not None: body['data_source_id'] = data_source_id + if description is not None: body['description'] = description + if name is not None: body['name'] = name + if options is not None: body['options'] = options + if query is not None: body['query'] = query + if run_as_role is not None: body['run_as_role'] = run_as_role.value + if tags is not None: body['tags'] = [v for v in tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/preview/sql/queries/{query_id}', body=body + + , headers=headers + ) return LegacyQuery.from_dict(res) - + + class QueryHistoryAPI: """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" - + def __init__(self, api_client): self._api = api_client + - def list( - self, - *, - filter_by: Optional[QueryFilter] = None, - include_metrics: Optional[bool] = None, - max_results: Optional[int] = None, - page_token: Optional[str] = None, - ) -> ListQueriesResponse: - """List Queries. + - List the history of queries through SQL warehouses, and serverless compute. + + + + + def list(self + + , * + , filter_by: Optional[QueryFilter] = None, include_metrics: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> ListQueriesResponse: + """List Queries. + + List the history of queries through SQL warehouses, and serverless compute. + You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are returned first (up to max_results in request). The pagination token returned in response can be used to list subsequent query statuses. - + :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. :param include_metrics: bool (optional) @@ -9872,77 +8529,98 @@ def list( A token that can be used to get the next page of results. The token can contains characters that need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by %2B. This field is optional. - + :returns: :class:`ListQueriesResponse` """ - + query = {} - if filter_by is not None: - query["filter_by"] = filter_by.as_dict() - if include_metrics is not None: - query["include_metrics"] = include_metrics - if max_results is not None: - query["max_results"] = max_results - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/sql/history/queries", query=query, headers=headers) + if filter_by is not None: query['filter_by'] = filter_by.as_dict() + if include_metrics is not None: query['include_metrics'] = include_metrics + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/sql/history/queries', query=query + + , headers=headers + ) return ListQueriesResponse.from_dict(res) - + + class QueryVisualizationsAPI: """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace. Data structures can change over time.""" - + def __init__(self, api_client): self._api = api_client + - def create(self, *, visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization: - """Add a visualization to a query. + - Adds a visualization to a query. + - :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) + + + def create(self + + , * + , visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization: + """Add a visualization to a query. + + Adds a visualization to a query. + + :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) + :returns: :class:`Visualization` """ body = {} - if visualization is not None: - body["visualization"] = visualization.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/sql/visualizations", body=body, headers=headers) + if visualization is not None: body['visualization'] = visualization.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/sql/visualizations', body=body + + , headers=headers + ) return Visualization.from_dict(res) - def delete(self, id: str): - """Remove a visualization. + + + + def delete(self + , id: str + ): + """Remove a visualization. + Removes a visualization. - + :param id: str - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/sql/visualizations/{id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/sql/visualizations/{id}", headers=headers) + + + - def update( - self, id: str, update_mask: str, *, visualization: Optional[UpdateVisualizationRequestVisualization] = None - ) -> Visualization: + def update(self + , id: str, update_mask: str + , * + , visualization: Optional[UpdateVisualizationRequestVisualization] = None) -> Visualization: """Update a visualization. - + Updates a visualization. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -9950,52 +8628,60 @@ def update( `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) - + :returns: :class:`Visualization` """ body = {} - if update_mask is not None: - body["update_mask"] = update_mask - if visualization is not None: - body["visualization"] = visualization.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/sql/visualizations/{id}", body=body, headers=headers) + if update_mask is not None: body['update_mask'] = update_mask + if visualization is not None: body['visualization'] = visualization.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/sql/visualizations/{id}', body=body + + , headers=headers + ) return Visualization.from_dict(res) - + + class QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace. Data structures may change over time. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, query_id: str, type: str, options: Any, *, description: Optional[str] = None, name: Optional[str] = None - ) -> LegacyVisualization: - """Add visualization to a query. + - Creates visualization in the query. + + + + + def create(self + , query_id: str, type: str, options: Any + , * + , description: Optional[str] = None, name: Optional[str] = None) -> LegacyVisualization: + """Add visualization to a query. + + Creates visualization in the query. + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str The identifier returned by :method:queries/create :param type: str @@ -10007,71 +8693,70 @@ def create( A short description of this visualization. This is not displayed in the UI. :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. - + :returns: :class:`LegacyVisualization` """ body = {} - if description is not None: - body["description"] = description - if name is not None: - body["name"] = name - if options is not None: - body["options"] = options - if query_id is not None: - body["query_id"] = query_id - if type is not None: - body["type"] = type - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/sql/visualizations", body=body, headers=headers) + if description is not None: body['description'] = description + if name is not None: body['name'] = name + if options is not None: body['options'] = options + if query_id is not None: body['query_id'] = query_id + if type is not None: body['type'] = type + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/preview/sql/visualizations', body=body + + , headers=headers + ) return LegacyVisualization.from_dict(res) - def delete(self, id: str): - """Remove visualization. + + + + def delete(self + , id: str + ): + """Remove visualization. + Removes a visualization from the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param id: str Widget ID returned by :method:queryvizualisations/create - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/preview/sql/visualizations/{id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/preview/sql/visualizations/{id}", headers=headers) - - def update( - self, - id: str, - *, - created_at: Optional[str] = None, - description: Optional[str] = None, - name: Optional[str] = None, - options: Optional[Any] = None, - query: Optional[LegacyQuery] = None, - type: Optional[str] = None, - updated_at: Optional[str] = None, - ) -> LegacyVisualization: - """Edit existing visualization. + + + + def update(self + , id: str + , * + , created_at: Optional[str] = None, description: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, query: Optional[LegacyQuery] = None, type: Optional[str] = None, updated_at: Optional[str] = None) -> LegacyVisualization: + """Edit existing visualization. + Updates visualization in the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param id: str The UUID for this visualization. :param created_at: str (optional) @@ -10086,63 +8771,66 @@ def update( :param type: str (optional) The type of visualization: chart, table, pivot table, and so on. :param updated_at: str (optional) - + :returns: :class:`LegacyVisualization` """ body = {} - if created_at is not None: - body["created_at"] = created_at - if description is not None: - body["description"] = description - if name is not None: - body["name"] = name - if options is not None: - body["options"] = options - if query is not None: - body["query"] = query.as_dict() - if type is not None: - body["type"] = type - if updated_at is not None: - body["updated_at"] = updated_at - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/preview/sql/visualizations/{id}", body=body, headers=headers) + if created_at is not None: body['created_at'] = created_at + if description is not None: body['description'] = description + if name is not None: body['name'] = name + if options is not None: body['options'] = options + if query is not None: body['query'] = query.as_dict() + if type is not None: body['type'] = type + if updated_at is not None: body['updated_at'] = updated_at + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/preview/sql/visualizations/{id}', body=body + + , headers=headers + ) return LegacyVisualization.from_dict(res) - + + class RedashConfigAPI: """Redash V2 service for workspace configurations (internal)""" - + def __init__(self, api_client): self._api = api_client + + + + + + + + def get_config(self) -> ClientConfig: """Read workspace configuration for Redash-v2. - + :returns: :class:`ClientConfig` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/redash-v2/config", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/redash-v2/config' + , headers=headers + ) return ClientConfig.from_dict(res) - + + class StatementExecutionAPI: """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result. - + **Getting started** - + We suggest beginning with the [Databricks SQL Statement Execution API tutorial]. - + **Overview of statement execution and result fetching** - + Statement execution begins by issuing a :method:statementexecution/executeStatement request with a valid SQL statement and warehouse ID, along with optional parameters such as the data catalog and output format. If no other parameters are specified, the server will wait for up to 10s before returning a response. If @@ -10150,7 +8838,7 @@ class StatementExecutionAPI: array and metadata. Otherwise, if no result is available after the 10s timeout expired, the response will provide the statement ID that can be used to poll for results by using a :method:statementexecution/getStatement request. - + You can specify whether the call should behave synchronously, asynchronously or start synchronously with a fallback to asynchronous execution. This is controlled with the `wait_timeout` and `on_wait_timeout` settings. If `wait_timeout` is set between 5-50 seconds (default: 10s), the call waits for results up to @@ -10158,7 +8846,7 @@ class StatementExecutionAPI: statement ID. The `on_wait_timeout` setting specifies what should happen when the timeout is reached while the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode, or it can be set to `CANCEL`, which cancels the statement. - + In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30 seconds; if the statement execution finishes within this time, the result data is returned directly in the response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns @@ -10170,38 +8858,38 @@ class StatementExecutionAPI: seconds; if the statement execution finishes within this time, the result data is returned directly in the response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can be used to fetch status and results in the same way as in the asynchronous mode. - + Depending on the size, the result can be split into multiple chunks. If the statement execution is successful, the statement response contains a manifest and the first chunk of the result. The manifest contains schema information and provides metadata for each chunk in the result. Result chunks can be retrieved by index with :method:statementexecution/getStatementResultChunkN which may be called in any order and in parallel. For sequential fetching, each chunk, apart from the last, also contains a `next_chunk_index` and `next_chunk_internal_link` that point to the next chunk. - + A statement can be canceled with :method:statementexecution/cancelExecution. - + **Fetching result data: format and disposition** - + To specify the format of the result data, use the `format` field, which can be set to one of the following options: `JSON_ARRAY` (JSON), `ARROW_STREAM` ([Apache Arrow Columnar]), or `CSV`. - + There are two ways to receive statement results, controlled by the `disposition` setting, which can be either `INLINE` or `EXTERNAL_LINKS`: - + - `INLINE`: In this mode, the result data is directly included in the response. It's best suited for smaller results. This mode can only be used with the `JSON_ARRAY` format. - + - `EXTERNAL_LINKS`: In this mode, the response provides links that can be used to download the result data in chunks separately. This approach is ideal for larger results and offers higher throughput. This mode can be used with all the formats: `JSON_ARRAY`, `ARROW_STREAM`, and `CSV`. - + By default, the API uses `format=JSON_ARRAY` and `disposition=INLINE`. - + **Limits and limitations** - + Note: The byte limit for INLINE disposition is based on internal storage metrics and will not exactly match the byte count of the actual payload. - + - Statements with `disposition=INLINE` are limited to 25 MiB and will fail when this limit is exceeded. - Statements with `disposition=EXTERNAL_LINKS` are limited to 100 GiB. Result sets larger than this limit will be truncated. Truncation is indicated by the `truncated` field in the result manifest. - The maximum @@ -10214,53 +8902,59 @@ class StatementExecutionAPI: once every 15 minutes. - The results are only available for one hour after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it. - + [Apache Arrow Columnar]: https://arrow.apache.org/overview/ - [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html - """ - + [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html""" + def __init__(self, api_client): self._api = api_client + - def cancel_execution(self, statement_id: str): - """Cancel statement execution. + + + + + + + def cancel_execution(self + , statement_id: str + ): + """Cancel statement execution. + Requests that an executing statement be canceled. Callers must poll for status to see the terminal state. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - - + + """ - + headers = {} + + self._api.do('POST',f'/api/2.0/sql/statements/{statement_id}/cancel' + + , headers=headers + ) + - self._api.do("POST", f"/api/2.0/sql/statements/{statement_id}/cancel", headers=headers) - - def execute_statement( - self, - statement: str, - warehouse_id: str, - *, - byte_limit: Optional[int] = None, - catalog: Optional[str] = None, - disposition: Optional[Disposition] = None, - format: Optional[Format] = None, - on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None, - parameters: Optional[List[StatementParameterListItem]] = None, - row_limit: Optional[int] = None, - schema: Optional[str] = None, - wait_timeout: Optional[str] = None, - ) -> StatementResponse: - """Execute a SQL statement. + + + + def execute_statement(self + , statement: str, warehouse_id: str + , * + , byte_limit: Optional[int] = None, catalog: Optional[str] = None, disposition: Optional[Disposition] = None, format: Optional[Format] = None, on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None, parameters: Optional[List[StatementParameterListItem]] = None, row_limit: Optional[int] = None, schema: Optional[str] = None, wait_timeout: Optional[str] = None) -> StatementResponse: + """Execute a SQL statement. + :param statement: str The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. :param warehouse_id: str Warehouse upon which to execute a statement. See also [What are SQL warehouses?] - + [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html :param byte_limit: int (optional) Applies the given byte limit to the statement's result size. Byte counts are based on internal data @@ -10270,37 +8964,37 @@ def execute_statement( explcitly set. :param catalog: str (optional) Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. - + [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. - + Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS` disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition. - + When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of values, where each value is either the *string representation* of a value, or `null`. For example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM range(3)` would look like this: - + ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ``` - + When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result contains compact JSON with no indentation or extra whitespace. - + When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format]. - + When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be a CSV according to [RFC 4180] standard. All the columns values will have *string representation* similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the first chunk in the result would contain a header row with column names. For example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look like this: - + ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ``` - + [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180 :param on_wait_timeout: :class:`ExecuteStatementRequestOnWaitTimeout` (optional) @@ -10315,27 +9009,27 @@ def execute_statement( of a name, a value, and optionally a type. To represent a NULL value, the `value` field may be omitted or set to `null` explicitly. If the `type` field is omitted, the value is interpreted as a string. - + If the type is given, parameters will be checked for type correctness according to the given type. A value is correct if the provided string can be converted to the requested type using the `cast` function. The exact semantics are described in the section [`cast` function] of the SQL language reference. - + For example, the following statement contains two parameters, `my_name` and `my_date`: - + SELECT * FROM my_table WHERE name = :my_name AND date = :my_date - + The parameters can be passed in the request body as follows: - + { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value": "2020-01-01", "type": "DATE" } ] } - + Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL Statement Execution API. - + Also see the section [Parameter markers] of the SQL language reference. - + [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html :param row_limit: int (optional) @@ -10344,204 +9038,196 @@ def execute_statement( the limit or not. :param schema: str (optional) Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL. - + [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html :param wait_timeout: str (optional) The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set to 0 or to a value between 5 and 50. - + When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for the execution to finish. In this case, the call returns directly with `PENDING` state and a statement ID which can be used for polling with :method:statementexecution/getStatement. - + When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and wait for the statement execution to finish. If the execution finishes within this time, the call returns immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached. - + :returns: :class:`StatementResponse` """ body = {} - if byte_limit is not None: - body["byte_limit"] = byte_limit - if catalog is not None: - body["catalog"] = catalog - if disposition is not None: - body["disposition"] = disposition.value - if format is not None: - body["format"] = format.value - if on_wait_timeout is not None: - body["on_wait_timeout"] = on_wait_timeout.value - if parameters is not None: - body["parameters"] = [v.as_dict() for v in parameters] - if row_limit is not None: - body["row_limit"] = row_limit - if schema is not None: - body["schema"] = schema - if statement is not None: - body["statement"] = statement - if wait_timeout is not None: - body["wait_timeout"] = wait_timeout - if warehouse_id is not None: - body["warehouse_id"] = warehouse_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/sql/statements/", body=body, headers=headers) + if byte_limit is not None: body['byte_limit'] = byte_limit + if catalog is not None: body['catalog'] = catalog + if disposition is not None: body['disposition'] = disposition.value + if format is not None: body['format'] = format.value + if on_wait_timeout is not None: body['on_wait_timeout'] = on_wait_timeout.value + if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters] + if row_limit is not None: body['row_limit'] = row_limit + if schema is not None: body['schema'] = schema + if statement is not None: body['statement'] = statement + if wait_timeout is not None: body['wait_timeout'] = wait_timeout + if warehouse_id is not None: body['warehouse_id'] = warehouse_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/sql/statements/', body=body + + , headers=headers + ) return StatementResponse.from_dict(res) - def get_statement(self, statement_id: str) -> StatementResponse: - """Get status, manifest, and result first chunk. + + + + def get_statement(self + , statement_id: str + ) -> StatementResponse: + """Get status, manifest, and result first chunk. + This request can be used to poll for the statement's status. When the `status.state` field is `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and further calls will receive an HTTP 404 response. - + **NOTE** This call currently might take up to 5 seconds to get the latest status and result. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - + :returns: :class:`StatementResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/sql/statements/{statement_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/sql/statements/{statement_id}' + + , headers=headers + ) return StatementResponse.from_dict(res) - def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> ResultData: - """Get result chunk by index. + + + + def get_statement_result_chunk_n(self + , statement_id: str, chunk_index: int + ) -> ResultData: + """Get result chunk by index. + After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index. Whereas the first chunk with `chunk_index=0` is typically fetched with :method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request can be used to fetch subsequent chunks. The response structure is identical to the nested `result` element described in the :method:statementexecution/getStatement request, and similarly includes the `next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. :param chunk_index: int - + :returns: :class:`ResultData` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}' + + , headers=headers + ) return ResultData.from_dict(res) - + + class WarehousesAPI: """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.""" - + def __init__(self, api_client): self._api = api_client + - def wait_get_warehouse_running( - self, id: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[GetWarehouseResponse], None]] = None - ) -> GetWarehouseResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (State.RUNNING,) - failure_states = ( - State.STOPPED, - State.DELETED, - ) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(id=id) - status = poll.state - status_message = f"current status: {status}" - if poll.health: - status_message = poll.health.summary - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach RUNNING, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"id={id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def wait_get_warehouse_stopped( - self, id: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[GetWarehouseResponse], None]] = None - ) -> GetWarehouseResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (State.STOPPED,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get(id=id) - status = poll.state - status_message = f"current status: {status}" - if poll.health: - status_message = poll.health.summary - if status in target_states: - return poll - if callback: - callback(poll) - prefix = f"id={id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def create( - self, - *, - auto_stop_mins: Optional[int] = None, - channel: Optional[Channel] = None, - cluster_size: Optional[str] = None, - creator_name: Optional[str] = None, - enable_photon: Optional[bool] = None, - enable_serverless_compute: Optional[bool] = None, - instance_profile_arn: Optional[str] = None, - max_num_clusters: Optional[int] = None, - min_num_clusters: Optional[int] = None, - name: Optional[str] = None, - spot_instance_policy: Optional[SpotInstancePolicy] = None, - tags: Optional[EndpointTags] = None, - warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None, - ) -> Wait[GetWarehouseResponse]: - """Create a warehouse. + - Creates a new SQL warehouse. + + def wait_get_warehouse_running(self, id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[GetWarehouseResponse], None]] = None) -> GetWarehouseResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (State.RUNNING, ) + failure_states = (State.STOPPED, State.DELETED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(id=id) + status = poll.state + status_message = f'current status: {status}' + if poll.health: + status_message = poll.health.summary + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach RUNNING, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"id={id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_get_warehouse_stopped(self, id: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[GetWarehouseResponse], None]] = None) -> GetWarehouseResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (State.STOPPED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(id=id) + status = poll.state + status_message = f'current status: {status}' + if poll.health: + status_message = poll.health.summary + if status in target_states: + return poll + if callback: + callback(poll) + prefix = f"id={id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + def create(self + + , * + , auto_stop_mins: Optional[int] = None, channel: Optional[Channel] = None, cluster_size: Optional[str] = None, creator_name: Optional[str] = None, enable_photon: Optional[bool] = None, enable_serverless_compute: Optional[bool] = None, instance_profile_arn: Optional[str] = None, max_num_clusters: Optional[int] = None, min_num_clusters: Optional[int] = None, name: Optional[str] = None, spot_instance_policy: Optional[SpotInstancePolicy] = None, tags: Optional[EndpointTags] = None, warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None) -> Wait[GetWarehouseResponse]: + """Create a warehouse. + + Creates a new SQL warehouse. + :param auto_stop_mins: int (optional) The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - + Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for non-serverless warehouses - 0 indicates no autostop. - + Defaults to 120 mins :param channel: :class:`Channel` (optional) Channel Details @@ -10549,14 +9235,14 @@ def create( Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, please tune max_num_clusters. - + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large :param creator_name: str (optional) warehouse creator name :param enable_photon: bool (optional) Configures whether the warehouse should use Photon optimized clusters. - + Defaults to false. :param enable_serverless_compute: bool (optional) Configures whether the warehouse should use serverless compute @@ -10564,157 +9250,113 @@ def create( Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) Maximum number of clusters that the autoscaler will create to handle concurrent queries. - + Supported values: - Must be >= min_num_clusters - Must be <= 30. - + Defaults to min_clusters if unset. :param min_num_clusters: int (optional) Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce the cold start time for new queries. This is similar to reserved vs. revocable cores in a resource manager. - + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - + Defaults to 1 :param name: str (optional) Logical name for the cluster. - + Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. - + Supported values: - Number of tags < 45. :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional) Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. """ body = {} - if auto_stop_mins is not None: - body["auto_stop_mins"] = auto_stop_mins - if channel is not None: - body["channel"] = channel.as_dict() - if cluster_size is not None: - body["cluster_size"] = cluster_size - if creator_name is not None: - body["creator_name"] = creator_name - if enable_photon is not None: - body["enable_photon"] = enable_photon - if enable_serverless_compute is not None: - body["enable_serverless_compute"] = enable_serverless_compute - if instance_profile_arn is not None: - body["instance_profile_arn"] = instance_profile_arn - if max_num_clusters is not None: - body["max_num_clusters"] = max_num_clusters - if min_num_clusters is not None: - body["min_num_clusters"] = min_num_clusters - if name is not None: - body["name"] = name - if spot_instance_policy is not None: - body["spot_instance_policy"] = spot_instance_policy.value - if tags is not None: - body["tags"] = tags.as_dict() - if warehouse_type is not None: - body["warehouse_type"] = warehouse_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.0/sql/warehouses", body=body, headers=headers) - return Wait( - self.wait_get_warehouse_running, - response=CreateWarehouseResponse.from_dict(op_response), - id=op_response["id"], - ) - - def create_and_wait( - self, - *, - auto_stop_mins: Optional[int] = None, - channel: Optional[Channel] = None, - cluster_size: Optional[str] = None, - creator_name: Optional[str] = None, - enable_photon: Optional[bool] = None, - enable_serverless_compute: Optional[bool] = None, - instance_profile_arn: Optional[str] = None, - max_num_clusters: Optional[int] = None, - min_num_clusters: Optional[int] = None, - name: Optional[str] = None, - spot_instance_policy: Optional[SpotInstancePolicy] = None, - tags: Optional[EndpointTags] = None, - warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None, - timeout=timedelta(minutes=20), - ) -> GetWarehouseResponse: - return self.create( - auto_stop_mins=auto_stop_mins, - channel=channel, - cluster_size=cluster_size, - creator_name=creator_name, - enable_photon=enable_photon, - enable_serverless_compute=enable_serverless_compute, - instance_profile_arn=instance_profile_arn, - max_num_clusters=max_num_clusters, - min_num_clusters=min_num_clusters, - name=name, - spot_instance_policy=spot_instance_policy, - tags=tags, - warehouse_type=warehouse_type, - ).result(timeout=timeout) - - def delete(self, id: str): - """Delete a warehouse. + if auto_stop_mins is not None: body['auto_stop_mins'] = auto_stop_mins + if channel is not None: body['channel'] = channel.as_dict() + if cluster_size is not None: body['cluster_size'] = cluster_size + if creator_name is not None: body['creator_name'] = creator_name + if enable_photon is not None: body['enable_photon'] = enable_photon + if enable_serverless_compute is not None: body['enable_serverless_compute'] = enable_serverless_compute + if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn + if max_num_clusters is not None: body['max_num_clusters'] = max_num_clusters + if min_num_clusters is not None: body['min_num_clusters'] = min_num_clusters + if name is not None: body['name'] = name + if spot_instance_policy is not None: body['spot_instance_policy'] = spot_instance_policy.value + if tags is not None: body['tags'] = tags.as_dict() + if warehouse_type is not None: body['warehouse_type'] = warehouse_type.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.0/sql/warehouses', body=body + + , headers=headers + ) + return Wait(self.wait_get_warehouse_running + , response = CreateWarehouseResponse.from_dict(op_response) + , id=op_response['id']) - Deletes a SQL warehouse. + + def create_and_wait(self + + , * + , auto_stop_mins: Optional[int] = None, channel: Optional[Channel] = None, cluster_size: Optional[str] = None, creator_name: Optional[str] = None, enable_photon: Optional[bool] = None, enable_serverless_compute: Optional[bool] = None, instance_profile_arn: Optional[str] = None, max_num_clusters: Optional[int] = None, min_num_clusters: Optional[int] = None, name: Optional[str] = None, spot_instance_policy: Optional[SpotInstancePolicy] = None, tags: Optional[EndpointTags] = None, warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None, + timeout=timedelta(minutes=20)) -> GetWarehouseResponse: + return self.create(auto_stop_mins=auto_stop_mins, channel=channel, cluster_size=cluster_size, creator_name=creator_name, enable_photon=enable_photon, enable_serverless_compute=enable_serverless_compute, instance_profile_arn=instance_profile_arn, max_num_clusters=max_num_clusters, min_num_clusters=min_num_clusters, name=name, spot_instance_policy=spot_instance_policy, tags=tags, warehouse_type=warehouse_type).result(timeout=timeout) + + + + def delete(self + , id: str + ): + """Delete a warehouse. + + Deletes a SQL warehouse. + :param id: str Required. Id of the SQL warehouse. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/sql/warehouses/{id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/sql/warehouses/{id}", headers=headers) - - def edit( - self, - id: str, - *, - auto_stop_mins: Optional[int] = None, - channel: Optional[Channel] = None, - cluster_size: Optional[str] = None, - creator_name: Optional[str] = None, - enable_photon: Optional[bool] = None, - enable_serverless_compute: Optional[bool] = None, - instance_profile_arn: Optional[str] = None, - max_num_clusters: Optional[int] = None, - min_num_clusters: Optional[int] = None, - name: Optional[str] = None, - spot_instance_policy: Optional[SpotInstancePolicy] = None, - tags: Optional[EndpointTags] = None, - warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None, - ) -> Wait[GetWarehouseResponse]: - """Update a warehouse. + + + + def edit(self + , id: str + , * + , auto_stop_mins: Optional[int] = None, channel: Optional[Channel] = None, cluster_size: Optional[str] = None, creator_name: Optional[str] = None, enable_photon: Optional[bool] = None, enable_serverless_compute: Optional[bool] = None, instance_profile_arn: Optional[str] = None, max_num_clusters: Optional[int] = None, min_num_clusters: Optional[int] = None, name: Optional[str] = None, spot_instance_policy: Optional[SpotInstancePolicy] = None, tags: Optional[EndpointTags] = None, warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None) -> Wait[GetWarehouseResponse]: + """Update a warehouse. + Updates the configuration for a SQL warehouse. - + :param id: str Required. Id of the warehouse to configure. :param auto_stop_mins: int (optional) The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - + Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. - + Defaults to 120 mins :param channel: :class:`Channel` (optional) Channel Details @@ -10722,14 +9364,14 @@ def edit( Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, please tune max_num_clusters. - + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large :param creator_name: str (optional) warehouse creator name :param enable_photon: bool (optional) Configures whether the warehouse should use Photon optimized clusters. - + Defaults to false. :param enable_serverless_compute: bool (optional) Configures whether the warehouse should use serverless compute. @@ -10737,243 +9379,235 @@ def edit( Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) Maximum number of clusters that the autoscaler will create to handle concurrent queries. - + Supported values: - Must be >= min_num_clusters - Must be <= 30. - + Defaults to min_clusters if unset. :param min_num_clusters: int (optional) Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce the cold start time for new queries. This is similar to reserved vs. revocable cores in a resource manager. - + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - + Defaults to 1 :param name: str (optional) Logical name for the cluster. - + Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. - + Supported values: - Number of tags < 45. :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional) Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. """ body = {} - if auto_stop_mins is not None: - body["auto_stop_mins"] = auto_stop_mins - if channel is not None: - body["channel"] = channel.as_dict() - if cluster_size is not None: - body["cluster_size"] = cluster_size - if creator_name is not None: - body["creator_name"] = creator_name - if enable_photon is not None: - body["enable_photon"] = enable_photon - if enable_serverless_compute is not None: - body["enable_serverless_compute"] = enable_serverless_compute - if instance_profile_arn is not None: - body["instance_profile_arn"] = instance_profile_arn - if max_num_clusters is not None: - body["max_num_clusters"] = max_num_clusters - if min_num_clusters is not None: - body["min_num_clusters"] = min_num_clusters - if name is not None: - body["name"] = name - if spot_instance_policy is not None: - body["spot_instance_policy"] = spot_instance_policy.value - if tags is not None: - body["tags"] = tags.as_dict() - if warehouse_type is not None: - body["warehouse_type"] = warehouse_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/edit", body=body, headers=headers) - return Wait(self.wait_get_warehouse_running, response=EditWarehouseResponse.from_dict(op_response), id=id) - - def edit_and_wait( - self, - id: str, - *, - auto_stop_mins: Optional[int] = None, - channel: Optional[Channel] = None, - cluster_size: Optional[str] = None, - creator_name: Optional[str] = None, - enable_photon: Optional[bool] = None, - enable_serverless_compute: Optional[bool] = None, - instance_profile_arn: Optional[str] = None, - max_num_clusters: Optional[int] = None, - min_num_clusters: Optional[int] = None, - name: Optional[str] = None, - spot_instance_policy: Optional[SpotInstancePolicy] = None, - tags: Optional[EndpointTags] = None, - warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None, - timeout=timedelta(minutes=20), - ) -> GetWarehouseResponse: - return self.edit( - auto_stop_mins=auto_stop_mins, - channel=channel, - cluster_size=cluster_size, - creator_name=creator_name, - enable_photon=enable_photon, - enable_serverless_compute=enable_serverless_compute, - id=id, - instance_profile_arn=instance_profile_arn, - max_num_clusters=max_num_clusters, - min_num_clusters=min_num_clusters, - name=name, - spot_instance_policy=spot_instance_policy, - tags=tags, - warehouse_type=warehouse_type, - ).result(timeout=timeout) - - def get(self, id: str) -> GetWarehouseResponse: - """Get warehouse info. + if auto_stop_mins is not None: body['auto_stop_mins'] = auto_stop_mins + if channel is not None: body['channel'] = channel.as_dict() + if cluster_size is not None: body['cluster_size'] = cluster_size + if creator_name is not None: body['creator_name'] = creator_name + if enable_photon is not None: body['enable_photon'] = enable_photon + if enable_serverless_compute is not None: body['enable_serverless_compute'] = enable_serverless_compute + if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn + if max_num_clusters is not None: body['max_num_clusters'] = max_num_clusters + if min_num_clusters is not None: body['min_num_clusters'] = min_num_clusters + if name is not None: body['name'] = name + if spot_instance_policy is not None: body['spot_instance_policy'] = spot_instance_policy.value + if tags is not None: body['tags'] = tags.as_dict() + if warehouse_type is not None: body['warehouse_type'] = warehouse_type.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/sql/warehouses/{id}/edit', body=body + + , headers=headers + ) + return Wait(self.wait_get_warehouse_running + , response = EditWarehouseResponse.from_dict(op_response) + , id=id) - Gets the information for a single SQL warehouse. + + def edit_and_wait(self + , id: str + , * + , auto_stop_mins: Optional[int] = None, channel: Optional[Channel] = None, cluster_size: Optional[str] = None, creator_name: Optional[str] = None, enable_photon: Optional[bool] = None, enable_serverless_compute: Optional[bool] = None, instance_profile_arn: Optional[str] = None, max_num_clusters: Optional[int] = None, min_num_clusters: Optional[int] = None, name: Optional[str] = None, spot_instance_policy: Optional[SpotInstancePolicy] = None, tags: Optional[EndpointTags] = None, warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None, + timeout=timedelta(minutes=20)) -> GetWarehouseResponse: + return self.edit(auto_stop_mins=auto_stop_mins, channel=channel, cluster_size=cluster_size, creator_name=creator_name, enable_photon=enable_photon, enable_serverless_compute=enable_serverless_compute, id=id, instance_profile_arn=instance_profile_arn, max_num_clusters=max_num_clusters, min_num_clusters=min_num_clusters, name=name, spot_instance_policy=spot_instance_policy, tags=tags, warehouse_type=warehouse_type).result(timeout=timeout) + + + + def get(self + , id: str + ) -> GetWarehouseResponse: + """Get warehouse info. + + Gets the information for a single SQL warehouse. + :param id: str Required. Id of the SQL warehouse. - + :returns: :class:`GetWarehouseResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/sql/warehouses/{id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/sql/warehouses/{id}' + + , headers=headers + ) return GetWarehouseResponse.from_dict(res) - def get_permission_levels(self, warehouse_id: str) -> GetWarehousePermissionLevelsResponse: - """Get SQL warehouse permission levels. + + + + def get_permission_levels(self + , warehouse_id: str + ) -> GetWarehousePermissionLevelsResponse: + """Get SQL warehouse permission levels. + Gets the permission levels that a user can have on an object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. - + :returns: :class:`GetWarehousePermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/warehouses/{warehouse_id}/permissionLevels", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/warehouses/{warehouse_id}/permissionLevels' + + , headers=headers + ) return GetWarehousePermissionLevelsResponse.from_dict(res) - def get_permissions(self, warehouse_id: str) -> WarehousePermissions: - """Get SQL warehouse permissions. + + + + def get_permissions(self + , warehouse_id: str + ) -> WarehousePermissions: + """Get SQL warehouse permissions. + Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. - + :returns: :class:`WarehousePermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/warehouses/{warehouse_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/warehouses/{warehouse_id}' + + , headers=headers + ) return WarehousePermissions.from_dict(res) + + + + def get_workspace_warehouse_config(self) -> GetWorkspaceWarehouseConfigResponse: """Get the workspace configuration. - + Gets the workspace level configuration that is shared by all SQL warehouses in a workspace. - + :returns: :class:`GetWorkspaceWarehouseConfigResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/sql/config/warehouses", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/sql/config/warehouses' + , headers=headers + ) return GetWorkspaceWarehouseConfigResponse.from_dict(res) - def list(self, *, run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo]: - """List warehouses. + + + + def list(self + + , * + , run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo]: + """List warehouses. + Lists all SQL warehouses that a user has manager permissions on. - + :param run_as_user_id: int (optional) Service Principal which will be used to fetch the list of warehouses. If not specified, the user from the session header is used. - + :returns: Iterator over :class:`EndpointInfo` """ - + query = {} - if run_as_user_id is not None: - query["run_as_user_id"] = run_as_user_id - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/sql/warehouses", query=query, headers=headers) + if run_as_user_id is not None: query['run_as_user_id'] = run_as_user_id + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/sql/warehouses', query=query + + , headers=headers + ) parsed = ListWarehousesResponse.from_dict(json).warehouses return parsed if parsed is not None else [] + - def set_permissions( - self, warehouse_id: str, *, access_control_list: Optional[List[WarehouseAccessControlRequest]] = None - ) -> WarehousePermissions: - """Set SQL warehouse permissions. + + + + def set_permissions(self + , warehouse_id: str + , * + , access_control_list: Optional[List[WarehouseAccessControlRequest]] = None) -> WarehousePermissions: + """Set SQL warehouse permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional) - + :returns: :class:`WarehousePermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/permissions/warehouses/{warehouse_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/warehouses/{warehouse_id}', body=body + + , headers=headers + ) return WarehousePermissions.from_dict(res) - def set_workspace_warehouse_config( - self, - *, - channel: Optional[Channel] = None, - config_param: Optional[RepeatedEndpointConfPairs] = None, - data_access_config: Optional[List[EndpointConfPair]] = None, - enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None, - global_param: Optional[RepeatedEndpointConfPairs] = None, - google_service_account: Optional[str] = None, - instance_profile_arn: Optional[str] = None, - security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None, - sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None, - ): - """Set the workspace configuration. + + + + def set_workspace_warehouse_config(self + + , * + , channel: Optional[Channel] = None, config_param: Optional[RepeatedEndpointConfPairs] = None, data_access_config: Optional[List[EndpointConfPair]] = None, enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None, global_param: Optional[RepeatedEndpointConfPairs] = None, google_service_account: Optional[str] = None, instance_profile_arn: Optional[str] = None, security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None, sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None): + """Set the workspace configuration. + Sets the workspace level configuration that is shared by all SQL warehouses in a workspace. - + :param channel: :class:`Channel` (optional) Optional: Channel selection details :param config_param: :class:`RepeatedEndpointConfPairs` (optional) @@ -10996,102 +9630,125 @@ def set_workspace_warehouse_config( Security policy for warehouses :param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional) SQL configuration parameters - - + + """ body = {} - if channel is not None: - body["channel"] = channel.as_dict() - if config_param is not None: - body["config_param"] = config_param.as_dict() - if data_access_config is not None: - body["data_access_config"] = [v.as_dict() for v in data_access_config] - if enabled_warehouse_types is not None: - body["enabled_warehouse_types"] = [v.as_dict() for v in enabled_warehouse_types] - if global_param is not None: - body["global_param"] = global_param.as_dict() - if google_service_account is not None: - body["google_service_account"] = google_service_account - if instance_profile_arn is not None: - body["instance_profile_arn"] = instance_profile_arn - if security_policy is not None: - body["security_policy"] = security_policy.value - if sql_configuration_parameters is not None: - body["sql_configuration_parameters"] = sql_configuration_parameters.as_dict() - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PUT", "/api/2.0/sql/config/warehouses", body=body, headers=headers) - - def start(self, id: str) -> Wait[GetWarehouseResponse]: - """Start a warehouse. + if channel is not None: body['channel'] = channel.as_dict() + if config_param is not None: body['config_param'] = config_param.as_dict() + if data_access_config is not None: body['data_access_config'] = [v.as_dict() for v in data_access_config] + if enabled_warehouse_types is not None: body['enabled_warehouse_types'] = [v.as_dict() for v in enabled_warehouse_types] + if global_param is not None: body['global_param'] = global_param.as_dict() + if google_service_account is not None: body['google_service_account'] = google_service_account + if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn + if security_policy is not None: body['security_policy'] = security_policy.value + if sql_configuration_parameters is not None: body['sql_configuration_parameters'] = sql_configuration_parameters.as_dict() + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PUT','/api/2.0/sql/config/warehouses', body=body + + , headers=headers + ) + - Starts a SQL warehouse. + + + + def start(self + , id: str + ) -> Wait[GetWarehouseResponse]: + """Start a warehouse. + + Starts a SQL warehouse. + :param id: str Required. Id of the SQL warehouse. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. """ + + headers = {'Accept': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/sql/warehouses/{id}/start' + + , headers=headers + ) + return Wait(self.wait_get_warehouse_running + , response = StartWarehouseResponse.from_dict(op_response) + , id=id) - headers = { - "Accept": "application/json", - } - - op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/start", headers=headers) - return Wait(self.wait_get_warehouse_running, response=StartWarehouseResponse.from_dict(op_response), id=id) - - def start_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse: + + def start_and_wait(self + , id: str + , + timeout=timedelta(minutes=20)) -> GetWarehouseResponse: return self.start(id=id).result(timeout=timeout) + + + - def stop(self, id: str) -> Wait[GetWarehouseResponse]: + def stop(self + , id: str + ) -> Wait[GetWarehouseResponse]: """Stop a warehouse. - + Stops a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_stopped for more details. """ + + headers = {'Accept': 'application/json',} + + op_response = self._api.do('POST',f'/api/2.0/sql/warehouses/{id}/stop' + + , headers=headers + ) + return Wait(self.wait_get_warehouse_stopped + , response = StopWarehouseResponse.from_dict(op_response) + , id=id) - headers = { - "Accept": "application/json", - } - - op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/stop", headers=headers) - return Wait(self.wait_get_warehouse_stopped, response=StopWarehouseResponse.from_dict(op_response), id=id) - - def stop_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse: + + def stop_and_wait(self + , id: str + , + timeout=timedelta(minutes=20)) -> GetWarehouseResponse: return self.stop(id=id).result(timeout=timeout) + + + - def update_permissions( - self, warehouse_id: str, *, access_control_list: Optional[List[WarehouseAccessControlRequest]] = None - ) -> WarehousePermissions: + def update_permissions(self + , warehouse_id: str + , * + , access_control_list: Optional[List[WarehouseAccessControlRequest]] = None) -> WarehousePermissions: """Update SQL warehouse permissions. - + Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional) - + :returns: :class:`WarehousePermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/permissions/warehouses/{warehouse_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/warehouses/{warehouse_id}', body=body + + , headers=headers + ) return WarehousePermissions.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index 4a2a7100a..5b6f77864 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -1,266 +1,241 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging -import random -import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Any, Callable, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ..errors import OperationFailed -from ._internal import Wait, _enum, _from_dict, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class ColumnInfo: name: Optional[str] = None """Name of the column.""" - + def as_dict(self) -> dict: """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" - return cls(name=d.get("name", None)) + return cls(name=d.get('name', None)) + + @dataclass class CreateEndpoint: name: str """Name of the vector search endpoint""" - + endpoint_type: EndpointType """Type of endpoint""" - + budget_policy_id: Optional[str] = None """The budget policy id to be applied""" - + def as_dict(self) -> dict: """Serializes the CreateEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_type is not None: - body["endpoint_type"] = self.endpoint_type.value - if self.name is not None: - body["name"] = self.name + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type.value + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the CreateEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_type is not None: - body["endpoint_type"] = self.endpoint_type - if self.name is not None: - body["name"] = self.name + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateEndpoint: """Deserializes the CreateEndpoint from a dictionary.""" - return cls( - budget_policy_id=d.get("budget_policy_id", None), - endpoint_type=_enum(d, "endpoint_type", EndpointType), - name=d.get("name", None), - ) + return cls(budget_policy_id=d.get('budget_policy_id', None), endpoint_type=_enum(d, 'endpoint_type', EndpointType), name=d.get('name', None)) + + @dataclass class CreateVectorIndexRequest: name: str """Name of the index""" - + endpoint_name: str """Name of the endpoint to be used for serving the index""" - + primary_key: str """Primary key of the index""" - + index_type: VectorIndexType """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" - + delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None """Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.""" - + direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None """Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`.""" - + def as_dict(self) -> dict: """Serializes the CreateVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.delta_sync_index_spec: - body["delta_sync_index_spec"] = self.delta_sync_index_spec.as_dict() - if self.direct_access_index_spec: - body["direct_access_index_spec"] = self.direct_access_index_spec.as_dict() - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type.value - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key + if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec.as_dict() + if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec.as_dict() + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_type is not None: body['index_type'] = self.index_type.value + if self.name is not None: body['name'] = self.name + if self.primary_key is not None: body['primary_key'] = self.primary_key return body def as_shallow_dict(self) -> dict: """Serializes the CreateVectorIndexRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.delta_sync_index_spec: - body["delta_sync_index_spec"] = self.delta_sync_index_spec - if self.direct_access_index_spec: - body["direct_access_index_spec"] = self.direct_access_index_spec - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key + if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec + if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_type is not None: body['index_type'] = self.index_type + if self.name is not None: body['name'] = self.name + if self.primary_key is not None: body['primary_key'] = self.primary_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVectorIndexRequest: """Deserializes the CreateVectorIndexRequest from a dictionary.""" - return cls( - delta_sync_index_spec=_from_dict(d, "delta_sync_index_spec", DeltaSyncVectorIndexSpecRequest), - direct_access_index_spec=_from_dict(d, "direct_access_index_spec", DirectAccessVectorIndexSpec), - endpoint_name=d.get("endpoint_name", None), - index_type=_enum(d, "index_type", VectorIndexType), - name=d.get("name", None), - primary_key=d.get("primary_key", None), - ) + return cls(delta_sync_index_spec=_from_dict(d, 'delta_sync_index_spec', DeltaSyncVectorIndexSpecRequest), direct_access_index_spec=_from_dict(d, 'direct_access_index_spec', DirectAccessVectorIndexSpec), endpoint_name=d.get('endpoint_name', None), index_type=_enum(d, 'index_type', VectorIndexType), name=d.get('name', None), primary_key=d.get('primary_key', None)) + + @dataclass class CustomTag: key: str """Key field for a vector search endpoint tag.""" - + value: Optional[str] = None """[Optional] Value field for a vector search endpoint tag.""" - + def as_dict(self) -> dict: """Serializes the CustomTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the CustomTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomTag: """Deserializes the CustomTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + @dataclass class DeleteDataResult: failed_primary_keys: Optional[List[str]] = None """List of primary keys for rows that failed to process.""" - + success_row_count: Optional[int] = None """Count of successfully processed rows.""" - + def as_dict(self) -> dict: """Serializes the DeleteDataResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.failed_primary_keys: - body["failed_primary_keys"] = [v for v in self.failed_primary_keys] - if self.success_row_count is not None: - body["success_row_count"] = self.success_row_count + if self.failed_primary_keys: body['failed_primary_keys'] = [v for v in self.failed_primary_keys] + if self.success_row_count is not None: body['success_row_count'] = self.success_row_count return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDataResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.failed_primary_keys: - body["failed_primary_keys"] = self.failed_primary_keys - if self.success_row_count is not None: - body["success_row_count"] = self.success_row_count + if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys + if self.success_row_count is not None: body['success_row_count'] = self.success_row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDataResult: """Deserializes the DeleteDataResult from a dictionary.""" - return cls( - failed_primary_keys=d.get("failed_primary_keys", None), success_row_count=d.get("success_row_count", None) - ) + return cls(failed_primary_keys=d.get('failed_primary_keys', None), success_row_count=d.get('success_row_count', None)) + + class DeleteDataStatus(Enum): + + + FAILURE = 'FAILURE' + PARTIAL_SUCCESS = 'PARTIAL_SUCCESS' + SUCCESS = 'SUCCESS' + - FAILURE = "FAILURE" - PARTIAL_SUCCESS = "PARTIAL_SUCCESS" - SUCCESS = "SUCCESS" @dataclass class DeleteDataVectorIndexResponse: result: Optional[DeleteDataResult] = None """Result of the upsert or delete operation.""" - + status: Optional[DeleteDataStatus] = None """Status of the delete operation.""" - + def as_dict(self) -> dict: """Serializes the DeleteDataVectorIndexResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.result: - body["result"] = self.result.as_dict() - if self.status is not None: - body["status"] = self.status.value + if self.result: body['result'] = self.result.as_dict() + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDataVectorIndexResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.result: - body["result"] = self.result - if self.status is not None: - body["status"] = self.status + if self.result: body['result'] = self.result + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDataVectorIndexResponse: """Deserializes the DeleteDataVectorIndexResponse from a dictionary.""" - return cls(result=_from_dict(d, "result", DeleteDataResult), status=_enum(d, "status", DeleteDataStatus)) + return cls(result=_from_dict(d, 'result', DeleteDataResult), status=_enum(d, 'status', DeleteDataStatus)) + + + + + @dataclass @@ -279,6 +254,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteEndpointResponse: """Deserializes the DeleteEndpointResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -297,6 +277,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteIndexResponse: """Deserializes the DeleteIndexResponse from a dictionary.""" return cls() + + @dataclass @@ -305,466 +287,392 @@ class DeltaSyncVectorIndexSpecRequest: """[Optional] Select the columns to sync with the vector index. If you leave this field blank, all columns from the source table are synced with the index. The primary key column and embedding source column or embedding vector column are always synced.""" - + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" - + embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None """The columns that contain the embedding vectors.""" - + embedding_writeback_table: Optional[str] = None """[Optional] Name of the Delta table to sync the vector index contents and computed embeddings to.""" - + pipeline_type: Optional[PipelineType] = None """Pipeline execution mode. - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing after successfully refreshing the source table in the pipeline once, ensuring the table is updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it arrives in the source table to keep vector index fresh.""" - + source_table: Optional[str] = None """The name of the source table.""" - + def as_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns_to_sync: - body["columns_to_sync"] = [v for v in self.columns_to_sync] - if self.embedding_source_columns: - body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] - if self.embedding_vector_columns: - body["embedding_vector_columns"] = [v.as_dict() for v in self.embedding_vector_columns] - if self.embedding_writeback_table is not None: - body["embedding_writeback_table"] = self.embedding_writeback_table - if self.pipeline_type is not None: - body["pipeline_type"] = self.pipeline_type.value - if self.source_table is not None: - body["source_table"] = self.source_table + if self.columns_to_sync: body['columns_to_sync'] = [v for v in self.columns_to_sync] + if self.embedding_source_columns: body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns] + if self.embedding_vector_columns: body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns] + if self.embedding_writeback_table is not None: body['embedding_writeback_table'] = self.embedding_writeback_table + if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type.value + if self.source_table is not None: body['source_table'] = self.source_table return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns_to_sync: - body["columns_to_sync"] = self.columns_to_sync - if self.embedding_source_columns: - body["embedding_source_columns"] = self.embedding_source_columns - if self.embedding_vector_columns: - body["embedding_vector_columns"] = self.embedding_vector_columns - if self.embedding_writeback_table is not None: - body["embedding_writeback_table"] = self.embedding_writeback_table - if self.pipeline_type is not None: - body["pipeline_type"] = self.pipeline_type - if self.source_table is not None: - body["source_table"] = self.source_table + if self.columns_to_sync: body['columns_to_sync'] = self.columns_to_sync + if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns + if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns + if self.embedding_writeback_table is not None: body['embedding_writeback_table'] = self.embedding_writeback_table + if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type + if self.source_table is not None: body['source_table'] = self.source_table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary.""" - return cls( - columns_to_sync=d.get("columns_to_sync", None), - embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), - embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), - embedding_writeback_table=d.get("embedding_writeback_table", None), - pipeline_type=_enum(d, "pipeline_type", PipelineType), - source_table=d.get("source_table", None), - ) + return cls(columns_to_sync=d.get('columns_to_sync', None), embedding_source_columns=_repeated_dict(d, 'embedding_source_columns', EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns', EmbeddingVectorColumn), embedding_writeback_table=d.get('embedding_writeback_table', None), pipeline_type=_enum(d, 'pipeline_type', PipelineType), source_table=d.get('source_table', None)) + + @dataclass class DeltaSyncVectorIndexSpecResponse: embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" - + embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None """The columns that contain the embedding vectors.""" - + embedding_writeback_table: Optional[str] = None """[Optional] Name of the Delta table to sync the vector index contents and computed embeddings to.""" - + pipeline_id: Optional[str] = None """The ID of the pipeline that is used to sync the index.""" - + pipeline_type: Optional[PipelineType] = None """Pipeline execution mode. - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing after successfully refreshing the source table in the pipeline once, ensuring the table is updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it arrives in the source table to keep vector index fresh.""" - + source_table: Optional[str] = None """The name of the source table.""" - + def as_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding_source_columns: - body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] - if self.embedding_vector_columns: - body["embedding_vector_columns"] = [v.as_dict() for v in self.embedding_vector_columns] - if self.embedding_writeback_table is not None: - body["embedding_writeback_table"] = self.embedding_writeback_table - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.pipeline_type is not None: - body["pipeline_type"] = self.pipeline_type.value - if self.source_table is not None: - body["source_table"] = self.source_table + if self.embedding_source_columns: body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns] + if self.embedding_vector_columns: body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns] + if self.embedding_writeback_table is not None: body['embedding_writeback_table'] = self.embedding_writeback_table + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type.value + if self.source_table is not None: body['source_table'] = self.source_table return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding_source_columns: - body["embedding_source_columns"] = self.embedding_source_columns - if self.embedding_vector_columns: - body["embedding_vector_columns"] = self.embedding_vector_columns - if self.embedding_writeback_table is not None: - body["embedding_writeback_table"] = self.embedding_writeback_table - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.pipeline_type is not None: - body["pipeline_type"] = self.pipeline_type - if self.source_table is not None: - body["source_table"] = self.source_table + if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns + if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns + if self.embedding_writeback_table is not None: body['embedding_writeback_table'] = self.embedding_writeback_table + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type + if self.source_table is not None: body['source_table'] = self.source_table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecResponse: """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary.""" - return cls( - embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), - embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), - embedding_writeback_table=d.get("embedding_writeback_table", None), - pipeline_id=d.get("pipeline_id", None), - pipeline_type=_enum(d, "pipeline_type", PipelineType), - source_table=d.get("source_table", None), - ) + return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns', EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns', EmbeddingVectorColumn), embedding_writeback_table=d.get('embedding_writeback_table', None), pipeline_id=d.get('pipeline_id', None), pipeline_type=_enum(d, 'pipeline_type', PipelineType), source_table=d.get('source_table', None)) + + @dataclass class DirectAccessVectorIndexSpec: embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source. The format should be array[double].""" - + embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None """The columns that contain the embedding vectors. The format should be array[double].""" - + schema_json: Optional[str] = None """The schema of the index in JSON format. Supported types are `integer`, `long`, `float`, `double`, `boolean`, `string`, `date`, `timestamp`. Supported types for vector column: `array`, `array`,`.""" - + def as_dict(self) -> dict: """Serializes the DirectAccessVectorIndexSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding_source_columns: - body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] - if self.embedding_vector_columns: - body["embedding_vector_columns"] = [v.as_dict() for v in self.embedding_vector_columns] - if self.schema_json is not None: - body["schema_json"] = self.schema_json + if self.embedding_source_columns: body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns] + if self.embedding_vector_columns: body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns] + if self.schema_json is not None: body['schema_json'] = self.schema_json return body def as_shallow_dict(self) -> dict: """Serializes the DirectAccessVectorIndexSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding_source_columns: - body["embedding_source_columns"] = self.embedding_source_columns - if self.embedding_vector_columns: - body["embedding_vector_columns"] = self.embedding_vector_columns - if self.schema_json is not None: - body["schema_json"] = self.schema_json + if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns + if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns + if self.schema_json is not None: body['schema_json'] = self.schema_json return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DirectAccessVectorIndexSpec: """Deserializes the DirectAccessVectorIndexSpec from a dictionary.""" - return cls( - embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), - embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), - schema_json=d.get("schema_json", None), - ) + return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns', EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns', EmbeddingVectorColumn), schema_json=d.get('schema_json', None)) + + @dataclass class EmbeddingSourceColumn: embedding_model_endpoint_name: Optional[str] = None """Name of the embedding model endpoint""" - + name: Optional[str] = None """Name of the column""" - + def as_dict(self) -> dict: """Serializes the EmbeddingSourceColumn into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding_model_endpoint_name is not None: - body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name - if self.name is not None: - body["name"] = self.name + if self.embedding_model_endpoint_name is not None: body['embedding_model_endpoint_name'] = self.embedding_model_endpoint_name + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the EmbeddingSourceColumn into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding_model_endpoint_name is not None: - body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name - if self.name is not None: - body["name"] = self.name + if self.embedding_model_endpoint_name is not None: body['embedding_model_endpoint_name'] = self.embedding_model_endpoint_name + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmbeddingSourceColumn: """Deserializes the EmbeddingSourceColumn from a dictionary.""" - return cls(embedding_model_endpoint_name=d.get("embedding_model_endpoint_name", None), name=d.get("name", None)) + return cls(embedding_model_endpoint_name=d.get('embedding_model_endpoint_name', None), name=d.get('name', None)) + + @dataclass class EmbeddingVectorColumn: embedding_dimension: Optional[int] = None """Dimension of the embedding vector""" - + name: Optional[str] = None """Name of the column""" - + def as_dict(self) -> dict: """Serializes the EmbeddingVectorColumn into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding_dimension is not None: - body["embedding_dimension"] = self.embedding_dimension - if self.name is not None: - body["name"] = self.name + if self.embedding_dimension is not None: body['embedding_dimension'] = self.embedding_dimension + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the EmbeddingVectorColumn into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding_dimension is not None: - body["embedding_dimension"] = self.embedding_dimension - if self.name is not None: - body["name"] = self.name + if self.embedding_dimension is not None: body['embedding_dimension'] = self.embedding_dimension + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmbeddingVectorColumn: """Deserializes the EmbeddingVectorColumn from a dictionary.""" - return cls(embedding_dimension=d.get("embedding_dimension", None), name=d.get("name", None)) + return cls(embedding_dimension=d.get('embedding_dimension', None), name=d.get('name', None)) + + @dataclass class EndpointInfo: creation_timestamp: Optional[int] = None """Timestamp of endpoint creation""" - + creator: Optional[str] = None """Creator of the endpoint""" - + custom_tags: Optional[List[CustomTag]] = None """The custom tags assigned to the endpoint""" - + effective_budget_policy_id: Optional[str] = None """The budget policy id applied to the endpoint""" - + endpoint_status: Optional[EndpointStatus] = None """Current status of the endpoint""" - + endpoint_type: Optional[EndpointType] = None """Type of endpoint""" - + id: Optional[str] = None """Unique identifier of the endpoint""" - + last_updated_timestamp: Optional[int] = None """Timestamp of last update to the endpoint""" - + last_updated_user: Optional[str] = None """User who last updated the endpoint""" - + name: Optional[str] = None """Name of the vector search endpoint""" - + num_indexes: Optional[int] = None """Number of indexes on the endpoint""" - + def as_dict(self) -> dict: """Serializes the EndpointInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.custom_tags: - body["custom_tags"] = [v.as_dict() for v in self.custom_tags] - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.endpoint_status: - body["endpoint_status"] = self.endpoint_status.as_dict() - if self.endpoint_type is not None: - body["endpoint_type"] = self.endpoint_type.value - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.last_updated_user is not None: - body["last_updated_user"] = self.last_updated_user - if self.name is not None: - body["name"] = self.name - if self.num_indexes is not None: - body["num_indexes"] = self.num_indexes + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.endpoint_status: body['endpoint_status'] = self.endpoint_status.as_dict() + if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type.value + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.last_updated_user is not None: body['last_updated_user'] = self.last_updated_user + if self.name is not None: body['name'] = self.name + if self.num_indexes is not None: body['num_indexes'] = self.num_indexes return body def as_shallow_dict(self) -> dict: """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: - body["creation_timestamp"] = self.creation_timestamp - if self.creator is not None: - body["creator"] = self.creator - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id - if self.endpoint_status: - body["endpoint_status"] = self.endpoint_status - if self.endpoint_type is not None: - body["endpoint_type"] = self.endpoint_type - if self.id is not None: - body["id"] = self.id - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp - if self.last_updated_user is not None: - body["last_updated_user"] = self.last_updated_user - if self.name is not None: - body["name"] = self.name - if self.num_indexes is not None: - body["num_indexes"] = self.num_indexes + if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp + if self.creator is not None: body['creator'] = self.creator + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.endpoint_status: body['endpoint_status'] = self.endpoint_status + if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.last_updated_user is not None: body['last_updated_user'] = self.last_updated_user + if self.name is not None: body['name'] = self.name + if self.num_indexes is not None: body['num_indexes'] = self.num_indexes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointInfo: """Deserializes the EndpointInfo from a dictionary.""" - return cls( - creation_timestamp=d.get("creation_timestamp", None), - creator=d.get("creator", None), - custom_tags=_repeated_dict(d, "custom_tags", CustomTag), - effective_budget_policy_id=d.get("effective_budget_policy_id", None), - endpoint_status=_from_dict(d, "endpoint_status", EndpointStatus), - endpoint_type=_enum(d, "endpoint_type", EndpointType), - id=d.get("id", None), - last_updated_timestamp=d.get("last_updated_timestamp", None), - last_updated_user=d.get("last_updated_user", None), - name=d.get("name", None), - num_indexes=d.get("num_indexes", None), - ) + return cls(creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), custom_tags=_repeated_dict(d, 'custom_tags', CustomTag), effective_budget_policy_id=d.get('effective_budget_policy_id', None), endpoint_status=_from_dict(d, 'endpoint_status', EndpointStatus), endpoint_type=_enum(d, 'endpoint_type', EndpointType), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), last_updated_user=d.get('last_updated_user', None), name=d.get('name', None), num_indexes=d.get('num_indexes', None)) + + @dataclass class EndpointStatus: """Status information of an endpoint""" - + message: Optional[str] = None """Additional status message""" - + state: Optional[EndpointStatusState] = None """Current state of the endpoint""" - + def as_dict(self) -> dict: """Serializes the EndpointStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.state is not None: - body["state"] = self.state.value + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.state is not None: - body["state"] = self.state + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointStatus: """Deserializes the EndpointStatus from a dictionary.""" - return cls(message=d.get("message", None), state=_enum(d, "state", EndpointStatusState)) + return cls(message=d.get('message', None), state=_enum(d, 'state', EndpointStatusState)) + + class EndpointStatusState(Enum): """Current state of the endpoint""" - - OFFLINE = "OFFLINE" - ONLINE = "ONLINE" - PROVISIONING = "PROVISIONING" - + + OFFLINE = 'OFFLINE' + ONLINE = 'ONLINE' + PROVISIONING = 'PROVISIONING' class EndpointType(Enum): """Type of endpoint.""" + + STANDARD = 'STANDARD' + + + + - STANDARD = "STANDARD" @dataclass class ListEndpointResponse: endpoints: Optional[List[EndpointInfo]] = None """An array of Endpoint objects""" - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" - + def as_dict(self) -> dict: """Serializes the ListEndpointResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoints: - body["endpoints"] = [v.as_dict() for v in self.endpoints] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.endpoints: body['endpoints'] = [v.as_dict() for v in self.endpoints] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListEndpointResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoints: - body["endpoints"] = self.endpoints - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.endpoints: body['endpoints'] = self.endpoints + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListEndpointResponse: """Deserializes the ListEndpointResponse from a dictionary.""" - return cls( - endpoints=_repeated_dict(d, "endpoints", EndpointInfo), next_page_token=d.get("next_page_token", None) - ) + return cls(endpoints=_repeated_dict(d, 'endpoints', EndpointInfo), next_page_token=d.get('next_page_token', None)) + + + + + + + + @dataclass class ListValue: values: Optional[List[Value]] = None """Repeated field of dynamically typed values.""" - + def as_dict(self) -> dict: """Serializes the ListValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.values: - body["values"] = [v.as_dict() for v in self.values] + if self.values: body['values'] = [v.as_dict() for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the ListValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.values: - body["values"] = self.values + if self.values: body['values'] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListValue: """Deserializes the ListValue from a dictionary.""" - return cls(values=_repeated_dict(d, "values", Value)) + return cls(values=_repeated_dict(d, 'values', Value)) + + @dataclass @@ -772,188 +680,165 @@ class ListVectorIndexesResponse: next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" - + vector_indexes: Optional[List[MiniVectorIndex]] = None - + def as_dict(self) -> dict: """Serializes the ListVectorIndexesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.vector_indexes: - body["vector_indexes"] = [v.as_dict() for v in self.vector_indexes] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.vector_indexes: body['vector_indexes'] = [v.as_dict() for v in self.vector_indexes] return body def as_shallow_dict(self) -> dict: """Serializes the ListVectorIndexesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.vector_indexes: - body["vector_indexes"] = self.vector_indexes + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.vector_indexes: body['vector_indexes'] = self.vector_indexes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListVectorIndexesResponse: """Deserializes the ListVectorIndexesResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - vector_indexes=_repeated_dict(d, "vector_indexes", MiniVectorIndex), - ) + return cls(next_page_token=d.get('next_page_token', None), vector_indexes=_repeated_dict(d, 'vector_indexes', MiniVectorIndex)) + + @dataclass class MapStringValueEntry: """Key-value pair.""" - + key: Optional[str] = None """Column name.""" - + value: Optional[Value] = None """Column value, nullable.""" - + def as_dict(self) -> dict: """Serializes the MapStringValueEntry into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value: - body["value"] = self.value.as_dict() + if self.key is not None: body['key'] = self.key + if self.value: body['value'] = self.value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the MapStringValueEntry into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MapStringValueEntry: """Deserializes the MapStringValueEntry from a dictionary.""" - return cls(key=d.get("key", None), value=_from_dict(d, "value", Value)) + return cls(key=d.get('key', None), value=_from_dict(d, 'value', Value)) + + @dataclass class MiniVectorIndex: creator: Optional[str] = None """The user who created the index.""" - + endpoint_name: Optional[str] = None """Name of the endpoint associated with the index""" - + index_type: Optional[VectorIndexType] = None """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" - + name: Optional[str] = None """Name of the index""" - + primary_key: Optional[str] = None """Primary key of the index""" - + def as_dict(self) -> dict: """Serializes the MiniVectorIndex into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creator is not None: - body["creator"] = self.creator - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type.value - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key + if self.creator is not None: body['creator'] = self.creator + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_type is not None: body['index_type'] = self.index_type.value + if self.name is not None: body['name'] = self.name + if self.primary_key is not None: body['primary_key'] = self.primary_key return body def as_shallow_dict(self) -> dict: """Serializes the MiniVectorIndex into a shallow dictionary of its immediate attributes.""" body = {} - if self.creator is not None: - body["creator"] = self.creator - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key + if self.creator is not None: body['creator'] = self.creator + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_type is not None: body['index_type'] = self.index_type + if self.name is not None: body['name'] = self.name + if self.primary_key is not None: body['primary_key'] = self.primary_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MiniVectorIndex: """Deserializes the MiniVectorIndex from a dictionary.""" - return cls( - creator=d.get("creator", None), - endpoint_name=d.get("endpoint_name", None), - index_type=_enum(d, "index_type", VectorIndexType), - name=d.get("name", None), - primary_key=d.get("primary_key", None), - ) + return cls(creator=d.get('creator', None), endpoint_name=d.get('endpoint_name', None), index_type=_enum(d, 'index_type', VectorIndexType), name=d.get('name', None), primary_key=d.get('primary_key', None)) + + @dataclass class PatchEndpointBudgetPolicyRequest: budget_policy_id: str """The budget policy id to be applied""" - + endpoint_name: Optional[str] = None """Name of the vector search endpoint""" - + def as_dict(self) -> dict: """Serializes the PatchEndpointBudgetPolicyRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name return body def as_shallow_dict(self) -> dict: """Serializes the PatchEndpointBudgetPolicyRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name + if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PatchEndpointBudgetPolicyRequest: """Deserializes the PatchEndpointBudgetPolicyRequest from a dictionary.""" - return cls(budget_policy_id=d.get("budget_policy_id", None), endpoint_name=d.get("endpoint_name", None)) + return cls(budget_policy_id=d.get('budget_policy_id', None), endpoint_name=d.get('endpoint_name', None)) + + @dataclass class PatchEndpointBudgetPolicyResponse: effective_budget_policy_id: Optional[str] = None """The budget policy applied to the vector search endpoint.""" - + def as_dict(self) -> dict: """Serializes the PatchEndpointBudgetPolicyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id return body def as_shallow_dict(self) -> dict: """Serializes the PatchEndpointBudgetPolicyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.effective_budget_policy_id is not None: - body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PatchEndpointBudgetPolicyResponse: """Deserializes the PatchEndpointBudgetPolicyResponse from a dictionary.""" - return cls(effective_budget_policy_id=d.get("effective_budget_policy_id", None)) + return cls(effective_budget_policy_id=d.get('effective_budget_policy_id', None)) + + class PipelineType(Enum): @@ -962,64 +847,55 @@ class PipelineType(Enum): ensuring the table is updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it arrives in the source table to keep vector index fresh.""" - - CONTINUOUS = "CONTINUOUS" - TRIGGERED = "TRIGGERED" - + + CONTINUOUS = 'CONTINUOUS' + TRIGGERED = 'TRIGGERED' @dataclass class QueryVectorIndexNextPageRequest: """Request payload for getting next page of results.""" - + endpoint_name: Optional[str] = None """Name of the endpoint.""" - + index_name: Optional[str] = None """Name of the vector index to query.""" - + page_token: Optional[str] = None """Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.""" - + def as_dict(self) -> dict: """Serializes the QueryVectorIndexNextPageRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_name is not None: - body["index_name"] = self.index_name - if self.page_token is not None: - body["page_token"] = self.page_token + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_name is not None: body['index_name'] = self.index_name + if self.page_token is not None: body['page_token'] = self.page_token return body def as_shallow_dict(self) -> dict: """Serializes the QueryVectorIndexNextPageRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_name is not None: - body["index_name"] = self.index_name - if self.page_token is not None: - body["page_token"] = self.page_token + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_name is not None: body['index_name'] = self.index_name + if self.page_token is not None: body['page_token'] = self.page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexNextPageRequest: """Deserializes the QueryVectorIndexNextPageRequest from a dictionary.""" - return cls( - endpoint_name=d.get("endpoint_name", None), - index_name=d.get("index_name", None), - page_token=d.get("page_token", None), - ) + return cls(endpoint_name=d.get('endpoint_name', None), index_name=d.get('index_name', None), page_token=d.get('page_token', None)) + + @dataclass class QueryVectorIndexRequest: columns: List[str] """List of column names to include in the response.""" - + columns_to_rerank: Optional[List[str]] = None """Column names used to retrieve data to send to the reranker.""" - + filters_json: Optional[str] = None """JSON string representing query filters. @@ -1028,301 +904,256 @@ class QueryVectorIndexRequest: - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5.""" - + index_name: Optional[str] = None """Name of the vector index to query.""" - + num_results: Optional[int] = None """Number of results to return. Defaults to 10.""" - + query_text: Optional[str] = None """Query text. Required for Delta Sync Index using model endpoint.""" - + query_type: Optional[str] = None """The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.""" - + query_vector: Optional[List[float]] = None """Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed vectors.""" - + score_threshold: Optional[float] = None """Threshold for the approximate nearest neighbor search. Defaults to 0.0.""" - + def as_dict(self) -> dict: """Serializes the QueryVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: - body["columns"] = [v for v in self.columns] - if self.columns_to_rerank: - body["columns_to_rerank"] = [v for v in self.columns_to_rerank] - if self.filters_json is not None: - body["filters_json"] = self.filters_json - if self.index_name is not None: - body["index_name"] = self.index_name - if self.num_results is not None: - body["num_results"] = self.num_results - if self.query_text is not None: - body["query_text"] = self.query_text - if self.query_type is not None: - body["query_type"] = self.query_type - if self.query_vector: - body["query_vector"] = [v for v in self.query_vector] - if self.score_threshold is not None: - body["score_threshold"] = self.score_threshold + if self.columns: body['columns'] = [v for v in self.columns] + if self.columns_to_rerank: body['columns_to_rerank'] = [v for v in self.columns_to_rerank] + if self.filters_json is not None: body['filters_json'] = self.filters_json + if self.index_name is not None: body['index_name'] = self.index_name + if self.num_results is not None: body['num_results'] = self.num_results + if self.query_text is not None: body['query_text'] = self.query_text + if self.query_type is not None: body['query_type'] = self.query_type + if self.query_vector: body['query_vector'] = [v for v in self.query_vector] + if self.score_threshold is not None: body['score_threshold'] = self.score_threshold return body def as_shallow_dict(self) -> dict: """Serializes the QueryVectorIndexRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: - body["columns"] = self.columns - if self.columns_to_rerank: - body["columns_to_rerank"] = self.columns_to_rerank - if self.filters_json is not None: - body["filters_json"] = self.filters_json - if self.index_name is not None: - body["index_name"] = self.index_name - if self.num_results is not None: - body["num_results"] = self.num_results - if self.query_text is not None: - body["query_text"] = self.query_text - if self.query_type is not None: - body["query_type"] = self.query_type - if self.query_vector: - body["query_vector"] = self.query_vector - if self.score_threshold is not None: - body["score_threshold"] = self.score_threshold + if self.columns: body['columns'] = self.columns + if self.columns_to_rerank: body['columns_to_rerank'] = self.columns_to_rerank + if self.filters_json is not None: body['filters_json'] = self.filters_json + if self.index_name is not None: body['index_name'] = self.index_name + if self.num_results is not None: body['num_results'] = self.num_results + if self.query_text is not None: body['query_text'] = self.query_text + if self.query_type is not None: body['query_type'] = self.query_type + if self.query_vector: body['query_vector'] = self.query_vector + if self.score_threshold is not None: body['score_threshold'] = self.score_threshold return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexRequest: """Deserializes the QueryVectorIndexRequest from a dictionary.""" - return cls( - columns=d.get("columns", None), - columns_to_rerank=d.get("columns_to_rerank", None), - filters_json=d.get("filters_json", None), - index_name=d.get("index_name", None), - num_results=d.get("num_results", None), - query_text=d.get("query_text", None), - query_type=d.get("query_type", None), - query_vector=d.get("query_vector", None), - score_threshold=d.get("score_threshold", None), - ) + return cls(columns=d.get('columns', None), columns_to_rerank=d.get('columns_to_rerank', None), filters_json=d.get('filters_json', None), index_name=d.get('index_name', None), num_results=d.get('num_results', None), query_text=d.get('query_text', None), query_type=d.get('query_type', None), query_vector=d.get('query_vector', None), score_threshold=d.get('score_threshold', None)) + + @dataclass class QueryVectorIndexResponse: manifest: Optional[ResultManifest] = None """Metadata about the result set.""" - + next_page_token: Optional[str] = None """[Optional] Token that can be used in `QueryVectorIndexNextPage` API to get next page of results. If more than 1000 results satisfy the query, they are returned in groups of 1000. Empty value means no more results. The maximum number of results that can be returned is 10,000.""" - + result: Optional[ResultData] = None """Data returned in the query result.""" - + def as_dict(self) -> dict: """Serializes the QueryVectorIndexResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.manifest: - body["manifest"] = self.manifest.as_dict() - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.result: - body["result"] = self.result.as_dict() + if self.manifest: body['manifest'] = self.manifest.as_dict() + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.result: body['result'] = self.result.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the QueryVectorIndexResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.manifest: - body["manifest"] = self.manifest - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.result: - body["result"] = self.result + if self.manifest: body['manifest'] = self.manifest + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.result: body['result'] = self.result return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexResponse: """Deserializes the QueryVectorIndexResponse from a dictionary.""" - return cls( - manifest=_from_dict(d, "manifest", ResultManifest), - next_page_token=d.get("next_page_token", None), - result=_from_dict(d, "result", ResultData), - ) + return cls(manifest=_from_dict(d, 'manifest', ResultManifest), next_page_token=d.get('next_page_token', None), result=_from_dict(d, 'result', ResultData)) + + @dataclass class ResultData: """Data returned in the query result.""" - + data_array: Optional[List[List[str]]] = None """Data rows returned in the query.""" - + row_count: Optional[int] = None """Number of rows in the result set.""" - + def as_dict(self) -> dict: """Serializes the ResultData into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_array: - body["data_array"] = [v for v in self.data_array] - if self.row_count is not None: - body["row_count"] = self.row_count + if self.data_array: body['data_array'] = [v for v in self.data_array] + if self.row_count is not None: body['row_count'] = self.row_count return body def as_shallow_dict(self) -> dict: """Serializes the ResultData into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_array: - body["data_array"] = self.data_array - if self.row_count is not None: - body["row_count"] = self.row_count + if self.data_array: body['data_array'] = self.data_array + if self.row_count is not None: body['row_count'] = self.row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultData: """Deserializes the ResultData from a dictionary.""" - return cls(data_array=d.get("data_array", None), row_count=d.get("row_count", None)) + return cls(data_array=d.get('data_array', None), row_count=d.get('row_count', None)) + + @dataclass class ResultManifest: """Metadata about the result set.""" - + column_count: Optional[int] = None """Number of columns in the result set.""" - + columns: Optional[List[ColumnInfo]] = None """Information about each column in the result set.""" - + def as_dict(self) -> dict: """Serializes the ResultManifest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column_count is not None: - body["column_count"] = self.column_count - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] + if self.column_count is not None: body['column_count'] = self.column_count + if self.columns: body['columns'] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the ResultManifest into a shallow dictionary of its immediate attributes.""" body = {} - if self.column_count is not None: - body["column_count"] = self.column_count - if self.columns: - body["columns"] = self.columns + if self.column_count is not None: body['column_count'] = self.column_count + if self.columns: body['columns'] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultManifest: """Deserializes the ResultManifest from a dictionary.""" - return cls(column_count=d.get("column_count", None), columns=_repeated_dict(d, "columns", ColumnInfo)) + return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo)) + + @dataclass class ScanVectorIndexRequest: index_name: Optional[str] = None """Name of the vector index to scan.""" - + last_primary_key: Optional[str] = None """Primary key of the last entry returned in the previous scan.""" - + num_results: Optional[int] = None """Number of results to return. Defaults to 10.""" - + def as_dict(self) -> dict: """Serializes the ScanVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.last_primary_key is not None: - body["last_primary_key"] = self.last_primary_key - if self.num_results is not None: - body["num_results"] = self.num_results + if self.index_name is not None: body['index_name'] = self.index_name + if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key + if self.num_results is not None: body['num_results'] = self.num_results return body def as_shallow_dict(self) -> dict: """Serializes the ScanVectorIndexRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.last_primary_key is not None: - body["last_primary_key"] = self.last_primary_key - if self.num_results is not None: - body["num_results"] = self.num_results + if self.index_name is not None: body['index_name'] = self.index_name + if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key + if self.num_results is not None: body['num_results'] = self.num_results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ScanVectorIndexRequest: """Deserializes the ScanVectorIndexRequest from a dictionary.""" - return cls( - index_name=d.get("index_name", None), - last_primary_key=d.get("last_primary_key", None), - num_results=d.get("num_results", None), - ) + return cls(index_name=d.get('index_name', None), last_primary_key=d.get('last_primary_key', None), num_results=d.get('num_results', None)) + + @dataclass class ScanVectorIndexResponse: """Response to a scan vector index request.""" - + data: Optional[List[Struct]] = None """List of data entries""" - + last_primary_key: Optional[str] = None """Primary key of the last entry.""" - + def as_dict(self) -> dict: """Serializes the ScanVectorIndexResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data: - body["data"] = [v.as_dict() for v in self.data] - if self.last_primary_key is not None: - body["last_primary_key"] = self.last_primary_key + if self.data: body['data'] = [v.as_dict() for v in self.data] + if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key return body def as_shallow_dict(self) -> dict: """Serializes the ScanVectorIndexResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.data: - body["data"] = self.data - if self.last_primary_key is not None: - body["last_primary_key"] = self.last_primary_key + if self.data: body['data'] = self.data + if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ScanVectorIndexResponse: """Deserializes the ScanVectorIndexResponse from a dictionary.""" - return cls(data=_repeated_dict(d, "data", Struct), last_primary_key=d.get("last_primary_key", None)) + return cls(data=_repeated_dict(d, 'data', Struct), last_primary_key=d.get('last_primary_key', None)) + + @dataclass class Struct: fields: Optional[List[MapStringValueEntry]] = None """Data entry, corresponding to a row in a vector index.""" - + def as_dict(self) -> dict: """Serializes the Struct into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fields: - body["fields"] = [v.as_dict() for v in self.fields] + if self.fields: body['fields'] = [v.as_dict() for v in self.fields] return body def as_shallow_dict(self) -> dict: """Serializes the Struct into a shallow dictionary of its immediate attributes.""" body = {} - if self.fields: - body["fields"] = self.fields + if self.fields: body['fields'] = self.fields return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Struct: """Deserializes the Struct from a dictionary.""" - return cls(fields=_repeated_dict(d, "fields", MapStringValueEntry)) + return cls(fields=_repeated_dict(d, 'fields', MapStringValueEntry)) + + + + + @dataclass @@ -1341,364 +1172,306 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SyncIndexResponse: """Deserializes the SyncIndexResponse from a dictionary.""" return cls() + + @dataclass class UpdateEndpointCustomTagsRequest: custom_tags: List[CustomTag] """The new custom tags for the vector search endpoint""" - + endpoint_name: Optional[str] = None """Name of the vector search endpoint""" - + def as_dict(self) -> dict: """Serializes the UpdateEndpointCustomTagsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: - body["custom_tags"] = [v.as_dict() for v in self.custom_tags] - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name + if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEndpointCustomTagsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEndpointCustomTagsRequest: """Deserializes the UpdateEndpointCustomTagsRequest from a dictionary.""" - return cls(custom_tags=_repeated_dict(d, "custom_tags", CustomTag), endpoint_name=d.get("endpoint_name", None)) + return cls(custom_tags=_repeated_dict(d, 'custom_tags', CustomTag), endpoint_name=d.get('endpoint_name', None)) + + @dataclass class UpdateEndpointCustomTagsResponse: custom_tags: Optional[List[CustomTag]] = None """All the custom tags that are applied to the vector search endpoint.""" - + name: Optional[str] = None """The name of the vector search endpoint whose custom tags were updated.""" - + def as_dict(self) -> dict: """Serializes the UpdateEndpointCustomTagsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: - body["custom_tags"] = [v.as_dict() for v in self.custom_tags] - if self.name is not None: - body["name"] = self.name + if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEndpointCustomTagsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.name is not None: - body["name"] = self.name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEndpointCustomTagsResponse: """Deserializes the UpdateEndpointCustomTagsResponse from a dictionary.""" - return cls(custom_tags=_repeated_dict(d, "custom_tags", CustomTag), name=d.get("name", None)) + return cls(custom_tags=_repeated_dict(d, 'custom_tags', CustomTag), name=d.get('name', None)) + + @dataclass class UpsertDataResult: failed_primary_keys: Optional[List[str]] = None """List of primary keys for rows that failed to process.""" - + success_row_count: Optional[int] = None """Count of successfully processed rows.""" - + def as_dict(self) -> dict: """Serializes the UpsertDataResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.failed_primary_keys: - body["failed_primary_keys"] = [v for v in self.failed_primary_keys] - if self.success_row_count is not None: - body["success_row_count"] = self.success_row_count + if self.failed_primary_keys: body['failed_primary_keys'] = [v for v in self.failed_primary_keys] + if self.success_row_count is not None: body['success_row_count'] = self.success_row_count return body def as_shallow_dict(self) -> dict: """Serializes the UpsertDataResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.failed_primary_keys: - body["failed_primary_keys"] = self.failed_primary_keys - if self.success_row_count is not None: - body["success_row_count"] = self.success_row_count + if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys + if self.success_row_count is not None: body['success_row_count'] = self.success_row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpsertDataResult: """Deserializes the UpsertDataResult from a dictionary.""" - return cls( - failed_primary_keys=d.get("failed_primary_keys", None), success_row_count=d.get("success_row_count", None) - ) - + return cls(failed_primary_keys=d.get('failed_primary_keys', None), success_row_count=d.get('success_row_count', None)) + -class UpsertDataStatus(Enum): - FAILURE = "FAILURE" - PARTIAL_SUCCESS = "PARTIAL_SUCCESS" - SUCCESS = "SUCCESS" +class UpsertDataStatus(Enum): + + + FAILURE = 'FAILURE' + PARTIAL_SUCCESS = 'PARTIAL_SUCCESS' + SUCCESS = 'SUCCESS' @dataclass class UpsertDataVectorIndexRequest: inputs_json: str """JSON string representing the data to be upserted.""" - + index_name: Optional[str] = None """Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.""" - + def as_dict(self) -> dict: """Serializes the UpsertDataVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.inputs_json is not None: - body["inputs_json"] = self.inputs_json + if self.index_name is not None: body['index_name'] = self.index_name + if self.inputs_json is not None: body['inputs_json'] = self.inputs_json return body def as_shallow_dict(self) -> dict: """Serializes the UpsertDataVectorIndexRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.inputs_json is not None: - body["inputs_json"] = self.inputs_json + if self.index_name is not None: body['index_name'] = self.index_name + if self.inputs_json is not None: body['inputs_json'] = self.inputs_json return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpsertDataVectorIndexRequest: """Deserializes the UpsertDataVectorIndexRequest from a dictionary.""" - return cls(index_name=d.get("index_name", None), inputs_json=d.get("inputs_json", None)) + return cls(index_name=d.get('index_name', None), inputs_json=d.get('inputs_json', None)) + + @dataclass class UpsertDataVectorIndexResponse: result: Optional[UpsertDataResult] = None """Result of the upsert or delete operation.""" - + status: Optional[UpsertDataStatus] = None """Status of the upsert operation.""" - + def as_dict(self) -> dict: """Serializes the UpsertDataVectorIndexResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.result: - body["result"] = self.result.as_dict() - if self.status is not None: - body["status"] = self.status.value + if self.result: body['result'] = self.result.as_dict() + if self.status is not None: body['status'] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpsertDataVectorIndexResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.result: - body["result"] = self.result - if self.status is not None: - body["status"] = self.status + if self.result: body['result'] = self.result + if self.status is not None: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpsertDataVectorIndexResponse: """Deserializes the UpsertDataVectorIndexResponse from a dictionary.""" - return cls(result=_from_dict(d, "result", UpsertDataResult), status=_enum(d, "status", UpsertDataStatus)) + return cls(result=_from_dict(d, 'result', UpsertDataResult), status=_enum(d, 'status', UpsertDataStatus)) + + @dataclass class Value: bool_value: Optional[bool] = None - + list_value: Optional[ListValue] = None - + number_value: Optional[float] = None - + string_value: Optional[str] = None - + struct_value: Optional[Struct] = None - + def as_dict(self) -> dict: """Serializes the Value into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bool_value is not None: - body["bool_value"] = self.bool_value - if self.list_value: - body["list_value"] = self.list_value.as_dict() - if self.number_value is not None: - body["number_value"] = self.number_value - if self.string_value is not None: - body["string_value"] = self.string_value - if self.struct_value: - body["struct_value"] = self.struct_value.as_dict() + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.list_value: body['list_value'] = self.list_value.as_dict() + if self.number_value is not None: body['number_value'] = self.number_value + if self.string_value is not None: body['string_value'] = self.string_value + if self.struct_value: body['struct_value'] = self.struct_value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Value into a shallow dictionary of its immediate attributes.""" body = {} - if self.bool_value is not None: - body["bool_value"] = self.bool_value - if self.list_value: - body["list_value"] = self.list_value - if self.number_value is not None: - body["number_value"] = self.number_value - if self.string_value is not None: - body["string_value"] = self.string_value - if self.struct_value: - body["struct_value"] = self.struct_value + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.list_value: body['list_value'] = self.list_value + if self.number_value is not None: body['number_value'] = self.number_value + if self.string_value is not None: body['string_value'] = self.string_value + if self.struct_value: body['struct_value'] = self.struct_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Value: """Deserializes the Value from a dictionary.""" - return cls( - bool_value=d.get("bool_value", None), - list_value=_from_dict(d, "list_value", ListValue), - number_value=d.get("number_value", None), - string_value=d.get("string_value", None), - struct_value=_from_dict(d, "struct_value", Struct), - ) + return cls(bool_value=d.get('bool_value', None), list_value=_from_dict(d, 'list_value', ListValue), number_value=d.get('number_value', None), string_value=d.get('string_value', None), struct_value=_from_dict(d, 'struct_value', Struct)) + + @dataclass class VectorIndex: creator: Optional[str] = None """The user who created the index.""" - + delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecResponse] = None - + direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None - + endpoint_name: Optional[str] = None """Name of the endpoint associated with the index""" - + index_type: Optional[VectorIndexType] = None """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" - + name: Optional[str] = None """Name of the index""" - + primary_key: Optional[str] = None """Primary key of the index""" - + status: Optional[VectorIndexStatus] = None - + def as_dict(self) -> dict: """Serializes the VectorIndex into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creator is not None: - body["creator"] = self.creator - if self.delta_sync_index_spec: - body["delta_sync_index_spec"] = self.delta_sync_index_spec.as_dict() - if self.direct_access_index_spec: - body["direct_access_index_spec"] = self.direct_access_index_spec.as_dict() - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type.value - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key - if self.status: - body["status"] = self.status.as_dict() + if self.creator is not None: body['creator'] = self.creator + if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec.as_dict() + if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec.as_dict() + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_type is not None: body['index_type'] = self.index_type.value + if self.name is not None: body['name'] = self.name + if self.primary_key is not None: body['primary_key'] = self.primary_key + if self.status: body['status'] = self.status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the VectorIndex into a shallow dictionary of its immediate attributes.""" body = {} - if self.creator is not None: - body["creator"] = self.creator - if self.delta_sync_index_spec: - body["delta_sync_index_spec"] = self.delta_sync_index_spec - if self.direct_access_index_spec: - body["direct_access_index_spec"] = self.direct_access_index_spec - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key - if self.status: - body["status"] = self.status + if self.creator is not None: body['creator'] = self.creator + if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec + if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec + if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.index_type is not None: body['index_type'] = self.index_type + if self.name is not None: body['name'] = self.name + if self.primary_key is not None: body['primary_key'] = self.primary_key + if self.status: body['status'] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VectorIndex: """Deserializes the VectorIndex from a dictionary.""" - return cls( - creator=d.get("creator", None), - delta_sync_index_spec=_from_dict(d, "delta_sync_index_spec", DeltaSyncVectorIndexSpecResponse), - direct_access_index_spec=_from_dict(d, "direct_access_index_spec", DirectAccessVectorIndexSpec), - endpoint_name=d.get("endpoint_name", None), - index_type=_enum(d, "index_type", VectorIndexType), - name=d.get("name", None), - primary_key=d.get("primary_key", None), - status=_from_dict(d, "status", VectorIndexStatus), - ) + return cls(creator=d.get('creator', None), delta_sync_index_spec=_from_dict(d, 'delta_sync_index_spec', DeltaSyncVectorIndexSpecResponse), direct_access_index_spec=_from_dict(d, 'direct_access_index_spec', DirectAccessVectorIndexSpec), endpoint_name=d.get('endpoint_name', None), index_type=_enum(d, 'index_type', VectorIndexType), name=d.get('name', None), primary_key=d.get('primary_key', None), status=_from_dict(d, 'status', VectorIndexStatus)) + + @dataclass class VectorIndexStatus: index_url: Optional[str] = None """Index API Url to be used to perform operations on the index""" - + indexed_row_count: Optional[int] = None """Number of rows indexed""" - + message: Optional[str] = None """Message associated with the index status""" - + ready: Optional[bool] = None """Whether the index is ready for search""" - + def as_dict(self) -> dict: """Serializes the VectorIndexStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.index_url is not None: - body["index_url"] = self.index_url - if self.indexed_row_count is not None: - body["indexed_row_count"] = self.indexed_row_count - if self.message is not None: - body["message"] = self.message - if self.ready is not None: - body["ready"] = self.ready + if self.index_url is not None: body['index_url'] = self.index_url + if self.indexed_row_count is not None: body['indexed_row_count'] = self.indexed_row_count + if self.message is not None: body['message'] = self.message + if self.ready is not None: body['ready'] = self.ready return body def as_shallow_dict(self) -> dict: """Serializes the VectorIndexStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.index_url is not None: - body["index_url"] = self.index_url - if self.indexed_row_count is not None: - body["indexed_row_count"] = self.indexed_row_count - if self.message is not None: - body["message"] = self.message - if self.ready is not None: - body["ready"] = self.ready + if self.index_url is not None: body['index_url'] = self.index_url + if self.indexed_row_count is not None: body['indexed_row_count'] = self.indexed_row_count + if self.message is not None: body['message'] = self.message + if self.ready is not None: body['ready'] = self.ready return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VectorIndexStatus: """Deserializes the VectorIndexStatus from a dictionary.""" - return cls( - index_url=d.get("index_url", None), - indexed_row_count=d.get("indexed_row_count", None), - message=d.get("message", None), - ready=d.get("ready", None), - ) + return cls(index_url=d.get('index_url', None), indexed_row_count=d.get('indexed_row_count', None), message=d.get('message', None), ready=d.get('ready', None)) + + class VectorIndexType(Enum): @@ -1707,241 +1480,267 @@ class VectorIndexType(Enum): data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" + + DELTA_SYNC = 'DELTA_SYNC' + DIRECT_ACCESS = 'DIRECT_ACCESS' - DELTA_SYNC = "DELTA_SYNC" - DIRECT_ACCESS = "DIRECT_ACCESS" class VectorSearchEndpointsAPI: """**Endpoint**: Represents the compute resources to host vector search indexes.""" - + def __init__(self, api_client): self._api = api_client + - def wait_get_endpoint_vector_search_endpoint_online( - self, - endpoint_name: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[EndpointInfo], None]] = None, - ) -> EndpointInfo: - deadline = time.time() + timeout.total_seconds() - target_states = (EndpointStatusState.ONLINE,) - failure_states = (EndpointStatusState.OFFLINE,) - status_message = "polling..." - attempt = 1 - while time.time() < deadline: - poll = self.get_endpoint(endpoint_name=endpoint_name) - status = poll.endpoint_status.state - status_message = f"current status: {status}" - if poll.endpoint_status: - status_message = poll.endpoint_status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f"failed to reach ONLINE, got {status}: {status_message}" - raise OperationFailed(msg) - prefix = f"endpoint_name={endpoint_name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f"timed out after {timeout}: {status_message}") - - def create_endpoint( - self, name: str, endpoint_type: EndpointType, *, budget_policy_id: Optional[str] = None - ) -> Wait[EndpointInfo]: - """Create an endpoint. + - Create a new endpoint. + + def wait_get_endpoint_vector_search_endpoint_online(self, endpoint_name: str, + timeout=timedelta(minutes=20), callback: Optional[Callable[[EndpointInfo], None]] = None) -> EndpointInfo: + deadline = time.time() + timeout.total_seconds() + target_states = (EndpointStatusState.ONLINE, ) + failure_states = (EndpointStatusState.OFFLINE, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_endpoint(endpoint_name=endpoint_name) + status = poll.endpoint_status.state + status_message = f'current status: {status}' + if poll.endpoint_status: + status_message = poll.endpoint_status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach ONLINE, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"endpoint_name={endpoint_name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + + + + def create_endpoint(self + , name: str, endpoint_type: EndpointType + , * + , budget_policy_id: Optional[str] = None) -> Wait[EndpointInfo]: + """Create an endpoint. + + Create a new endpoint. + :param name: str Name of the vector search endpoint :param endpoint_type: :class:`EndpointType` Type of endpoint :param budget_policy_id: str (optional) The budget policy id to be applied - + :returns: Long-running operation waiter for :class:`EndpointInfo`. See :method:wait_get_endpoint_vector_search_endpoint_online for more details. """ body = {} - if budget_policy_id is not None: - body["budget_policy_id"] = budget_policy_id - if endpoint_type is not None: - body["endpoint_type"] = endpoint_type.value - if name is not None: - body["name"] = name - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - op_response = self._api.do("POST", "/api/2.0/vector-search/endpoints", body=body, headers=headers) - return Wait( - self.wait_get_endpoint_vector_search_endpoint_online, - response=EndpointInfo.from_dict(op_response), - endpoint_name=op_response["name"], - ) - - def create_endpoint_and_wait( - self, - name: str, - endpoint_type: EndpointType, - *, - budget_policy_id: Optional[str] = None, - timeout=timedelta(minutes=20), - ) -> EndpointInfo: - return self.create_endpoint(budget_policy_id=budget_policy_id, endpoint_type=endpoint_type, name=name).result( - timeout=timeout - ) - - def delete_endpoint(self, endpoint_name: str): - """Delete an endpoint. + if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id + if endpoint_type is not None: body['endpoint_type'] = endpoint_type.value + if name is not None: body['name'] = name + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + op_response = self._api.do('POST','/api/2.0/vector-search/endpoints', body=body + + , headers=headers + ) + return Wait(self.wait_get_endpoint_vector_search_endpoint_online + , response = EndpointInfo.from_dict(op_response) + , endpoint_name=op_response['name']) - Delete a vector search endpoint. + + def create_endpoint_and_wait(self + , name: str, endpoint_type: EndpointType + , * + , budget_policy_id: Optional[str] = None, + timeout=timedelta(minutes=20)) -> EndpointInfo: + return self.create_endpoint(budget_policy_id=budget_policy_id, endpoint_type=endpoint_type, name=name).result(timeout=timeout) + + + + def delete_endpoint(self + , endpoint_name: str + ): + """Delete an endpoint. + + Delete a vector search endpoint. + :param endpoint_name: str Name of the vector search endpoint - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/vector-search/endpoints/{endpoint_name}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/vector-search/endpoints/{endpoint_name}", headers=headers) + + + - def get_endpoint(self, endpoint_name: str) -> EndpointInfo: + def get_endpoint(self + , endpoint_name: str + ) -> EndpointInfo: """Get an endpoint. - + Get details for a single vector search endpoint. - + :param endpoint_name: str Name of the endpoint - + :returns: :class:`EndpointInfo` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/vector-search/endpoints/{endpoint_name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/vector-search/endpoints/{endpoint_name}' + + , headers=headers + ) return EndpointInfo.from_dict(res) - def list_endpoints(self, *, page_token: Optional[str] = None) -> Iterator[EndpointInfo]: - """List all endpoints. + + + + def list_endpoints(self + + , * + , page_token: Optional[str] = None) -> Iterator[EndpointInfo]: + """List all endpoints. + List all vector search endpoints in the workspace. - + :param page_token: str (optional) Token for pagination - + :returns: Iterator over :class:`EndpointInfo` """ - + query = {} - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/vector-search/endpoints", query=query, headers=headers) - if "endpoints" in json: - for v in json["endpoints"]: - yield EndpointInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def update_endpoint_budget_policy( - self, endpoint_name: str, budget_policy_id: str - ) -> PatchEndpointBudgetPolicyResponse: - """Update the budget policy of an endpoint. + json = self._api.do('GET','/api/2.0/vector-search/endpoints', query=query + + , headers=headers + ) + if 'endpoints' in json: + for v in json['endpoints']: + yield EndpointInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Update the budget policy of an endpoint + + + + def update_endpoint_budget_policy(self + , endpoint_name: str, budget_policy_id: str + ) -> PatchEndpointBudgetPolicyResponse: + """Update the budget policy of an endpoint. + + Update the budget policy of an endpoint + :param endpoint_name: str Name of the vector search endpoint :param budget_policy_id: str The budget policy id to be applied - + :returns: :class:`PatchEndpointBudgetPolicyResponse` """ body = {} - if budget_policy_id is not None: - body["budget_policy_id"] = budget_policy_id - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/vector-search/endpoints/{endpoint_name}/budget-policy", body=body, headers=headers - ) + if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/vector-search/endpoints/{endpoint_name}/budget-policy', body=body + + , headers=headers + ) return PatchEndpointBudgetPolicyResponse.from_dict(res) - def update_endpoint_custom_tags( - self, endpoint_name: str, custom_tags: List[CustomTag] - ) -> UpdateEndpointCustomTagsResponse: - """Update the custom tags of an endpoint. + + + + def update_endpoint_custom_tags(self + , endpoint_name: str, custom_tags: List[CustomTag] + ) -> UpdateEndpointCustomTagsResponse: + """Update the custom tags of an endpoint. + :param endpoint_name: str Name of the vector search endpoint :param custom_tags: List[:class:`CustomTag`] The new custom tags for the vector search endpoint - + :returns: :class:`UpdateEndpointCustomTagsResponse` """ body = {} - if custom_tags is not None: - body["custom_tags"] = [v.as_dict() for v in custom_tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/vector-search/endpoints/{endpoint_name}/tags", body=body, headers=headers - ) + if custom_tags is not None: body['custom_tags'] = [v.as_dict() for v in custom_tags] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/vector-search/endpoints/{endpoint_name}/tags', body=body + + , headers=headers + ) return UpdateEndpointCustomTagsResponse.from_dict(res) - + + class VectorSearchIndexesAPI: """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries. - + There are 2 types of Vector Search indexes: - **Delta Sync Index**: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - **Direct Vector Access Index**: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" - + def __init__(self, api_client): self._api = api_client + - def create_index( - self, - name: str, - endpoint_name: str, - primary_key: str, - index_type: VectorIndexType, - *, - delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None, - direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None, - ) -> VectorIndex: - """Create an index. + - Create a new index. + + + + + def create_index(self + , name: str, endpoint_name: str, primary_key: str, index_type: VectorIndexType + , * + , delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None, direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None) -> VectorIndex: + """Create an index. + + Create a new index. + :param name: str Name of the index :param endpoint_name: str @@ -1957,138 +1756,155 @@ def create_index( Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`. :param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional) Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`. - + :returns: :class:`VectorIndex` """ body = {} - if delta_sync_index_spec is not None: - body["delta_sync_index_spec"] = delta_sync_index_spec.as_dict() - if direct_access_index_spec is not None: - body["direct_access_index_spec"] = direct_access_index_spec.as_dict() - if endpoint_name is not None: - body["endpoint_name"] = endpoint_name - if index_type is not None: - body["index_type"] = index_type.value - if name is not None: - body["name"] = name - if primary_key is not None: - body["primary_key"] = primary_key - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/vector-search/indexes", body=body, headers=headers) + if delta_sync_index_spec is not None: body['delta_sync_index_spec'] = delta_sync_index_spec.as_dict() + if direct_access_index_spec is not None: body['direct_access_index_spec'] = direct_access_index_spec.as_dict() + if endpoint_name is not None: body['endpoint_name'] = endpoint_name + if index_type is not None: body['index_type'] = index_type.value + if name is not None: body['name'] = name + if primary_key is not None: body['primary_key'] = primary_key + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/vector-search/indexes', body=body + + , headers=headers + ) return VectorIndex.from_dict(res) - def delete_data_vector_index(self, index_name: str, primary_keys: List[str]) -> DeleteDataVectorIndexResponse: - """Delete data from index. + + + + def delete_data_vector_index(self + , index_name: str, primary_keys: List[str] + ) -> DeleteDataVectorIndexResponse: + """Delete data from index. + Handles the deletion of data from a specified vector index. - + :param index_name: str Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index. :param primary_keys: List[str] List of primary keys for the data to be deleted. - + :returns: :class:`DeleteDataVectorIndexResponse` """ - + query = {} - if primary_keys is not None: - query["primary_keys"] = [v for v in primary_keys] - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "DELETE", f"/api/2.0/vector-search/indexes/{index_name}/delete-data", query=query, headers=headers - ) + if primary_keys is not None: query['primary_keys'] = [v for v in primary_keys] + headers = {'Accept': 'application/json',} + + res = self._api.do('DELETE',f'/api/2.0/vector-search/indexes/{index_name}/delete-data', query=query + + , headers=headers + ) return DeleteDataVectorIndexResponse.from_dict(res) - def delete_index(self, index_name: str): - """Delete an index. + + + + def delete_index(self + , index_name: str + ): + """Delete an index. + Delete an index. - + :param index_name: str Name of the index - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/vector-search/indexes/{index_name}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/vector-search/indexes/{index_name}", headers=headers) + + + - def get_index(self, index_name: str) -> VectorIndex: + def get_index(self + , index_name: str + ) -> VectorIndex: """Get an index. - + Get an index. - + :param index_name: str Name of the index - + :returns: :class:`VectorIndex` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/vector-search/indexes/{index_name}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/vector-search/indexes/{index_name}' + + , headers=headers + ) return VectorIndex.from_dict(res) - def list_indexes(self, endpoint_name: str, *, page_token: Optional[str] = None) -> Iterator[MiniVectorIndex]: - """List indexes. + + + + def list_indexes(self + , endpoint_name: str + , * + , page_token: Optional[str] = None) -> Iterator[MiniVectorIndex]: + """List indexes. + List all indexes in the given endpoint. - + :param endpoint_name: str Name of the endpoint :param page_token: str (optional) Token for pagination - + :returns: Iterator over :class:`MiniVectorIndex` """ - + query = {} - if endpoint_name is not None: - query["endpoint_name"] = endpoint_name - if page_token is not None: - query["page_token"] = page_token - headers = { - "Accept": "application/json", - } - + if endpoint_name is not None: query['endpoint_name'] = endpoint_name + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/vector-search/indexes", query=query, headers=headers) - if "vector_indexes" in json: - for v in json["vector_indexes"]: - yield MiniVectorIndex.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["page_token"] = json["next_page_token"] - - def query_index( - self, - index_name: str, - columns: List[str], - *, - columns_to_rerank: Optional[List[str]] = None, - filters_json: Optional[str] = None, - num_results: Optional[int] = None, - query_text: Optional[str] = None, - query_type: Optional[str] = None, - query_vector: Optional[List[float]] = None, - score_threshold: Optional[float] = None, - ) -> QueryVectorIndexResponse: - """Query an index. + json = self._api.do('GET','/api/2.0/vector-search/indexes', query=query + + , headers=headers + ) + if 'vector_indexes' in json: + for v in json['vector_indexes']: + yield MiniVectorIndex.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + - Query the specified vector index. + + + + def query_index(self + , index_name: str, columns: List[str] + , * + , columns_to_rerank: Optional[List[str]] = None, filters_json: Optional[str] = None, num_results: Optional[int] = None, query_text: Optional[str] = None, query_type: Optional[str] = None, query_vector: Optional[List[float]] = None, score_threshold: Optional[float] = None) -> QueryVectorIndexResponse: + """Query an index. + + Query the specified vector index. + :param index_name: str Name of the vector index to query. :param columns: List[str] @@ -2097,9 +1913,9 @@ def query_index( Column names used to retrieve data to send to the reranker. :param filters_json: str (optional) JSON string representing query filters. - + Example filters: - + - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5. @@ -2114,134 +1930,144 @@ def query_index( vectors. :param score_threshold: float (optional) Threshold for the approximate nearest neighbor search. Defaults to 0.0. - + :returns: :class:`QueryVectorIndexResponse` """ body = {} - if columns is not None: - body["columns"] = [v for v in columns] - if columns_to_rerank is not None: - body["columns_to_rerank"] = [v for v in columns_to_rerank] - if filters_json is not None: - body["filters_json"] = filters_json - if num_results is not None: - body["num_results"] = num_results - if query_text is not None: - body["query_text"] = query_text - if query_type is not None: - body["query_type"] = query_type - if query_vector is not None: - body["query_vector"] = [v for v in query_vector] - if score_threshold is not None: - body["score_threshold"] = score_threshold - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/query", body=body, headers=headers) + if columns is not None: body['columns'] = [v for v in columns] + if columns_to_rerank is not None: body['columns_to_rerank'] = [v for v in columns_to_rerank] + if filters_json is not None: body['filters_json'] = filters_json + if num_results is not None: body['num_results'] = num_results + if query_text is not None: body['query_text'] = query_text + if query_type is not None: body['query_type'] = query_type + if query_vector is not None: body['query_vector'] = [v for v in query_vector] + if score_threshold is not None: body['score_threshold'] = score_threshold + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/query', body=body + + , headers=headers + ) return QueryVectorIndexResponse.from_dict(res) - def query_next_page( - self, index_name: str, *, endpoint_name: Optional[str] = None, page_token: Optional[str] = None - ) -> QueryVectorIndexResponse: - """Query next page. + + + + def query_next_page(self + , index_name: str + , * + , endpoint_name: Optional[str] = None, page_token: Optional[str] = None) -> QueryVectorIndexResponse: + """Query next page. + Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request to fetch next page of results. - + :param index_name: str Name of the vector index to query. :param endpoint_name: str (optional) Name of the endpoint. :param page_token: str (optional) Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API. - + :returns: :class:`QueryVectorIndexResponse` """ body = {} - if endpoint_name is not None: - body["endpoint_name"] = endpoint_name - if page_token is not None: - body["page_token"] = page_token - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/vector-search/indexes/{index_name}/query-next-page", body=body, headers=headers - ) + if endpoint_name is not None: body['endpoint_name'] = endpoint_name + if page_token is not None: body['page_token'] = page_token + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/query-next-page', body=body + + , headers=headers + ) return QueryVectorIndexResponse.from_dict(res) - def scan_index( - self, index_name: str, *, last_primary_key: Optional[str] = None, num_results: Optional[int] = None - ) -> ScanVectorIndexResponse: - """Scan an index. + + + + def scan_index(self + , index_name: str + , * + , last_primary_key: Optional[str] = None, num_results: Optional[int] = None) -> ScanVectorIndexResponse: + """Scan an index. + Scan the specified vector index and return the first `num_results` entries after the exclusive `primary_key`. - + :param index_name: str Name of the vector index to scan. :param last_primary_key: str (optional) Primary key of the last entry returned in the previous scan. :param num_results: int (optional) Number of results to return. Defaults to 10. - + :returns: :class:`ScanVectorIndexResponse` """ body = {} - if last_primary_key is not None: - body["last_primary_key"] = last_primary_key - if num_results is not None: - body["num_results"] = num_results - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/scan", body=body, headers=headers) + if last_primary_key is not None: body['last_primary_key'] = last_primary_key + if num_results is not None: body['num_results'] = num_results + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/scan', body=body + + , headers=headers + ) return ScanVectorIndexResponse.from_dict(res) - def sync_index(self, index_name: str): - """Synchronize an index. + + + + def sync_index(self + , index_name: str + ): + """Synchronize an index. + Triggers a synchronization process for a specified vector index. - + :param index_name: str Name of the vector index to synchronize. Must be a Delta Sync Index. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/sync' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/sync", headers=headers) + + + - def upsert_data_vector_index(self, index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse: + def upsert_data_vector_index(self + , index_name: str, inputs_json: str + ) -> UpsertDataVectorIndexResponse: """Upsert data into an index. - + Handles the upserting of data into a specified vector index. - + :param index_name: str Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index. :param inputs_json: str JSON string representing the data to be upserted. - + :returns: :class:`UpsertDataVectorIndexResponse` """ body = {} - if inputs_json is not None: - body["inputs_json"] = inputs_json - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "POST", f"/api/2.0/vector-search/indexes/{index_name}/upsert-data", body=body, headers=headers - ) + if inputs_json is not None: body['inputs_json'] = inputs_json + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/upsert-data', body=body + + , headers=headers + ) return UpsertDataVectorIndexResponse.from_dict(res) + + + \ No newline at end of file diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index 6753ad880..db0902331 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -1,89 +1,91 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations - -import logging from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Any, Dict, Iterator, List, Optional +from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO +import time +import random +import logging +import requests +import threading -from ._internal import _enum, _from_dict, _repeated_dict +from ..errors import OperationTimeout, OperationFailed +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token -_LOG = logging.getLogger("databricks.sdk") +_LOG = logging.getLogger('databricks.sdk') -# all definitions in this file are in alphabetical order +# all definitions in this file are in alphabetical order @dataclass class AclItem: principal: str """The principal in which the permission is applied.""" - + permission: AclPermission """The permission level applied to the principal.""" - + def as_dict(self) -> dict: """Serializes the AclItem into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission is not None: - body["permission"] = self.permission.value - if self.principal is not None: - body["principal"] = self.principal + if self.permission is not None: body['permission'] = self.permission.value + if self.principal is not None: body['principal'] = self.principal return body def as_shallow_dict(self) -> dict: """Serializes the AclItem into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission is not None: - body["permission"] = self.permission - if self.principal is not None: - body["principal"] = self.principal + if self.permission is not None: body['permission'] = self.permission + if self.principal is not None: body['principal'] = self.principal return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AclItem: """Deserializes the AclItem from a dictionary.""" - return cls(permission=_enum(d, "permission", AclPermission), principal=d.get("principal", None)) - + return cls(permission=_enum(d, 'permission', AclPermission), principal=d.get('principal', None)) + -class AclPermission(Enum): - MANAGE = "MANAGE" - READ = "READ" - WRITE = "WRITE" +class AclPermission(Enum): + + + MANAGE = 'MANAGE' + READ = 'READ' + WRITE = 'WRITE' @dataclass class AzureKeyVaultSecretScopeMetadata: resource_id: str """The resource id of the azure KeyVault that user wants to associate the scope with.""" - + dns_name: str """The DNS of the KeyVault""" - + def as_dict(self) -> dict: """Serializes the AzureKeyVaultSecretScopeMetadata into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dns_name is not None: - body["dns_name"] = self.dns_name - if self.resource_id is not None: - body["resource_id"] = self.resource_id + if self.dns_name is not None: body['dns_name'] = self.dns_name + if self.resource_id is not None: body['resource_id'] = self.resource_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureKeyVaultSecretScopeMetadata into a shallow dictionary of its immediate attributes.""" body = {} - if self.dns_name is not None: - body["dns_name"] = self.dns_name - if self.resource_id is not None: - body["resource_id"] = self.resource_id + if self.dns_name is not None: body['dns_name'] = self.dns_name + if self.resource_id is not None: body['resource_id'] = self.resource_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureKeyVaultSecretScopeMetadata: """Deserializes the AzureKeyVaultSecretScopeMetadata from a dictionary.""" - return cls(dns_name=d.get("dns_name", None), resource_id=d.get("resource_id", None)) + return cls(dns_name=d.get('dns_name', None), resource_id=d.get('resource_id', None)) + + @dataclass @@ -92,275 +94,217 @@ class CreateCredentialsRequest: """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account, depending on which provider you are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please see your provider's Personal Access Token authentication documentation to see what is supported.""" - + personal_access_token: Optional[str] = None """The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - + def as_dict(self) -> dict: """Serializes the CreateCredentialsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username + if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username + if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialsRequest: """Deserializes the CreateCredentialsRequest from a dictionary.""" - return cls( - git_provider=d.get("git_provider", None), - git_username=d.get("git_username", None), - personal_access_token=d.get("personal_access_token", None), - ) + return cls(git_provider=d.get('git_provider', None), git_username=d.get('git_username', None), personal_access_token=d.get('personal_access_token', None)) + + @dataclass class CreateCredentialsResponse: credential_id: int """ID of the credential object in the workspace.""" - + git_provider: str """The Git provider associated with the credential.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account and associated with the credential.""" - + def as_dict(self) -> dict: """Serializes the CreateCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialsResponse: """Deserializes the CreateCredentialsResponse from a dictionary.""" - return cls( - credential_id=d.get("credential_id", None), - git_provider=d.get("git_provider", None), - git_username=d.get("git_username", None), - ) + return cls(credential_id=d.get('credential_id', None), git_provider=d.get('git_provider', None), git_username=d.get('git_username', None)) + + @dataclass class CreateRepoRequest: url: str """URL of the Git repository to be linked.""" - + provider: str """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - + path: Optional[str] = None """Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.""" - + sparse_checkout: Optional[SparseCheckout] = None """If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable sparse checkout after the repo is created.""" - + def as_dict(self) -> dict: """Serializes the CreateRepoRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout.as_dict() - if self.url is not None: - body["url"] = self.url + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the CreateRepoRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout - if self.url is not None: - body["url"] = self.url + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRepoRequest: """Deserializes the CreateRepoRequest from a dictionary.""" - return cls( - path=d.get("path", None), - provider=d.get("provider", None), - sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), - url=d.get("url", None), - ) + return cls(path=d.get('path', None), provider=d.get('provider', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), url=d.get('url', None)) + + @dataclass class CreateRepoResponse: branch: Optional[str] = None """Branch that the Git folder (repo) is checked out to.""" - + head_commit_id: Optional[str] = None """SHA-1 hash representing the commit ID of the current HEAD of the Git folder (repo).""" - + id: Optional[int] = None """ID of the Git folder (repo) object in the workspace.""" - + path: Optional[str] = None """Path of the Git folder (repo) in the workspace.""" - + provider: Optional[str] = None """Git provider of the linked Git repository.""" - + sparse_checkout: Optional[SparseCheckout] = None """Sparse checkout settings for the Git folder (repo).""" - + url: Optional[str] = None """URL of the linked Git repository.""" - + def as_dict(self) -> dict: """Serializes the CreateRepoResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.head_commit_id is not None: - body["head_commit_id"] = self.head_commit_id - if self.id is not None: - body["id"] = self.id - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout.as_dict() - if self.url is not None: - body["url"] = self.url + if self.branch is not None: body['branch'] = self.branch + if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id + if self.id is not None: body['id'] = self.id + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the CreateRepoResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.head_commit_id is not None: - body["head_commit_id"] = self.head_commit_id - if self.id is not None: - body["id"] = self.id - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout - if self.url is not None: - body["url"] = self.url + if self.branch is not None: body['branch'] = self.branch + if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id + if self.id is not None: body['id'] = self.id + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRepoResponse: """Deserializes the CreateRepoResponse from a dictionary.""" - return cls( - branch=d.get("branch", None), - head_commit_id=d.get("head_commit_id", None), - id=d.get("id", None), - path=d.get("path", None), - provider=d.get("provider", None), - sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), - url=d.get("url", None), - ) + return cls(branch=d.get('branch', None), head_commit_id=d.get('head_commit_id', None), id=d.get('id', None), path=d.get('path', None), provider=d.get('provider', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), url=d.get('url', None)) + + @dataclass class CreateScope: scope: str """Scope name requested by the user. Scope names are unique.""" - + backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None """The metadata for the secret scope if the type is `AZURE_KEYVAULT`""" - + initial_manage_principal: Optional[str] = None """The principal that is initially granted `MANAGE` permission to the created scope.""" - + scope_backend_type: Optional[ScopeBackendType] = None """The backend type the scope will be created with. If not specified, will default to `DATABRICKS`""" - + def as_dict(self) -> dict: """Serializes the CreateScope into a dictionary suitable for use as a JSON request body.""" body = {} - if self.backend_azure_keyvault: - body["backend_azure_keyvault"] = self.backend_azure_keyvault.as_dict() - if self.initial_manage_principal is not None: - body["initial_manage_principal"] = self.initial_manage_principal - if self.scope is not None: - body["scope"] = self.scope - if self.scope_backend_type is not None: - body["scope_backend_type"] = self.scope_backend_type.value + if self.backend_azure_keyvault: body['backend_azure_keyvault'] = self.backend_azure_keyvault.as_dict() + if self.initial_manage_principal is not None: body['initial_manage_principal'] = self.initial_manage_principal + if self.scope is not None: body['scope'] = self.scope + if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateScope into a shallow dictionary of its immediate attributes.""" body = {} - if self.backend_azure_keyvault: - body["backend_azure_keyvault"] = self.backend_azure_keyvault - if self.initial_manage_principal is not None: - body["initial_manage_principal"] = self.initial_manage_principal - if self.scope is not None: - body["scope"] = self.scope - if self.scope_backend_type is not None: - body["scope_backend_type"] = self.scope_backend_type + if self.backend_azure_keyvault: body['backend_azure_keyvault'] = self.backend_azure_keyvault + if self.initial_manage_principal is not None: body['initial_manage_principal'] = self.initial_manage_principal + if self.scope is not None: body['scope'] = self.scope + if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateScope: """Deserializes the CreateScope from a dictionary.""" - return cls( - backend_azure_keyvault=_from_dict(d, "backend_azure_keyvault", AzureKeyVaultSecretScopeMetadata), - initial_manage_principal=d.get("initial_manage_principal", None), - scope=d.get("scope", None), - scope_backend_type=_enum(d, "scope_backend_type", ScopeBackendType), - ) + return cls(backend_azure_keyvault=_from_dict(d, 'backend_azure_keyvault', AzureKeyVaultSecretScopeMetadata), initial_manage_principal=d.get('initial_manage_principal', None), scope=d.get('scope', None), scope_backend_type=_enum(d, 'scope_backend_type', ScopeBackendType)) + + @dataclass @@ -379,116 +323,106 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CreateScopeResponse: """Deserializes the CreateScopeResponse from a dictionary.""" return cls() + + @dataclass class CredentialInfo: credential_id: int """ID of the credential object in the workspace.""" - + git_provider: Optional[str] = None """The Git provider associated with the credential.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account and associated with the credential.""" - + def as_dict(self) -> dict: """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username return body def as_shallow_dict(self) -> dict: """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: """Deserializes the CredentialInfo from a dictionary.""" - return cls( - credential_id=d.get("credential_id", None), - git_provider=d.get("git_provider", None), - git_username=d.get("git_username", None), - ) + return cls(credential_id=d.get('credential_id', None), git_provider=d.get('git_provider', None), git_username=d.get('git_username', None)) + + @dataclass class Delete: path: str """The absolute path of the notebook or directory.""" - + recursive: Optional[bool] = None """The flag that specifies whether to delete the object recursively. It is `false` by default. Please note this deleting directory is not atomic. If it fails in the middle, some of objects under this directory may be deleted and cannot be undone.""" - + def as_dict(self) -> dict: """Serializes the Delete into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive + if self.path is not None: body['path'] = self.path + if self.recursive is not None: body['recursive'] = self.recursive return body def as_shallow_dict(self) -> dict: """Serializes the Delete into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive + if self.path is not None: body['path'] = self.path + if self.recursive is not None: body['recursive'] = self.recursive return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Delete: """Deserializes the Delete from a dictionary.""" - return cls(path=d.get("path", None), recursive=d.get("recursive", None)) + return cls(path=d.get('path', None), recursive=d.get('recursive', None)) + + @dataclass class DeleteAcl: scope: str """The name of the scope to remove permissions from.""" - + principal: str """The principal to remove an existing ACL from.""" - + def as_dict(self) -> dict: """Serializes the DeleteAcl into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope + if self.principal is not None: body['principal'] = self.principal + if self.scope is not None: body['scope'] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the DeleteAcl into a shallow dictionary of its immediate attributes.""" body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope + if self.principal is not None: body['principal'] = self.principal + if self.scope is not None: body['scope'] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteAcl: """Deserializes the DeleteAcl from a dictionary.""" - return cls(principal=d.get("principal", None), scope=d.get("scope", None)) + return cls(principal=d.get('principal', None), scope=d.get('scope', None)) + + @dataclass @@ -507,6 +441,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteAclResponse: """Deserializes the DeleteAclResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -525,6 +464,11 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCredentialsResponse: """Deserializes the DeleteCredentialsResponse from a dictionary.""" return cls() + + + + + @dataclass @@ -543,6 +487,8 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteRepoResponse: """Deserializes the DeleteRepoResponse from a dictionary.""" return cls() + + @dataclass @@ -561,31 +507,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() + + @dataclass class DeleteScope: scope: str """Name of the scope to delete.""" - + def as_dict(self) -> dict: """Serializes the DeleteScope into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scope is not None: - body["scope"] = self.scope + if self.scope is not None: body['scope'] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the DeleteScope into a shallow dictionary of its immediate attributes.""" body = {} - if self.scope is not None: - body["scope"] = self.scope + if self.scope is not None: body['scope'] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteScope: """Deserializes the DeleteScope from a dictionary.""" - return cls(scope=d.get("scope", None)) + return cls(scope=d.get('scope', None)) + + @dataclass @@ -604,38 +552,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteScopeResponse: """Deserializes the DeleteScopeResponse from a dictionary.""" return cls() + + @dataclass class DeleteSecret: scope: str """The name of the scope that contains the secret to delete.""" - + key: str """Name of the secret to delete.""" - + def as_dict(self) -> dict: """Serializes the DeleteSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope + if self.key is not None: body['key'] = self.key + if self.scope is not None: body['scope'] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the DeleteSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope + if self.key is not None: body['key'] = self.key + if self.scope is not None: body['scope'] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteSecret: """Deserializes the DeleteSecret from a dictionary.""" - return cls(key=d.get("key", None), scope=d.get("scope", None)) + return cls(key=d.get('key', None), scope=d.get('scope', None)) + + @dataclass @@ -654,255 +602,254 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteSecretResponse: """Deserializes the DeleteSecretResponse from a dictionary.""" return cls() + + class ExportFormat(Enum): """The format for workspace import and export.""" + + AUTO = 'AUTO' + DBC = 'DBC' + HTML = 'HTML' + JUPYTER = 'JUPYTER' + RAW = 'RAW' + R_MARKDOWN = 'R_MARKDOWN' + SOURCE = 'SOURCE' + - AUTO = "AUTO" - DBC = "DBC" - HTML = "HTML" - JUPYTER = "JUPYTER" - RAW = "RAW" - R_MARKDOWN = "R_MARKDOWN" - SOURCE = "SOURCE" @dataclass class ExportResponse: """The request field `direct_download` determines whether a JSON response or binary contents are returned by this endpoint.""" - + content: Optional[str] = None """The base64-encoded content. If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown.""" - + file_type: Optional[str] = None """The file type of the exported file.""" - + def as_dict(self) -> dict: """Serializes the ExportResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.file_type is not None: - body["file_type"] = self.file_type + if self.content is not None: body['content'] = self.content + if self.file_type is not None: body['file_type'] = self.file_type return body def as_shallow_dict(self) -> dict: """Serializes the ExportResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.file_type is not None: - body["file_type"] = self.file_type + if self.content is not None: body['content'] = self.content + if self.file_type is not None: body['file_type'] = self.file_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExportResponse: """Deserializes the ExportResponse from a dictionary.""" - return cls(content=d.get("content", None), file_type=d.get("file_type", None)) + return cls(content=d.get('content', None), file_type=d.get('file_type', None)) + + + + + + + + @dataclass class GetCredentialsResponse: credential_id: int """ID of the credential object in the workspace.""" - + git_provider: Optional[str] = None """The Git provider associated with the credential.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account and associated with the credential.""" - + def as_dict(self) -> dict: """Serializes the GetCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username return body def as_shallow_dict(self) -> dict: """Serializes the GetCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetCredentialsResponse: """Deserializes the GetCredentialsResponse from a dictionary.""" - return cls( - credential_id=d.get("credential_id", None), - git_provider=d.get("git_provider", None), - git_username=d.get("git_username", None), - ) + return cls(credential_id=d.get('credential_id', None), git_provider=d.get('git_provider', None), git_username=d.get('git_username', None)) + + + + + @dataclass class GetRepoPermissionLevelsResponse: permission_levels: Optional[List[RepoPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetRepoPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetRepoPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRepoPermissionLevelsResponse: """Deserializes the GetRepoPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", RepoPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', RepoPermissionsDescription)) + + + + + + + + @dataclass class GetRepoResponse: branch: Optional[str] = None """Branch that the local version of the repo is checked out to.""" - + head_commit_id: Optional[str] = None """SHA-1 hash representing the commit ID of the current HEAD of the repo.""" - + id: Optional[int] = None """ID of the Git folder (repo) object in the workspace.""" - + path: Optional[str] = None """Path of the Git folder (repo) in the workspace.""" - + provider: Optional[str] = None """Git provider of the linked Git repository.""" - + sparse_checkout: Optional[SparseCheckout] = None """Sparse checkout settings for the Git folder (repo).""" - + url: Optional[str] = None """URL of the linked Git repository.""" - + def as_dict(self) -> dict: """Serializes the GetRepoResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.head_commit_id is not None: - body["head_commit_id"] = self.head_commit_id - if self.id is not None: - body["id"] = self.id - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout.as_dict() - if self.url is not None: - body["url"] = self.url + if self.branch is not None: body['branch'] = self.branch + if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id + if self.id is not None: body['id'] = self.id + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the GetRepoResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.head_commit_id is not None: - body["head_commit_id"] = self.head_commit_id - if self.id is not None: - body["id"] = self.id - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout - if self.url is not None: - body["url"] = self.url + if self.branch is not None: body['branch'] = self.branch + if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id + if self.id is not None: body['id'] = self.id + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRepoResponse: """Deserializes the GetRepoResponse from a dictionary.""" - return cls( - branch=d.get("branch", None), - head_commit_id=d.get("head_commit_id", None), - id=d.get("id", None), - path=d.get("path", None), - provider=d.get("provider", None), - sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), - url=d.get("url", None), - ) + return cls(branch=d.get('branch', None), head_commit_id=d.get('head_commit_id', None), id=d.get('id', None), path=d.get('path', None), provider=d.get('provider', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), url=d.get('url', None)) + + + + + @dataclass class GetSecretResponse: key: Optional[str] = None """A unique name to identify the secret.""" - + value: Optional[str] = None """The value of the secret in its byte representation.""" - + def as_dict(self) -> dict: """Serializes the GetSecretResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the GetSecretResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.key is not None: body['key'] = self.key + if self.value is not None: body['value'] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetSecretResponse: """Deserializes the GetSecretResponse from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + return cls(key=d.get('key', None), value=d.get('value', None)) + + + + + + + + @dataclass class GetWorkspaceObjectPermissionLevelsResponse: permission_levels: Optional[List[WorkspaceObjectPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetWorkspaceObjectPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: - body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetWorkspaceObjectPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: - body["permission_levels"] = self.permission_levels + if self.permission_levels: body['permission_levels'] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceObjectPermissionLevelsResponse: """Deserializes the GetWorkspaceObjectPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, "permission_levels", WorkspaceObjectPermissionsDescription)) + return cls(permission_levels=_repeated_dict(d, 'permission_levels', WorkspaceObjectPermissionsDescription)) + + + + + @dataclass @@ -910,13 +857,13 @@ class Import: path: str """The absolute path of the object or directory. Importing a directory is only supported for the `DBC` and `SOURCE` formats.""" - + content: Optional[str] = None """The base64-encoded content. This has a limit of 10 MB. If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown. This parameter might be absent, and instead a posted file is used.""" - + format: Optional[ImportFormat] = None """This specifies the format of the file to be imported. @@ -929,67 +876,52 @@ class Import: as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.""" - + language: Optional[Language] = None """The language of the object. This value is set only if the object type is `NOTEBOOK`.""" - + overwrite: Optional[bool] = None """The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC` format, `overwrite` is not supported since it may contain a directory.""" - + def as_dict(self) -> dict: """Serializes the Import into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.format is not None: - body["format"] = self.format.value - if self.language is not None: - body["language"] = self.language.value - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path + if self.content is not None: body['content'] = self.content + if self.format is not None: body['format'] = self.format.value + if self.language is not None: body['language'] = self.language.value + if self.overwrite is not None: body['overwrite'] = self.overwrite + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the Import into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: - body["content"] = self.content - if self.format is not None: - body["format"] = self.format - if self.language is not None: - body["language"] = self.language - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path + if self.content is not None: body['content'] = self.content + if self.format is not None: body['format'] = self.format + if self.language is not None: body['language'] = self.language + if self.overwrite is not None: body['overwrite'] = self.overwrite + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Import: """Deserializes the Import from a dictionary.""" - return cls( - content=d.get("content", None), - format=_enum(d, "format", ImportFormat), - language=_enum(d, "language", Language), - overwrite=d.get("overwrite", None), - path=d.get("path", None), - ) + return cls(content=d.get('content', None), format=_enum(d, 'format', ImportFormat), language=_enum(d, 'language', Language), overwrite=d.get('overwrite', None), path=d.get('path', None)) + + class ImportFormat(Enum): """The format for workspace import and export.""" - - AUTO = "AUTO" - DBC = "DBC" - HTML = "HTML" - JUPYTER = "JUPYTER" - RAW = "RAW" - R_MARKDOWN = "R_MARKDOWN" - SOURCE = "SOURCE" - + + AUTO = 'AUTO' + DBC = 'DBC' + HTML = 'HTML' + JUPYTER = 'JUPYTER' + RAW = 'RAW' + R_MARKDOWN = 'R_MARKDOWN' + SOURCE = 'SOURCE' @dataclass class ImportResponse: @@ -1007,65 +939,72 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ImportResponse: """Deserializes the ImportResponse from a dictionary.""" return cls() + + class Language(Enum): """The language of notebook.""" + + PYTHON = 'PYTHON' + R = 'R' + SCALA = 'SCALA' + SQL = 'SQL' + - PYTHON = "PYTHON" - R = "R" - SCALA = "SCALA" - SQL = "SQL" @dataclass class ListAclsResponse: items: Optional[List[AclItem]] = None """The associated ACLs rule applied to principals in the given scope.""" - + def as_dict(self) -> dict: """Serializes the ListAclsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items: - body["items"] = [v.as_dict() for v in self.items] + if self.items: body['items'] = [v.as_dict() for v in self.items] return body def as_shallow_dict(self) -> dict: """Serializes the ListAclsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items: - body["items"] = self.items + if self.items: body['items'] = self.items return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAclsResponse: """Deserializes the ListAclsResponse from a dictionary.""" - return cls(items=_repeated_dict(d, "items", AclItem)) + return cls(items=_repeated_dict(d, 'items', AclItem)) + + @dataclass class ListCredentialsResponse: credentials: Optional[List[CredentialInfo]] = None """List of credentials.""" - + def as_dict(self) -> dict: """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credentials: - body["credentials"] = [v.as_dict() for v in self.credentials] + if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials] return body def as_shallow_dict(self) -> dict: """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credentials: - body["credentials"] = self.credentials + if self.credentials: body['credentials'] = self.credentials return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCredentialsResponse: """Deserializes the ListCredentialsResponse from a dictionary.""" - return cls(credentials=_repeated_dict(d, "credentials", CredentialInfo)) + return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo)) + + + + + @dataclass @@ -1073,107 +1012,111 @@ class ListReposResponse: next_page_token: Optional[str] = None """Token that can be specified as a query parameter to the `GET /repos` endpoint to retrieve the next page of results.""" - + repos: Optional[List[RepoInfo]] = None """List of Git folders (repos).""" - + def as_dict(self) -> dict: """Serializes the ListReposResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.repos: - body["repos"] = [v.as_dict() for v in self.repos] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.repos: body['repos'] = [v.as_dict() for v in self.repos] return body def as_shallow_dict(self) -> dict: """Serializes the ListReposResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.repos: - body["repos"] = self.repos + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.repos: body['repos'] = self.repos return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListReposResponse: """Deserializes the ListReposResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), repos=_repeated_dict(d, "repos", RepoInfo)) + return cls(next_page_token=d.get('next_page_token', None), repos=_repeated_dict(d, 'repos', RepoInfo)) + + @dataclass class ListResponse: objects: Optional[List[ObjectInfo]] = None """List of objects.""" - + def as_dict(self) -> dict: """Serializes the ListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.objects: - body["objects"] = [v.as_dict() for v in self.objects] + if self.objects: body['objects'] = [v.as_dict() for v in self.objects] return body def as_shallow_dict(self) -> dict: """Serializes the ListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.objects: - body["objects"] = self.objects + if self.objects: body['objects'] = self.objects return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListResponse: """Deserializes the ListResponse from a dictionary.""" - return cls(objects=_repeated_dict(d, "objects", ObjectInfo)) + return cls(objects=_repeated_dict(d, 'objects', ObjectInfo)) + + @dataclass class ListScopesResponse: scopes: Optional[List[SecretScope]] = None """The available secret scopes.""" - + def as_dict(self) -> dict: """Serializes the ListScopesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scopes: - body["scopes"] = [v.as_dict() for v in self.scopes] + if self.scopes: body['scopes'] = [v.as_dict() for v in self.scopes] return body def as_shallow_dict(self) -> dict: """Serializes the ListScopesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.scopes: - body["scopes"] = self.scopes + if self.scopes: body['scopes'] = self.scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListScopesResponse: """Deserializes the ListScopesResponse from a dictionary.""" - return cls(scopes=_repeated_dict(d, "scopes", SecretScope)) + return cls(scopes=_repeated_dict(d, 'scopes', SecretScope)) + + + + + @dataclass class ListSecretsResponse: secrets: Optional[List[SecretMetadata]] = None """Metadata information of all secrets contained within the given scope.""" - + def as_dict(self) -> dict: """Serializes the ListSecretsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.secrets: - body["secrets"] = [v.as_dict() for v in self.secrets] + if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets] return body def as_shallow_dict(self) -> dict: """Serializes the ListSecretsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.secrets: - body["secrets"] = self.secrets + if self.secrets: body['secrets'] = self.secrets return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSecretsResponse: """Deserializes the ListSecretsResponse from a dictionary.""" - return cls(secrets=_repeated_dict(d, "secrets", SecretMetadata)) + return cls(secrets=_repeated_dict(d, 'secrets', SecretMetadata)) + + + + + @dataclass @@ -1181,25 +1124,25 @@ class Mkdirs: path: str """The absolute path of the directory. If the parent directories do not exist, it will also create them. If the directory already exists, this command will do nothing and succeed.""" - + def as_dict(self) -> dict: """Serializes the Mkdirs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: - body["path"] = self.path + if self.path is not None: body['path'] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the Mkdirs into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: - body["path"] = self.path + if self.path is not None: body['path'] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Mkdirs: """Deserializes the Mkdirs from a dictionary.""" - return cls(path=d.get("path", None)) + return cls(path=d.get('path', None)) + + @dataclass @@ -1218,149 +1161,119 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> MkdirsResponse: """Deserializes the MkdirsResponse from a dictionary.""" return cls() + + @dataclass class ObjectInfo: """The information of the object in workspace. It will be returned by ``list`` and ``get-status``.""" - + created_at: Optional[int] = None """Only applicable to files. The creation UTC timestamp.""" - + language: Optional[Language] = None """The language of the object. This value is set only if the object type is ``NOTEBOOK``.""" - + modified_at: Optional[int] = None """Only applicable to files, the last modified UTC timestamp.""" - + object_id: Optional[int] = None """Unique identifier for the object.""" - + object_type: Optional[ObjectType] = None """The type of the object in workspace. - `NOTEBOOK`: document that contains runnable code, visualizations, and explanatory text. - `DIRECTORY`: directory - `LIBRARY`: library - `FILE`: file - `REPO`: repository - `DASHBOARD`: Lakeview dashboard""" - + path: Optional[str] = None """The absolute path of the object.""" - + resource_id: Optional[str] = None """A unique identifier for the object that is consistent across all Databricks APIs.""" - + size: Optional[int] = None """Only applicable to files. The file size in bytes can be returned.""" - + def as_dict(self) -> dict: """Serializes the ObjectInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.language is not None: - body["language"] = self.language.value - if self.modified_at is not None: - body["modified_at"] = self.modified_at - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type.value - if self.path is not None: - body["path"] = self.path - if self.resource_id is not None: - body["resource_id"] = self.resource_id - if self.size is not None: - body["size"] = self.size + if self.created_at is not None: body['created_at'] = self.created_at + if self.language is not None: body['language'] = self.language.value + if self.modified_at is not None: body['modified_at'] = self.modified_at + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type.value + if self.path is not None: body['path'] = self.path + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.size is not None: body['size'] = self.size return body def as_shallow_dict(self) -> dict: """Serializes the ObjectInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: - body["created_at"] = self.created_at - if self.language is not None: - body["language"] = self.language - if self.modified_at is not None: - body["modified_at"] = self.modified_at - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type - if self.path is not None: - body["path"] = self.path - if self.resource_id is not None: - body["resource_id"] = self.resource_id - if self.size is not None: - body["size"] = self.size + if self.created_at is not None: body['created_at'] = self.created_at + if self.language is not None: body['language'] = self.language + if self.modified_at is not None: body['modified_at'] = self.modified_at + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + if self.path is not None: body['path'] = self.path + if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.size is not None: body['size'] = self.size return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ObjectInfo: """Deserializes the ObjectInfo from a dictionary.""" - return cls( - created_at=d.get("created_at", None), - language=_enum(d, "language", Language), - modified_at=d.get("modified_at", None), - object_id=d.get("object_id", None), - object_type=_enum(d, "object_type", ObjectType), - path=d.get("path", None), - resource_id=d.get("resource_id", None), - size=d.get("size", None), - ) + return cls(created_at=d.get('created_at', None), language=_enum(d, 'language', Language), modified_at=d.get('modified_at', None), object_id=d.get('object_id', None), object_type=_enum(d, 'object_type', ObjectType), path=d.get('path', None), resource_id=d.get('resource_id', None), size=d.get('size', None)) + + class ObjectType(Enum): """The type of the object in workspace.""" - - DASHBOARD = "DASHBOARD" - DIRECTORY = "DIRECTORY" - FILE = "FILE" - LIBRARY = "LIBRARY" - NOTEBOOK = "NOTEBOOK" - REPO = "REPO" - + + DASHBOARD = 'DASHBOARD' + DIRECTORY = 'DIRECTORY' + FILE = 'FILE' + LIBRARY = 'LIBRARY' + NOTEBOOK = 'NOTEBOOK' + REPO = 'REPO' @dataclass class PutAcl: scope: str """The name of the scope to apply permissions to.""" - + principal: str """The principal in which the permission is applied.""" - + permission: AclPermission """The permission level applied to the principal.""" - + def as_dict(self) -> dict: """Serializes the PutAcl into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission is not None: - body["permission"] = self.permission.value - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope + if self.permission is not None: body['permission'] = self.permission.value + if self.principal is not None: body['principal'] = self.principal + if self.scope is not None: body['scope'] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the PutAcl into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission is not None: - body["permission"] = self.permission - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope + if self.permission is not None: body['permission'] = self.permission + if self.principal is not None: body['principal'] = self.principal + if self.scope is not None: body['scope'] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutAcl: """Deserializes the PutAcl from a dictionary.""" - return cls( - permission=_enum(d, "permission", AclPermission), - principal=d.get("principal", None), - scope=d.get("scope", None), - ) + return cls(permission=_enum(d, 'permission', AclPermission), principal=d.get('principal', None), scope=d.get('scope', None)) + + @dataclass @@ -1379,57 +1292,48 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PutAclResponse: """Deserializes the PutAclResponse from a dictionary.""" return cls() + + @dataclass class PutSecret: scope: str """The name of the scope to which the secret will be associated with.""" - + key: str """A unique name to identify the secret.""" - + bytes_value: Optional[str] = None """If specified, value will be stored as bytes.""" - + string_value: Optional[str] = None """If specified, note that the value will be stored in UTF-8 (MB4) form.""" - + def as_dict(self) -> dict: """Serializes the PutSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bytes_value is not None: - body["bytes_value"] = self.bytes_value - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - if self.string_value is not None: - body["string_value"] = self.string_value + if self.bytes_value is not None: body['bytes_value'] = self.bytes_value + if self.key is not None: body['key'] = self.key + if self.scope is not None: body['scope'] = self.scope + if self.string_value is not None: body['string_value'] = self.string_value return body def as_shallow_dict(self) -> dict: """Serializes the PutSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.bytes_value is not None: - body["bytes_value"] = self.bytes_value - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - if self.string_value is not None: - body["string_value"] = self.string_value + if self.bytes_value is not None: body['bytes_value'] = self.bytes_value + if self.key is not None: body['key'] = self.key + if self.scope is not None: body['scope'] = self.scope + if self.string_value is not None: body['string_value'] = self.string_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutSecret: """Deserializes the PutSecret from a dictionary.""" - return cls( - bytes_value=d.get("bytes_value", None), - key=d.get("key", None), - scope=d.get("scope", None), - string_value=d.get("string_value", None), - ) + return cls(bytes_value=d.get('bytes_value', None), key=d.get('key', None), scope=d.get('scope', None), string_value=d.get('string_value', None)) + + @dataclass @@ -1448,489 +1352,410 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PutSecretResponse: """Deserializes the PutSecretResponse from a dictionary.""" return cls() + + @dataclass class RepoAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[RepoPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the RepoAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RepoAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoAccessControlRequest: """Deserializes the RepoAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", RepoPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', RepoPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class RepoAccessControlResponse: all_permissions: Optional[List[RepoPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the RepoAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RepoAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoAccessControlResponse: """Deserializes the RepoAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", RepoPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', RepoPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class RepoInfo: """Git folder (repo) information.""" - + branch: Optional[str] = None """Name of the current git branch of the git folder (repo).""" - + head_commit_id: Optional[str] = None """Current git commit id of the git folder (repo).""" - + id: Optional[int] = None """Id of the git folder (repo) in the Workspace.""" - + path: Optional[str] = None """Root path of the git folder (repo) in the Workspace.""" - + provider: Optional[str] = None """Git provider of the remote git repository, e.g. `gitHub`.""" - + sparse_checkout: Optional[SparseCheckout] = None """Sparse checkout config for the git folder (repo).""" - + url: Optional[str] = None """URL of the remote git repository.""" - + def as_dict(self) -> dict: """Serializes the RepoInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.head_commit_id is not None: - body["head_commit_id"] = self.head_commit_id - if self.id is not None: - body["id"] = self.id - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout.as_dict() - if self.url is not None: - body["url"] = self.url + if self.branch is not None: body['branch'] = self.branch + if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id + if self.id is not None: body['id'] = self.id + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() + if self.url is not None: body['url'] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the RepoInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.head_commit_id is not None: - body["head_commit_id"] = self.head_commit_id - if self.id is not None: - body["id"] = self.id - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout - if self.url is not None: - body["url"] = self.url + if self.branch is not None: body['branch'] = self.branch + if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id + if self.id is not None: body['id'] = self.id + if self.path is not None: body['path'] = self.path + if self.provider is not None: body['provider'] = self.provider + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.url is not None: body['url'] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoInfo: """Deserializes the RepoInfo from a dictionary.""" - return cls( - branch=d.get("branch", None), - head_commit_id=d.get("head_commit_id", None), - id=d.get("id", None), - path=d.get("path", None), - provider=d.get("provider", None), - sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), - url=d.get("url", None), - ) + return cls(branch=d.get('branch', None), head_commit_id=d.get('head_commit_id', None), id=d.get('id', None), path=d.get('path', None), provider=d.get('provider', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), url=d.get('url', None)) + + @dataclass class RepoPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[RepoPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the RepoPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the RepoPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoPermission: """Deserializes the RepoPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", RepoPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', RepoPermissionLevel)) + + class RepoPermissionLevel(Enum): """Permission level""" - - CAN_EDIT = "CAN_EDIT" - CAN_MANAGE = "CAN_MANAGE" - CAN_READ = "CAN_READ" - CAN_RUN = "CAN_RUN" - + + CAN_EDIT = 'CAN_EDIT' + CAN_MANAGE = 'CAN_MANAGE' + CAN_READ = 'CAN_READ' + CAN_RUN = 'CAN_RUN' @dataclass class RepoPermissions: access_control_list: Optional[List[RepoAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the RepoPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the RepoPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoPermissions: """Deserializes the RepoPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", RepoAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', RepoAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class RepoPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[RepoPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the RepoPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the RepoPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoPermissionsDescription: """Deserializes the RepoPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), permission_level=_enum(d, "permission_level", RepoPermissionLevel) - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', RepoPermissionLevel)) + + @dataclass class RepoPermissionsRequest: access_control_list: Optional[List[RepoAccessControlRequest]] = None - + repo_id: Optional[str] = None """The repo for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the RepoPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.repo_id is not None: - body["repo_id"] = self.repo_id + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.repo_id is not None: body['repo_id'] = self.repo_id return body def as_shallow_dict(self) -> dict: """Serializes the RepoPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.repo_id is not None: - body["repo_id"] = self.repo_id + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.repo_id is not None: body['repo_id'] = self.repo_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoPermissionsRequest: """Deserializes the RepoPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", RepoAccessControlRequest), - repo_id=d.get("repo_id", None), - ) - + return cls(access_control_list=_repeated_dict(d, 'access_control_list', RepoAccessControlRequest), repo_id=d.get('repo_id', None)) + -class ScopeBackendType(Enum): - AZURE_KEYVAULT = "AZURE_KEYVAULT" - DATABRICKS = "DATABRICKS" +class ScopeBackendType(Enum): + + + AZURE_KEYVAULT = 'AZURE_KEYVAULT' + DATABRICKS = 'DATABRICKS' @dataclass class SecretMetadata: key: Optional[str] = None """A unique name to identify the secret.""" - + last_updated_timestamp: Optional[int] = None """The last updated timestamp (in milliseconds) for the secret.""" - + def as_dict(self) -> dict: """Serializes the SecretMetadata into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp + if self.key is not None: body['key'] = self.key + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp return body def as_shallow_dict(self) -> dict: """Serializes the SecretMetadata into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.last_updated_timestamp is not None: - body["last_updated_timestamp"] = self.last_updated_timestamp + if self.key is not None: body['key'] = self.key + if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SecretMetadata: """Deserializes the SecretMetadata from a dictionary.""" - return cls(key=d.get("key", None), last_updated_timestamp=d.get("last_updated_timestamp", None)) + return cls(key=d.get('key', None), last_updated_timestamp=d.get('last_updated_timestamp', None)) + + @dataclass class SecretScope: backend_type: Optional[ScopeBackendType] = None """The type of secret scope backend.""" - + keyvault_metadata: Optional[AzureKeyVaultSecretScopeMetadata] = None """The metadata for the secret scope if the type is `AZURE_KEYVAULT`""" - + name: Optional[str] = None """A unique name to identify the secret scope.""" - + def as_dict(self) -> dict: """Serializes the SecretScope into a dictionary suitable for use as a JSON request body.""" body = {} - if self.backend_type is not None: - body["backend_type"] = self.backend_type.value - if self.keyvault_metadata: - body["keyvault_metadata"] = self.keyvault_metadata.as_dict() - if self.name is not None: - body["name"] = self.name + if self.backend_type is not None: body['backend_type'] = self.backend_type.value + if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata.as_dict() + if self.name is not None: body['name'] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the SecretScope into a shallow dictionary of its immediate attributes.""" body = {} - if self.backend_type is not None: - body["backend_type"] = self.backend_type - if self.keyvault_metadata: - body["keyvault_metadata"] = self.keyvault_metadata - if self.name is not None: - body["name"] = self.name + if self.backend_type is not None: body['backend_type'] = self.backend_type + if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata + if self.name is not None: body['name'] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SecretScope: """Deserializes the SecretScope from a dictionary.""" - return cls( - backend_type=_enum(d, "backend_type", ScopeBackendType), - keyvault_metadata=_from_dict(d, "keyvault_metadata", AzureKeyVaultSecretScopeMetadata), - name=d.get("name", None), - ) + return cls(backend_type=_enum(d, 'backend_type', ScopeBackendType), keyvault_metadata=_from_dict(d, 'keyvault_metadata', AzureKeyVaultSecretScopeMetadata), name=d.get('name', None)) + + @dataclass class SparseCheckout: """Sparse checkout configuration, it contains options like cone patterns.""" - + patterns: Optional[List[str]] = None """List of sparse checkout cone patterns, see [cone mode handling] for details. [cone mode handling]: https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling""" - + def as_dict(self) -> dict: """Serializes the SparseCheckout into a dictionary suitable for use as a JSON request body.""" body = {} - if self.patterns: - body["patterns"] = [v for v in self.patterns] + if self.patterns: body['patterns'] = [v for v in self.patterns] return body def as_shallow_dict(self) -> dict: """Serializes the SparseCheckout into a shallow dictionary of its immediate attributes.""" body = {} - if self.patterns: - body["patterns"] = self.patterns + if self.patterns: body['patterns'] = self.patterns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparseCheckout: """Deserializes the SparseCheckout from a dictionary.""" - return cls(patterns=d.get("patterns", None)) + return cls(patterns=d.get('patterns', None)) + + @dataclass class SparseCheckoutUpdate: """Sparse checkout configuration, it contains options like cone patterns.""" - + patterns: Optional[List[str]] = None """List of sparse checkout cone patterns, see [cone mode handling] for details. [cone mode handling]: https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling""" - + def as_dict(self) -> dict: """Serializes the SparseCheckoutUpdate into a dictionary suitable for use as a JSON request body.""" body = {} - if self.patterns: - body["patterns"] = [v for v in self.patterns] + if self.patterns: body['patterns'] = [v for v in self.patterns] return body def as_shallow_dict(self) -> dict: """Serializes the SparseCheckoutUpdate into a shallow dictionary of its immediate attributes.""" body = {} - if self.patterns: - body["patterns"] = self.patterns + if self.patterns: body['patterns'] = self.patterns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparseCheckoutUpdate: """Deserializes the SparseCheckoutUpdate from a dictionary.""" - return cls(patterns=d.get("patterns", None)) + return cls(patterns=d.get('patterns', None)) + + @dataclass @@ -1939,58 +1764,47 @@ class UpdateCredentialsRequest: """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - + credential_id: Optional[int] = None """The ID for the corresponding credential to access.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account, depending on which provider you are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please see your provider's Personal Access Token authentication documentation to see what is supported.""" - + personal_access_token: Optional[str] = None """The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - + def as_dict(self) -> dict: """Serializes the UpdateCredentialsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username + if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCredentialsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token + if self.credential_id is not None: body['credential_id'] = self.credential_id + if self.git_provider is not None: body['git_provider'] = self.git_provider + if self.git_username is not None: body['git_username'] = self.git_username + if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialsRequest: """Deserializes the UpdateCredentialsRequest from a dictionary.""" - return cls( - credential_id=d.get("credential_id", None), - git_provider=d.get("git_provider", None), - git_username=d.get("git_username", None), - personal_access_token=d.get("personal_access_token", None), - ) + return cls(credential_id=d.get('credential_id', None), git_provider=d.get('git_provider', None), git_username=d.get('git_username', None), personal_access_token=d.get('personal_access_token', None)) + + @dataclass @@ -2009,60 +1823,51 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialsResponse: """Deserializes the UpdateCredentialsResponse from a dictionary.""" return cls() + + @dataclass class UpdateRepoRequest: branch: Optional[str] = None """Branch that the local version of the repo is checked out to.""" - + repo_id: Optional[int] = None """ID of the Git folder (repo) object in the workspace.""" - + sparse_checkout: Optional[SparseCheckoutUpdate] = None """If specified, update the sparse checkout settings. The update will fail if sparse checkout is not enabled for the repo.""" - + tag: Optional[str] = None """Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD.""" - + def as_dict(self) -> dict: """Serializes the UpdateRepoRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.repo_id is not None: - body["repo_id"] = self.repo_id - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout.as_dict() - if self.tag is not None: - body["tag"] = self.tag + if self.branch is not None: body['branch'] = self.branch + if self.repo_id is not None: body['repo_id'] = self.repo_id + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() + if self.tag is not None: body['tag'] = self.tag return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRepoRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.repo_id is not None: - body["repo_id"] = self.repo_id - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout - if self.tag is not None: - body["tag"] = self.tag + if self.branch is not None: body['branch'] = self.branch + if self.repo_id is not None: body['repo_id'] = self.repo_id + if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout + if self.tag is not None: body['tag'] = self.tag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRepoRequest: """Deserializes the UpdateRepoRequest from a dictionary.""" - return cls( - branch=d.get("branch", None), - repo_id=d.get("repo_id", None), - sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckoutUpdate), - tag=d.get("tag", None), - ) + return cls(branch=d.get('branch', None), repo_id=d.get('repo_id', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckoutUpdate), tag=d.get('tag', None)) + + @dataclass @@ -2081,303 +1886,261 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateRepoResponse: """Deserializes the UpdateRepoResponse from a dictionary.""" return cls() + + @dataclass class WorkspaceObjectAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[WorkspaceObjectPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: - body["group_name"] = self.group_name - if self.permission_level is not None: - body["permission_level"] = self.permission_level - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectAccessControlRequest: """Deserializes the WorkspaceObjectAccessControlRequest from a dictionary.""" - return cls( - group_name=d.get("group_name", None), - permission_level=_enum(d, "permission_level", WorkspaceObjectPermissionLevel), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', WorkspaceObjectPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class WorkspaceObjectAccessControlResponse: all_permissions: Optional[List[WorkspaceObjectPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: - body["all_permissions"] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: - body["all_permissions"] = self.all_permissions - if self.display_name is not None: - body["display_name"] = self.display_name - if self.group_name is not None: - body["group_name"] = self.group_name - if self.service_principal_name is not None: - body["service_principal_name"] = self.service_principal_name - if self.user_name is not None: - body["user_name"] = self.user_name + if self.all_permissions: body['all_permissions'] = self.all_permissions + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectAccessControlResponse: """Deserializes the WorkspaceObjectAccessControlResponse from a dictionary.""" - return cls( - all_permissions=_repeated_dict(d, "all_permissions", WorkspaceObjectPermission), - display_name=d.get("display_name", None), - group_name=d.get("group_name", None), - service_principal_name=d.get("service_principal_name", None), - user_name=d.get("user_name", None), - ) + return cls(all_permissions=_repeated_dict(d, 'all_permissions', WorkspaceObjectPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) + + @dataclass class WorkspaceObjectPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[WorkspaceObjectPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = [v for v in self.inherited_from_object] - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: - body["inherited"] = self.inherited - if self.inherited_from_object: - body["inherited_from_object"] = self.inherited_from_object - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermission: """Deserializes the WorkspaceObjectPermission from a dictionary.""" - return cls( - inherited=d.get("inherited", None), - inherited_from_object=d.get("inherited_from_object", None), - permission_level=_enum(d, "permission_level", WorkspaceObjectPermissionLevel), - ) + return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', WorkspaceObjectPermissionLevel)) + + class WorkspaceObjectPermissionLevel(Enum): """Permission level""" - - CAN_EDIT = "CAN_EDIT" - CAN_MANAGE = "CAN_MANAGE" - CAN_READ = "CAN_READ" - CAN_RUN = "CAN_RUN" - + + CAN_EDIT = 'CAN_EDIT' + CAN_MANAGE = 'CAN_MANAGE' + CAN_READ = 'CAN_READ' + CAN_RUN = 'CAN_RUN' @dataclass class WorkspaceObjectPermissions: access_control_list: Optional[List[WorkspaceObjectAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissions: """Deserializes the WorkspaceObjectPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", WorkspaceObjectAccessControlResponse), - object_id=d.get("object_id", None), - object_type=d.get("object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', WorkspaceObjectAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) + + @dataclass class WorkspaceObjectPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[WorkspaceObjectPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level.value + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.permission_level is not None: - body["permission_level"] = self.permission_level + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissionsDescription: """Deserializes the WorkspaceObjectPermissionsDescription from a dictionary.""" - return cls( - description=d.get("description", None), - permission_level=_enum(d, "permission_level", WorkspaceObjectPermissionLevel), - ) + return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', WorkspaceObjectPermissionLevel)) + + @dataclass class WorkspaceObjectPermissionsRequest: access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None - + workspace_object_id: Optional[str] = None """The workspace object for which to get or manage permissions.""" - + workspace_object_type: Optional[str] = None """The workspace object type for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.workspace_object_id is not None: - body["workspace_object_id"] = self.workspace_object_id - if self.workspace_object_type is not None: - body["workspace_object_type"] = self.workspace_object_type + if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.workspace_object_id is not None: body['workspace_object_id'] = self.workspace_object_id + if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.workspace_object_id is not None: - body["workspace_object_id"] = self.workspace_object_id - if self.workspace_object_type is not None: - body["workspace_object_type"] = self.workspace_object_type + if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.workspace_object_id is not None: body['workspace_object_id'] = self.workspace_object_id + if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissionsRequest: """Deserializes the WorkspaceObjectPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", WorkspaceObjectAccessControlRequest), - workspace_object_id=d.get("workspace_object_id", None), - workspace_object_type=d.get("workspace_object_type", None), - ) + return cls(access_control_list=_repeated_dict(d, 'access_control_list', WorkspaceObjectAccessControlRequest), workspace_object_id=d.get('workspace_object_id', None), workspace_object_type=d.get('workspace_object_type', None)) + + + + class GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user. - + See [more info]. - + [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, git_provider: str, *, git_username: Optional[str] = None, personal_access_token: Optional[str] = None - ) -> CreateCredentialsResponse: - """Create a credential entry. + + + + + + + def create(self + , git_provider: str + , * + , git_username: Optional[str] = None, personal_access_token: Optional[str] = None) -> CreateCredentialsResponse: + """Create a credential entry. + Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update existing credentials, or the DELETE endpoint to delete existing credentials. - + :param git_provider: str Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, @@ -2391,89 +2154,106 @@ def create( :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. - + [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html - + :returns: :class:`CreateCredentialsResponse` """ body = {} - if git_provider is not None: - body["git_provider"] = git_provider - if git_username is not None: - body["git_username"] = git_username - if personal_access_token is not None: - body["personal_access_token"] = personal_access_token - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/git-credentials", body=body, headers=headers) + if git_provider is not None: body['git_provider'] = git_provider + if git_username is not None: body['git_username'] = git_username + if personal_access_token is not None: body['personal_access_token'] = personal_access_token + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/git-credentials', body=body + + , headers=headers + ) return CreateCredentialsResponse.from_dict(res) - def delete(self, credential_id: int): - """Delete a credential. + + + + def delete(self + , credential_id: int + ): + """Delete a credential. + Deletes the specified Git credential. - + :param credential_id: int The ID for the corresponding credential to access. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/git-credentials/{credential_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/git-credentials/{credential_id}", headers=headers) + + + - def get(self, credential_id: int) -> GetCredentialsResponse: + def get(self + , credential_id: int + ) -> GetCredentialsResponse: """Get a credential entry. - + Gets the Git credential with the specified credential ID. - + :param credential_id: int The ID for the corresponding credential to access. - + :returns: :class:`GetCredentialsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/git-credentials/{credential_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/git-credentials/{credential_id}' + + , headers=headers + ) return GetCredentialsResponse.from_dict(res) + + + + def list(self) -> Iterator[CredentialInfo]: """Get Git credentials. - + Lists the calling user's Git credentials. One credential per user is supported. - + :returns: Iterator over :class:`CredentialInfo` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/git-credentials", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/git-credentials' + , headers=headers + ) parsed = ListCredentialsResponse.from_dict(json).credentials return parsed if parsed is not None else [] + - def update( - self, - credential_id: int, - git_provider: str, - *, - git_username: Optional[str] = None, - personal_access_token: Optional[str] = None, - ): - """Update a credential. + + + + def update(self + , credential_id: int, git_provider: str + , * + , git_username: Optional[str] = None, personal_access_token: Optional[str] = None): + """Update a credential. + Updates the specified Git credential. - + :param credential_id: int The ID for the corresponding credential to access. :param git_provider: str @@ -2489,48 +2269,56 @@ def update( :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. - + [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html - - + + """ body = {} - if git_provider is not None: - body["git_provider"] = git_provider - if git_username is not None: - body["git_username"] = git_username - if personal_access_token is not None: - body["personal_access_token"] = personal_access_token - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/git-credentials/{credential_id}", body=body, headers=headers) - + if git_provider is not None: body['git_provider'] = git_provider + if git_username is not None: body['git_username'] = git_username + if personal_access_token is not None: body['personal_access_token'] = personal_access_token + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/git-credentials/{credential_id}', body=body + + , headers=headers + ) + + + class ReposAPI: """The Repos API allows users to manage their git repos. Users can use the API to access all repos that they have manage permissions on. - + Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a repository, committing and pushing, pulling, branch management, and visual comparison of diffs when committing. - + Within Repos you can develop code in notebooks or other files and follow data science and engineering code development best practices using Git for version control, collaboration, and CI/CD.""" - + def __init__(self, api_client): self._api = api_client + - def create( - self, url: str, provider: str, *, path: Optional[str] = None, sparse_checkout: Optional[SparseCheckout] = None - ) -> CreateRepoResponse: - """Create a repo. + + + + + + + def create(self + , url: str, provider: str + , * + , path: Optional[str] = None, sparse_checkout: Optional[SparseCheckout] = None) -> CreateRepoResponse: + """Create a repo. + Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created programmatically must be linked to a remote Git repo, unlike repos created in the browser. - + :param url: str URL of the Git repository to be linked. :param provider: str @@ -2543,103 +2331,135 @@ def create( :param sparse_checkout: :class:`SparseCheckout` (optional) If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable sparse checkout after the repo is created. - + :returns: :class:`CreateRepoResponse` """ body = {} - if path is not None: - body["path"] = path - if provider is not None: - body["provider"] = provider - if sparse_checkout is not None: - body["sparse_checkout"] = sparse_checkout.as_dict() - if url is not None: - body["url"] = url - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/repos", body=body, headers=headers) + if path is not None: body['path'] = path + if provider is not None: body['provider'] = provider + if sparse_checkout is not None: body['sparse_checkout'] = sparse_checkout.as_dict() + if url is not None: body['url'] = url + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('POST','/api/2.0/repos', body=body + + , headers=headers + ) return CreateRepoResponse.from_dict(res) - def delete(self, repo_id: int): - """Delete a repo. + + + + def delete(self + , repo_id: int + ): + """Delete a repo. + Deletes the specified repo. - + :param repo_id: int The ID for the corresponding repo to delete. - - + + """ + + headers = {'Accept': 'application/json',} + + self._api.do('DELETE',f'/api/2.0/repos/{repo_id}' + + , headers=headers + ) + - headers = { - "Accept": "application/json", - } - - self._api.do("DELETE", f"/api/2.0/repos/{repo_id}", headers=headers) + + + - def get(self, repo_id: int) -> GetRepoResponse: + def get(self + , repo_id: int + ) -> GetRepoResponse: """Get a repo. - + Returns the repo with the given repo ID. - + :param repo_id: int ID of the Git folder (repo) object in the workspace. - + :returns: :class:`GetRepoResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/repos/{repo_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/repos/{repo_id}' + + , headers=headers + ) return GetRepoResponse.from_dict(res) - def get_permission_levels(self, repo_id: str) -> GetRepoPermissionLevelsResponse: - """Get repo permission levels. + + + + def get_permission_levels(self + , repo_id: str + ) -> GetRepoPermissionLevelsResponse: + """Get repo permission levels. + Gets the permission levels that a user can have on an object. - + :param repo_id: str The repo for which to get or manage permissions. - + :returns: :class:`GetRepoPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/repos/{repo_id}/permissionLevels", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/repos/{repo_id}/permissionLevels' + + , headers=headers + ) return GetRepoPermissionLevelsResponse.from_dict(res) - def get_permissions(self, repo_id: str) -> RepoPermissions: - """Get repo permissions. + + + + def get_permissions(self + , repo_id: str + ) -> RepoPermissions: + """Get repo permissions. + Gets the permissions of a repo. Repos can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. - + :returns: :class:`RepoPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", f"/api/2.0/permissions/repos/{repo_id}", headers=headers) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/repos/{repo_id}' + + , headers=headers + ) return RepoPermissions.from_dict(res) - def list(self, *, next_page_token: Optional[str] = None, path_prefix: Optional[str] = None) -> Iterator[RepoInfo]: - """Get repos. + + + + def list(self + + , * + , next_page_token: Optional[str] = None, path_prefix: Optional[str] = None) -> Iterator[RepoInfo]: + """Get repos. + Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate through additional pages. - + :param next_page_token: str (optional) Token used to get the next page of results. If not specified, returns the first page of results as well as a next page token if there are more results. @@ -2647,66 +2467,72 @@ def list(self, *, next_page_token: Optional[str] = None, path_prefix: Optional[s Filters repos that have paths starting with the given path prefix. If not provided or when provided an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will be served. - + :returns: Iterator over :class:`RepoInfo` """ - + query = {} - if next_page_token is not None: - query["next_page_token"] = next_page_token - if path_prefix is not None: - query["path_prefix"] = path_prefix - headers = { - "Accept": "application/json", - } - + if next_page_token is not None: query['next_page_token'] = next_page_token + if path_prefix is not None: query['path_prefix'] = path_prefix + headers = {'Accept': 'application/json',} + + + while True: - json = self._api.do("GET", "/api/2.0/repos", query=query, headers=headers) - if "repos" in json: - for v in json["repos"]: - yield RepoInfo.from_dict(v) - if "next_page_token" not in json or not json["next_page_token"]: - return - query["next_page_token"] = json["next_page_token"] - - def set_permissions( - self, repo_id: str, *, access_control_list: Optional[List[RepoAccessControlRequest]] = None - ) -> RepoPermissions: - """Set repo permissions. + json = self._api.do('GET','/api/2.0/repos', query=query + + , headers=headers + ) + if 'repos' in json: + for v in json['repos']: + yield RepoInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['next_page_token'] = json['next_page_token'] + + + + + + def set_permissions(self + , repo_id: str + , * + , access_control_list: Optional[List[RepoAccessControlRequest]] = None) -> RepoPermissions: + """Set repo permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional) - + :returns: :class:`RepoPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PUT", f"/api/2.0/permissions/repos/{repo_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/repos/{repo_id}', body=body + + , headers=headers + ) return RepoPermissions.from_dict(res) - def update( - self, - repo_id: int, - *, - branch: Optional[str] = None, - sparse_checkout: Optional[SparseCheckoutUpdate] = None, - tag: Optional[str] = None, - ): - """Update a repo. + + + + def update(self + , repo_id: int + , * + , branch: Optional[str] = None, sparse_checkout: Optional[SparseCheckoutUpdate] = None, tag: Optional[str] = None): + """Update a repo. + Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same branch. - + :param repo_id: int ID of the Git folder (repo) object in the workspace. :param branch: str (optional) @@ -2718,75 +2544,82 @@ def update( Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD. - - + + """ body = {} - if branch is not None: - body["branch"] = branch - if sparse_checkout is not None: - body["sparse_checkout"] = sparse_checkout.as_dict() - if tag is not None: - body["tag"] = tag - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("PATCH", f"/api/2.0/repos/{repo_id}", body=body, headers=headers) - - def update_permissions( - self, repo_id: str, *, access_control_list: Optional[List[RepoAccessControlRequest]] = None - ) -> RepoPermissions: - """Update repo permissions. + if branch is not None: body['branch'] = branch + if sparse_checkout is not None: body['sparse_checkout'] = sparse_checkout.as_dict() + if tag is not None: body['tag'] = tag + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('PATCH',f'/api/2.0/repos/{repo_id}', body=body + + , headers=headers + ) + - Updates the permissions on a repo. Repos can inherit permissions from their root object. + + + + def update_permissions(self + , repo_id: str + , * + , access_control_list: Optional[List[RepoAccessControlRequest]] = None) -> RepoPermissions: + """Update repo permissions. + + Updates the permissions on a repo. Repos can inherit permissions from their root object. + :param repo_id: str The repo for which to get or manage permissions. :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional) - + :returns: :class:`RepoPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("PATCH", f"/api/2.0/permissions/repos/{repo_id}", body=body, headers=headers) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/repos/{repo_id}', body=body + + , headers=headers + ) return RepoPermissions.from_dict(res) - + + class SecretsAPI: """The Secrets API allows you to manage secrets, secret scopes, and access permissions. - + Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of directly entering your credentials into a notebook, use Databricks secrets to store your credentials and reference them in notebooks and jobs. - + Administrators, secret creators, and users granted permission can read Databricks secrets. While Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not possible to prevent such users from reading secrets.""" - + def __init__(self, api_client): self._api = api_client + - def create_scope( - self, - scope: str, - *, - backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None, - initial_manage_principal: Optional[str] = None, - scope_backend_type: Optional[ScopeBackendType] = None, - ): - """Create a new secret scope. + + + + + + + def create_scope(self + , scope: str + , * + , backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None, initial_manage_principal: Optional[str] = None, scope_backend_type: Optional[ScopeBackendType] = None): + """Create a new secret scope. + The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. - + :param scope: str Scope name requested by the user. Scope names are unique. :param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional) @@ -2795,308 +2628,359 @@ def create_scope( The principal that is initially granted `MANAGE` permission to the created scope. :param scope_backend_type: :class:`ScopeBackendType` (optional) The backend type the scope will be created with. If not specified, will default to `DATABRICKS` - - + + """ body = {} - if backend_azure_keyvault is not None: - body["backend_azure_keyvault"] = backend_azure_keyvault.as_dict() - if initial_manage_principal is not None: - body["initial_manage_principal"] = initial_manage_principal - if scope is not None: - body["scope"] = scope - if scope_backend_type is not None: - body["scope_backend_type"] = scope_backend_type.value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/secrets/scopes/create", body=body, headers=headers) - - def delete_acl(self, scope: str, principal: str): - """Delete an ACL. + if backend_azure_keyvault is not None: body['backend_azure_keyvault'] = backend_azure_keyvault.as_dict() + if initial_manage_principal is not None: body['initial_manage_principal'] = initial_manage_principal + if scope is not None: body['scope'] = scope + if scope_backend_type is not None: body['scope_backend_type'] = scope_backend_type.value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/secrets/scopes/create', body=body + + , headers=headers + ) + - Deletes the given ACL on the given scope. + + + + def delete_acl(self + , scope: str, principal: str + ): + """Delete an ACL. + + Deletes the given ACL on the given scope. + Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to remove permissions from. :param principal: str The principal to remove an existing ACL from. - - + + """ body = {} - if principal is not None: - body["principal"] = principal - if scope is not None: - body["scope"] = scope - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if principal is not None: body['principal'] = principal + if scope is not None: body['scope'] = scope + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/secrets/acls/delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/secrets/acls/delete", body=body, headers=headers) + + + - def delete_scope(self, scope: str): + def delete_scope(self + , scope: str + ): """Delete a secret scope. - + Deletes a secret scope. - + Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str Name of the scope to delete. - - + + """ body = {} - if scope is not None: - body["scope"] = scope - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if scope is not None: body['scope'] = scope + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/secrets/scopes/delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/secrets/scopes/delete", body=body, headers=headers) + + + - def delete_secret(self, scope: str, key: str): + def delete_secret(self + , scope: str, key: str + ): """Delete a secret. - + Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the secret scope. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope that contains the secret to delete. :param key: str Name of the secret to delete. - - + + """ body = {} - if key is not None: - body["key"] = key - if scope is not None: - body["scope"] = scope - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if key is not None: body['key'] = key + if scope is not None: body['scope'] = scope + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/secrets/delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/secrets/delete", body=body, headers=headers) + + + - def get_acl(self, scope: str, principal: str) -> AclItem: + def get_acl(self + , scope: str, principal: str + ) -> AclItem: """Get secret ACL details. - + Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` permission to invoke this API. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to fetch ACL information from. :param principal: str The principal to fetch ACL information for. - + :returns: :class:`AclItem` """ - + query = {} - if principal is not None: - query["principal"] = principal - if scope is not None: - query["scope"] = scope - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/secrets/acls/get", query=query, headers=headers) + if principal is not None: query['principal'] = principal + if scope is not None: query['scope'] = scope + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/secrets/acls/get', query=query + + , headers=headers + ) return AclItem.from_dict(res) - def get_secret(self, scope: str, key: str) -> GetSecretResponse: - """Get a secret. + + + + def get_secret(self + , scope: str, key: str + ) -> GetSecretResponse: + """Get a secret. + Gets the bytes representation of a secret value for the specified scope and key. - + Users need the READ permission to make this call. - + Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the caller in DBUtils and the type the data is decoded into. - + Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. - + :param scope: str The name of the scope to fetch secret information from. :param key: str The key to fetch secret for. - + :returns: :class:`GetSecretResponse` """ - + query = {} - if key is not None: - query["key"] = key - if scope is not None: - query["scope"] = scope - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/secrets/get", query=query, headers=headers) + if key is not None: query['key'] = key + if scope is not None: query['scope'] = scope + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/secrets/get', query=query + + , headers=headers + ) return GetSecretResponse.from_dict(res) - def list_acls(self, scope: str) -> Iterator[AclItem]: - """Lists ACLs. + + + + def list_acls(self + , scope: str + ) -> Iterator[AclItem]: + """Lists ACLs. + List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to fetch ACL information from. - + :returns: Iterator over :class:`AclItem` """ - + query = {} - if scope is not None: - query["scope"] = scope - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/secrets/acls/list", query=query, headers=headers) + if scope is not None: query['scope'] = scope + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/secrets/acls/list', query=query + + , headers=headers + ) parsed = ListAclsResponse.from_dict(json).items return parsed if parsed is not None else [] + + + + + def list_scopes(self) -> Iterator[SecretScope]: """List all scopes. - + Lists all secret scopes available in the workspace. - + Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :returns: Iterator over :class:`SecretScope` """ - - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/secrets/scopes/list", headers=headers) + + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/secrets/scopes/list' + , headers=headers + ) parsed = ListScopesResponse.from_dict(json).scopes return parsed if parsed is not None else [] + - def list_secrets(self, scope: str) -> Iterator[SecretMetadata]: - """List secret keys. + + + + def list_secrets(self + , scope: str + ) -> Iterator[SecretMetadata]: + """List secret keys. + Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call. - + The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to list secrets within. - + :returns: Iterator over :class:`SecretMetadata` """ - + query = {} - if scope is not None: - query["scope"] = scope - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/secrets/list", query=query, headers=headers) + if scope is not None: query['scope'] = scope + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/secrets/list', query=query + + , headers=headers + ) parsed = ListSecretsResponse.from_dict(json).secrets return parsed if parsed is not None else [] + - def put_acl(self, scope: str, principal: str, permission: AclPermission): - """Create/update an ACL. + + + + def put_acl(self + , scope: str, principal: str, permission: AclPermission + ): + """Create/update an ACL. + Creates or overwrites the Access Control List (ACL) associated with the given principal (user or group) on the specified scope point. - + In general, a user or group will use the most powerful permission available to them, and permissions are ordered as follows: - + * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what secrets are available. - + Note that in general, secret values can only be read from within a command on a cluster (for example, through a notebook). There is no API to read the actual secret value material outside of a cluster. However, the user's permission will be applied based on who is executing the command, and they must have at least READ permission. - + Users must have the `MANAGE` permission to invoke this API. - + The principal is a user or group name corresponding to an existing Databricks principal to be granted or revoked access. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to apply permissions to. :param principal: str The principal in which the permission is applied. :param permission: :class:`AclPermission` The permission level applied to the principal. - - + + """ body = {} - if permission is not None: - body["permission"] = permission.value - if principal is not None: - body["principal"] = principal - if scope is not None: - body["scope"] = scope - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/secrets/acls/put", body=body, headers=headers) - - def put_secret( - self, scope: str, key: str, *, bytes_value: Optional[str] = None, string_value: Optional[str] = None - ): - """Add a secret. + if permission is not None: body['permission'] = permission.value + if principal is not None: body['principal'] = principal + if scope is not None: body['scope'] = scope + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/secrets/acls/put', body=body + + , headers=headers + ) + + + + + + def put_secret(self + , scope: str, key: str + , * + , bytes_value: Optional[str] = None, string_value: Optional[str] = None): + """Add a secret. + Inserts a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret's value. The server encrypts the secret using the secret scope's encryption settings before storing it. - + You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. - + The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine the value returned when the secret value is requested. Exactly one must be specified. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to which the secret will be associated with. :param key: str @@ -3105,207 +2989,232 @@ def put_secret( If specified, value will be stored as bytes. :param string_value: str (optional) If specified, note that the value will be stored in UTF-8 (MB4) form. - - + + """ body = {} - if bytes_value is not None: - body["bytes_value"] = bytes_value - if key is not None: - body["key"] = key - if scope is not None: - body["scope"] = scope - if string_value is not None: - body["string_value"] = string_value - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/secrets/put", body=body, headers=headers) - + if bytes_value is not None: body['bytes_value'] = bytes_value + if key is not None: body['key'] = key + if scope is not None: body['scope'] = scope + if string_value is not None: body['string_value'] = string_value + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/secrets/put', body=body + + , headers=headers + ) + + + class WorkspaceAPI: """The Workspace API allows you to list, import, export, and delete notebooks and folders. - + A notebook is a web-based interface to a document that contains runnable code, visualizations, and explanatory text.""" - + def __init__(self, api_client): self._api = api_client + - def delete(self, path: str, *, recursive: Optional[bool] = None): - """Delete a workspace object. + + + + + + + def delete(self + , path: str + , * + , recursive: Optional[bool] = None): + """Delete a workspace object. + Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`. - + Object deletion cannot be undone and deleting a directory recursively is not atomic. - + :param path: str The absolute path of the notebook or directory. :param recursive: bool (optional) The flag that specifies whether to delete the object recursively. It is `false` by default. Please note this deleting directory is not atomic. If it fails in the middle, some of objects under this directory may be deleted and cannot be undone. - - + + """ body = {} - if path is not None: - body["path"] = path - if recursive is not None: - body["recursive"] = recursive - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } + if path is not None: body['path'] = path + if recursive is not None: body['recursive'] = recursive + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/workspace/delete', body=body + + , headers=headers + ) + - self._api.do("POST", "/api/2.0/workspace/delete", body=body, headers=headers) + + + - def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportResponse: + def export(self + , path: str + , * + , format: Optional[ExportFormat] = None) -> ExportResponse: """Export a workspace object. - + Exports an object or the contents of an entire directory. - + If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`. Currently, this API does not support exporting a library. - + :param path: str The absolute path of the object or directory. Exporting a directory is only supported for the `DBC`, `SOURCE`, and `AUTO` format. :param format: :class:`ExportFormat` (optional) This specifies the format of the exported file. By default, this is `SOURCE`. - + The value is case sensitive. - + - `SOURCE`: The notebook is exported as source code. Directory exports will not include non-notebook entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The notebook is exported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in Databricks archive format. Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type. Directory exports will include notebooks and workspace files. - + :returns: :class:`ExportResponse` """ - + query = {} - if format is not None: - query["format"] = format.value - if path is not None: - query["path"] = path - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/workspace/export", query=query, headers=headers) + if format is not None: query['format'] = format.value + if path is not None: query['path'] = path + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/workspace/export', query=query + + , headers=headers + ) return ExportResponse.from_dict(res) - def get_permission_levels( - self, workspace_object_type: str, workspace_object_id: str - ) -> GetWorkspaceObjectPermissionLevelsResponse: - """Get workspace object permission levels. + + + + def get_permission_levels(self + , workspace_object_type: str, workspace_object_id: str + ) -> GetWorkspaceObjectPermissionLevelsResponse: + """Get workspace object permission levels. + Gets the permission levels that a user can have on an object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. - + :returns: :class:`GetWorkspaceObjectPermissionLevelsResponse` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", - f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}/permissionLevels", - headers=headers, - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}/permissionLevels' + + , headers=headers + ) return GetWorkspaceObjectPermissionLevelsResponse.from_dict(res) - def get_permissions(self, workspace_object_type: str, workspace_object_id: str) -> WorkspaceObjectPermissions: - """Get workspace object permissions. + + + + def get_permissions(self + , workspace_object_type: str, workspace_object_id: str + ) -> WorkspaceObjectPermissions: + """Get workspace object permissions. + Gets the permissions of a workspace object. Workspace objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. - + :returns: :class:`WorkspaceObjectPermissions` """ - - headers = { - "Accept": "application/json", - } - - res = self._api.do( - "GET", f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}", headers=headers - ) + + headers = {'Accept': 'application/json',} + + res = self._api.do('GET',f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}' + + , headers=headers + ) return WorkspaceObjectPermissions.from_dict(res) - def get_status(self, path: str) -> ObjectInfo: - """Get status. + + + + def get_status(self + , path: str + ) -> ObjectInfo: + """Get status. + Gets the status of an object or a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The absolute path of the notebook or directory. - + :returns: :class:`ObjectInfo` """ - + query = {} - if path is not None: - query["path"] = path - headers = { - "Accept": "application/json", - } - - res = self._api.do("GET", "/api/2.0/workspace/get-status", query=query, headers=headers) + if path is not None: query['path'] = path + headers = {'Accept': 'application/json',} + + res = self._api.do('GET','/api/2.0/workspace/get-status', query=query + + , headers=headers + ) return ObjectInfo.from_dict(res) - def import_( - self, - path: str, - *, - content: Optional[str] = None, - format: Optional[ImportFormat] = None, - language: Optional[Language] = None, - overwrite: Optional[bool] = None, - ): - """Import a workspace object. + + + + def import_(self + , path: str + , * + , content: Optional[str] = None, format: Optional[ImportFormat] = None, language: Optional[Language] = None, overwrite: Optional[bool] = None): + """Import a workspace object. + Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. If `path` already exists and `overwrite` is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you must set the `language` field. - + :param path: str The absolute path of the object or directory. Importing a directory is only supported for the `DBC` and `SOURCE` formats. :param content: str (optional) The base64-encoded content. This has a limit of 10 MB. - + If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown. This parameter might be absent, and instead a posted file is used. :param format: :class:`ImportFormat` (optional) This specifies the format of the file to be imported. - + The value is case sensitive. - + - `AUTO`: The item is imported depending on an analysis of the item's extension and the header content provided in the request. If the item is imported as a notebook, then the item's extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`: @@ -3317,143 +3226,151 @@ def import_( :param overwrite: bool (optional) The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC` format, `overwrite` is not supported since it may contain a directory. - - + + """ body = {} - if content is not None: - body["content"] = content - if format is not None: - body["format"] = format.value - if language is not None: - body["language"] = language.value - if overwrite is not None: - body["overwrite"] = overwrite - if path is not None: - body["path"] = path - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/workspace/import", body=body, headers=headers) - - def list(self, path: str, *, notebooks_modified_after: Optional[int] = None) -> Iterator[ObjectInfo]: - """List contents. + if content is not None: body['content'] = content + if format is not None: body['format'] = format.value + if language is not None: body['language'] = language.value + if overwrite is not None: body['overwrite'] = overwrite + if path is not None: body['path'] = path + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/workspace/import', body=body + + , headers=headers + ) + + + + + + def list(self + , path: str + , * + , notebooks_modified_after: Optional[int] = None) -> Iterator[ObjectInfo]: + """List contents. + Lists the contents of a directory, or the object if it is not a directory. If the input path does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The absolute path of the notebook or directory. :param notebooks_modified_after: int (optional) UTC timestamp in milliseconds - + :returns: Iterator over :class:`ObjectInfo` """ - + query = {} - if notebooks_modified_after is not None: - query["notebooks_modified_after"] = notebooks_modified_after - if path is not None: - query["path"] = path - headers = { - "Accept": "application/json", - } - - json = self._api.do("GET", "/api/2.0/workspace/list", query=query, headers=headers) + if notebooks_modified_after is not None: query['notebooks_modified_after'] = notebooks_modified_after + if path is not None: query['path'] = path + headers = {'Accept': 'application/json',} + + json = self._api.do('GET','/api/2.0/workspace/list', query=query + + , headers=headers + ) parsed = ListResponse.from_dict(json).objects return parsed if parsed is not None else [] + - def mkdirs(self, path: str): - """Create a directory. + + + + def mkdirs(self + , path: str + ): + """Create a directory. + Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error `RESOURCE_ALREADY_EXISTS`. - + Note that if this operation fails it may have succeeded in creating some of the necessary parent directories. - + :param path: str The absolute path of the directory. If the parent directories do not exist, it will also create them. If the directory already exists, this command will do nothing and succeed. - - + + """ body = {} - if path is not None: - body["path"] = path - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - self._api.do("POST", "/api/2.0/workspace/mkdirs", body=body, headers=headers) - - def set_permissions( - self, - workspace_object_type: str, - workspace_object_id: str, - *, - access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None, - ) -> WorkspaceObjectPermissions: - """Set workspace object permissions. + if path is not None: body['path'] = path + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + self._api.do('POST','/api/2.0/workspace/mkdirs', body=body + + , headers=headers + ) + + + + + + def set_permissions(self + , workspace_object_type: str, workspace_object_id: str + , * + , access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None) -> WorkspaceObjectPermissions: + """Set workspace object permissions. + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) - + :returns: :class:`WorkspaceObjectPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PUT", f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PUT',f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}', body=body + + , headers=headers + ) return WorkspaceObjectPermissions.from_dict(res) - def update_permissions( - self, - workspace_object_type: str, - workspace_object_id: str, - *, - access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None, - ) -> WorkspaceObjectPermissions: - """Update workspace object permissions. + + + + def update_permissions(self + , workspace_object_type: str, workspace_object_id: str + , * + , access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None) -> WorkspaceObjectPermissions: + """Update workspace object permissions. + Updates the permissions on a workspace object. Workspace objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) - + :returns: :class:`WorkspaceObjectPermissions` """ body = {} - if access_control_list is not None: - body["access_control_list"] = [v.as_dict() for v in access_control_list] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do( - "PATCH", f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}", body=body, headers=headers - ) + if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json','Content-Type': 'application/json',} + + res = self._api.do('PATCH',f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}', body=body + + , headers=headers + ) return WorkspaceObjectPermissions.from_dict(res) + + + \ No newline at end of file diff --git a/docs/account/billing/billable_usage.rst b/docs/account/billing/billable_usage.rst index b3bda9c61..181b91cc3 100644 --- a/docs/account/billing/billable_usage.rst +++ b/docs/account/billing/billable_usage.rst @@ -21,16 +21,16 @@ resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09") Return billable usage logs. - + Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see [CSV file schema]. Note that this method might take multiple minutes to complete. - + **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges. - + [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema - + :param start_month: str Format: `YYYY-MM`. First month to return billable usage logs for. This field is required. :param end_month: str @@ -39,6 +39,6 @@ Specify whether to include personally identifiable information in the billable usage logs, for example the email addresses of cluster creators. Handle this information with care. Defaults to false. - + :returns: :class:`DownloadResponse` \ No newline at end of file diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst index d77eaa4a2..abf07b663 100644 --- a/docs/account/billing/budget_policy.rst +++ b/docs/account/billing/budget_policy.rst @@ -9,9 +9,9 @@ .. py:method:: create( [, policy: Optional[BudgetPolicy], request_id: Optional[str]]) -> BudgetPolicy Create a budget policy. - + Creates a new policy. - + :param policy: :class:`BudgetPolicy` (optional) The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must be provided, custom_tags may need to be provided depending on the cloud provider. All other fields are @@ -19,40 +19,40 @@ :param request_id: str (optional) A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is recommended. This request is only idempotent if a `request_id` is provided. - + :returns: :class:`BudgetPolicy` .. py:method:: delete(policy_id: str) Delete a budget policy. - + Deletes a policy - + :param policy_id: str The Id of the policy. - - + + .. py:method:: get(policy_id: str) -> BudgetPolicy Get a budget policy. - + Retrieves a policy by it's ID. - + :param policy_id: str The Id of the policy. - + :returns: :class:`BudgetPolicy` .. py:method:: list( [, filter_by: Optional[Filter], page_size: Optional[int], page_token: Optional[str], sort_spec: Optional[SortSpec]]) -> Iterator[BudgetPolicy] List policies. - + Lists all policies. Policies are returned in the alphabetically ascending order of their names. - + :param filter_by: :class:`Filter` (optional) A filter to apply to the list of policies. :param page_size: int (optional) @@ -61,27 +61,27 @@ :param page_token: str (optional) A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the subsequent page. If unspecified, the first page will be returned. - + When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the call that provided the page token. :param sort_spec: :class:`SortSpec` (optional) The sort specification. - + :returns: Iterator over :class:`BudgetPolicy` .. py:method:: update(policy_id: str, policy: BudgetPolicy [, limit_config: Optional[LimitConfig]]) -> BudgetPolicy Update a budget policy. - + Updates a policy - + :param policy_id: str The Id of the policy. This field is generated by Databricks and globally unique. :param policy: :class:`BudgetPolicy` Contains the BudgetPolicy details. :param limit_config: :class:`LimitConfig` (optional) DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy - + :returns: :class:`BudgetPolicy` \ No newline at end of file diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst index cf87d1424..6eba7d36e 100644 --- a/docs/account/billing/budgets.rst +++ b/docs/account/billing/budgets.rst @@ -57,27 +57,27 @@ a.budgets.delete(budget_id=created.budget.budget_configuration_id) Create new budget. - + Create a new budget configuration for an account. For full details, see https://docs.databricks.com/en/admin/account-settings/budgets.html. - + :param budget: :class:`CreateBudgetConfigurationBudget` Properties of the new budget configuration. - + :returns: :class:`CreateBudgetConfigurationResponse` .. py:method:: delete(budget_id: str) Delete budget. - + Deletes a budget configuration for an account. Both account and budget configuration are specified by ID. This cannot be undone. - + :param budget_id: str The Databricks budget configuration ID. - - + + .. py:method:: get(budget_id: str) -> GetBudgetConfigurationResponse @@ -131,12 +131,12 @@ a.budgets.delete(budget_id=created.budget.budget_configuration_id) Get budget. - + Gets a budget configuration for an account. Both account and budget configuration are specified by ID. - + :param budget_id: str The budget configuration ID - + :returns: :class:`GetBudgetConfigurationResponse` @@ -155,13 +155,13 @@ all = a.budgets.list(billing.ListBudgetConfigurationsRequest()) Get all budgets. - + Gets all budgets associated with this account. - + :param page_token: str (optional) A page token received from a previous get all budget configurations call. This token can be used to retrieve the subsequent page. Requests first page if absent. - + :returns: Iterator over :class:`BudgetConfiguration` @@ -243,14 +243,14 @@ a.budgets.delete(budget_id=created.budget.budget_configuration_id) Modify budget. - + Updates a budget configuration for an account. Both account and budget configuration are specified by ID. - + :param budget_id: str The Databricks budget configuration ID. :param budget: :class:`UpdateBudgetConfigurationBudget` The updated budget. This will overwrite the budget specified by the budget ID. - + :returns: :class:`UpdateBudgetConfigurationResponse` \ No newline at end of file diff --git a/docs/account/billing/log_delivery.rst b/docs/account/billing/log_delivery.rst index ec5e0a945..93868765d 100644 --- a/docs/account/billing/log_delivery.rst +++ b/docs/account/billing/log_delivery.rst @@ -4,54 +4,10 @@ .. py:class:: LogDeliveryAPI - These APIs manage log delivery configurations for this account. The two supported log types for this API - are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all - account ID types. - - Log delivery works with all account types. However, if your account is on the E2 version of the platform - or on a select custom plan that allows multiple workspaces per account, you can optionally configure - different storage destinations for each workspace. Log delivery status is also provided to know the latest - status of log delivery attempts. The high-level flow of billable usage delivery: - - 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using - Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create) - that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For - full details, including the required IAM role policies and trust relationship, see [Billable usage log - delivery]. Using Databricks APIs, call the Account API to create a [credential configuration - object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery - configuration**: Using Databricks APIs, call the Account API to [create a log delivery - configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from - previous steps. You can specify if the logs should include all events of that log type in your account - (_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery). - Account level log delivery applies to all current and future workspaces plus account level logs, while - workspace level log delivery solely delivers logs related to the specified workspaces. You can create - multiple types of delivery configurations per account. - - For billable usage delivery: * For more information about billable usage logs, see [Billable usage log - delivery]. For the CSV schema, see the [Usage page]. * The delivery location is - `//billable-usage/csv/`, where `` is the name of the optional delivery path - prefix you set up during log delivery configuration. Files are named - `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific - workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an - _account level_ delivery configuration that delivers logs for all current and future workspaces in your - account. * The files are delivered daily by overwriting the month's CSV file for each workspace. - - For audit log delivery: * For more information about about audit log delivery, see [Audit log delivery], - which includes information about the used JSON schema. * The delivery location is - `//workspaceId=/date=/auditlogs_.json`. - Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the - audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for - those workspaces are delivered. If the log delivery configuration applies to the entire account (_account - level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all - workspaces in the account as well as account-level audit logs. See [Audit log delivery] for details. * - Auditable events are typically available in logs within 15 minutes. - - [Audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html - [create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html - - .. py:method:: create( [, log_delivery_configuration: Optional[CreateLogDeliveryConfigurationParams]]) -> WrappedLogDeliveryConfiguration + These APIs manage Log delivery configurations for this account. Log delivery configs enable you to + configure the delivery of the specified type of logs to your storage account. + + .. py:method:: create(log_delivery_configuration: CreateLogDeliveryConfigurationParams) -> WrappedLogDeliveryConfiguration Usage: @@ -97,34 +53,35 @@ ) Create a new log delivery configuration. - + Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you already created a [credential object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket). - + For full details, including the required IAM role policies and bucket policies, see [Deliver and access billable usage logs] or [Configure audit logging]. - + **Note**: There is a limit on the number of log delivery configurations available per account (each limit applies separately to each log type including billable usage and audit logs). You can create a maximum of two enabled account-level delivery configurations (configurations without a workspace filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per workspace for each log type, which means that the same workspace ID can occur in the workspace filter for no more than two delivery configurations per log type. - + You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log delivery configuration](:method:LogDelivery/PatchStatus)). - + [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - - :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` (optional) - + + :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` + * Log Delivery Configuration + :returns: :class:`WrappedLogDeliveryConfiguration` - .. py:method:: get(log_delivery_configuration_id: str) -> WrappedLogDeliveryConfiguration + .. py:method:: get(log_delivery_configuration_id: str) -> GetLogDeliveryConfigurationResponse Usage: @@ -172,16 +129,16 @@ ) Get log delivery configuration. - + Gets a Databricks log delivery configuration object for an account, both specified by ID. - + :param log_delivery_configuration_id: str - Databricks log delivery configuration ID - - :returns: :class:`WrappedLogDeliveryConfiguration` + The log delivery configuration id of customer + + :returns: :class:`GetLogDeliveryConfigurationResponse` - .. py:method:: list( [, credentials_id: Optional[str], status: Optional[LogDeliveryConfigStatus], storage_configuration_id: Optional[str]]) -> Iterator[LogDeliveryConfiguration] + .. py:method:: list( [, credentials_id: Optional[str], page_token: Optional[str], status: Optional[LogDeliveryConfigStatus], storage_configuration_id: Optional[str]]) -> Iterator[LogDeliveryConfiguration] Usage: @@ -196,35 +153,38 @@ all = a.log_delivery.list(billing.ListLogDeliveryRequest()) Get all log delivery configurations. - + Gets all Databricks log delivery configurations associated with an account specified by ID. - + :param credentials_id: str (optional) - Filter by credential configuration ID. + The Credentials id to filter the search results with + :param page_token: str (optional) + A page token received from a previous get all budget configurations call. This token can be used to + retrieve the subsequent page. Requests first page if absent. :param status: :class:`LogDeliveryConfigStatus` (optional) - Filter by status `ENABLED` or `DISABLED`. + The log delivery status to filter the search results with :param storage_configuration_id: str (optional) - Filter by storage configuration ID. - + The Storage Configuration id to filter the search results with + :returns: Iterator over :class:`LogDeliveryConfiguration` .. py:method:: patch_status(log_delivery_configuration_id: str, status: LogDeliveryConfigStatus) Enable or disable log delivery configuration. - + Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under [Create log delivery](:method:LogDelivery/Create). - + :param log_delivery_configuration_id: str - Databricks log delivery configuration ID + The log delivery configuration id of customer :param status: :class:`LogDeliveryConfigStatus` Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. - - + + \ No newline at end of file diff --git a/docs/account/billing/usage_dashboards.rst b/docs/account/billing/usage_dashboards.rst index a316bf232..350ef1f08 100644 --- a/docs/account/billing/usage_dashboards.rst +++ b/docs/account/billing/usage_dashboards.rst @@ -11,29 +11,29 @@ .. py:method:: create( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> CreateBillingUsageDashboardResponse Create new usage dashboard. - + Create a usage dashboard specified by workspaceId, accountId, and dashboard type. - + :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account. :param workspace_id: int (optional) The workspace ID of the workspace in which the usage dashboard is created. - + :returns: :class:`CreateBillingUsageDashboardResponse` .. py:method:: get( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> GetBillingUsageDashboardResponse Get usage dashboard. - + Get a usage dashboard specified by workspaceId, accountId, and dashboard type. - + :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account. :param workspace_id: int (optional) The workspace ID of the workspace in which the usage dashboard is created. - + :returns: :class:`GetBillingUsageDashboardResponse` \ No newline at end of file diff --git a/docs/account/catalog/metastore_assignments.rst b/docs/account/catalog/metastore_assignments.rst index 1bfeedca0..f5b00c6b3 100644 --- a/docs/account/catalog/metastore_assignments.rst +++ b/docs/account/catalog/metastore_assignments.rst @@ -9,43 +9,43 @@ .. py:method:: create(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[CreateMetastoreAssignment]]) Assigns a workspace to a metastore. - + Creates an assignment to a metastore for a workspace - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional) - - + + .. py:method:: delete(workspace_id: int, metastore_id: str) Delete a metastore assignment. - + Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID - - + + .. py:method:: get(workspace_id: int) -> AccountsMetastoreAssignment Gets the metastore assignment for a workspace. - + Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment will not be found and a 404 returned. - + :param workspace_id: int Workspace ID. - + :returns: :class:`AccountsMetastoreAssignment` @@ -65,27 +65,27 @@ ws = a.metastore_assignments.list(metastore_id=os.environ["TEST_METASTORE_ID"]) Get all workspaces assigned to a metastore. - + Gets a list of all Databricks workspace IDs that have been assigned to given metastore. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: Iterator over int .. py:method:: update(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[UpdateMetastoreAssignment]]) Updates a metastore assignment to a workspaces. - + Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be updated. - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID :param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional) - - + + \ No newline at end of file diff --git a/docs/account/catalog/metastores.rst b/docs/account/catalog/metastores.rst index 36df616ea..15f39060d 100644 --- a/docs/account/catalog/metastores.rst +++ b/docs/account/catalog/metastores.rst @@ -10,58 +10,58 @@ .. py:method:: create( [, metastore_info: Optional[CreateMetastore]]) -> AccountsMetastoreInfo Create metastore. - + Creates a Unity Catalog metastore. - + :param metastore_info: :class:`CreateMetastore` (optional) - + :returns: :class:`AccountsMetastoreInfo` .. py:method:: delete(metastore_id: str [, force: Optional[bool]]) Delete a metastore. - + Deletes a Unity Catalog metastore for an account, both specified by ID. - + :param metastore_id: str Unity Catalog metastore ID :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - - + + .. py:method:: get(metastore_id: str) -> AccountsMetastoreInfo Get a metastore. - + Gets a Unity Catalog metastore from an account, both specified by ID. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: :class:`AccountsMetastoreInfo` .. py:method:: list() -> Iterator[MetastoreInfo] Get all metastores associated with an account. - + Gets all Unity Catalog metastores associated with an account specified by ID. - + :returns: Iterator over :class:`MetastoreInfo` .. py:method:: update(metastore_id: str [, metastore_info: Optional[UpdateMetastore]]) -> AccountsMetastoreInfo Update a metastore. - + Updates an existing Unity Catalog metastore. - + :param metastore_id: str Unity Catalog metastore ID :param metastore_info: :class:`UpdateMetastore` (optional) - + :returns: :class:`AccountsMetastoreInfo` \ No newline at end of file diff --git a/docs/account/catalog/storage_credentials.rst b/docs/account/catalog/storage_credentials.rst index 0b9948015..453b3a1eb 100644 --- a/docs/account/catalog/storage_credentials.rst +++ b/docs/account/catalog/storage_credentials.rst @@ -9,78 +9,78 @@ .. py:method:: create(metastore_id: str [, credential_info: Optional[CreateStorageCredential]]) -> AccountsStorageCredentialInfo Create a storage credential. - + Creates a new storage credential. The request object is specific to the cloud: - + * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * **GcpServiceAcountKey** for GCP credentials. - + The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the metastore. - + :param metastore_id: str Unity Catalog metastore ID :param credential_info: :class:`CreateStorageCredential` (optional) - + :returns: :class:`AccountsStorageCredentialInfo` .. py:method:: delete(metastore_id: str, storage_credential_name: str [, force: Optional[bool]]) Delete a storage credential. - + Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. :param force: bool (optional) Force deletion even if the Storage Credential is not empty. Default is false. - - + + .. py:method:: get(metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo Gets the named storage credential. - + Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have a level of privilege on the storage credential. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. - + :returns: :class:`AccountsStorageCredentialInfo` .. py:method:: list(metastore_id: str) -> Iterator[StorageCredentialInfo] Get all storage credentials assigned to a metastore. - + Gets a list of all storage credentials that have been assigned to given metastore. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: Iterator over :class:`StorageCredentialInfo` .. py:method:: update(metastore_id: str, storage_credential_name: str [, credential_info: Optional[UpdateStorageCredential]]) -> AccountsStorageCredentialInfo Updates a storage credential. - + Updates a storage credential on the metastore. The caller must be the owner of the storage credential. If the caller is a metastore admin, only the __owner__ credential can be changed. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. :param credential_info: :class:`UpdateStorageCredential` (optional) - + :returns: :class:`AccountsStorageCredentialInfo` \ No newline at end of file diff --git a/docs/account/iam/access_control.rst b/docs/account/iam/access_control.rst index 475d28c07..bd3c05ec2 100644 --- a/docs/account/iam/access_control.rst +++ b/docs/account/iam/access_control.rst @@ -11,31 +11,31 @@ .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse Get assignable roles for a resource. - + Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. - + :param resource: str The resource name for which assignable roles will be listed. - + Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service principal. - + :returns: :class:`GetAssignableRolesForResourceResponse` .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse Get a rule set. - + Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. - + :param name: str The ruleset name associated with the request. - + Examples | Summary :--- | :--- `name=accounts//ruleSets/default` | A name for a rule set on the account. `name=accounts//groups//ruleSets/default` | A name for a rule set on the group. @@ -48,24 +48,24 @@ modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating. - + Examples | Summary :--- | :--- `etag=` | An empty etag can only be used in GET to indicate no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` | An etag encoded a specific version of the rule set to get or to be updated. - + :returns: :class:`RuleSetResponse` .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse Update a rule set. - + Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. - + :param name: str Name of the rule set. :param rule_set: :class:`RuleSetUpdateRequest` - + :returns: :class:`RuleSetResponse` \ No newline at end of file diff --git a/docs/account/iam/groups.rst b/docs/account/iam/groups.rst index d005f7930..5400ede93 100644 --- a/docs/account/iam/groups.rst +++ b/docs/account/iam/groups.rst @@ -6,7 +6,7 @@ Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects. - + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks account identities can be assigned as members of groups, and members inherit permissions that are assigned to their group. @@ -14,15 +14,15 @@ .. py:method:: create( [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) -> Group Create a new group. - + Creates a group in the Databricks account with a unique name, using the supplied group details. - + :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -35,40 +35,40 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - + :returns: :class:`Group` .. py:method:: delete(id: str) Delete a group. - + Deletes a group from the Databricks account. - + :param id: str Unique ID for a group in the Databricks account. - - + + .. py:method:: get(id: str) -> Group Get group details. - + Gets the information for a specific group in the Databricks account. - + :param id: str Unique ID for a group in the Databricks account. - + :returns: :class:`Group` .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group] List group details. - + Gets all details of the groups associated with the Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -80,7 +80,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -88,31 +88,31 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`Group` .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]]) Update group details. - + Partially updates the details of a group. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) Replace a group. - + Updates the details of a group by replacing the entire group entity. - + :param id: str Databricks group ID :param display_name: str (optional) @@ -120,7 +120,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -131,6 +131,6 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - - + + \ No newline at end of file diff --git a/docs/account/iam/service_principals.rst b/docs/account/iam/service_principals.rst index e0fd8577a..5075673ed 100644 --- a/docs/account/iam/service_principals.rst +++ b/docs/account/iam/service_principals.rst @@ -23,15 +23,12 @@ a = AccountClient() - sp_create = a.service_principals.create(active=True, display_name=f"sdk-{time.time_ns()}") - - # cleanup - a.service_principals.delete(id=sp_create.id) + spn = a.service_principals.create(display_name=f"sdk-{time.time_ns()}") Create a service principal. - + Creates a new service principal in the Databricks account. - + :param active: bool (optional) If this user is active :param application_id: str (optional) @@ -41,7 +38,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -51,20 +48,20 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - + :returns: :class:`ServicePrincipal` .. py:method:: delete(id: str) Delete a service principal. - + Delete a single service principal in the Databricks account. - + :param id: str Unique ID for a service principal in the Databricks account. - - + + .. py:method:: get(id: str) -> ServicePrincipal @@ -88,12 +85,12 @@ a.service_principals.delete(id=sp_create.id) Get service principal details. - + Gets the details for a single service principal define in the Databricks account. - + :param id: str Unique ID for a service principal in the Databricks account. - + :returns: :class:`ServicePrincipal` @@ -120,9 +117,9 @@ a.service_principals.delete(id=sp_create.id) List service principals. - + Gets the set of service principals associated with a Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -134,7 +131,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -142,7 +139,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`ServicePrincipal` @@ -174,16 +171,16 @@ a.service_principals.delete(id=sp_create.id) Update service principal details. - + Partially updates the details of a single service principal in the Databricks account. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]]) @@ -209,11 +206,11 @@ a.service_principals.delete(id=sp_create.id) Replace service principal. - + Updates the details of a single service principal. - + This action replaces the existing service principal with the same name. - + :param id: str Databricks service principal ID. :param active: bool (optional) @@ -225,7 +222,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -233,6 +230,6 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - - + + \ No newline at end of file diff --git a/docs/account/iam/users.rst b/docs/account/iam/users.rst index 7e527ec45..ebfc8baa3 100644 --- a/docs/account/iam/users.rst +++ b/docs/account/iam/users.rst @@ -5,7 +5,7 @@ .. py:class:: AccountUsersAPI User identities recognized by Databricks and represented by email addresses. - + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your identity provider to create users and groups in Databricks account and give them the proper level of @@ -36,23 +36,23 @@ a.users.delete(id=user.id) Create a new user. - + Creates a new user in the Databricks account. This new user will also be added to the Databricks account. - + :param active: bool (optional) If this user is active :param display_name: str (optional) String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -66,7 +66,7 @@ The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - + :returns: :class:`User` @@ -91,14 +91,14 @@ a.users.delete(id=user.id) Delete a user. - + Deletes a user. Deleting a user from a Databricks account also removes objects associated with the user. - + :param id: str Unique ID for a user in the Databricks account. - - + + .. py:method:: get(id: str [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[GetSortOrder], start_index: Optional[int]]) -> User @@ -125,9 +125,9 @@ a.users.delete(id=user.id) Get user details. - + Gets information for a specific user in Databricks account. - + :param id: str Unique ID for a user in the Databricks account. :param attributes: str (optional) @@ -141,7 +141,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -150,16 +150,16 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: :class:`User` .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[User] List users. - + Gets details for all the users associated with a Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -171,7 +171,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -180,7 +180,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`User` @@ -218,24 +218,24 @@ a.users.delete(id=user.id) Update user details. - + Partially updates a user resource by applying the supplied operations on specific user attributes. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]]) Replace a user. - + Replaces a user's information with the data supplied in request. - + :param id: str Databricks user ID. :param active: bool (optional) @@ -244,13 +244,13 @@ String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -262,6 +262,6 @@ The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - - + + \ No newline at end of file diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 33df20178..a9ec2383c 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -10,27 +10,27 @@ .. py:method:: delete(workspace_id: int, principal_id: int) Delete permissions assignment. - + Deletes the workspace permissions assignment in a given account and workspace for the specified principal. - + :param workspace_id: int The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. - - + + .. py:method:: get(workspace_id: int) -> WorkspacePermissions List workspace permissions. - + Get an array of workspace permissions for the specified account and workspace. - + :param workspace_id: int The workspace ID. - + :returns: :class:`WorkspacePermissions` @@ -52,12 +52,12 @@ all = a.workspace_assignment.list(workspace_id=workspace_id) Get permission assignments. - + Get the permission assignments for the specified Databricks account and Databricks workspace. - + :param workspace_id: int The workspace ID for the account. - + :returns: Iterator over :class:`PermissionAssignment` @@ -80,19 +80,19 @@ spn_id = spn.id - workspace_id = os.environ["DUMMY_WORKSPACE_ID"] + workspace_id = os.environ["TEST_WORKSPACE_ID"] - _ = a.workspace_assignment.update( + a.workspace_assignment.update( workspace_id=workspace_id, principal_id=spn_id, permissions=[iam.WorkspacePermission.USER], ) Create or update permissions assignment. - + Creates or updates the workspace permissions assignment in a given account and workspace for the specified principal. - + :param workspace_id: int The workspace ID. :param principal_id: int @@ -103,6 +103,6 @@ will be ignored. Note that excluding this field, or providing unsupported values, will have the same effect as providing an empty list, which will result in the deletion of all permissions for the principal. - + :returns: :class:`PermissionAssignment` \ No newline at end of file diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst index 5110e70ad..7043a343b 100644 --- a/docs/account/oauth2/custom_app_integration.rst +++ b/docs/account/oauth2/custom_app_integration.rst @@ -10,11 +10,11 @@ .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) -> CreateCustomAppIntegrationOutput Create Custom OAuth App Integration. - + Create Custom OAuth App Integration. - + You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param confidential: bool (optional) This field indicates whether an OAuth client secret is required to authenticate this client. :param name: str (optional) @@ -29,54 +29,54 @@ :param user_authorized_scopes: List[str] (optional) Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes. - + :returns: :class:`CreateCustomAppIntegrationOutput` .. py:method:: delete(integration_id: str) Delete Custom OAuth App Integration. - + Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param integration_id: str - - + + .. py:method:: get(integration_id: str) -> GetCustomAppIntegrationOutput Get OAuth Custom App Integration. - + Gets the Custom OAuth App Integration for the given integration id. - + :param integration_id: str The OAuth app integration ID. - + :returns: :class:`GetCustomAppIntegrationOutput` .. py:method:: list( [, include_creator_username: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetCustomAppIntegrationOutput] Get custom oauth app integrations. - + Get the list of custom OAuth app integrations for the specified Databricks account - + :param include_creator_username: bool (optional) :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`GetCustomAppIntegrationOutput` .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) Updates Custom OAuth App Integration. - + Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param integration_id: str :param redirect_urls: List[str] (optional) List of OAuth redirect urls to be updated in the custom OAuth app integration @@ -88,6 +88,6 @@ :param user_authorized_scopes: List[str] (optional) Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes. - - + + \ No newline at end of file diff --git a/docs/account/oauth2/federation_policy.rst b/docs/account/oauth2/federation_policy.rst index a8957e5f2..9eed9396b 100644 --- a/docs/account/oauth2/federation_policy.rst +++ b/docs/account/oauth2/federation_policy.rst @@ -5,20 +5,20 @@ .. py:class:: AccountFederationPolicyAPI These APIs manage account federation policies. - + Account federation policies allow users and service principals in your Databricks account to securely access Databricks APIs using tokens from your trusted identity providers (IdPs). - + With token federation, your users and service principals can exchange tokens from your IdP for Databricks OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage Databricks secrets, and allows you to centralize management of token issuance policies in your IdP. Databricks token federation is typically used in combination with [SCIM], so users in your IdP are synchronized into your Databricks account. - + Token federation is configured in your Databricks account using an account federation policy. An account federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from * how to determine which Databricks user, or subject, a token is issued for - + To configure a federation policy, you provide the following: * The required token __issuer__, as specified in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to @@ -29,68 +29,68 @@ public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for discovering public keys. - + An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"] subject_claim: "sub" ``` - + An example JWT token body that matches this policy and could be used to authenticate to Databricks as user `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub": "username@mycompany.com" } ``` - + You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if your users do not already have the ability to generate tokens that are compatible with your federation policy. - + You do not need to configure an OAuth application in Databricks to use token federation. - + [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html .. py:method:: create(policy: FederationPolicy [, policy_id: Optional[str]]) -> FederationPolicy Create account federation policy. - + :param policy: :class:`FederationPolicy` :param policy_id: str (optional) The identifier for the federation policy. The identifier must contain only lowercase alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. - + :returns: :class:`FederationPolicy` .. py:method:: delete(policy_id: str) Delete account federation policy. - + :param policy_id: str The identifier for the federation policy. - - + + .. py:method:: get(policy_id: str) -> FederationPolicy Get account federation policy. - + :param policy_id: str The identifier for the federation policy. - + :returns: :class:`FederationPolicy` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] List account federation policies. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` .. py:method:: update(policy_id: str, policy: FederationPolicy [, update_mask: Optional[str]]) -> FederationPolicy Update account federation policy. - + :param policy_id: str The identifier for the federation policy. :param policy: :class:`FederationPolicy` @@ -100,6 +100,6 @@ should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'description,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` \ No newline at end of file diff --git a/docs/account/oauth2/o_auth_published_apps.rst b/docs/account/oauth2/o_auth_published_apps.rst index 873d8a650..18c07c326 100644 --- a/docs/account/oauth2/o_auth_published_apps.rst +++ b/docs/account/oauth2/o_auth_published_apps.rst @@ -11,13 +11,13 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PublishedAppOutput] Get all the published OAuth apps. - + Get all the available published OAuth apps in Databricks. - + :param page_size: int (optional) The max number of OAuth published apps to return in one page. :param page_token: str (optional) A token that can be used to get the next page of results. - + :returns: Iterator over :class:`PublishedAppOutput` \ No newline at end of file diff --git a/docs/account/oauth2/published_app_integration.rst b/docs/account/oauth2/published_app_integration.rst index fd61c58fa..f59f2c4aa 100644 --- a/docs/account/oauth2/published_app_integration.rst +++ b/docs/account/oauth2/published_app_integration.rst @@ -10,64 +10,64 @@ .. py:method:: create( [, app_id: Optional[str], token_access_policy: Optional[TokenAccessPolicy]]) -> CreatePublishedAppIntegrationOutput Create Published OAuth App Integration. - + Create Published OAuth App Integration. - + You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param app_id: str (optional) App id of the OAuth published app integration. For example power-bi, tableau-deskop :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy - + :returns: :class:`CreatePublishedAppIntegrationOutput` .. py:method:: delete(integration_id: str) Delete Published OAuth App Integration. - + Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param integration_id: str - - + + .. py:method:: get(integration_id: str) -> GetPublishedAppIntegrationOutput Get OAuth Published App Integration. - + Gets the Published OAuth App Integration for the given integration id. - + :param integration_id: str - + :returns: :class:`GetPublishedAppIntegrationOutput` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetPublishedAppIntegrationOutput] Get published oauth app integrations. - + Get the list of published OAuth app integrations for the specified Databricks account - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`GetPublishedAppIntegrationOutput` .. py:method:: update(integration_id: str [, token_access_policy: Optional[TokenAccessPolicy]]) Updates Published OAuth App Integration. - + Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param integration_id: str :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy to be updated in the published OAuth app integration - - + + \ No newline at end of file diff --git a/docs/account/oauth2/service_principal_federation_policy.rst b/docs/account/oauth2/service_principal_federation_policy.rst index f3335d87a..2f6292687 100644 --- a/docs/account/oauth2/service_principal_federation_policy.rst +++ b/docs/account/oauth2/service_principal_federation_policy.rst @@ -5,22 +5,22 @@ .. py:class:: ServicePrincipalFederationPolicyAPI These APIs manage service principal federation policies. - + Service principal federation, also known as Workload Identity Federation, allows your automated workloads running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets. With Workload Identity Federation, your application (or workload) authenticates to Databricks as a Databricks service principal, using tokens provided by the workload runtime. - + Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever possible. Workload Identity Federation is supported by many popular services, including Github Actions, Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others. - + Workload identity federation is configured in your Databricks account using a service principal federation policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the Databricks service principal - + To configure a federation policy, you provide the following: * The required token __issuer__, as specified in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the workload identity provider. * The required token __subject__, as specified in the “sub” claim of @@ -32,73 +32,73 @@ of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on the issuer’s well known endpoint for discovering public keys. - + An example service principal federation policy, for a Github Actions workload, is: ``` issuer: "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject: "repo:my-github-org/my-repo:environment:prod" ``` - + An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ``` { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub": "repo:my-github-org/my-repo:environment:prod" } ``` - + You may also need to configure the workload runtime to generate tokens for your workloads. - + You do not need to configure an OAuth application in Databricks to use token federation. .. py:method:: create(service_principal_id: int, policy: FederationPolicy [, policy_id: Optional[str]]) -> FederationPolicy Create service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy: :class:`FederationPolicy` :param policy_id: str (optional) The identifier for the federation policy. The identifier must contain only lowercase alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. - + :returns: :class:`FederationPolicy` .. py:method:: delete(service_principal_id: int, policy_id: str) Delete service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str The identifier for the federation policy. - - + + .. py:method:: get(service_principal_id: int, policy_id: str) -> FederationPolicy Get service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str The identifier for the federation policy. - + :returns: :class:`FederationPolicy` .. py:method:: list(service_principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] List service principal federation policies. - + :param service_principal_id: int The service principal id for the federation policy. :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` .. py:method:: update(service_principal_id: int, policy_id: str, policy: FederationPolicy [, update_mask: Optional[str]]) -> FederationPolicy Update service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str @@ -110,6 +110,6 @@ should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'description,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` \ No newline at end of file diff --git a/docs/account/oauth2/service_principal_secrets.rst b/docs/account/oauth2/service_principal_secrets.rst index 01965a19a..d72b4e8f7 100644 --- a/docs/account/oauth2/service_principal_secrets.rst +++ b/docs/account/oauth2/service_principal_secrets.rst @@ -5,54 +5,53 @@ .. py:class:: ServicePrincipalSecretsAPI These APIs enable administrators to manage service principal secrets. - + You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using OAuth tokens for service principals], - + In addition, the generated secrets can be used to configure the Databricks Terraform Provider to authenticate with the service principal. For more information, see [Databricks Terraform Provider]. - + [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal - .. py:method:: create(service_principal_id: int [, lifetime: Optional[str]]) -> CreateServicePrincipalSecretResponse Create service principal secret. - + Create a secret for the given service principal. - + :param service_principal_id: int The service principal ID. :param lifetime: str (optional) The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a default lifetime of 730 days (63072000s). - + :returns: :class:`CreateServicePrincipalSecretResponse` .. py:method:: delete(service_principal_id: int, secret_id: str) Delete service principal secret. - + Delete a secret from the given service principal. - + :param service_principal_id: int The service principal ID. :param secret_id: str The secret ID. - - + + .. py:method:: list(service_principal_id: int [, page_token: Optional[str]]) -> Iterator[SecretInfo] List service principal secrets. - + List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the secret values. - + :param service_principal_id: int The service principal ID. :param page_token: str (optional) @@ -62,6 +61,6 @@ previous request. To list all of the secrets for a service principal, it is necessary to continue requesting pages of entries until the response contains no `next_page_token`. Note that the number of entries returned must not be used to determine when the listing is complete. - + :returns: Iterator over :class:`SecretInfo` \ No newline at end of file diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index d023d4f1f..75abd62d4 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -24,49 +24,49 @@ a = AccountClient() - role = a.credentials.create( + creds = a.credentials.create( credentials_name=f"sdk-{time.time_ns()}", aws_credentials=provisioning.CreateCredentialAwsCredentials( - sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]) + sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"]) ), ) # cleanup - a.credentials.delete(credentials_id=role.credentials_id) + a.credentials.delete(credentials_id=creds.credentials_id) Create credential configuration. - + Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account ID) in the returned credential object, and configure the required access policy. - + Save the response's `credentials_id` field, which is the ID for your new credential configuration object. - + For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param credentials_name: str The human-readable name of the credential configuration object. :param aws_credentials: :class:`CreateCredentialAwsCredentials` - + :returns: :class:`Credential` .. py:method:: delete(credentials_id: str) Delete credential configuration. - + Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any workspace. - + :param credentials_id: str Databricks Account API credential configuration ID - - + + .. py:method:: get(credentials_id: str) -> Credential @@ -97,12 +97,12 @@ a.credentials.delete(credentials_id=role.credentials_id) Get credential configuration. - + Gets a Databricks credential configuration object for an account, both specified by ID. - + :param credentials_id: str Databricks Account API credential configuration ID - + :returns: :class:`Credential` @@ -120,8 +120,8 @@ configs = a.credentials.list() Get all credential configurations. - + Gets all Databricks credential configurations associated with an account specified by ID. - + :returns: Iterator over :class:`Credential` \ No newline at end of file diff --git a/docs/account/provisioning/encryption_keys.rst b/docs/account/provisioning/encryption_keys.rst index 1c00a2914..baf076a8a 100644 --- a/docs/account/provisioning/encryption_keys.rst +++ b/docs/account/provisioning/encryption_keys.rst @@ -7,11 +7,11 @@ These APIs manage encryption key configurations for this workspace (optional). A key configuration encapsulates the AWS KMS key information and some information about how the key configuration can be used. There are two possible uses for key configurations: - + * Managed services: A key configuration can be used to encrypt a workspace's notebook and secret data in the control plane, as well as Databricks SQL queries and query history. * Storage: A key configuration can be used to encrypt a workspace's DBFS and EBS data in the data plane. - + In both of these cases, the key configuration's ID is used when creating a new workspace. This Preview feature is available if your account is on the E2 version of the platform. Updating a running workspace with workspace storage encryption requires that the workspace is on the E2 version of the platform. If you @@ -44,7 +44,7 @@ a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) Create encryption key configuration. - + Creates a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -52,32 +52,32 @@ specified as a workspace's customer-managed key for workspace storage, the key encrypts the workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume data. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions that currently support creation of Databricks workspaces. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :param use_cases: List[:class:`KeyUseCase`] The cases that the key can be used for. :param aws_key_info: :class:`CreateAwsKeyInfo` (optional) :param gcp_key_info: :class:`CreateGcpKeyInfo` (optional) - + :returns: :class:`CustomerManagedKey` .. py:method:: delete(customer_managed_key_id: str) Delete encryption key configuration. - + Deletes a customer-managed key configuration object for an account. You cannot delete a configuration that is associated with a running workspace. - + :param customer_managed_key_id: str Databricks encryption key configuration ID. - - + + .. py:method:: get(customer_managed_key_id: str) -> CustomerManagedKey @@ -108,7 +108,7 @@ a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) Get encryption key configuration. - + Gets a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -116,15 +116,15 @@ specified as a workspace's customer-managed key for storage, the key encrypts the workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume data. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions. - + This operation is available only if your account is on the E2 version of the platform.", - + :param customer_managed_key_id: str Databricks encryption key configuration ID. - + :returns: :class:`CustomerManagedKey` @@ -142,17 +142,17 @@ all = a.encryption_keys.list() Get all encryption key configurations. - + Gets all customer-managed key configuration objects for an account. If the key is specified as a workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions. - + This operation is available only if your account is on the E2 version of the platform. - + :returns: Iterator over :class:`CustomerManagedKey` \ No newline at end of file diff --git a/docs/account/provisioning/networks.rst b/docs/account/provisioning/networks.rst index 46bccd872..8ac512a72 100644 --- a/docs/account/provisioning/networks.rst +++ b/docs/account/provisioning/networks.rst @@ -28,10 +28,10 @@ ) Create network configuration. - + Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a pre-existing VPC and subnets. - + :param network_name: str The human-readable name of the network configuration. :param gcp_network_info: :class:`GcpNetworkInfo` (optional) @@ -46,28 +46,28 @@ :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional) If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/ :param vpc_id: str (optional) The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations. - + :returns: :class:`Network` .. py:method:: delete(network_id: str) Delete a network configuration. - + Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. - + This operation is available only if your account is on the E2 version of the platform. - + :param network_id: str Databricks Account API network configuration ID. - - + + .. py:method:: get(network_id: str) -> Network @@ -93,12 +93,12 @@ by_id = a.networks.get(network_id=netw.network_id) Get a network configuration. - + Gets a Databricks network configuration, which represents a cloud VPC and its resources. - + :param network_id: str Databricks Account API network configuration ID. - + :returns: :class:`Network` @@ -116,10 +116,10 @@ configs = a.networks.list() Get all network configurations. - + Gets a list of all Databricks network configurations for an account, specified by ID. - + This operation is available only if your account is on the E2 version of the platform. - + :returns: Iterator over :class:`Network` \ No newline at end of file diff --git a/docs/account/provisioning/private_access.rst b/docs/account/provisioning/private_access.rst index e30ed2585..5a1266de8 100644 --- a/docs/account/provisioning/private_access.rst +++ b/docs/account/provisioning/private_access.rst @@ -29,20 +29,20 @@ a.private_access.delete(private_access_settings_id=created.private_access_settings_id) Create private access settings. - + Creates a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. - + You can share one private access settings with multiple workspaces in a single account. However, private access settings are specific to AWS regions, so only workspaces in the same AWS region can use a given private access settings object. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_name: str The human-readable name of the private access settings object. :param region: str @@ -51,14 +51,14 @@ An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in AWS. - + Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints that in your account that can connect to your workspace over AWS PrivateLink. - + If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this control only works for PrivateLink connections. To control how your workspace is accessed via public internet, see [IP access lists]. - + [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) The private access level controls which VPC endpoints can connect to the UI or API of any workspace @@ -70,26 +70,26 @@ Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - + :returns: :class:`PrivateAccessSettings` .. py:method:: delete(private_access_settings_id: str) Delete a private access settings object. - + Deletes a private access settings object, which determines how your workspace is accessed over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. - - + + .. py:method:: get(private_access_settings_id: str) -> PrivateAccessSettings @@ -117,18 +117,18 @@ a.private_access.delete(private_access_settings_id=created.private_access_settings_id) Get a private access settings object. - + Gets a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. - + :returns: :class:`PrivateAccessSettings` @@ -146,9 +146,9 @@ all = a.private_access.list() Get all private access settings objects. - + Gets a list of all private access settings objects for an account, specified by ID. - + :returns: Iterator over :class:`PrivateAccessSettings` @@ -181,25 +181,25 @@ a.private_access.delete(private_access_settings_id=created.private_access_settings_id) Replace private access settings. - + Updates an existing private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. - + This operation completely overwrites your existing private access settings object attached to your workspaces. All workspaces attached to the private access settings are affected by any change. If `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of these changes might take several minutes to propagate to the workspace API. - + You can share one private access settings object with multiple workspaces in a single account. However, private access settings are specific to AWS regions, so only workspaces in the same AWS region can use a given private access settings object. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. :param private_access_settings_name: str @@ -210,14 +210,14 @@ An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in AWS. - + Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints that in your account that can connect to your workspace over AWS PrivateLink. - + If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this control only works for PrivateLink connections. To control how your workspace is accessed via public internet, see [IP access lists]. - + [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) The private access level controls which VPC endpoints can connect to the UI or API of any workspace @@ -229,6 +229,6 @@ Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - - + + \ No newline at end of file diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index b8e144f8c..baf760a82 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -16,7 +16,6 @@ .. code-block:: - import os import time from databricks.sdk import AccountClient @@ -24,45 +23,45 @@ a = AccountClient() - storage = a.storage.create( + bucket = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]), + root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), ) # cleanup - a.storage.delete(storage_configuration_id=storage.storage_configuration_id) + a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) Create new storage configuration. - + Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your account. Databricks stores related workspace assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the required bucket policy. - + For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param storage_configuration_name: str The human-readable name of the storage configuration. :param root_bucket_info: :class:`RootBucketInfo` Root S3 bucket information. - + :returns: :class:`StorageConfiguration` .. py:method:: delete(storage_configuration_id: str) Delete storage configuration. - + Deletes a Databricks storage configuration. You cannot delete a storage configuration that is associated with any workspace. - + :param storage_configuration_id: str Databricks Account API storage configuration ID. - - + + .. py:method:: get(storage_configuration_id: str) -> StorageConfiguration @@ -87,12 +86,12 @@ by_id = a.storage.get(storage_configuration_id=storage.storage_configuration_id) Get storage configuration. - + Gets a Databricks storage configuration for an account, both specified by ID. - + :param storage_configuration_id: str Databricks Account API storage configuration ID. - + :returns: :class:`StorageConfiguration` @@ -110,8 +109,8 @@ configs = a.storage.list() Get all storage configurations. - + Gets a list of all Databricks storage configurations for your account, specified by ID. - + :returns: Iterator over :class:`StorageConfiguration` \ No newline at end of file diff --git a/docs/account/provisioning/vpc_endpoints.rst b/docs/account/provisioning/vpc_endpoints.rst index fecfbec5d..5d050ca63 100644 --- a/docs/account/provisioning/vpc_endpoints.rst +++ b/docs/account/provisioning/vpc_endpoints.rst @@ -30,20 +30,20 @@ a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) Create VPC endpoint configuration. - + Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. - + After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically accepts the VPC endpoint. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html - + :param vpc_endpoint_name: str The human-readable name of the storage configuration. :param aws_vpc_endpoint_id: str (optional) @@ -52,27 +52,27 @@ The Google Cloud specific information for this Private Service Connect endpoint. :param region: str (optional) The AWS region in which this VPC endpoint object exists. - + :returns: :class:`VpcEndpoint` .. py:method:: delete(vpc_endpoint_id: str) Delete VPC endpoint configuration. - + Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate privately with Databricks over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param vpc_endpoint_id: str Databricks VPC endpoint ID. - - + + .. py:method:: get(vpc_endpoint_id: str) -> VpcEndpoint @@ -101,16 +101,16 @@ a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) Get a VPC endpoint configuration. - + Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html - + :param vpc_endpoint_id: str Databricks VPC endpoint ID. - + :returns: :class:`VpcEndpoint` @@ -128,12 +128,12 @@ all = a.vpc_endpoints.list() Get all VPC endpoint configurations. - + Gets a list of all VPC endpoints for an account, specified by ID. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :returns: Iterator over :class:`VpcEndpoint` \ No newline at end of file diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst index 26ec685e5..6962dcdcd 100644 --- a/docs/account/provisioning/workspaces.rst +++ b/docs/account/provisioning/workspaces.rst @@ -7,7 +7,7 @@ These APIs manage workspaces for this account. A Databricks workspace is an environment for accessing all of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into folders, and provides access to data and computational resources such as clusters and jobs. - + These endpoints are available if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. @@ -51,16 +51,16 @@ a.workspaces.delete(workspace_id=waiter.workspace_id) Create a new workspace. - + Creates a new workspace. - + **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request has been accepted and is in progress, but does not mean that the workspace deployed successfully and is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID (`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests with the workspace ID and check its status. The workspace becomes available when the status changes to `RUNNING`. - + :param workspace_name: str The workspace's human-readable name. :param aws_region: str (optional) @@ -82,22 +82,22 @@ deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the set of characters that are allowed in a subdomain. - + To set this value, you must have a deployment name prefix. Contact your Databricks account team to add an account deployment name prefix to your account. - + Workspace deployment names follow the account prefix and a hyphen. For example, if your account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be `acme-workspace-1.cloud.databricks.com`. - + You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment name to only include the deployment prefix. For example, if your account's deployment prefix is `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and the workspace URL is `acme.cloud.databricks.com`. - + This value must be unique across all non-deleted deployments across all AWS regions. - + If a new workspace omits this property, the server generates a unique deployment name for you with the pattern `dbc-xxxxxxxx-xxxx`. :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional) @@ -105,19 +105,19 @@ is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks detects an IP range overlap. - + Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - + The sizes of these IP ranges affect the maximum number of nodes for the workspace. - + **Important**: Confirm the IP ranges used by your Databricks workspace before creating the workspace. You cannot change them after your workspace is deployed. If the IP address ranges for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To determine the address range sizes that you need, Databricks provides a calculator as a Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - + [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html :param gke_config: :class:`GkeConfig` (optional) The configurations for the GKE cluster of a Databricks workspace. @@ -132,15 +132,15 @@ :param network_id: str (optional) :param pricing_tier: :class:`PricingTier` (optional) The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - + [AWS Pricing]: https://databricks.com/product/aws-pricing :param private_access_settings_id: str (optional) ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), back-end (data plane to control plane connection), or both connection types. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html :param storage_configuration_id: str (optional) @@ -149,7 +149,7 @@ The ID of the workspace's storage encryption key configuration object. This is used to encrypt the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The provided key configuration object property `use_cases` must contain `STORAGE`. - + :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. @@ -161,18 +161,18 @@ .. py:method:: delete(workspace_id: int) Delete a workspace. - + Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. However, it might take a few minutes for all workspaces resources to be deleted, depending on the size and number of workspace resources. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :param workspace_id: int Workspace ID. - - + + .. py:method:: get(workspace_id: int) -> Workspace @@ -191,23 +191,23 @@ by_id = a.workspaces.get(workspace_id=created.workspace_id) Get a workspace. - + Gets information including status for a Databricks workspace, specified by ID. In the response, the `workspace_status` field indicates the current status. After initial workspace creation (which is asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace becomes available when the status changes to `RUNNING`. - + For information about how to create a new workspace with this API **including error handling**, see [Create a new workspace using the Account API]. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param workspace_id: int Workspace ID. - + :returns: :class:`Workspace` @@ -225,12 +225,12 @@ all = a.workspaces.list() Get all workspaces. - + Gets a list of all workspaces associated with an account, specified by ID. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :returns: Iterator over :class:`Workspace` @@ -267,10 +267,10 @@ a.credentials.delete(credentials_id=update_role.credentials_id) Update workspace configuration. - + Updates a workspace configuration for either a running workspace or a failed workspace. The elements that can be updated varies between these two use cases. - + ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace deployment for some fields, but not all fields. For a failed workspace, this request supports updates to the following fields only: - Credential configuration ID - Storage configuration ID - Network @@ -292,14 +292,14 @@ update the network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from the workspace once attached, you can only switch to another one. - + After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` requests with the workspace ID and check the workspace status. The workspace is successful if the status changes to `RUNNING`. - + For information about how to create a new workspace with this API **including error handling**, see [Create a new workspace using the Account API]. - + ### Update a running workspace You can update a Databricks workspace configuration for running workspaces for some fields, but not all fields. For a running workspace, this request supports updating the following fields only: - Credential configuration ID - Network configuration ID. Used @@ -325,12 +325,12 @@ network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from the workspace once attached, you can only switch to another one. - + **Important**: To update a running workspace, your workspace must have no running compute resources that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not terminate all cluster instances in the workspace before calling this API, the request will fail. - + ### Wait until changes take effect. After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` requests with the workspace ID and check the workspace status and the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes @@ -346,22 +346,22 @@ silently to its original configuration. After the workspace has been updated, you cannot use or create clusters for another 20 minutes. If you create or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could cause other unexpected behavior. - + If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20 minute wait. - + **Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment types and subscription types. If you have questions about availability, contact your Databricks representative. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param workspace_id: int Workspace ID. :param aws_region: str (optional) @@ -391,7 +391,7 @@ :param storage_customer_managed_key_id: str (optional) The ID of the key configuration object for workspace storage. This parameter is available for updating both failed and running workspaces. - + :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. diff --git a/docs/account/settings/csp_enablement_account.rst b/docs/account/settings/csp_enablement_account.rst index a2b8cb91a..885aae89f 100644 --- a/docs/account/settings/csp_enablement_account.rst +++ b/docs/account/settings/csp_enablement_account.rst @@ -7,32 +7,32 @@ The compliance security profile settings at the account level control whether to enable it for new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace creation, account admins can enable the compliance security profile individually for each workspace. - + This settings can be disabled so that new workspaces do not have compliance security profile enabled by default. .. py:method:: get( [, etag: Optional[str]]) -> CspEnablementAccountSetting Get the compliance security profile setting for new workspaces. - + Gets the compliance security profile setting for new workspaces. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`CspEnablementAccountSetting` .. py:method:: update(allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str) -> CspEnablementAccountSetting Update the compliance security profile setting for new workspaces. - + Updates the value of the compliance security profile setting for new workspaces. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`CspEnablementAccountSetting` @@ -42,10 +42,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`CspEnablementAccountSetting` \ No newline at end of file diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst index 212e3f98e..b10d7e2dc 100644 --- a/docs/account/settings/disable_legacy_features.rst +++ b/docs/account/settings/disable_legacy_features.rst @@ -5,7 +5,7 @@ .. py:class:: DisableLegacyFeaturesAPI Disable legacy features for new Databricks workspaces. - + For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions prior to 13.3LTS. @@ -13,41 +13,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyFeaturesResponse Delete the disable legacy features setting. - + Deletes the disable legacy features setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyFeaturesResponse` .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyFeatures Get the disable legacy features setting. - + Gets the value of the disable legacy features setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyFeatures` .. py:method:: update(allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures Update the disable legacy features setting. - + Updates the value of the disable legacy features setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyFeatures` @@ -57,10 +57,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyFeatures` \ No newline at end of file diff --git a/docs/account/settings/enable_ip_access_lists.rst b/docs/account/settings/enable_ip_access_lists.rst index b570b2e37..9485b7332 100644 --- a/docs/account/settings/enable_ip_access_lists.rst +++ b/docs/account/settings/enable_ip_access_lists.rst @@ -10,41 +10,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAccountIpAccessEnableResponse Delete the account IP access toggle setting. - + Reverts the value of the account IP access toggle setting to default (ON) - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAccountIpAccessEnableResponse` .. py:method:: get( [, etag: Optional[str]]) -> AccountIpAccessEnable Get the account IP access toggle setting. - + Gets the value of the account IP access toggle setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AccountIpAccessEnable` .. py:method:: update(allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable Update the account IP access toggle setting. - + Updates the value of the account IP access toggle setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AccountIpAccessEnable` @@ -54,10 +54,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AccountIpAccessEnable` \ No newline at end of file diff --git a/docs/account/settings/esm_enablement_account.rst b/docs/account/settings/esm_enablement_account.rst index e14d1a71f..e9359d907 100644 --- a/docs/account/settings/esm_enablement_account.rst +++ b/docs/account/settings/esm_enablement_account.rst @@ -11,25 +11,25 @@ .. py:method:: get( [, etag: Optional[str]]) -> EsmEnablementAccountSetting Get the enhanced security monitoring setting for new workspaces. - + Gets the enhanced security monitoring setting for new workspaces. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`EsmEnablementAccountSetting` .. py:method:: update(allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str) -> EsmEnablementAccountSetting Update the enhanced security monitoring setting for new workspaces. - + Updates the value of the enhanced security monitoring setting for new workspaces. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EsmEnablementAccountSetting` @@ -39,10 +39,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EsmEnablementAccountSetting` \ No newline at end of file diff --git a/docs/account/settings/ip_access_lists.rst b/docs/account/settings/ip_access_lists.rst index 031354b15..7718d0c54 100644 --- a/docs/account/settings/ip_access_lists.rst +++ b/docs/account/settings/ip_access_lists.rst @@ -6,92 +6,92 @@ The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console. - + Account IP Access Lists affect web application access and REST API access to the account console and account APIs. If the feature is disabled for the account, all access is allowed for this account. There is support for allow lists (inclusion) and block lists (exclusion). - + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address matches any block list, the connection is rejected. 2. **If the connection was not rejected by block lists**, the IP address is compared with the allow lists. - + If there is at least one allow list for the account, the connection is allowed only if the IP address matches an allow list. If there are no allow lists for the account, all IP addresses are allowed. - + For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. - + After changes to the account-level IP access lists, it can take a few minutes for changes to take effect. .. py:method:: create(label: str, list_type: ListType [, ip_addresses: Optional[List[str]]]) -> CreateIpAccessListResponse Create access list. - + Creates an IP access list for the account. - + A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. - + When creating or updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. - + :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) - + :returns: :class:`CreateIpAccessListResponse` .. py:method:: delete(ip_access_list_id: str) Delete access list. - + Deletes an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - - + + .. py:method:: get(ip_access_list_id: str) -> GetIpAccessListResponse Get IP access list. - + Gets an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - + :returns: :class:`GetIpAccessListResponse` .. py:method:: list() -> Iterator[IpAccessListInfo] Get access lists. - + Gets all IP access lists for the specified account. - + :returns: Iterator over :class:`IpAccessListInfo` .. py:method:: replace(ip_access_list_id: str, label: str, list_type: ListType, enabled: bool [, ip_addresses: Optional[List[str]]]) Replace access list. - + Replaces an IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one @@ -99,41 +99,41 @@ `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take effect. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) - - + + .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]]) Update access list. - + Updates an existing IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. - + When updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param enabled: bool (optional) @@ -143,9 +143,9 @@ Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. - - + + \ No newline at end of file diff --git a/docs/account/settings/llm_proxy_partner_powered_account.rst b/docs/account/settings/llm_proxy_partner_powered_account.rst index fe5a55183..f0affb743 100644 --- a/docs/account/settings/llm_proxy_partner_powered_account.rst +++ b/docs/account/settings/llm_proxy_partner_powered_account.rst @@ -9,25 +9,25 @@ .. py:method:: get( [, etag: Optional[str]]) -> LlmProxyPartnerPoweredAccount Get the enable partner powered AI features account setting. - + Gets the enable partner powered AI features account setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredAccount` .. py:method:: update(allow_missing: bool, setting: LlmProxyPartnerPoweredAccount, field_mask: str) -> LlmProxyPartnerPoweredAccount Update the enable partner powered AI features account setting. - + Updates the enable partner powered AI features account setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredAccount` @@ -37,10 +37,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredAccount` \ No newline at end of file diff --git a/docs/account/settings/llm_proxy_partner_powered_enforce.rst b/docs/account/settings/llm_proxy_partner_powered_enforce.rst index 084b744e0..ae07c81db 100644 --- a/docs/account/settings/llm_proxy_partner_powered_enforce.rst +++ b/docs/account/settings/llm_proxy_partner_powered_enforce.rst @@ -10,25 +10,25 @@ .. py:method:: get( [, etag: Optional[str]]) -> LlmProxyPartnerPoweredEnforce Get the enforcement status of partner powered AI features account setting. - + Gets the enforcement status of partner powered AI features account setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredEnforce` .. py:method:: update(allow_missing: bool, setting: LlmProxyPartnerPoweredEnforce, field_mask: str) -> LlmProxyPartnerPoweredEnforce Update the enforcement status of partner powered AI features account setting. - + Updates the enable enforcement status of partner powered AI features account setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredEnforce` @@ -38,10 +38,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredEnforce` \ No newline at end of file diff --git a/docs/account/settings/network_connectivity.rst b/docs/account/settings/network_connectivity.rst index 2ed5c167a..88f52ea0f 100644 --- a/docs/account/settings/network_connectivity.rst +++ b/docs/account/settings/network_connectivity.rst @@ -9,145 +9,145 @@ your Azure Storage accounts to allow access from Databricks. You can also use the API to provision private endpoints for Databricks to privately connect serverless compute resources to your Azure resources using Azure Private Link. See [configure serverless secure connectivity]. - - [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security .. py:method:: create_network_connectivity_configuration(network_connectivity_config: CreateNetworkConnectivityConfiguration) -> NetworkConnectivityConfiguration Create a network connectivity configuration. - + Creates a network connectivity configuration (NCC), which provides stable Azure service subnets when accessing your Azure Storage accounts. You can also use a network connectivity configuration to create Databricks managed private endpoints so that Databricks serverless compute resources privately access your resources. - + **IMPORTANT**: After you create the network connectivity configuration, you must assign one or more workspaces to the new network connectivity configuration. You can share one network connectivity configuration with multiple workspaces from the same Azure region within the same Databricks account. See [configure serverless secure connectivity]. - + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security - + :param network_connectivity_config: :class:`CreateNetworkConnectivityConfiguration` Properties of the new network connectivity configuration. - + :returns: :class:`NetworkConnectivityConfiguration` - .. py:method:: create_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule: CreatePrivateEndpointRule) -> NccAzurePrivateEndpointRule + .. py:method:: create_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule: CreatePrivateEndpointRule) -> NccPrivateEndpointRule Create a private endpoint rule. - + Create a private endpoint rule for the specified network connectivity config object. Once the object is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure resource. - + **IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to complete the connection. To get the information of the private endpoint created, make a `GET` request on the new private endpoint rule. See [serverless private link]. - + [serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. :param private_endpoint_rule: :class:`CreatePrivateEndpointRule` Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization. - - :returns: :class:`NccAzurePrivateEndpointRule` + + :returns: :class:`NccPrivateEndpointRule` .. py:method:: delete_network_connectivity_configuration(network_connectivity_config_id: str) Delete a network connectivity configuration. - + Deletes a network connectivity configuration. - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. - - + + - .. py:method:: delete_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule + .. py:method:: delete_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccPrivateEndpointRule Delete a private endpoint rule. - + Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is set to `true` and the private endpoint is not available to your serverless compute resources. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param private_endpoint_rule_id: str Your private endpoint rule ID. - - :returns: :class:`NccAzurePrivateEndpointRule` + + :returns: :class:`NccPrivateEndpointRule` .. py:method:: get_network_connectivity_configuration(network_connectivity_config_id: str) -> NetworkConnectivityConfiguration Get a network connectivity configuration. - + Gets a network connectivity configuration. - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. - + :returns: :class:`NetworkConnectivityConfiguration` - .. py:method:: get_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule + .. py:method:: get_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccPrivateEndpointRule Gets a private endpoint rule. - + Gets the private endpoint rule. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param private_endpoint_rule_id: str Your private endpoint rule ID. - - :returns: :class:`NccAzurePrivateEndpointRule` + + :returns: :class:`NccPrivateEndpointRule` .. py:method:: list_network_connectivity_configurations( [, page_token: Optional[str]]) -> Iterator[NetworkConnectivityConfiguration] List network connectivity configurations. - + Gets an array of network connectivity configurations. - + :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`NetworkConnectivityConfiguration` - .. py:method:: list_private_endpoint_rules(network_connectivity_config_id: str [, page_token: Optional[str]]) -> Iterator[NccAzurePrivateEndpointRule] + .. py:method:: list_private_endpoint_rules(network_connectivity_config_id: str [, page_token: Optional[str]]) -> Iterator[NccPrivateEndpointRule] List private endpoint rules. - + Gets an array of private endpoint rules. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param page_token: str (optional) Pagination token to go to next page based on previous query. - - :returns: Iterator over :class:`NccAzurePrivateEndpointRule` + + :returns: Iterator over :class:`NccPrivateEndpointRule` - .. py:method:: update_ncc_azure_private_endpoint_rule_public(network_connectivity_config_id: str, private_endpoint_rule_id: str, private_endpoint_rule: UpdatePrivateEndpointRule, update_mask: str) -> NccAzurePrivateEndpointRule + .. py:method:: update_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str, private_endpoint_rule: UpdatePrivateEndpointRule, update_mask: str) -> NccPrivateEndpointRule Update a private endpoint rule. - + Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed resources is allowed to be updated. - + :param network_connectivity_config_id: str - Your Network Connectivity Configuration ID. + The ID of a network connectivity configuration, which is the parent resource of this private + endpoint rule object. :param private_endpoint_rule_id: str Your private endpoint rule ID. :param private_endpoint_rule: :class:`UpdatePrivateEndpointRule` @@ -159,6 +159,6 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - - :returns: :class:`NccAzurePrivateEndpointRule` + + :returns: :class:`NccPrivateEndpointRule` \ No newline at end of file diff --git a/docs/account/settings/network_policies.rst b/docs/account/settings/network_policies.rst index 7eb489bb8..c9969393d 100644 --- a/docs/account/settings/network_policies.rst +++ b/docs/account/settings/network_policies.rst @@ -14,60 +14,60 @@ .. py:method:: create_network_policy_rpc(network_policy: AccountNetworkPolicy) -> AccountNetworkPolicy Create a network policy. - + Creates a new network policy to manage which network destinations can be accessed from the Databricks environment. - + :param network_policy: :class:`AccountNetworkPolicy` - + :returns: :class:`AccountNetworkPolicy` .. py:method:: delete_network_policy_rpc(network_policy_id: str) Delete a network policy. - + Deletes a network policy. Cannot be called on 'default-policy'. - + :param network_policy_id: str The unique identifier of the network policy to delete. - - + + .. py:method:: get_network_policy_rpc(network_policy_id: str) -> AccountNetworkPolicy Get a network policy. - + Gets a network policy. - + :param network_policy_id: str The unique identifier of the network policy to retrieve. - + :returns: :class:`AccountNetworkPolicy` .. py:method:: list_network_policies_rpc( [, page_token: Optional[str]]) -> Iterator[AccountNetworkPolicy] List network policies. - + Gets an array of network policies. - + :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`AccountNetworkPolicy` .. py:method:: update_network_policy_rpc(network_policy_id: str, network_policy: AccountNetworkPolicy) -> AccountNetworkPolicy Update a network policy. - + Updates a network policy. This allows you to modify the configuration of a network policy. - + :param network_policy_id: str The unique identifier for the network policy. :param network_policy: :class:`AccountNetworkPolicy` - + :returns: :class:`AccountNetworkPolicy` \ No newline at end of file diff --git a/docs/account/settings/personal_compute.rst b/docs/account/settings/personal_compute.rst index 58b35e7f7..54e958a28 100644 --- a/docs/account/settings/personal_compute.rst +++ b/docs/account/settings/personal_compute.rst @@ -7,7 +7,7 @@ The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can change the setting to instead let individual workspaces configure access control (DELEGATE). - + There is only one instance of this setting per account. Since this setting has a default value, this setting is present on all accounts even though it's never set on a given account. Deletion reverts the value of the setting back to the default value. @@ -15,41 +15,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeletePersonalComputeSettingResponse Delete Personal Compute setting. - + Reverts back the Personal Compute setting value to default (ON) - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeletePersonalComputeSettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> PersonalComputeSetting Get Personal Compute setting. - + Gets the value of the Personal Compute setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`PersonalComputeSetting` .. py:method:: update(allow_missing: bool, setting: PersonalComputeSetting, field_mask: str) -> PersonalComputeSetting Update Personal Compute setting. - + Updates the value of the Personal Compute setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`PersonalComputeSetting` @@ -59,10 +59,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`PersonalComputeSetting` \ No newline at end of file diff --git a/docs/account/settings/settings.rst b/docs/account/settings/settings.rst index e96e06a7c..2efa1fd09 100644 --- a/docs/account/settings/settings.rst +++ b/docs/account/settings/settings.rst @@ -12,7 +12,7 @@ The compliance security profile settings at the account level control whether to enable it for new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace creation, account admins can enable the compliance security profile individually for each workspace. - + This settings can be disabled so that new workspaces do not have compliance security profile enabled by default. @@ -20,7 +20,7 @@ :type: DisableLegacyFeaturesAPI Disable legacy features for new Databricks workspaces. - + For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions prior to 13.3LTS. @@ -55,7 +55,7 @@ The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can change the setting to instead let individual workspaces configure access control (DELEGATE). - + There is only one instance of this setting per account. Since this setting has a default value, this setting is present on all accounts even though it's never set on a given account. Deletion reverts the value of the setting back to the default value. \ No newline at end of file diff --git a/docs/account/settings/workspace_network_configuration.rst b/docs/account/settings/workspace_network_configuration.rst index 98ff84202..63a61c9a4 100644 --- a/docs/account/settings/workspace_network_configuration.rst +++ b/docs/account/settings/workspace_network_configuration.rst @@ -1,39 +1,38 @@ -``a.workspace_network_configuration``: Workspace Network Configuration -====================================================================== +``a.workspace_network_configuration``: Workspace Network Option +=============================================================== .. currentmodule:: databricks.sdk.service.settings .. py:class:: WorkspaceNetworkConfigurationAPI - These APIs allow configuration of network settings for Databricks workspaces. Each workspace is always - associated with exactly one network policy that controls which network destinations can be accessed from - the Databricks environment. By default, workspaces are associated with the 'default-policy' network - policy. You cannot create or delete a workspace's network configuration, only update it to associate the - workspace with a different policy. + These APIs allow configuration of network settings for Databricks workspaces by selecting which network + policy to associate with the workspace. Each workspace is always associated with exactly one network + policy that controls which network destinations can be accessed from the Databricks environment. By + default, workspaces are associated with the 'default-policy' network policy. You cannot create or delete a + workspace's network option, only update it to associate the workspace with a different policy .. py:method:: get_workspace_network_option_rpc(workspace_id: int) -> WorkspaceNetworkOption - Get workspace network configuration. - - Gets the network configuration for a workspace. Every workspace has exactly one network policy - binding, with 'default-policy' used if no explicit assignment exists. - + Get workspace network option. + + Gets the network option for a workspace. Every workspace has exactly one network policy binding, with + 'default-policy' used if no explicit assignment exists. + :param workspace_id: int The workspace ID. - + :returns: :class:`WorkspaceNetworkOption` .. py:method:: update_workspace_network_option_rpc(workspace_id: int, workspace_network_option: WorkspaceNetworkOption) -> WorkspaceNetworkOption - Update workspace network configuration. - - Updates the network configuration for a workspace. This operation associates the workspace with the - specified network policy. To revert to the default policy, specify 'default-policy' as the - network_policy_id. - + Update workspace network option. + + Updates the network option for a workspace. This operation associates the workspace with the specified + network policy. To revert to the default policy, specify 'default-policy' as the network_policy_id. + :param workspace_id: int The workspace ID. :param workspace_network_option: :class:`WorkspaceNetworkOption` - + :returns: :class:`WorkspaceNetworkOption` \ No newline at end of file diff --git a/docs/dbdataclasses/aibuilder.rst b/docs/dbdataclasses/aibuilder.rst new file mode 100644 index 000000000..a8f37542e --- /dev/null +++ b/docs/dbdataclasses/aibuilder.rst @@ -0,0 +1,59 @@ +AI Builder +========== + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.aibuilder`` module. + +.. py:currentmodule:: databricks.sdk.service.aibuilder +.. autoclass:: CancelCustomLlmOptimizationRunRequest + :members: + :undoc-members: + +.. autoclass:: CancelResponse + :members: + :undoc-members: + +.. autoclass:: CustomLlm + :members: + :undoc-members: + +.. autoclass:: Dataset + :members: + :undoc-members: + +.. autoclass:: StartCustomLlmOptimizationRunRequest + :members: + :undoc-members: + +.. py:class:: State + + States of Custom LLM optimization lifecycle. + + .. py:attribute:: CANCELLED + :value: "CANCELLED" + + .. py:attribute:: COMPLETED + :value: "COMPLETED" + + .. py:attribute:: CREATED + :value: "CREATED" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: RUNNING + :value: "RUNNING" + +.. autoclass:: Table + :members: + :undoc-members: + +.. autoclass:: Token + :members: + :undoc-members: + +.. autoclass:: UpdateCustomLlmRequest + :members: + :undoc-members: diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst index bbd625c62..78ef0c339 100644 --- a/docs/dbdataclasses/apps.rst +++ b/docs/dbdataclasses/apps.rst @@ -226,3 +226,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: StopAppRequest :members: :undoc-members: + +.. autoclass:: Token + :members: + :undoc-members: diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index 590fd693e..625c93a80 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -107,7 +107,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: DeliveryStatus - The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account. + * The status string for log delivery. Possible values are: `CREATED`: There were no log delivery attempts since the config was created. `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account. .. py:attribute:: CREATED :value: "CREATED" @@ -140,6 +140,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GetLogDeliveryConfigurationResponse + :members: + :undoc-members: + .. autoclass:: LimitConfig :members: :undoc-members: @@ -154,7 +158,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: LogDeliveryConfigStatus - Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. + * Log Delivery Status + `ENABLED`: All dependencies have executed and succeeded `DISABLED`: At least one dependency has succeeded .. py:attribute:: DISABLED :value: "DISABLED" @@ -172,10 +177,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: LogType - Log delivery type. Supported values are: - * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the [View billable usage]. - * `AUDIT_LOGS` — Configure [audit log delivery]. For the JSON schema, see [Configure audit logging] - [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html + * Log Delivery Type .. py:attribute:: AUDIT_LOGS :value: "AUDIT_LOGS" @@ -185,9 +187,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: OutputFormat - The file type of log delivery. - * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated values) format is supported. For the schema, see the [View billable usage] * If `log_type` is `AUDIT_LOGS`, this value must be `JSON`. Only the JSON (JavaScript Object Notation) format is supported. For the schema, see the [Configuring audit logs]. - [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html + * Log Delivery Output Format .. py:attribute:: CSV :value: "CSV" @@ -208,6 +208,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: POLICY_NAME :value: "POLICY_NAME" +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UpdateBudgetConfigurationBudget :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index efbb6d06c..172eb0478 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -246,7 +246,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ConnectionType - The type of connection. + Next Id: 31 .. py:attribute:: BIGQUERY :value: "BIGQUERY" @@ -254,6 +254,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DATABRICKS :value: "DATABRICKS" + .. py:attribute:: GA4_RAW_DATA + :value: "GA4_RAW_DATA" + .. py:attribute:: GLUE :value: "GLUE" @@ -272,9 +275,21 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: POSTGRESQL :value: "POSTGRESQL" + .. py:attribute:: POWER_BI + :value: "POWER_BI" + .. py:attribute:: REDSHIFT :value: "REDSHIFT" + .. py:attribute:: SALESFORCE + :value: "SALESFORCE" + + .. py:attribute:: SALESFORCE_DATA_CLOUD + :value: "SALESFORCE_DATA_CLOUD" + + .. py:attribute:: SERVICENOW + :value: "SERVICENOW" + .. py:attribute:: SNOWFLAKE :value: "SNOWFLAKE" @@ -287,6 +302,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TERADATA :value: "TERADATA" + .. py:attribute:: UNKNOWN_CONNECTION_TYPE + :value: "UNKNOWN_CONNECTION_TYPE" + + .. py:attribute:: WORKDAY_RAAS + :value: "WORKDAY_RAAS" + .. autoclass:: ContinuousUpdateStatus :members: :undoc-members: @@ -402,11 +423,41 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: CredentialType - The type of credential. + Next Id: 12 .. py:attribute:: BEARER_TOKEN :value: "BEARER_TOKEN" + .. py:attribute:: OAUTH_ACCESS_TOKEN + :value: "OAUTH_ACCESS_TOKEN" + + .. py:attribute:: OAUTH_M2M + :value: "OAUTH_M2M" + + .. py:attribute:: OAUTH_REFRESH_TOKEN + :value: "OAUTH_REFRESH_TOKEN" + + .. py:attribute:: OAUTH_RESOURCE_OWNER_PASSWORD + :value: "OAUTH_RESOURCE_OWNER_PASSWORD" + + .. py:attribute:: OAUTH_U2M + :value: "OAUTH_U2M" + + .. py:attribute:: OAUTH_U2M_MAPPING + :value: "OAUTH_U2M_MAPPING" + + .. py:attribute:: OIDC_TOKEN + :value: "OIDC_TOKEN" + + .. py:attribute:: PEM_PRIVATE_KEY + :value: "PEM_PRIVATE_KEY" + + .. py:attribute:: SERVICE_CREDENTIAL + :value: "SERVICE_CREDENTIAL" + + .. py:attribute:: UNKNOWN_CREDENTIAL_TYPE + :value: "UNKNOWN_CREDENTIAL_TYPE" + .. py:attribute:: USERNAME_PASSWORD :value: "USERNAME_PASSWORD" @@ -487,34 +538,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WORKDAY_RAAS_FORMAT :value: "WORKDAY_RAAS_FORMAT" -.. autoclass:: DatabaseCatalog - :members: - :undoc-members: - -.. autoclass:: DatabaseInstance - :members: - :undoc-members: - -.. py:class:: DatabaseInstanceState - - .. py:attribute:: AVAILABLE - :value: "AVAILABLE" - - .. py:attribute:: DELETING - :value: "DELETING" - - .. py:attribute:: FAILING_OVER - :value: "FAILING_OVER" - - .. py:attribute:: STARTING - :value: "STARTING" - - .. py:attribute:: STOPPED - :value: "STOPPED" - - .. py:attribute:: UPDATING - :value: "UPDATING" - .. autoclass:: DatabricksGcpServiceAccount :members: :undoc-members: @@ -535,25 +558,21 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteDatabaseCatalogResponse +.. autoclass:: DeleteResponse :members: :undoc-members: -.. autoclass:: DeleteDatabaseInstanceResponse +.. autoclass:: DeltaRuntimePropertiesKvPairs :members: :undoc-members: -.. autoclass:: DeleteResponse - :members: - :undoc-members: +.. py:class:: DeltaSharingScopeEnum -.. autoclass:: DeleteSyncedDatabaseTableResponse - :members: - :undoc-members: + .. py:attribute:: INTERNAL + :value: "INTERNAL" -.. autoclass:: DeltaRuntimePropertiesKvPairs - :members: - :undoc-members: + .. py:attribute:: INTERNAL_AND_EXTERNAL + :value: "INTERNAL_AND_EXTERNAL" .. autoclass:: Dependency :members: @@ -738,15 +757,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. py:class:: GetMetastoreSummaryResponseDeltaSharingScope - - The scope of Delta Sharing enabled for the metastore. - - .. py:attribute:: INTERNAL - :value: "INTERNAL" - - .. py:attribute:: INTERNAL_AND_EXTERNAL - :value: "INTERNAL_AND_EXTERNAL" +.. autoclass:: GetPermissionsResponse + :members: + :undoc-members: .. autoclass:: GetQuotaResponse :members: @@ -784,10 +797,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ListDatabaseInstancesResponse - :members: - :undoc-members: - .. autoclass:: ListExternalLocationsResponse :members: :undoc-members: @@ -851,16 +860,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. py:class:: MetastoreInfoDeltaSharingScope - - The scope of Delta Sharing enabled for the metastore. - - .. py:attribute:: INTERNAL - :value: "INTERNAL" - - .. py:attribute:: INTERNAL_AND_EXTERNAL - :value: "INTERNAL_AND_EXTERNAL" - .. autoclass:: ModelVersionInfo :members: :undoc-members: @@ -1007,10 +1006,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: NewPipelineSpec - :members: - :undoc-members: - .. autoclass:: OnlineTable :members: :undoc-members: @@ -1072,10 +1067,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PermissionsList - :members: - :undoc-members: - .. autoclass:: PipelineProgress :members: :undoc-members: @@ -1375,25 +1366,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: SyncedDatabaseTable - :members: - :undoc-members: - -.. py:class:: SyncedTableSchedulingPolicy - - .. py:attribute:: CONTINUOUS - :value: "CONTINUOUS" - - .. py:attribute:: SNAPSHOT - :value: "SNAPSHOT" - - .. py:attribute:: TRIGGERED - :value: "TRIGGERED" - -.. autoclass:: SyncedTableSpec - :members: - :undoc-members: - .. autoclass:: SystemSchemaInfo :members: :undoc-members: @@ -1464,6 +1436,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: TriggeredUpdateStatus :members: :undoc-members: @@ -1508,16 +1484,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. py:class:: UpdateMetastoreDeltaSharingScope - - The scope of Delta Sharing enabled for the metastore. - - .. py:attribute:: INTERNAL - :value: "INTERNAL" - - .. py:attribute:: INTERNAL_AND_EXTERNAL - :value: "INTERNAL_AND_EXTERNAL" - .. autoclass:: UpdateModelVersionRequest :members: :undoc-members: @@ -1530,6 +1496,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdatePermissionsResponse + :members: + :undoc-members: + .. autoclass:: UpdateRegisteredModelRequest :members: :undoc-members: @@ -1546,6 +1516,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateTableRequest + :members: + :undoc-members: + .. autoclass:: UpdateVolumeRequestContent :members: :undoc-members: diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst index b07745b6f..d2c1cd609 100644 --- a/docs/dbdataclasses/cleanrooms.rst +++ b/docs/dbdataclasses/cleanrooms.rst @@ -176,6 +176,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UpdateCleanRoomRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index fcee1a56c..d78a9d4f1 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -516,6 +516,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: AUTOSCALING_STATS_REPORT :value: "AUTOSCALING_STATS_REPORT" + .. py:attribute:: CLUSTER_MIGRATED + :value: "CLUSTER_MIGRATED" + .. py:attribute:: CREATING :value: "CREATING" @@ -1328,6 +1331,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DRIVER_UNEXPECTED_FAILURE :value: "DRIVER_UNEXPECTED_FAILURE" + .. py:attribute:: DRIVER_UNHEALTHY + :value: "DRIVER_UNHEALTHY" + .. py:attribute:: DRIVER_UNREACHABLE :value: "DRIVER_UNREACHABLE" @@ -1650,6 +1656,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UninstallLibraries :members: :undoc-members: diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index 5ac3f0f4b..16912e3f7 100644 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -12,14 +12,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CancelQueryExecutionResponse - :members: - :undoc-members: - -.. autoclass:: CancelQueryExecutionResponseStatus - :members: - :undoc-members: - .. autoclass:: CronSchedule :members: :undoc-members: @@ -41,18 +33,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Empty - :members: - :undoc-members: - -.. autoclass:: ExecutePublishedDashboardQueryRequest - :members: - :undoc-members: - -.. autoclass:: ExecuteQueryResponse - :members: - :undoc-members: - .. autoclass:: GenieAttachment :members: :undoc-members: @@ -77,6 +57,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GenieListSpacesResponse + :members: + :undoc-members: + .. autoclass:: GenieMessage :members: :undoc-members: @@ -101,10 +85,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GetPublishedDashboardEmbeddedResponse - :members: - :undoc-members: - .. autoclass:: GetPublishedDashboardTokenInfoResponse :members: :undoc-members: @@ -332,18 +312,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PendingStatus - :members: - :undoc-members: - -.. autoclass:: PollQueryStatusResponse - :members: - :undoc-members: - -.. autoclass:: PollQueryStatusResponseData - :members: - :undoc-members: - .. autoclass:: PublishRequest :members: :undoc-members: @@ -352,10 +320,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: QueryResponseStatus - :members: - :undoc-members: - .. autoclass:: Result :members: :undoc-members: @@ -388,11 +352,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: SuccessStatus +.. autoclass:: TextAttachment :members: :undoc-members: -.. autoclass:: TextAttachment +.. autoclass:: Token :members: :undoc-members: diff --git a/docs/dbdataclasses/database.rst b/docs/dbdataclasses/database.rst new file mode 100644 index 000000000..28d9e99c5 --- /dev/null +++ b/docs/dbdataclasses/database.rst @@ -0,0 +1,173 @@ +Database Instances +================== + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.database`` module. + +.. py:currentmodule:: databricks.sdk.service.database +.. autoclass:: DatabaseCatalog + :members: + :undoc-members: + +.. autoclass:: DatabaseCredential + :members: + :undoc-members: + +.. autoclass:: DatabaseInstance + :members: + :undoc-members: + +.. py:class:: DatabaseInstanceState + + .. py:attribute:: AVAILABLE + :value: "AVAILABLE" + + .. py:attribute:: DELETING + :value: "DELETING" + + .. py:attribute:: FAILING_OVER + :value: "FAILING_OVER" + + .. py:attribute:: STARTING + :value: "STARTING" + + .. py:attribute:: STOPPED + :value: "STOPPED" + + .. py:attribute:: UPDATING + :value: "UPDATING" + +.. autoclass:: DatabaseTable + :members: + :undoc-members: + +.. autoclass:: DeleteDatabaseCatalogResponse + :members: + :undoc-members: + +.. autoclass:: DeleteDatabaseInstanceResponse + :members: + :undoc-members: + +.. autoclass:: DeleteDatabaseTableResponse + :members: + :undoc-members: + +.. autoclass:: DeleteSyncedDatabaseTableResponse + :members: + :undoc-members: + +.. autoclass:: GenerateDatabaseCredentialRequest + :members: + :undoc-members: + +.. autoclass:: ListDatabaseInstancesResponse + :members: + :undoc-members: + +.. autoclass:: NewPipelineSpec + :members: + :undoc-members: + +.. py:class:: ProvisioningInfoState + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: DEGRADED + :value: "DEGRADED" + + .. py:attribute:: DELETING + :value: "DELETING" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: PROVISIONING + :value: "PROVISIONING" + + .. py:attribute:: UPDATING + :value: "UPDATING" + +.. autoclass:: SyncedDatabaseTable + :members: + :undoc-members: + +.. autoclass:: SyncedTableContinuousUpdateStatus + :members: + :undoc-members: + +.. autoclass:: SyncedTableFailedStatus + :members: + :undoc-members: + +.. autoclass:: SyncedTablePipelineProgress + :members: + :undoc-members: + +.. autoclass:: SyncedTableProvisioningStatus + :members: + :undoc-members: + +.. py:class:: SyncedTableSchedulingPolicy + + .. py:attribute:: CONTINUOUS + :value: "CONTINUOUS" + + .. py:attribute:: SNAPSHOT + :value: "SNAPSHOT" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + +.. autoclass:: SyncedTableSpec + :members: + :undoc-members: + +.. py:class:: SyncedTableState + + The state of a synced table. + + .. py:attribute:: SYNCED_TABLED_OFFLINE + :value: "SYNCED_TABLED_OFFLINE" + + .. py:attribute:: SYNCED_TABLE_OFFLINE_FAILED + :value: "SYNCED_TABLE_OFFLINE_FAILED" + + .. py:attribute:: SYNCED_TABLE_ONLINE + :value: "SYNCED_TABLE_ONLINE" + + .. py:attribute:: SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE + :value: "SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE" + + .. py:attribute:: SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE + :value: "SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE" + + .. py:attribute:: SYNCED_TABLE_ONLINE_PIPELINE_FAILED + :value: "SYNCED_TABLE_ONLINE_PIPELINE_FAILED" + + .. py:attribute:: SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE + :value: "SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE" + + .. py:attribute:: SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES + :value: "SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES" + + .. py:attribute:: SYNCED_TABLE_PROVISIONING + :value: "SYNCED_TABLE_PROVISIONING" + + .. py:attribute:: SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT + :value: "SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT" + + .. py:attribute:: SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES + :value: "SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES" + +.. autoclass:: SyncedTableStatus + :members: + :undoc-members: + +.. autoclass:: SyncedTableTriggeredUpdateStatus + :members: + :undoc-members: + +.. autoclass:: Token + :members: + :undoc-members: diff --git a/docs/dbdataclasses/files.rst b/docs/dbdataclasses/files.rst index 2b0d9845d..acc4920d9 100644 --- a/docs/dbdataclasses/files.rst +++ b/docs/dbdataclasses/files.rst @@ -100,6 +100,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UploadResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst index a471503a7..a170fe6c3 100644 --- a/docs/dbdataclasses/iam.rst +++ b/docs/dbdataclasses/iam.rst @@ -299,6 +299,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UpdateObjectPermissions :members: :undoc-members: diff --git a/docs/dbdataclasses/index.rst b/docs/dbdataclasses/index.rst index 3ecb9c13f..ca9fd5a4d 100644 --- a/docs/dbdataclasses/index.rst +++ b/docs/dbdataclasses/index.rst @@ -5,12 +5,14 @@ Dataclasses .. toctree:: :maxdepth: 1 + aibuilder apps billing catalog cleanrooms compute dashboards + database files iam jobs @@ -19,6 +21,7 @@ Dataclasses oauth2 pipelines provisioning + qualitymonitorv2 serving settings sharing diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 670e83685..0e22f457a 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -200,6 +200,40 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DbtCloudJobRunStep + :members: + :undoc-members: + +.. py:class:: DbtCloudRunStatus + + Response enumeration from calling the dbt Cloud API, for inclusion in output + + .. py:attribute:: CANCELLED + :value: "CANCELLED" + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: QUEUED + :value: "QUEUED" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: STARTING + :value: "STARTING" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + +.. autoclass:: DbtCloudTask + :members: + :undoc-members: + +.. autoclass:: DbtCloudTaskOutput + :members: + :undoc-members: + .. autoclass:: DbtOutput :members: :undoc-members: @@ -244,6 +278,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: FileArrivalTriggerState + :members: + :undoc-members: + .. autoclass:: ForEachStats :members: :undoc-members: @@ -1063,6 +1101,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: TriggerInfo :members: :undoc-members: @@ -1071,6 +1113,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: TriggerStateProto + :members: + :undoc-members: + .. py:class:: TriggerType The type of trigger that fired this run. diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst index 02e48c381..47a72865a 100644 --- a/docs/dbdataclasses/marketplace.rst +++ b/docs/dbdataclasses/marketplace.rst @@ -527,6 +527,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: TokenDetail :members: :undoc-members: diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index 1778c5837..3ffa046e3 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -61,30 +61,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ArtifactCredentialInfo - :members: - :undoc-members: - -.. autoclass:: ArtifactCredentialInfoHttpHeader - :members: - :undoc-members: - -.. py:class:: ArtifactCredentialType - - The type of a given artifact access credential - - .. py:attribute:: AWS_PRESIGNED_URL - :value: "AWS_PRESIGNED_URL" - - .. py:attribute:: AZURE_ADLS_GEN2_SAS_URI - :value: "AZURE_ADLS_GEN2_SAS_URI" - - .. py:attribute:: AZURE_SAS_URI - :value: "AZURE_SAS_URI" - - .. py:attribute:: GCP_SIGNED_URL - :value: "GCP_SIGNED_URL" - .. py:class:: CommentActivityAction An action that a user (with sufficient permissions) could take on a comment. Valid values are: * `EDIT_COMMENT`: Edit the comment @@ -340,14 +316,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCEEDED :value: "SUCCEEDED" -.. autoclass:: GetCredentialsForTraceDataDownloadResponse - :members: - :undoc-members: - -.. autoclass:: GetCredentialsForTraceDataUploadResponse - :members: - :undoc-members: - .. autoclass:: GetExperimentByNameResponse :members: :undoc-members: @@ -424,10 +392,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ListLoggedModelArtifactsResponse - :members: - :undoc-members: - .. autoclass:: ListModelsResponse :members: :undoc-members: @@ -907,6 +871,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: TransitionModelVersionStageDatabricks :members: :undoc-members: diff --git a/docs/dbdataclasses/oauth2.rst b/docs/dbdataclasses/oauth2.rst index 4097add9e..b0219a5ff 100644 --- a/docs/dbdataclasses/oauth2.rst +++ b/docs/dbdataclasses/oauth2.rst @@ -84,6 +84,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: TokenAccessPolicy :members: :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index fbad8a4f3..ec7d7a80b 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -165,6 +165,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SQLSERVER :value: "SQLSERVER" + .. py:attribute:: TERADATA + :value: "TERADATA" + .. py:attribute:: WORKDAY_RAAS :value: "WORKDAY_RAAS" @@ -419,6 +422,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SCD_TYPE_2 :value: "SCD_TYPE_2" +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UpdateInfo :members: :undoc-members: diff --git a/docs/dbdataclasses/provisioning.rst b/docs/dbdataclasses/provisioning.rst index 4c909d488..41d5e2f71 100644 --- a/docs/dbdataclasses/provisioning.rst +++ b/docs/dbdataclasses/provisioning.rst @@ -221,6 +221,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UpdateResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/qualitymonitorv2.rst b/docs/dbdataclasses/qualitymonitorv2.rst new file mode 100644 index 000000000..6787d6a09 --- /dev/null +++ b/docs/dbdataclasses/qualitymonitorv2.rst @@ -0,0 +1,53 @@ +Quality Monitor +=============== + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.qualitymonitorv2`` module. + +.. py:currentmodule:: databricks.sdk.service.qualitymonitorv2 +.. autoclass:: AnomalyDetectionConfig + :members: + :undoc-members: + +.. py:class:: AnomalyDetectionRunStatus + + Status of Anomaly Detection Job Run + + .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_CANCELED + :value: "ANOMALY_DETECTION_RUN_STATUS_CANCELED" + + .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_FAILED + :value: "ANOMALY_DETECTION_RUN_STATUS_FAILED" + + .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED + :value: "ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED" + + .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_PENDING + :value: "ANOMALY_DETECTION_RUN_STATUS_PENDING" + + .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_RUNNING + :value: "ANOMALY_DETECTION_RUN_STATUS_RUNNING" + + .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_SUCCESS + :value: "ANOMALY_DETECTION_RUN_STATUS_SUCCESS" + + .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_UNKNOWN + :value: "ANOMALY_DETECTION_RUN_STATUS_UNKNOWN" + + .. py:attribute:: ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR + :value: "ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR" + +.. autoclass:: DeleteQualityMonitorResponse + :members: + :undoc-members: + +.. autoclass:: ListQualityMonitorResponse + :members: + :undoc-members: + +.. autoclass:: QualityMonitor + :members: + :undoc-members: + +.. autoclass:: Token + :members: + :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index 01249dced..80dec3c18 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -532,6 +532,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: MULTIGPU_MEDIUM :value: "MULTIGPU_MEDIUM" +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: TrafficConfig :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index 0917028d7..f65058b9e 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -212,6 +212,31 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule + :members: + :undoc-members: + +.. py:class:: CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + + .. py:attribute:: DISCONNECTED + :value: "DISCONNECTED" + + .. py:attribute:: ESTABLISHED + :value: "ESTABLISHED" + + .. py:attribute:: EXPIRED + :value: "EXPIRED" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: REJECTED + :value: "REJECTED" + +.. autoclass:: DashboardEmailSubscriptions + :members: + :undoc-members: + .. autoclass:: DefaultNamespaceSetting :members: :undoc-members: @@ -228,6 +253,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteDashboardEmailSubscriptionsResponse + :members: + :undoc-members: + .. autoclass:: DeleteDefaultNamespaceSettingResponse :members: :undoc-members: @@ -268,6 +297,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteSqlResultsDownloadResponse + :members: + :undoc-members: + .. py:class:: DestinationType .. py:attribute:: EMAIL @@ -521,10 +554,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ListNccAzurePrivateEndpointRulesResponse - :members: - :undoc-members: - .. autoclass:: ListNetworkConnectivityConfigurationsResponse :members: :undoc-members: @@ -541,6 +570,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListPrivateEndpointRulesResponse + :members: + :undoc-members: + .. autoclass:: ListPublicTokensResponse :members: :undoc-members: @@ -620,6 +653,27 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: NccPrivateEndpointRule + :members: + :undoc-members: + +.. py:class:: NccPrivateEndpointRulePrivateLinkConnectionState + + .. py:attribute:: DISCONNECTED + :value: "DISCONNECTED" + + .. py:attribute:: ESTABLISHED + :value: "ESTABLISHED" + + .. py:attribute:: EXPIRED + :value: "EXPIRED" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: REJECTED + :value: "REJECTED" + .. autoclass:: NetworkConnectivityConfiguration :members: :undoc-members: @@ -702,10 +756,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: SqlResultsDownload + :members: + :undoc-members: + .. autoclass:: StringMessage :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: TokenAccessControlRequest :members: :undoc-members: @@ -784,6 +846,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateDashboardEmailSubscriptionsRequest + :members: + :undoc-members: + .. autoclass:: UpdateDefaultNamespaceSettingRequest :members: :undoc-members: @@ -856,6 +922,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateSqlResultsDownloadRequest + :members: + :undoc-members: + .. autoclass:: WorkspaceNetworkOption :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index 2c79baa22..ed208ebf6 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -14,6 +14,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: OAUTH_CLIENT_CREDENTIALS :value: "OAUTH_CLIENT_CREDENTIALS" + .. py:attribute:: OIDC_FEDERATION + :value: "OIDC_FEDERATION" + .. py:attribute:: TOKEN :value: "TOKEN" @@ -512,6 +515,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VIEW :value: "VIEW" +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UpdateProvider :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 60712bc2e..913da36c3 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -302,6 +302,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CreateQueryVisualizationsLegacyRequest + :members: + :undoc-members: + .. autoclass:: CreateVisualizationRequest :members: :undoc-members: @@ -1029,6 +1033,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WORKSPACE_TEMPORARILY_UNAVAILABLE :value: "WORKSPACE_TEMPORARILY_UNAVAILABLE" +.. autoclass:: SetRequest + :members: + :undoc-members: + .. autoclass:: SetResponse :members: :undoc-members: @@ -1156,6 +1164,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" +.. autoclass:: TaskTimeOverRange + :members: + :undoc-members: + +.. autoclass:: TaskTimeOverRangeEntry + :members: + :undoc-members: + .. autoclass:: TerminationReason :members: :undoc-members: @@ -1425,10 +1441,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: TransferOwnershipObjectId :members: :undoc-members: +.. autoclass:: TransferOwnershipRequest + :members: + :undoc-members: + .. autoclass:: UpdateAlertRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/vectorsearch.rst b/docs/dbdataclasses/vectorsearch.rst index 5433f2673..b443acae0 100644 --- a/docs/dbdataclasses/vectorsearch.rst +++ b/docs/dbdataclasses/vectorsearch.rst @@ -169,6 +169,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UpdateEndpointCustomTagsRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index e20f4ac7d..0b0e3a16f 100644 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -331,6 +331,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Token + :members: + :undoc-members: + .. autoclass:: UpdateCredentialsRequest :members: :undoc-members: diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py index 6ebfa7bab..66235d26c 100644 --- a/docs/gen-client-docs.py +++ b/docs/gen-client-docs.py @@ -249,6 +249,21 @@ class Generator: Package("marketplace", "Marketplace", "Manage AI and analytics assets such as ML models, notebooks, applications in an open marketplace"), Package("apps", "Apps", "Build custom applications on Databricks"), Package("cleanrooms", "Clean Rooms", "Manage clean rooms and their assets and task runs"), + Package( + "qualitymonitorv2", + "Quality Monitor", + "Manage quality monitor on Unity Catalog objects." + ), + Package( + "database", + "Database Instances", + "Create Database Instances and manage their configurations, including integrations with Unity Catalog" + ), + Package( + "aibuilder", + "AI Builder", + "Create and manage AI Builder resources." + ) ] def __init__(self): @@ -277,11 +292,11 @@ def _load_mapping(self) -> dict[str, Tag]: key = f"{key}.{clean_parent_service}" key = f"{key}.{tag['x-databricks-service']}".lower() - + package = tag['x-databricks-package'] t = Tag(name=tag['name'], service=tag['x-databricks-service'], is_account=tag.get('x-databricks-is-accounts', False), - package=pkgs[tag['x-databricks-package']]) + package=pkgs[package]) mapping[key] = t return mapping diff --git a/docs/workspace/aibuilder/custom_llms.rst b/docs/workspace/aibuilder/custom_llms.rst new file mode 100644 index 000000000..19a41fdc6 --- /dev/null +++ b/docs/workspace/aibuilder/custom_llms.rst @@ -0,0 +1,61 @@ +``w.custom_llms``: Custom LLMs Service +====================================== +.. currentmodule:: databricks.sdk.service.aibuilder + +.. py:class:: CustomLlmsAPI + + The Custom LLMs service manages state and powers the UI for the Custom LLM product. + + .. py:method:: cancel(id: str) + + Cancel a Custom LLM Optimization Run. + + :param id: str + + + + + .. py:method:: create(id: str) -> CustomLlm + + Start a Custom LLM Optimization Run. + + :param id: str + The Id of the tile. + + :returns: :class:`CustomLlm` + + + .. py:method:: get(id: str) -> CustomLlm + + Get a Custom LLM. + + :param id: str + The id of the custom llm + + :returns: :class:`CustomLlm` + + + .. py:method:: update(id: str, custom_llm: CustomLlm, update_mask: str) -> CustomLlm + + Update a Custom LLM. + + :param id: str + The id of the custom llm + :param custom_llm: :class:`CustomLlm` + The CustomLlm containing the fields which should be updated. + :param update_mask: str + The list of the CustomLlm fields to update. These should correspond to the values (or lack thereof) + present in `custom_llm`. + + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`CustomLlm` + \ No newline at end of file diff --git a/docs/workspace/aibuilder/index.rst b/docs/workspace/aibuilder/index.rst new file mode 100644 index 000000000..ff3ba49e7 --- /dev/null +++ b/docs/workspace/aibuilder/index.rst @@ -0,0 +1,10 @@ + +AI Builder +========== + +Create and manage AI Builder resources. + +.. toctree:: + :maxdepth: 1 + + custom_llms \ No newline at end of file diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst index be094be30..34aa3f7c4 100644 --- a/docs/workspace/apps/apps.rst +++ b/docs/workspace/apps/apps.rst @@ -10,13 +10,13 @@ .. py:method:: create(app: App [, no_compute: Optional[bool]]) -> Wait[App] Create an app. - + Creates a new app. - + :param app: :class:`App` :param no_compute: bool (optional) If true, the app will not be started after creation. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. @@ -28,25 +28,25 @@ .. py:method:: delete(name: str) -> App Delete an app. - + Deletes an app. - + :param name: str The name of the app. - + :returns: :class:`App` .. py:method:: deploy(app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment] Create an app deployment. - + Creates an app deployment for the app with the supplied name. - + :param app_name: str The name of the app. :param app_deployment: :class:`AppDeployment` - + :returns: Long-running operation waiter for :class:`AppDeployment`. See :method:wait_get_deployment_app_succeeded for more details. @@ -58,106 +58,106 @@ .. py:method:: get(name: str) -> App Get an app. - + Retrieves information for the app with the supplied name. - + :param name: str The name of the app. - + :returns: :class:`App` .. py:method:: get_deployment(app_name: str, deployment_id: str) -> AppDeployment Get an app deployment. - + Retrieves information for the app deployment with the supplied name and deployment id. - + :param app_name: str The name of the app. :param deployment_id: str The unique id of the deployment. - + :returns: :class:`AppDeployment` .. py:method:: get_permission_levels(app_name: str) -> GetAppPermissionLevelsResponse Get app permission levels. - + Gets the permission levels that a user can have on an object. - + :param app_name: str The app for which to get or manage permissions. - + :returns: :class:`GetAppPermissionLevelsResponse` .. py:method:: get_permissions(app_name: str) -> AppPermissions Get app permissions. - + Gets the permissions of an app. Apps can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. - + :returns: :class:`AppPermissions` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[App] List apps. - + Lists all apps in the workspace. - + :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of apps. Requests first page if absent. - + :returns: Iterator over :class:`App` .. py:method:: list_deployments(app_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AppDeployment] List app deployments. - + Lists all app deployments for the app with the supplied name. - + :param app_name: str The name of the app. :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of apps. Requests first page if absent. - + :returns: Iterator over :class:`AppDeployment` .. py:method:: set_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions Set app permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) - + :returns: :class:`AppPermissions` .. py:method:: start(name: str) -> Wait[App] Start an app. - + Start the last active deployment of the app in the workspace. - + :param name: str The name of the app. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. @@ -169,12 +169,12 @@ .. py:method:: stop(name: str) -> Wait[App] Stop an app. - + Stops the active deployment of the app in the workspace. - + :param name: str The name of the app. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_stopped for more details. @@ -186,27 +186,27 @@ .. py:method:: update(name: str, app: App) -> App Update an app. - + Updates the app with the supplied name. - + :param name: str The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It must be unique within the workspace. :param app: :class:`App` - + :returns: :class:`App` .. py:method:: update_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions Update app permissions. - + Updates the permissions on an app. Apps can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) - + :returns: :class:`AppPermissions` diff --git a/docs/workspace/catalog/artifact_allowlists.rst b/docs/workspace/catalog/artifact_allowlists.rst index f153dee79..d84666398 100644 --- a/docs/workspace/catalog/artifact_allowlists.rst +++ b/docs/workspace/catalog/artifact_allowlists.rst @@ -10,24 +10,24 @@ .. py:method:: get(artifact_type: ArtifactType) -> ArtifactAllowlistInfo Get an artifact allowlist. - + Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. - + :param artifact_type: :class:`ArtifactType` The artifact type of the allowlist. - + :returns: :class:`ArtifactAllowlistInfo` .. py:method:: update(artifact_type: ArtifactType, artifact_matchers: List[ArtifactMatcher] [, created_at: Optional[int], created_by: Optional[str], metastore_id: Optional[str]]) -> ArtifactAllowlistInfo Set an artifact allowlist. - + Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. - + :param artifact_type: :class:`ArtifactType` The artifact type of the allowlist. :param artifact_matchers: List[:class:`ArtifactMatcher`] @@ -38,6 +38,6 @@ Username of the user who set the artifact allowlist. :param metastore_id: str (optional) Unique identifier of parent metastore. - + :returns: :class:`ArtifactAllowlistInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 60959cad4..51ad5ca8e 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -6,7 +6,7 @@ A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission. - + In Unity Catalog, admins and data stewards manage users and their access to data centrally across all of the workspaces in a Databricks account. Users in different workspaces can share access to the same data, depending on privileges granted centrally in Unity Catalog. @@ -24,16 +24,16 @@ w = WorkspaceClient() - created = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=created.name, force=True) + w.catalogs.delete(name=created_catalog.name, force=True) Create a catalog. - + Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. - + :param name: str Name of catalog. :param comment: str (optional) @@ -46,29 +46,29 @@ A map of key-value properties attached to the securable. :param provider_name: str (optional) The name of delta sharing provider. - + A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server. :param share_name: str (optional) The name of the share under the share provider. :param storage_root: str (optional) Storage root URL for managed tables within catalog. - + :returns: :class:`CatalogInfo` .. py:method:: delete(name: str [, force: Optional[bool]]) Delete a catalog. - + Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner of the catalog. - + :param name: str The name of the catalog. :param force: bool (optional) Force deletion even if the catalog is not empty. - - + + .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> CatalogInfo @@ -92,16 +92,16 @@ w.catalogs.delete(name=created.name, force=True) Get a catalog. - + Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the catalog, or a user that has the **USE_CATALOG** privilege set for their account. - + :param name: str The name of the catalog. :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective metadata for - + :returns: :class:`CatalogInfo` @@ -120,12 +120,12 @@ all = w.catalogs.list(catalog.ListCatalogsRequest()) List catalogs. - + Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. - + :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective metadata for @@ -139,7 +139,7 @@ response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CatalogInfo` @@ -164,10 +164,10 @@ w.catalogs.delete(name=created.name, force=True) Update a catalog. - + Updates the catalog that matches the supplied name. The caller must be either the owner of the catalog, or a metastore admin (when changing the owner field of the catalog). - + :param name: str The name of the catalog. :param comment: str (optional) @@ -184,6 +184,6 @@ Username of current owner of catalog. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. - + :returns: :class:`CatalogInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/connections.rst b/docs/workspace/catalog/connections.rst index 463c9493e..8aa4ebf7f 100644 --- a/docs/workspace/catalog/connections.rst +++ b/docs/workspace/catalog/connections.rst @@ -5,7 +5,7 @@ .. py:class:: ConnectionsAPI Connections allow for creating a connection to an external data source. - + A connection is an abstraction of an external data source that can be connected from Databricks Compute. Creating a connection object is the first step to managing external data sources within Unity Catalog, with the second step being creating a data object (catalog, schema, or table) using the connection. Data @@ -42,12 +42,12 @@ w.connections.delete(name=conn_create.name) Create a connection. - + Creates a new connection - + Creates a new connection to an external data source. It allows users to specify connection details and configurations for interaction with the external server. - + :param name: str Name of the connection. :param connection_type: :class:`ConnectionType` @@ -57,23 +57,23 @@ :param comment: str (optional) User-provided free-form text description. :param properties: Dict[str,str] (optional) - An object containing map of key-value properties attached to the connection. + A map of key-value properties attached to the securable. :param read_only: bool (optional) If the connection is read only. - + :returns: :class:`ConnectionInfo` .. py:method:: delete(name: str) Delete a connection. - + Deletes the connection that matches the supplied name. - + :param name: str The name of the connection to be deleted. - - + + .. py:method:: get(name: str) -> ConnectionInfo @@ -116,12 +116,12 @@ w.connections.delete(name=conn_create.name) Get a connection. - + Gets a connection from it's name. - + :param name: str Name of the connection. - + :returns: :class:`ConnectionInfo` @@ -140,9 +140,9 @@ conn_list = w.connections.list(catalog.ListConnectionsRequest()) List connections. - + List all connections. - + :param max_results: int (optional) Maximum number of connections to return. - If not set, all connections are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of this value and @@ -150,7 +150,7 @@ (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ConnectionInfo` @@ -192,9 +192,9 @@ w.connections.delete(name=conn_create.name) Update a connection. - + Updates the connection that matches the supplied name. - + :param name: str Name of the connection. :param options: Dict[str,str] @@ -203,6 +203,6 @@ New name for the connection. :param owner: str (optional) Username of current owner of the connection. - + :returns: :class:`ConnectionInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst index 661d955b0..829bd174f 100644 --- a/docs/workspace/catalog/credentials.rst +++ b/docs/workspace/catalog/credentials.rst @@ -7,7 +7,7 @@ A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant. Each credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential. - + To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL` privilege. The user who creates the credential can delegate ownership to another user or group to manage permissions on it. @@ -15,13 +15,13 @@ .. py:method:: create_credential(name: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], purpose: Optional[CredentialPurpose], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo Create a credential. - + Creates a new credential. The type of credential to be created is determined by the **purpose** field, which should be either **SERVICE** or **STORAGE**. - + The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials. - + :param name: str The credential name. The name must be unique among storage and service credentials within the metastore. @@ -42,66 +42,66 @@ **STORAGE**. :param skip_validation: bool (optional) Optional. Supplying true to this argument skips validation of the created set of credentials. - + :returns: :class:`CredentialInfo` .. py:method:: delete_credential(name_arg: str [, force: Optional[bool]]) Delete a credential. - + Deletes a service or storage credential from the metastore. The caller must be an owner of the credential. - + :param name_arg: str Name of the credential. :param force: bool (optional) Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent external locations and external tables (when purpose is **STORAGE**). - - + + .. py:method:: generate_temporary_service_credential(credential_name: str [, azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions], gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions]]) -> TemporaryCredentials Generate a temporary service credential. - + Returns a set of temporary credentials generated using the specified service credential. The caller must be a metastore admin or have the metastore privilege **ACCESS** on the service credential. - + :param credential_name: str The name of the service credential used to generate a temporary credential :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional) The Azure cloud options to customize the requested temporary credential :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional) The GCP cloud options to customize the requested temporary credential - + :returns: :class:`TemporaryCredentials` .. py:method:: get_credential(name_arg: str) -> CredentialInfo Get a credential. - + Gets a service or storage credential from the metastore. The caller must be a metastore admin, the owner of the credential, or have any permission on the credential. - + :param name_arg: str Name of the credential. - + :returns: :class:`CredentialInfo` .. py:method:: list_credentials( [, max_results: Optional[int], page_token: Optional[str], purpose: Optional[CredentialPurpose]]) -> Iterator[CredentialInfo] List credentials. - + Gets an array of credentials (as __CredentialInfo__ objects). - + The array is limited to only the credentials that the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of credentials to return. - If not set, the default max page size is used. - When set to a value greater than 0, the page length is the minimum of this value and a server-configured @@ -111,19 +111,19 @@ Opaque token to retrieve the next page of results. :param purpose: :class:`CredentialPurpose` (optional) Return only credentials for the specified purpose. - + :returns: Iterator over :class:`CredentialInfo` .. py:method:: update_credential(name_arg: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo Update a credential. - + Updates a service or storage credential on the metastore. - + The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission. If the caller is a metastore admin, only the __owner__ field can be changed. - + :param name_arg: str Name of the credential. :param aws_iam_role: :class:`AwsIamRole` (optional) @@ -150,28 +150,28 @@ **STORAGE**. :param skip_validation: bool (optional) Supply true to this argument to skip validation of the updated credential. - + :returns: :class:`CredentialInfo` .. py:method:: validate_credential( [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], credential_name: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], external_location_name: Optional[str], purpose: Optional[CredentialPurpose], read_only: Optional[bool], url: Optional[str]]) -> ValidateCredentialResponse Validate a credential. - + Validates a credential. - + For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific credential must be provided. - + For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific credential must be provided. - + The caller must be a metastore admin or the credential owner or have the required permission on the metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**). - + :param aws_iam_role: :class:`AwsIamRole` (optional) The AWS IAM role configuration :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) @@ -190,6 +190,6 @@ (purpose is **STORAGE**.) :param url: str (optional) The external location url to validate. Only applicable when purpose is **STORAGE**. - + :returns: :class:`ValidateCredentialResponse` \ No newline at end of file diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index c9f1e3e37..164c21a86 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -9,9 +9,9 @@ access-control policies that control which users and groups can access the credential. If a user does not have access to an external location in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. - + Databricks recommends using external locations rather than using storage credentials directly. - + To create external locations, you must be a metastore admin or a user with the **CREATE_EXTERNAL_LOCATION** privilege. @@ -30,29 +30,27 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", ) - external_location = w.external_locations.create( + created = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=storage_credential.name, - comment="created via SDK", - url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", + credential_name=credential.name, + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) - w.external_locations.delete(name=external_location.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Create an external location. - + Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage credential. - + :param name: str Name of the external location. :param url: str @@ -75,23 +73,23 @@ Indicates whether the external location is read-only. :param skip_validation: bool (optional) Skips validation of the storage credential associated with the external location. - + :returns: :class:`ExternalLocationInfo` .. py:method:: delete(name: str [, force: Optional[bool]]) Delete an external location. - + Deletes the specified external location from the metastore. The caller must be the owner of the external location. - + :param name: str Name of the external location. :param force: bool (optional) Force deletion even if there are dependent external tables or mounts. - - + + .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> ExternalLocationInfo @@ -127,16 +125,16 @@ w.external_locations.delete(name=created.name) Get an external location. - + Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. - + :param name: str Name of the external location. :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access selective metadata for - + :returns: :class:`ExternalLocationInfo` @@ -154,11 +152,11 @@ all = w.external_locations.list() List external locations. - + Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on the external location. There is no guarantee of a specific ordering of the elements in the array. - + :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access selective metadata for @@ -169,7 +167,7 @@ value (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ExternalLocationInfo` @@ -190,31 +188,31 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) _ = w.external_locations.update( name=created.name, credential_name=credential.name, - url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) # cleanup - w.storage_credentials.delete(delete=credential.name) - w.external_locations.delete(delete=created.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Update an external location. - + Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin can only update the name of the external location. - + :param name: str Name of the external location. :param comment: str (optional) @@ -244,6 +242,6 @@ Skips validation of the storage credential associated with the external location. :param url: str (optional) Path URL of the external location. - + :returns: :class:`ExternalLocationInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/functions.rst b/docs/workspace/catalog/functions.rst index 3c736e714..646488074 100644 --- a/docs/workspace/catalog/functions.rst +++ b/docs/workspace/catalog/functions.rst @@ -5,7 +5,7 @@ .. py:class:: FunctionsAPI Functions implement User-Defined Functions (UDFs) in Unity Catalog. - + The function implementation can be any SQL expression or Query, and it can be invoked wherever a table reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it can be referenced with the form __catalog_name__.__schema_name__.__function_name__. @@ -13,71 +13,71 @@ .. py:method:: create(function_info: CreateFunction) -> FunctionInfo Create a function. - + **WARNING: This API is experimental and will change in future versions** - + Creates a new function - + The user must have the following permissions in order for the function to be created: - **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the function's parent schema - + :param function_info: :class:`CreateFunction` Partial __FunctionInfo__ specifying the function to be created. - + :returns: :class:`FunctionInfo` .. py:method:: delete(name: str [, force: Optional[bool]]) Delete a function. - + Deletes the function that matches the supplied name. For the deletion to succeed, the user must satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog and the **USE_SCHEMA** privilege on its parent schema - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param force: bool (optional) Force deletion even if the function is notempty. - - + + .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> FunctionInfo Get a function. - + Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the **USE_SCHEMA** privilege on the function's parent schema, and the **EXECUTE** privilege on the function itself - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param include_browse: bool (optional) Whether to include functions in the response for which the principal can only access selective metadata for - + :returns: :class:`FunctionInfo` .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[FunctionInfo] List functions. - + List functions within the specified parent catalog and schema. If the user is a metastore admin, all functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for functions of interest. :param schema_name: str @@ -92,26 +92,26 @@ (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`FunctionInfo` .. py:method:: update(name: str [, owner: Optional[str]]) -> FunctionInfo Update a function. - + Updates the function that matches the supplied name. Only the owner of the function can be updated. If the user is not a metastore admin, the user must be a member of the group that is the new function owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the **USE_SCHEMA** privilege on the function's parent schema. - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param owner: str (optional) Username of current owner of function. - + :returns: :class:`FunctionInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/grants.rst b/docs/workspace/catalog/grants.rst index 73b3dae28..4a46cb5a6 100644 --- a/docs/workspace/catalog/grants.rst +++ b/docs/workspace/catalog/grants.rst @@ -8,13 +8,13 @@ Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. - + Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. This means that granting a privilege on the catalog automatically grants the privilege to all current and future objects within the catalog. Similarly, privileges granted on a schema are inherited by all current and future objects within that schema. - .. py:method:: get(securable_type: SecurableType, full_name: str [, principal: Optional[str]]) -> PermissionsList + .. py:method:: get(securable_type: str, full_name: str [, max_results: Optional[int], page_token: Optional[str], principal: Optional[str]]) -> GetPermissionsResponse Usage: @@ -61,20 +61,32 @@ w.tables.delete(full_name=table_full_name) Get permissions. - - Gets the permissions for a securable. - - :param securable_type: :class:`SecurableType` + + Gets the permissions for a securable. Does not include inherited permissions. + + :param securable_type: str Type of securable. :param full_name: str Full name of securable. + :param max_results: int (optional) + Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment + present in a single page response is guaranteed to contain all the privileges granted on the + requested Securable for the respective principal. + + If not set, all the permissions are returned. If set to - lesser than 0: invalid parameter error - + 0: page length is set to a server configured value - lesser than 150 but greater than 0: invalid + parameter error (this is to ensure that server is able to return at least one complete + PrivilegeAssignment in a single page response) - greater than (or equal to) 150: page length is the + minimum of this value and a server configured value + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :param principal: str (optional) If provided, only the permissions for the specified principal (user or group) are returned. - - :returns: :class:`PermissionsList` + + :returns: :class:`GetPermissionsResponse` - .. py:method:: get_effective(securable_type: SecurableType, full_name: str [, principal: Optional[str]]) -> EffectivePermissionsList + .. py:method:: get_effective(securable_type: str, full_name: str [, max_results: Optional[int], page_token: Optional[str], principal: Optional[str]]) -> EffectivePermissionsList Usage: @@ -121,21 +133,35 @@ w.tables.delete(full_name=table_full_name) Get effective permissions. - - Gets the effective permissions for a securable. - - :param securable_type: :class:`SecurableType` + + Gets the effective permissions for a securable. Includes inherited permissions from any parent + securables. + + :param securable_type: str Type of securable. :param full_name: str Full name of securable. + :param max_results: int (optional) + Specifies the maximum number of privileges to return (page length). Every + EffectivePrivilegeAssignment present in a single page response is guaranteed to contain all the + effective privileges granted on (or inherited by) the requested Securable for the respective + principal. + + If not set, all the effective permissions are returned. If set to - lesser than 0: invalid parameter + error - 0: page length is set to a server configured value - lesser than 150 but greater than 0: + invalid parameter error (this is to ensure that server is able to return at least one complete + EffectivePrivilegeAssignment in a single page response) - greater than (or equal to) 150: page + length is the minimum of this value and a server configured value + :param page_token: str (optional) + Opaque token for the next page of results (pagination). :param principal: str (optional) If provided, only the effective permissions for the specified principal (user or group) are returned. - + :returns: :class:`EffectivePermissionsList` - .. py:method:: update(securable_type: SecurableType, full_name: str [, changes: Optional[List[PermissionsChange]]]) -> PermissionsList + .. py:method:: update(securable_type: str, full_name: str [, changes: Optional[List[PermissionsChange]]]) -> UpdatePermissionsResponse Usage: @@ -190,15 +216,15 @@ w.tables.delete(full_name=table_full_name) Update permissions. - + Updates the permissions for a securable. - - :param securable_type: :class:`SecurableType` + + :param securable_type: str Type of securable. :param full_name: str Full name of securable. :param changes: List[:class:`PermissionsChange`] (optional) Array of permissions change objects. - - :returns: :class:`PermissionsList` + + :returns: :class:`UpdatePermissionsResponse` \ No newline at end of file diff --git a/docs/workspace/catalog/index.rst b/docs/workspace/catalog/index.rst index 7549bc487..471804098 100644 --- a/docs/workspace/catalog/index.rst +++ b/docs/workspace/catalog/index.rst @@ -11,7 +11,6 @@ Configure data governance with Unity Catalog for metastores, catalogs, schemas, catalogs connections credentials - database_instances external_locations functions grants diff --git a/docs/workspace/catalog/metastores.rst b/docs/workspace/catalog/metastores.rst index 52c36437e..628fe13f1 100644 --- a/docs/workspace/catalog/metastores.rst +++ b/docs/workspace/catalog/metastores.rst @@ -8,10 +8,10 @@ views) and the permissions that govern access to them. Databricks account admins can create metastores and assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use Unity Catalog, it must have a Unity Catalog metastore attached. - + Each metastore is configured with a root storage location in a cloud storage account. This storage location is used for metadata and managed tables data. - + NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is available in a catalog named hive_metastore. @@ -43,20 +43,20 @@ w.metastores.delete(id=created.metastore_id, force=True) Create an assignment. - + Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account admin. - + :param workspace_id: int A workspace ID. :param metastore_id: str The unique ID of the metastore. :param default_catalog_name: str - The name of the default catalog in the metastore. This field is depracted. Please use "Default + The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace. - - + + .. py:method:: create(name: str [, region: Optional[str], storage_root: Optional[str]]) -> MetastoreInfo @@ -82,21 +82,19 @@ w.metastores.delete(id=created.metastore_id, force=True) Create a metastore. - + Creates a new metastore based on a provided name and optional storage root path. By default (if the __owner__ field is not set), the owner of the new metastore is the user calling the __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is assigned to the System User instead. - + :param name: str The user-specified name of the metastore. :param region: str (optional) - Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted in - the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is omitted, - the region of the workspace receiving the request will be used. + Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). :param storage_root: str (optional) The storage root URL for metastore - + :returns: :class:`MetastoreInfo` @@ -114,24 +112,24 @@ current_metastore = w.metastores.current() Get metastore assignment for workspace. - + Gets the metastore assignment for the workspace being accessed. - + :returns: :class:`MetastoreAssignment` .. py:method:: delete(id: str [, force: Optional[bool]]) Delete a metastore. - + Deletes a metastore. The caller must be a metastore admin. - + :param id: str Unique ID of the metastore. :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - - + + .. py:method:: get(id: str) -> MetastoreInfo @@ -159,17 +157,17 @@ w.metastores.delete(id=created.metastore_id, force=True) Get a metastore. - + Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this info. - + :param id: str Unique ID of the metastore. - + :returns: :class:`MetastoreInfo` - .. py:method:: list() -> Iterator[MetastoreInfo] + .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[MetastoreInfo] Usage: @@ -183,10 +181,21 @@ all = w.metastores.list() List metastores. - + Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in the array. - + + :param max_results: int (optional) + Maximum number of metastores to return. - when set to a value greater than 0, the page length is the + minimum of this value and a server configured value; - when set to 0, the page length is set to a + server configured value (recommended); - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all the metastores are returned (not recommended). - Note: The number of + returned metastores might be less than the specified max_results size, even zero. The only + definitive indication that no further metastores can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + :returns: Iterator over :class:`MetastoreInfo` @@ -204,10 +213,10 @@ summary = w.metastores.summary() Get a metastore summary. - + Gets information about a metastore. This summary includes the storage credential, the cloud vendor, the cloud region, and the global metastore ID. - + :returns: :class:`GetMetastoreSummaryResponse` @@ -238,18 +247,18 @@ w.metastores.delete(id=created.metastore_id, force=True) Delete an assignment. - + Deletes a metastore assignment. The caller must be an account administrator. - + :param workspace_id: int A workspace ID. :param metastore_id: str Query for the ID of the metastore to delete. - - + + - .. py:method:: update(id: str [, delta_sharing_organization_name: Optional[str], delta_sharing_recipient_token_lifetime_in_seconds: Optional[int], delta_sharing_scope: Optional[UpdateMetastoreDeltaSharingScope], new_name: Optional[str], owner: Optional[str], privilege_model_version: Optional[str], storage_root_credential_id: Optional[str]]) -> MetastoreInfo + .. py:method:: update(id: str [, delta_sharing_organization_name: Optional[str], delta_sharing_recipient_token_lifetime_in_seconds: Optional[int], delta_sharing_scope: Optional[DeltaSharingScopeEnum], new_name: Optional[str], owner: Optional[str], privilege_model_version: Optional[str], storage_root_credential_id: Optional[str]]) -> MetastoreInfo Usage: @@ -274,10 +283,10 @@ w.metastores.delete(id=created.metastore_id, force=True) Update a metastore. - + Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__ field is set to the empty string (**""**), the ownership is updated to the System User. - + :param id: str Unique ID of the metastore. :param delta_sharing_organization_name: str (optional) @@ -285,7 +294,7 @@ Sharing as the official name. :param delta_sharing_recipient_token_lifetime_in_seconds: int (optional) The lifetime of delta sharing recipient token in seconds. - :param delta_sharing_scope: :class:`UpdateMetastoreDeltaSharingScope` (optional) + :param delta_sharing_scope: :class:`DeltaSharingScopeEnum` (optional) The scope of Delta Sharing enabled for the metastore. :param new_name: str (optional) New name for the metastore. @@ -295,26 +304,26 @@ Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`). :param storage_root_credential_id: str (optional) UUID of storage credential to access the metastore storage_root. - + :returns: :class:`MetastoreInfo` .. py:method:: update_assignment(workspace_id: int [, default_catalog_name: Optional[str], metastore_id: Optional[str]]) Update an assignment. - + Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a Workspace admin. - + :param workspace_id: int A workspace ID. :param default_catalog_name: str (optional) - The name of the default catalog in the metastore. This field is depracted. Please use "Default + The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace. :param metastore_id: str (optional) The unique ID of the metastore. - - + + \ No newline at end of file diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst index 99b62ae03..bae6f25f8 100644 --- a/docs/workspace/catalog/model_versions.rst +++ b/docs/workspace/catalog/model_versions.rst @@ -7,39 +7,39 @@ Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog provide centralized access control, auditing, lineage, and discovery of ML models across Databricks workspaces. - + This API reference documents the REST endpoints for managing model versions in Unity Catalog. For more details, see the [registered models API docs](/api/workspace/registeredmodels). .. py:method:: delete(full_name: str, version: int) Delete a Model Version. - + Deletes a model version from the specified registered model. Any aliases assigned to the model version will also be deleted. - + The caller must be a metastore admin or an owner of the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version - - + + .. py:method:: get(full_name: str, version: int [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> ModelVersionInfo Get a Model Version. - + Get a model version. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int @@ -49,46 +49,46 @@ :param include_browse: bool (optional) Whether to include model versions in the response for which the principal can only access selective metadata for - + :returns: :class:`ModelVersionInfo` .. py:method:: get_by_alias(full_name: str, alias: str [, include_aliases: Optional[bool]]) -> ModelVersionInfo Get Model Version By Alias. - + Get a model version by alias. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param alias: str The name of the alias :param include_aliases: bool (optional) Whether to include aliases associated with the model version in the response - + :returns: :class:`ModelVersionInfo` .. py:method:: list(full_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ModelVersionInfo] List Model Versions. - + List model versions. You can list model versions under a particular schema, or list all model versions in the current metastore. - + The returned models are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the model versions. A regular user needs to be the owner or have the **EXECUTE** privilege on the parent registered model to recieve the model versions in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the response. The elements in the response will not contain any aliases or tags. - + :param full_name: str The full three-level name of the registered model under which to list model versions :param include_browse: bool (optional) @@ -102,28 +102,28 @@ value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ModelVersionInfo` .. py:method:: update(full_name: str, version: int [, comment: Optional[str]]) -> ModelVersionInfo Update a Model Version. - + Updates the specified model version. - + The caller must be a metastore admin or an owner of the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the comment of the model version can be updated. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version :param comment: str (optional) The comment attached to the model version - + :returns: :class:`ModelVersionInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/online_tables.rst b/docs/workspace/catalog/online_tables.rst index 898d00eb3..6cc5f20ae 100644 --- a/docs/workspace/catalog/online_tables.rst +++ b/docs/workspace/catalog/online_tables.rst @@ -9,12 +9,12 @@ .. py:method:: create(table: OnlineTable) -> Wait[OnlineTable] Create an Online Table. - + Create a new Online Table. - + :param table: :class:`OnlineTable` Online Table information. - + :returns: Long-running operation waiter for :class:`OnlineTable`. See :method:wait_get_online_table_active for more details. @@ -26,26 +26,26 @@ .. py:method:: delete(name: str) Delete an Online Table. - + Delete an online table. Warning: This will delete all the data in the online table. If the source Delta table was deleted or modified since this Online Table was created, this will lose the data forever! - + :param name: str Full three-part (catalog, schema, table) name of the table. - - + + .. py:method:: get(name: str) -> OnlineTable Get an Online Table. - + Get information about an existing online table and its status. - + :param name: str Full three-part (catalog, schema, table) name of the table. - + :returns: :class:`OnlineTable` diff --git a/docs/workspace/catalog/quality_monitors.rst b/docs/workspace/catalog/quality_monitors.rst index 255076aac..93f05b69a 100644 --- a/docs/workspace/catalog/quality_monitors.rst +++ b/docs/workspace/catalog/quality_monitors.rst @@ -6,7 +6,7 @@ A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics tables and a dashboard that you can use to monitor table health and set alerts. - + Most write operations require the user to be the owner of the table (or its parent schema or parent catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires the user to have **SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**). @@ -14,38 +14,38 @@ .. py:method:: cancel_refresh(table_name: str, refresh_id: str) Cancel refresh. - + Cancel an active monitor refresh for the given refresh ID. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. :param refresh_id: str ID of the refresh. - - + + .. py:method:: create(table_name: str, assets_dir: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], skip_builtin_dashboard: Optional[bool], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries], warehouse_id: Optional[str]]) -> MonitorInfo Create a table monitor. - + Creates a new monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent schema, and have **SELECT** access on the table. 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Workspace assets, such as the dashboard, will be created in the workspace where this call was made. - + :param table_name: str Full name of the table. :param assets_dir: str @@ -79,152 +79,152 @@ :param warehouse_id: str (optional) Optional argument to specify the warehouse for dashboard creation. If not specified, the first running warehouse will be used. - + :returns: :class:`MonitorInfo` .. py:method:: delete(table_name: str) Delete a table monitor. - + Deletes a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + Note that the metric tables and dashboard will not be deleted as part of this call; those assets must be manually cleaned up (if desired). - + :param table_name: str Full name of the table. - - + + .. py:method:: get(table_name: str) -> MonitorInfo Get a table monitor. - + Gets a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + The returned information includes configuration values, as well as information on assets created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different workspace than where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorInfo` .. py:method:: get_refresh(table_name: str, refresh_id: str) -> MonitorRefreshInfo Get refresh. - + Gets info about a specific monitor refresh using the given refresh ID. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. :param refresh_id: str ID of the refresh. - + :returns: :class:`MonitorRefreshInfo` .. py:method:: list_refreshes(table_name: str) -> MonitorRefreshListResponse List refreshes. - + Gets an array containing the history of the most recent refreshes (up to 25) for this table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorRefreshListResponse` .. py:method:: regenerate_dashboard(table_name: str [, warehouse_id: Optional[str]]) -> RegenerateDashboardResponse Regenerate a monitoring dashboard. - + Regenerates the monitoring dashboard for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + The call must be made from the workspace where the monitor was created. The dashboard will be regenerated in the assets directory that was specified when the monitor was created. - + :param table_name: str Full name of the table. :param warehouse_id: str (optional) Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first running warehouse will be used. - + :returns: :class:`RegenerateDashboardResponse` .. py:method:: run_refresh(table_name: str) -> MonitorRefreshInfo Queue a metric refresh for a monitor. - + Queues a metric refresh on the monitor for the specified table. The refresh will execute in the background. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorRefreshInfo` .. py:method:: update(table_name: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], dashboard_id: Optional[str], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries]]) -> MonitorInfo Update a table monitor. - + Updates a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Additionally, the call must be made from the workspace where the monitor was created, and the caller must be the original creator of the monitor. - + Certain configuration fields, such as output asset identifiers, cannot be updated. - + :param table_name: str Full name of the table. :param output_schema_name: str @@ -254,6 +254,6 @@ Configuration for monitoring snapshot tables. :param time_series: :class:`MonitorTimeSeries` (optional) Configuration for monitoring time series tables. - + :returns: :class:`MonitorInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/registered_models.rst b/docs/workspace/catalog/registered_models.rst index 3f7ced621..b05a702b5 100644 --- a/docs/workspace/catalog/registered_models.rst +++ b/docs/workspace/catalog/registered_models.rst @@ -7,17 +7,17 @@ Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog provide centralized access control, auditing, lineage, and discovery of ML models across Databricks workspaces. - + An MLflow registered model resides in the third layer of Unity Catalog’s three-level namespace. Registered models contain model versions, which correspond to actual ML models (MLflow models). Creating new model versions currently requires use of the MLflow Python client. Once model versions are created, you can load them for batch inference using MLflow Python client APIs, or deploy them for real-time serving using Databricks Model Serving. - + All operations on registered models and model versions require USE_CATALOG permissions on the enclosing catalog and USE_SCHEMA permissions on the enclosing schema. In addition, the following additional privileges are required for various operations: - + * To create a registered model, users must additionally have the CREATE_MODEL permission on the target schema. * To view registered model or model version metadata, model version data files, or invoke a model version, users must additionally have the EXECUTE permission on the registered model * To update @@ -25,24 +25,24 @@ registered model * To update other registered model or model version metadata (comments, aliases) create a new model version, or update permissions on the registered model, users must be owners of the registered model. - + Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that specify a securable type, use "FUNCTION" as the securable type. .. py:method:: create(catalog_name: str, schema_name: str, name: str [, comment: Optional[str], storage_location: Optional[str]]) -> RegisteredModelInfo Create a Registered Model. - + Creates a new registered model in Unity Catalog. - + File storage for model versions in the registered model will be located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. - + For registered model creation to succeed, the user must satisfy the following conditions: - The caller must be a metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema. - + :param catalog_name: str The name of the catalog where the schema and the registered model reside :param schema_name: str @@ -53,54 +53,54 @@ The comment attached to the registered model :param storage_location: str (optional) The storage location on the cloud under which model version data files are stored - + :returns: :class:`RegisteredModelInfo` .. py:method:: delete(full_name: str) Delete a Registered Model. - + Deletes a registered model and all its model versions from the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model - - + + .. py:method:: delete_alias(full_name: str, alias: str) Delete a Registered Model Alias. - + Deletes a registered model alias. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param alias: str The name of the alias - - + + .. py:method:: get(full_name: str [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> RegisteredModelInfo Get a Registered Model. - + Get a registered model. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param include_aliases: bool (optional) @@ -108,25 +108,25 @@ :param include_browse: bool (optional) Whether to include registered models in the response for which the principal can only access selective metadata for - + :returns: :class:`RegisteredModelInfo` .. py:method:: list( [, catalog_name: Optional[str], include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str], schema_name: Optional[str]]) -> Iterator[RegisteredModelInfo] List Registered Models. - + List registered models. You can list registered models under a particular schema, or list all registered models in the current metastore. - + The returned models are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the registered models. A regular user needs to be the owner or have the **EXECUTE** privilege on the registered model to recieve the registered models in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the response. - + :param catalog_name: str (optional) The identifier of the catalog under which to list registered models. If specified, schema_name must be specified. @@ -135,13 +135,13 @@ selective metadata for :param max_results: int (optional) Max number of registered models to return. - + If both catalog and schema are specified: - when max_results is not specified, the page length is set to a server configured value (10000, as of 4/2/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (10000, as of 4/2/2024); - when set to 0, the page length is set to a server configured value (10000, as of 4/2/2024); - when set to a value less than 0, an invalid parameter error is returned; - + If neither schema nor catalog is specified: - when max_results is not specified, the page length is set to a server configured value (100, as of 4/2/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (1000, as of 4/2/2024); - @@ -152,42 +152,42 @@ :param schema_name: str (optional) The identifier of the schema under which to list registered models. If specified, catalog_name must be specified. - + :returns: Iterator over :class:`RegisteredModelInfo` .. py:method:: set_alias(full_name: str, alias: str, version_num: int) -> RegisteredModelAlias Set a Registered Model Alias. - + Set an alias on the specified registered model. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the registered model :param alias: str The name of the alias :param version_num: int The version number of the model version to which the alias points - + :returns: :class:`RegisteredModelAlias` .. py:method:: update(full_name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str]]) -> RegisteredModelInfo Update a Registered Model. - + Updates the specified registered model. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the name, the owner or the comment of the registered model can be updated. - + :param full_name: str The three-level (fully qualified) name of the registered model :param comment: str (optional) @@ -196,6 +196,6 @@ New name for the registered model. :param owner: str (optional) The identifier of the user who owns the registered model - + :returns: :class:`RegisteredModelInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/resource_quotas.rst b/docs/workspace/catalog/resource_quotas.rst index c1e14687c..3396011f0 100644 --- a/docs/workspace/catalog/resource_quotas.rst +++ b/docs/workspace/catalog/resource_quotas.rst @@ -8,39 +8,38 @@ can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and limits. For more information on resource quotas see the [Unity Catalog documentation]. - - [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas + [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas .. py:method:: get_quota(parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse Get information for a single resource quota. - + The GetQuota API returns usage information for a single resource quota, defined as a child-parent pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered asynchronously. The updated count might not be returned in the first call. - + :param parent_securable_type: str Securable type of the quota parent. :param parent_full_name: str Full name of the parent resource. Provide the metastore ID if the parent is a metastore. :param quota_name: str Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. - + :returns: :class:`GetQuotaResponse` .. py:method:: list_quotas( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QuotaInfo] List all resource quotas under a metastore. - + ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the counts returned. This API does not trigger a refresh of quota counts. - + :param max_results: int (optional) The number of quotas to return. :param page_token: str (optional) Opaque token for the next page of results. - + :returns: Iterator over :class:`QuotaInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst index d646a7489..35f556239 100644 --- a/docs/workspace/catalog/schemas.rst +++ b/docs/workspace/catalog/schemas.rst @@ -31,10 +31,10 @@ w.schemas.delete(full_name=created_schema.full_name) Create a schema. - + Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. - + :param name: str Name of schema, relative to parent catalog. :param catalog_name: str @@ -45,23 +45,23 @@ A map of key-value properties attached to the securable. :param storage_root: str (optional) Storage root URL for managed tables within schema. - + :returns: :class:`SchemaInfo` .. py:method:: delete(full_name: str [, force: Optional[bool]]) Delete a schema. - + Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an owner of the parent catalog. - + :param full_name: str Full name of the schema. :param force: bool (optional) Force deletion even if the schema is not empty. - - + + .. py:method:: get(full_name: str [, include_browse: Optional[bool]]) -> SchemaInfo @@ -88,16 +88,16 @@ w.schemas.delete(full_name=created.full_name) Get a schema. - + Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the schema, or a user that has the **USE_SCHEMA** privilege on the schema. - + :param full_name: str Full name of the schema. :param include_browse: bool (optional) Whether to include schemas in the response for which the principal can only access selective metadata for - + :returns: :class:`SchemaInfo` @@ -122,12 +122,12 @@ w.catalogs.delete(name=new_catalog.name, force=True) List schemas. - + Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Parent catalog for schemas of interest. :param include_browse: bool (optional) @@ -140,7 +140,7 @@ (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`SchemaInfo` @@ -168,23 +168,24 @@ w.schemas.delete(full_name=created.full_name) Update a schema. - + Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If the caller is a metastore admin, only the __owner__ field can be changed in the update. If the __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA** privilege on the parent catalog. - + :param full_name: str Full name of the schema. :param comment: str (optional) User-provided free-form text description. :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional) + Whether predictive optimization should be enabled for this object and objects under it. :param new_name: str (optional) New name for the schema. :param owner: str (optional) Username of current owner of schema. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. - + :returns: :class:`SchemaInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index a1b985155..947174a01 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -9,9 +9,9 @@ control which users and groups can access the credential. If a user does not have access to a storage credential in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. - + Databricks recommends using external locations rather than using storage credentials directly. - + To create storage credentials, you must be a Databricks account admin. The account admin who creates the storage credential can delegate ownership to another user or group to manage permissions on it. @@ -30,18 +30,18 @@ w = WorkspaceClient() - created = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(name=credential.name) Create a storage credential. - + Creates a new storage credential. - + :param name: str The credential name. The name must be unique within the metastore. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) @@ -60,23 +60,23 @@ Whether the storage credential is only usable for read operations. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the created credential. - + :returns: :class:`StorageCredentialInfo` .. py:method:: delete(name: str [, force: Optional[bool]]) Delete a credential. - + Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. - + :param name: str Name of the storage credential. :param force: bool (optional) Force deletion even if there are dependent external locations or external tables. - - + + .. py:method:: get(name: str) -> StorageCredentialInfo @@ -105,13 +105,13 @@ w.storage_credentials.delete(delete=created.name) Get a credential. - + Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. - + :param name: str Name of the storage credential. - + :returns: :class:`StorageCredentialInfo` @@ -123,18 +123,19 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.storage_credentials.list() + all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) List credentials. - + Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of storage credentials to return. If not set, all the storage credentials are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of @@ -143,7 +144,7 @@ returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`StorageCredentialInfo` @@ -164,22 +165,22 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) _ = w.storage_credentials.update( name=created.name, comment=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(delete=created.name) Update a credential. - + Updates a storage credential on the metastore. - + :param name: str Name of the storage credential. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) @@ -205,24 +206,24 @@ Whether the storage credential is only usable for read operations. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the updated credential. - + :returns: :class:`StorageCredentialInfo` .. py:method:: validate( [, aws_iam_role: Optional[AwsIamRoleRequest], azure_managed_identity: Optional[AzureManagedIdentityRequest], azure_service_principal: Optional[AzureServicePrincipal], cloudflare_api_token: Optional[CloudflareApiToken], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest], external_location_name: Optional[str], read_only: Optional[bool], storage_credential_name: Optional[str], url: Optional[str]]) -> ValidateStorageCredentialResponse Validate a storage credential. - + Validates a storage credential. At least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking overlapping urls. - + Either the __storage_credential_name__ or the cloud-specific credential must be provided. - + The caller must be a metastore admin or the storage credential owner or have the **CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage credential. - + :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional) @@ -241,6 +242,6 @@ The name of the storage credential to validate. :param url: str (optional) The external location url to validate. - + :returns: :class:`ValidateStorageCredentialResponse` \ No newline at end of file diff --git a/docs/workspace/catalog/system_schemas.rst b/docs/workspace/catalog/system_schemas.rst index 545a3b2e2..97debf034 100644 --- a/docs/workspace/catalog/system_schemas.rst +++ b/docs/workspace/catalog/system_schemas.rst @@ -10,42 +10,42 @@ .. py:method:: disable(metastore_id: str, schema_name: str) Disable a system schema. - + Disables the system schema and removes it from the system catalog. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The metastore ID under which the system schema lives. :param schema_name: str Full name of the system schema. - - + + .. py:method:: enable(metastore_id: str, schema_name: str [, catalog_name: Optional[str]]) Enable a system schema. - + Enables the system schema and adds it to the system catalog. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The metastore ID under which the system schema lives. :param schema_name: str Full name of the system schema. :param catalog_name: str (optional) the catalog for which the system schema is to enabled in - - + + .. py:method:: list(metastore_id: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SystemSchemaInfo] List system schemas. - + Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The ID for the metastore in which the system schema resides. :param max_results: int (optional) @@ -55,6 +55,6 @@ is returned; - If not set, all the schemas are returned (not recommended). :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`SystemSchemaInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/table_constraints.rst b/docs/workspace/catalog/table_constraints.rst index 6b974c463..dd46c42f3 100644 --- a/docs/workspace/catalog/table_constraints.rst +++ b/docs/workspace/catalog/table_constraints.rst @@ -5,51 +5,51 @@ .. py:class:: TableConstraintsAPI Primary key and foreign key constraints encode relationships between fields in tables. - + Primary and foreign keys are informational only and are not enforced. Foreign keys must reference a primary key in another table. This primary key is the parent constraint of the foreign key and the table this primary key is on is the parent table of the foreign key. Similarly, the foreign key is the child constraint of its referenced primary key; the table of the foreign key is the child table of the primary key. - + You can declare primary keys and foreign keys as part of the table specification during table creation. You can also add or drop constraints on existing tables. .. py:method:: create(full_name_arg: str, constraint: TableConstraint) -> TableConstraint Create a table constraint. - + Creates a new table constraint. - + For the table constraint creation to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** privilege on the table's parent schema, and be the owner of the table. - if the new constraint is a __ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the owner of the referenced parent table. - + :param full_name_arg: str The full name of the table referenced by the constraint. :param constraint: :class:`TableConstraint` A table constraint, as defined by *one* of the following fields being set: __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. - + :returns: :class:`TableConstraint` .. py:method:: delete(full_name: str, constraint_name: str, cascade: bool) Delete a table constraint. - + Deletes a table constraint. - + For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** privilege on the table's parent schema, and be the owner of the table. - if __cascade__ argument is **true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG** privilege on the table's catalog, the **USE_SCHEMA** privilege on the table's schema, and be the owner of the table. - + :param full_name: str Full name of the table referenced by the constraint. :param constraint_name: str @@ -57,6 +57,6 @@ :param cascade: bool If true, try deleting all child constraints of the current constraint. If false, reject this operation if the current constraint has any child constraints. - - + + \ No newline at end of file diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index 0ff5bb2d1..1c292eb5c 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -9,39 +9,39 @@ have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT permission on the table, and they must have the USE_CATALOG permission on its parent catalog and the USE_SCHEMA permission on its parent schema. - + A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table (rather than a managed or external table). .. py:method:: delete(full_name: str) Delete a table. - + Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the table. - - + + .. py:method:: exists(full_name: str) -> TableExistsResponse Get boolean reflecting if table exists. - + Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on the parent catalog * Have BROWSE privilege on the parent schema. - + :param full_name: str Full name of the table. - + :returns: :class:`TableExistsResponse` @@ -86,13 +86,13 @@ w.tables.delete(full_name=table_full_name) Get a table. - + Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the SELECT privilege on the table. - + :param full_name: str Full name of the table. :param include_browse: bool (optional) @@ -102,7 +102,7 @@ Whether delta metadata should be included in the response. :param include_manifest_capabilities: bool (optional) Whether to include a manifest containing capabilities the table has. - + :returns: :class:`TableInfo` @@ -123,20 +123,20 @@ created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name) - summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name) + all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name) # cleanup w.schemas.delete(full_name=created_schema.full_name) w.catalogs.delete(name=created_catalog.name, force=True) List tables. - + Gets an array of all tables for the current metastore under the parent catalog and schema. The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for tables of interest. :param schema_name: str @@ -162,7 +162,7 @@ not. :param page_token: str (optional) Opaque token to send for the next page of results (pagination). - + :returns: Iterator over :class:`TableInfo` @@ -190,18 +190,18 @@ w.catalogs.delete(name=created_catalog.name, force=True) List table summaries. - + Gets an array of summaries for tables for a schema and catalog within the metastore. The table summaries returned are either: - + * summaries for tables (within the current metastore and parent catalog and schema), when the user is a metastore admin, or: * summaries for tables and schemas (within the current metastore and parent catalog) for which the user has ownership or the **SELECT** privilege on the table and ownership or **USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the **USE_CATALOG** privilege on the parent catalog. - + There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for tables of interest. :param include_manifest_capabilities: bool (optional) @@ -218,22 +218,22 @@ A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or empty. :param table_name_pattern: str (optional) A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty. - + :returns: Iterator over :class:`TableSummary` .. py:method:: update(full_name: str [, owner: Optional[str]]) Update a table owner. - + Change the owner of the table. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the table. :param owner: str (optional) - - + + \ No newline at end of file diff --git a/docs/workspace/catalog/temporary_table_credentials.rst b/docs/workspace/catalog/temporary_table_credentials.rst index b6ebbe819..1acd462b7 100644 --- a/docs/workspace/catalog/temporary_table_credentials.rst +++ b/docs/workspace/catalog/temporary_table_credentials.rst @@ -20,17 +20,17 @@ .. py:method:: generate_temporary_table_credentials( [, operation: Optional[TableOperation], table_id: Optional[str]]) -> GenerateTemporaryTableCredentialResponse Generate a temporary table credential. - + Get a short-lived credential for directly accessing the table data on cloud storage. The metastore must have external_access_enabled flag set to true (default false). The caller must have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog owners. - + :param operation: :class:`TableOperation` (optional) The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is specified, the credentials returned will have write permissions, otherwise, it will be read only. :param table_id: str (optional) UUID of the table to read or write. - + :returns: :class:`GenerateTemporaryTableCredentialResponse` \ No newline at end of file diff --git a/docs/workspace/catalog/volumes.rst b/docs/workspace/catalog/volumes.rst index 5b6662f48..cd9234587 100644 --- a/docs/workspace/catalog/volumes.rst +++ b/docs/workspace/catalog/volumes.rst @@ -59,23 +59,23 @@ w.volumes.delete(name=created_volume.full_name) Create a Volume. - + Creates a new volume. - + The user could create either an external volume or a managed volume. An external volume will be created in the specified external location, while a managed volume will be located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. - + For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have **CREATE VOLUME** privilege on the parent schema. - + For an external volume, following conditions also need to satisfy - The caller must have **CREATE EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes existing in the specified storage location. - The specified storage location is not under the location of other tables, nor volumes, or catalogs or schemas. - + :param catalog_name: str The name of the catalog where the schema and the volume are :param schema_name: str @@ -86,30 +86,30 @@ The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] - + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external :param comment: str (optional) The comment attached to the volume :param storage_location: str (optional) The storage location on the cloud - + :returns: :class:`VolumeInfo` .. py:method:: delete(name: str) Delete a Volume. - + Deletes a volume from the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param name: str The three-level (fully qualified) name of the volume - - + + .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[VolumeInfo] @@ -136,17 +136,17 @@ w.catalogs.delete(name=created_catalog.name, force=True) List Volumes. - + Gets an array of volumes for the current metastore under the parent catalog and schema. - + The returned volumes are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the volumes. A regular user needs to be the owner or have the **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str The identifier of the catalog :param schema_name: str @@ -156,20 +156,20 @@ metadata for :param max_results: int (optional) Maximum number of volumes to return (page length). - + If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value (10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter error is returned; - + Note: this parameter controls only the maximum number of volumes to return. The actual number of volumes returned in a page may be smaller than this value, including 0, even if there are more pages. :param page_token: str (optional) Opaque token returned by a previous request. It must be included in the request to retrieve the next page of results (pagination). - + :returns: Iterator over :class:`VolumeInfo` @@ -223,19 +223,19 @@ w.volumes.delete(name=created_volume.full_name) Get a Volume. - + Gets a volume from the metastore for a specific catalog and schema. - + The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param name: str The three-level (fully qualified) name of the volume :param include_browse: bool (optional) Whether to include volumes in the response for which the principal can only access selective metadata for - + :returns: :class:`VolumeInfo` @@ -291,15 +291,15 @@ w.volumes.delete(name=created_volume.full_name) Update a Volume. - + Updates the specified volume under the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the name, the owner or the comment of the volume could be updated. - + :param name: str The three-level (fully qualified) name of the volume :param comment: str (optional) @@ -308,6 +308,6 @@ New name for the volume. :param owner: str (optional) The identifier of the user who owns the volume - + :returns: :class:`VolumeInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/workspace_bindings.rst b/docs/workspace/catalog/workspace_bindings.rst index c507d4c78..6571d97b7 100644 --- a/docs/workspace/catalog/workspace_bindings.rst +++ b/docs/workspace/catalog/workspace_bindings.rst @@ -7,16 +7,16 @@ A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable can be accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a configured list of workspaces. This API allows you to configure (bind) securables to workspaces. - + NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) and the workspace bindings are only consulted when the securable's __isolation_mode__ is set to __ISOLATED__. - + A securable's workspace bindings can be configured by a metastore admin or the owner of the securable. - + The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). - + Securable types that support binding: - catalog - storage_credential - credential - external_location .. py:method:: get(name: str) -> GetCatalogWorkspaceBindingsResponse @@ -40,23 +40,23 @@ w.catalogs.delete(name=created.name, force=True) Get catalog workspace bindings. - + Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. - + :param name: str The name of the catalog. - + :returns: :class:`GetCatalogWorkspaceBindingsResponse` .. py:method:: get_bindings(securable_type: str, securable_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[WorkspaceBinding] Get securable workspace bindings. - + Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). @@ -69,7 +69,7 @@ error is returned; - If not set, all the workspace bindings are returned (not recommended). :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`WorkspaceBinding` @@ -97,27 +97,27 @@ w.catalogs.delete(name=created.name, force=True) Update catalog workspace bindings. - + Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. - + :param name: str The name of the catalog. :param assign_workspaces: List[int] (optional) A list of workspace IDs. :param unassign_workspaces: List[int] (optional) A list of workspace IDs. - + :returns: :class:`UpdateCatalogWorkspaceBindingsResponse` .. py:method:: update_bindings(securable_type: str, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> UpdateWorkspaceBindingsResponse Update securable workspace bindings. - + Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). @@ -127,6 +127,6 @@ List of workspace bindings. :param remove: List[:class:`WorkspaceBinding`] (optional) List of workspace bindings. - + :returns: :class:`UpdateWorkspaceBindingsResponse` \ No newline at end of file diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst index 5021629c8..4dced9ce2 100644 --- a/docs/workspace/cleanrooms/clean_room_assets.rst +++ b/docs/workspace/cleanrooms/clean_room_assets.rst @@ -10,71 +10,71 @@ .. py:method:: create(clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset Create an asset. - + Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC asset that is added through this method, the clean room owner must also have enough privilege on the asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to access the asset. Typically, you should use a group as the clean room owner. - + :param clean_room_name: str Name of the clean room. :param asset: :class:`CleanRoomAsset` Metadata of the clean room asset - + :returns: :class:`CleanRoomAsset` - .. py:method:: delete(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str) + .. py:method:: delete(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str) Delete an asset. - + Delete a clean room asset - unshare/remove the asset from the clean room - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` The type of the asset. - :param asset_full_name: str + :param name: str The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. - - + + - .. py:method:: get(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str) -> CleanRoomAsset + .. py:method:: get(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str) -> CleanRoomAsset Get an asset. - + Get the details of a clean room asset by its type and full name. - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` The type of the asset. - :param asset_full_name: str + :param name: str The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. - + :returns: :class:`CleanRoomAsset` .. py:method:: list(clean_room_name: str [, page_token: Optional[str]]) -> Iterator[CleanRoomAsset] List assets. - + :param clean_room_name: str Name of the clean room. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoomAsset` .. py:method:: update(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, asset: CleanRoomAsset) -> CleanRoomAsset Update an asset. - + Update a clean room asset. For example, updating the content of a notebook; changing the shared partitions of a table; etc. - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` @@ -82,13 +82,13 @@ :param name: str A fully qualified name that uniquely identifies the asset within the clean room. This is also the name displayed in the clean room UI. - + For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - + For notebooks, the name is the notebook file name. :param asset: :class:`CleanRoomAsset` Metadata of the clean room asset - + :returns: :class:`CleanRoomAsset` \ No newline at end of file diff --git a/docs/workspace/cleanrooms/clean_room_task_runs.rst b/docs/workspace/cleanrooms/clean_room_task_runs.rst index b78bf2c2a..0a22a1f3e 100644 --- a/docs/workspace/cleanrooms/clean_room_task_runs.rst +++ b/docs/workspace/cleanrooms/clean_room_task_runs.rst @@ -9,9 +9,9 @@ .. py:method:: list(clean_room_name: str [, notebook_name: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoomNotebookTaskRun] List notebook task runs. - + List all the historical notebook task runs in a clean room. - + :param clean_room_name: str Name of the clean room. :param notebook_name: str (optional) @@ -20,6 +20,6 @@ The maximum number of task runs to return. Currently ignored - all runs will be returned. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoomNotebookTaskRun` \ No newline at end of file diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst index 45981bd9c..a711120d5 100644 --- a/docs/workspace/cleanrooms/clean_rooms.rst +++ b/docs/workspace/cleanrooms/clean_rooms.rst @@ -11,85 +11,85 @@ .. py:method:: create(clean_room: CleanRoom) -> CleanRoom Create a clean room. - + Create a new clean room with the specified collaborators. This method is asynchronous; the returned name field inside the clean_room field can be used to poll the clean room status, using the :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING state, with only name, owner, comment, created_at and status populated. The clean room will be usable once it enters an ACTIVE state. - + The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore. - + :param clean_room: :class:`CleanRoom` - + :returns: :class:`CleanRoom` .. py:method:: create_output_catalog(clean_room_name: str, output_catalog: CleanRoomOutputCatalog) -> CreateCleanRoomOutputCatalogResponse Create an output catalog. - + Create the output catalog of the clean room. - + :param clean_room_name: str Name of the clean room. :param output_catalog: :class:`CleanRoomOutputCatalog` - + :returns: :class:`CreateCleanRoomOutputCatalogResponse` .. py:method:: delete(name: str) Delete a clean room. - + Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other collaborators have not deleted the clean room, they will still have the clean room in their metastore, but it will be in a DELETED state and no operations other than deletion can be performed on it. - + :param name: str Name of the clean room. - - + + .. py:method:: get(name: str) -> CleanRoom Get a clean room. - + Get the details of a clean room given its name. - + :param name: str - + :returns: :class:`CleanRoom` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoom] List clean rooms. - + Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are returned. - + :param page_size: int (optional) Maximum number of clean rooms to return (i.e., the page length). Defaults to 100. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoom` .. py:method:: update(name: str [, clean_room: Optional[CleanRoom]]) -> CleanRoom Update a clean room. - + Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM** privilege, or be metastore admin. - + When the caller is a metastore admin, only the __owner__ field can be updated. - + :param name: str Name of the clean room. :param clean_room: :class:`CleanRoom` (optional) - + :returns: :class:`CleanRoom` \ No newline at end of file diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst index 790315fd9..bf4c8ab61 100644 --- a/docs/workspace/compute/cluster_policies.rst +++ b/docs/workspace/compute/cluster_policies.rst @@ -7,18 +7,18 @@ You can use cluster policies to control users' ability to configure clusters based on a set of rules. These rules specify which attributes or attribute values can be used during cluster creation. Cluster policies have ACLs that limit their use to specific users and groups. - + With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in the policy's "libraries" field (Public Preview). - Limit users to creating clusters with the prescribed settings. - Simplify the user interface, enabling more users to create clusters, by fixing and hiding some fields. - Manage costs by setting limits on attributes that impact the hourly rate. - + Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted policy and create fully-configurable clusters. - A user who has both unrestricted cluster create permission and access to cluster policies can select the Unrestricted policy and policies they have access to. - A user that has access to only cluster policies, can select the policies they have access to. - + If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create, edit, and delete policies. Admin users also have access to all policies. @@ -50,12 +50,12 @@ w.cluster_policies.delete(policy_id=created.policy_id) Create a new policy. - + Creates a new policy with prescribed settings. - + :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - + [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param description: str (optional) Additional human-readable description of the cluster policy. @@ -71,31 +71,31 @@ :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. - + You can use this to customize the policy definition inherited from the policy family. Policy rules specified here are merged into the inherited policy definition. - + [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param policy_family_id: str (optional) ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. - + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition. - + :returns: :class:`CreatePolicyResponse` .. py:method:: delete(policy_id: str) Delete a cluster policy. - + Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited. - + :param policy_id: str The ID of the policy to delete. - - + + .. py:method:: edit(policy_id: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) @@ -140,15 +140,15 @@ w.cluster_policies.delete(policy_id=created.policy_id) Update a cluster policy. - + Update an existing policy for cluster. This operation may make some clusters governed by the previous policy invalid. - + :param policy_id: str The ID of the policy to update. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - + [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param description: str (optional) Additional human-readable description of the cluster policy. @@ -164,19 +164,19 @@ :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. - + You can use this to customize the policy definition inherited from the policy family. Policy rules specified here are merged into the inherited policy definition. - + [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param policy_family_id: str (optional) ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. - + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition. - - + + .. py:method:: get(policy_id: str) -> Policy @@ -209,37 +209,37 @@ w.cluster_policies.delete(policy_id=created.policy_id) Get a cluster policy. - + Get a cluster policy entity. Creation and editing is available to admins only. - + :param policy_id: str Canonical unique identifier for the Cluster Policy. - + :returns: :class:`Policy` .. py:method:: get_permission_levels(cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse Get cluster policy permission levels. - + Gets the permission levels that a user can have on an object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. - + :returns: :class:`GetClusterPolicyPermissionLevelsResponse` .. py:method:: get_permissions(cluster_policy_id: str) -> ClusterPolicyPermissions Get cluster policy permissions. - + Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. - + :returns: :class:`ClusterPolicyPermissions` @@ -258,43 +258,43 @@ all = w.cluster_policies.list(compute.ListClusterPoliciesRequest()) List cluster policies. - + Returns a list of policies accessible by the requesting user. - + :param sort_column: :class:`ListSortColumn` (optional) The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy creation time. * `POLICY_NAME` - Sort result list by policy name. :param sort_order: :class:`ListSortOrder` (optional) The order in which the policies get listed. * `DESC` - Sort result list in descending order. * `ASC` - Sort result list in ascending order. - + :returns: Iterator over :class:`Policy` .. py:method:: set_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions Set cluster policy permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional) - + :returns: :class:`ClusterPolicyPermissions` .. py:method:: update_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions Update cluster policy permissions. - + Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional) - + :returns: :class:`ClusterPolicyPermissions` \ No newline at end of file diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index fe9271c13..80a6609e9 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -5,22 +5,22 @@ .. py:class:: ClustersExt The Clusters API allows you to create, start, edit, list, terminate, and delete clusters. - + Databricks maps cluster node instance types to compute units known as DBUs. See the instance type pricing page for a list of the supported instance types and their corresponding DBUs. - + A Databricks cluster is a set of computation resources and configurations on which you run data engineering, data science, and data analytics workloads, such as production ETL pipelines, streaming analytics, ad-hoc analytics, and machine learning. - + You run these workloads as a set of commands in a notebook or as an automated job. Databricks makes a distinction between all-purpose clusters and job clusters. You use all-purpose clusters to analyze data collaboratively using interactive notebooks. You use job clusters to run fast and robust automated jobs. - + You can create an all-purpose cluster using the UI, CLI, or REST API. You can manually terminate and restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive analysis. - + IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list. @@ -60,16 +60,16 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Change cluster owner. - + Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform this operation. The service principal application ID can be supplied as an argument to `owner_username`. - + :param cluster_id: str :param owner_username: str New owner of the cluster_id after this RPC. - - + + .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails] @@ -102,22 +102,22 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Create new cluster. - + Creates a new Spark cluster. This method will acquire new instances from the cloud provider if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The cluster will be usable once it enters a ``RUNNING`` state. Note: Databricks may not be able to acquire some of the requested nodes, due to cloud provider limitations (account limits, spot price, etc.) or transient network issues. - + If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed. Otherwise the cluster will terminate with an informative error message. - + Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out the [create compute UI] and then copying the generated JSON definition from the UI. - + [create compute UI]: https://docs.databricks.com/compute/configure.html - + :param spark_version: str The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. @@ -152,18 +152,18 @@ :param custom_tags: Dict[str,str] (optional) Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags - + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. @@ -172,10 +172,10 @@ fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on @@ -189,7 +189,7 @@ :param driver_node_type_id: str (optional) The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. - + This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence. @@ -210,22 +210,22 @@ The optional ID of the instance pool to which the cluster belongs. :param is_single_node: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in @@ -235,7 +235,7 @@ :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas @@ -245,10 +245,10 @@ The ID of the cluster policy used to create the cluster if applicable. :param runtime_engine: :class:`RuntimeEngine` (optional) Determines the cluster's runtime engine, either standard or Photon. - + This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - + If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used. :param single_user_name: str (optional) @@ -261,11 +261,11 @@ An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. - + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks managed environmental variables are included as well. - + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}` @@ -275,12 +275,12 @@ specified. :param use_ml_runtime: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) Cluster Attributes showing for clusters workload types. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -321,14 +321,14 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Terminate cluster. - + Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a `TERMINATING` or `TERMINATED` state, nothing will happen. - + :param cluster_id: str The cluster to be terminated. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_terminated for more details. @@ -376,19 +376,19 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Update cluster configuration. - + Updates the configuration of a cluster to match the provided attributes and size. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. - + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes can take effect. - + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time it is started using the `clusters/start` API, the new attributes will take effect. Any attempt to update a cluster in any other state will be rejected with an `INVALID_STATE` error code. - + Clusters created by the Databricks Jobs service cannot be edited. - + :param cluster_id: str ID of the cluster :param spark_version: str @@ -423,18 +423,18 @@ :param custom_tags: Dict[str,str] (optional) Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags - + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. @@ -443,10 +443,10 @@ fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on @@ -460,7 +460,7 @@ :param driver_node_type_id: str (optional) The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. - + This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence. @@ -481,22 +481,22 @@ The optional ID of the instance pool to which the cluster belongs. :param is_single_node: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in @@ -506,7 +506,7 @@ :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas @@ -516,10 +516,10 @@ The ID of the cluster policy used to create the cluster if applicable. :param runtime_engine: :class:`RuntimeEngine` (optional) Determines the cluster's runtime engine, either standard or Photon. - + This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - + If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used. :param single_user_name: str (optional) @@ -532,11 +532,11 @@ An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. - + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks managed environmental variables are included as well. - + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}` @@ -546,12 +546,12 @@ specified. :param use_ml_runtime: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) Cluster Attributes showing for clusters workload types. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -617,10 +617,10 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) List cluster activity events. - + Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more events to read, the response includes all the parameters necessary to request the next page of events. - + :param cluster_id: str The ID of the cluster to retrieve events about. :param end_time: int (optional) @@ -629,12 +629,12 @@ An optional set of event types to filter on. If empty, all event types are returned. :param limit: int (optional) Deprecated: use page_token in combination with page_size instead. - + The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed value is 500. :param offset: int (optional) Deprecated: use page_token in combination with page_size instead. - + The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results are requested in descending order, the end_time field is required. :param order: :class:`GetEventsOrder` (optional) @@ -649,7 +649,7 @@ previous page of events respectively. If page_token is empty, the first page is returned. :param start_time: int (optional) The start time in epoch milliseconds. If empty, returns events starting from the beginning of time. - + :returns: Iterator over :class:`ClusterEvent` @@ -685,37 +685,37 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Get cluster info. - + Retrieves the information for a cluster given its identifier. Clusters can be described while they are running, or up to 60 days after they are terminated. - + :param cluster_id: str The cluster about which to retrieve information. - + :returns: :class:`ClusterDetails` .. py:method:: get_permission_levels(cluster_id: str) -> GetClusterPermissionLevelsResponse Get cluster permission levels. - + Gets the permission levels that a user can have on an object. - + :param cluster_id: str The cluster for which to get or manage permissions. - + :returns: :class:`GetClusterPermissionLevelsResponse` .. py:method:: get_permissions(cluster_id: str) -> ClusterPermissions Get cluster permissions. - + Gets the permissions of a cluster. Clusters can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. - + :returns: :class:`ClusterPermissions` @@ -727,17 +727,16 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import compute w = WorkspaceClient() - all = w.clusters.list(compute.ListClustersRequest()) + nodes = w.clusters.list_node_types() List clusters. - + Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. - + :param filter_by: :class:`ListClustersFilterBy` (optional) Filters to apply to the list of clusters. :param page_size: int (optional) @@ -748,7 +747,7 @@ previous page of clusters respectively. :param sort_by: :class:`ListClustersSortBy` (optional) Sort the list of clusters by a specific criteria. - + :returns: Iterator over :class:`ClusterDetails` @@ -766,36 +765,36 @@ nodes = w.clusters.list_node_types() List node types. - + Returns a list of supported Spark node types. These node types can be used to launch a cluster. - + :returns: :class:`ListNodeTypesResponse` .. py:method:: list_zones() -> ListAvailableZonesResponse List availability zones. - + Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These zones can be used to launch a cluster. - + :returns: :class:`ListAvailableZonesResponse` .. py:method:: permanent_delete(cluster_id: str) Permanently delete cluster. - + Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously removed. - + In addition, users will no longer see permanently deleted clusters in the cluster list, and API users can no longer perform any action on permanently deleted clusters. - + :param cluster_id: str The cluster to be deleted. - - + + .. py:method:: pin(cluster_id: str) @@ -830,13 +829,13 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Pin cluster. - + Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a cluster that is already pinned will have no effect. This API can only be called by workspace admins. - + :param cluster_id: str - - + + .. py:method:: resize(cluster_id: str [, autoscale: Optional[AutoScale], num_workers: Optional[int]]) -> Wait[ClusterDetails] @@ -871,10 +870,10 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Resize cluster. - + Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a `RUNNING` state. - + :param cluster_id: str The cluster to be resized. :param autoscale: :class:`AutoScale` (optional) @@ -883,13 +882,13 @@ :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -930,14 +929,14 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Restart cluster. - + Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state, nothing will happen. - + :param cluster_id: str The cluster to be started. :param restart_user: str (optional) - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -1010,23 +1009,23 @@ .. py:method:: set_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions Set cluster permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional) - + :returns: :class:`ClusterPermissions` .. py:method:: spark_versions() -> GetSparkVersionsResponse List available Spark versions. - + Returns the list of available Spark versions. These versions can be used to launch a cluster. - + :returns: :class:`GetSparkVersionsResponse` @@ -1062,16 +1061,16 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Start terminated cluster. - + Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster starts with the last specified cluster size. - If the previous cluster was an autoscaling cluster, the current cluster starts with the minimum number of nodes. - If the cluster is not currently in a ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job cannot be started. - + :param cluster_id: str The cluster to be started. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -1112,20 +1111,20 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Unpin cluster. - + Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace admins. - + :param cluster_id: str - - + + .. py:method:: update(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource]]) -> Wait[ClusterDetails] Update cluster configuration (partial). - + Updates the configuration of a cluster to match the partial set of attributes and size. Denote which fields to update using the `update_mask` field in the request body. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be @@ -1134,25 +1133,25 @@ is started using the `clusters/start` API. Attempts to update a cluster in any other state will be rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be updated. - + :param cluster_id: str ID of the cluster. :param update_mask: str Used to specify which cluster attributes and size fields to update. See https://google.aip.dev/161 for more details. - + The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. :param cluster: :class:`UpdateClusterResource` (optional) The cluster to be updated. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -1164,13 +1163,13 @@ .. py:method:: update_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions Update cluster permissions. - + Updates the permissions on a cluster. Clusters can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional) - + :returns: :class:`ClusterPermissions` diff --git a/docs/workspace/compute/command_execution.rst b/docs/workspace/compute/command_execution.rst index c96d044a2..f51daabe8 100644 --- a/docs/workspace/compute/command_execution.rst +++ b/docs/workspace/compute/command_execution.rst @@ -10,15 +10,15 @@ .. py:method:: cancel( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str]]) -> Wait[CommandStatusResponse] Cancel a command. - + Cancels a currently running command within an execution context. - + The command ID is obtained from a prior successful call to __execute__. - + :param cluster_id: str (optional) :param command_id: str (optional) :param context_id: str (optional) - + :returns: Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_cancelled for more details. @@ -30,27 +30,27 @@ .. py:method:: command_status(cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse Get command info. - + Gets the status of and, if available, the results from a currently executing command. - + The command ID is obtained from a prior successful call to __execute__. - + :param cluster_id: str :param context_id: str :param command_id: str - + :returns: :class:`CommandStatusResponse` .. py:method:: context_status(cluster_id: str, context_id: str) -> ContextStatusResponse Get status. - + Gets the status for an execution context. - + :param cluster_id: str :param context_id: str - + :returns: :class:`ContextStatusResponse` @@ -76,15 +76,15 @@ w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id) Create an execution context. - + Creates an execution context for running cluster commands. - + If successful, this method returns the ID of the new execution context. - + :param cluster_id: str (optional) Running cluster id :param language: :class:`Language` (optional) - + :returns: Long-running operation waiter for :class:`ContextStatusResponse`. See :method:wait_context_status_command_execution_running for more details. @@ -96,13 +96,13 @@ .. py:method:: destroy(cluster_id: str, context_id: str) Delete an execution context. - + Deletes an execution context. - + :param cluster_id: str :param context_id: str - - + + .. py:method:: execute( [, cluster_id: Optional[str], command: Optional[str], context_id: Optional[str], language: Optional[Language]]) -> Wait[CommandStatusResponse] @@ -134,11 +134,11 @@ w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id) Run a command. - + Runs a cluster command in the given execution context, using the provided language. - + If successful, it returns an ID for tracking the status of the command's execution. - + :param cluster_id: str (optional) Running cluster id :param command: str (optional) @@ -146,7 +146,7 @@ :param context_id: str (optional) Running context id :param language: :class:`Language` (optional) - + :returns: Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_finished_or_error for more details. diff --git a/docs/workspace/compute/global_init_scripts.rst b/docs/workspace/compute/global_init_scripts.rst index e2eba7604..b4c044b95 100644 --- a/docs/workspace/compute/global_init_scripts.rst +++ b/docs/workspace/compute/global_init_scripts.rst @@ -6,7 +6,7 @@ The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace. These scripts run on every node in every cluster in the workspace. - + **Important:** Existing clusters must be restarted to pick up any changes made to global init scripts. Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark container fails to launch and init scripts with later position are skipped. If enough containers fail, the @@ -37,9 +37,9 @@ w.global_init_scripts.delete(script_id=created.script_id) Create init script. - + Creates a new global init script in this workspace. - + :param name: str The name of the script :param script: str @@ -49,27 +49,27 @@ :param position: int (optional) The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. - + If you omit the numeric position for a new global init script, it defaults to last position. It will run after all current scripts. Setting any value greater than the position of the last script is equivalent to the last position. Example: Take three existing scripts with positions 0, 1, and 2. Any position of (3) or greater puts the script in the last position. If an explicit position value conflicts with an existing script value, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1. - + :returns: :class:`CreateResponse` .. py:method:: delete(script_id: str) Delete init script. - + Deletes a global init script. - + :param script_id: str The ID of the global init script. - - + + .. py:method:: get(script_id: str) -> GlobalInitScriptDetailsWithContent @@ -99,12 +99,12 @@ w.global_init_scripts.delete(script_id=created.script_id) Get an init script. - + Gets all the details of a script, including its Base64-encoded contents. - + :param script_id: str The ID of the global init script. - + :returns: :class:`GlobalInitScriptDetailsWithContent` @@ -122,11 +122,11 @@ all = w.global_init_scripts.list() Get init scripts. - + Get a list of all global init scripts for this workspace. This returns all properties for each script but **not** the script contents. To retrieve the contents of a script, use the [get a global init script](:method:globalinitscripts/get) operation. - + :returns: Iterator over :class:`GlobalInitScriptDetails` @@ -161,10 +161,10 @@ w.global_init_scripts.delete(script_id=created.script_id) Update init script. - + Updates a global init script, specifying only the fields to change. All fields are optional. Unspecified fields retain their current value. - + :param script_id: str The ID of the global init script. :param name: str @@ -176,13 +176,13 @@ :param position: int (optional) The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. To move the script to run first, set its position to 0. - + To move the script to the end, set its position to any value greater or equal to the position of the last script. Example, three existing scripts with positions 0, 1, and 2. Any position value of 2 or greater puts the script in the last position (2). - + If an explicit position value conflicts with an existing script, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1. - - + + \ No newline at end of file diff --git a/docs/workspace/compute/instance_pools.rst b/docs/workspace/compute/instance_pools.rst index 0614f2101..8fb46dbc9 100644 --- a/docs/workspace/compute/instance_pools.rst +++ b/docs/workspace/compute/instance_pools.rst @@ -6,16 +6,16 @@ Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times. - + Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle instances. If the pool has no idle instances, the pool expands by allocating a new instance from the instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that pool’s idle instances. - + You can specify a different pool for the driver node and worker nodes, or use the same pool for both. - + Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does apply. See pricing. @@ -40,9 +40,9 @@ w.instance_pools.delete(instance_pool_id=created.instance_pool_id) Create a new instance pool. - + Creates a new instance pool using idle and ready-to-use cloud instances. - + :param instance_pool_name: str Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters. @@ -60,7 +60,7 @@ :param custom_tags: Dict[str,str] (optional) Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags :param disk_spec: :class:`DiskSpec` (optional) Defines the specification of the disks that will be attached to all spark containers. @@ -89,20 +89,20 @@ A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. - + :returns: :class:`CreateInstancePoolResponse` .. py:method:: delete(instance_pool_id: str) Delete an instance pool. - + Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously. - + :param instance_pool_id: str The instance pool to be terminated. - - + + .. py:method:: edit(instance_pool_id: str, instance_pool_name: str, node_type_id: str [, custom_tags: Optional[Dict[str, str]], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int]]) @@ -132,9 +132,9 @@ w.instance_pools.delete(instance_pool_id=created.instance_pool_id) Edit an existing instance pool. - + Modifies the configuration of an existing instance pool. - + :param instance_pool_id: str Instance pool ID :param instance_pool_name: str @@ -148,7 +148,7 @@ :param custom_tags: Dict[str,str] (optional) Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags :param idle_instance_autotermination_minutes: int (optional) Automatically terminates the extra instances in the pool cache after they are inactive for this time @@ -162,8 +162,8 @@ upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool - - + + .. py:method:: get(instance_pool_id: str) -> GetInstancePool @@ -189,37 +189,37 @@ w.instance_pools.delete(instance_pool_id=created.instance_pool_id) Get instance pool information. - + Retrieve the information for an instance pool based on its identifier. - + :param instance_pool_id: str The canonical unique identifier for the instance pool. - + :returns: :class:`GetInstancePool` .. py:method:: get_permission_levels(instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse Get instance pool permission levels. - + Gets the permission levels that a user can have on an object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. - + :returns: :class:`GetInstancePoolPermissionLevelsResponse` .. py:method:: get_permissions(instance_pool_id: str) -> InstancePoolPermissions Get instance pool permissions. - + Gets the permissions of an instance pool. Instance pools can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. - + :returns: :class:`InstancePoolPermissions` @@ -237,36 +237,36 @@ all = w.instance_pools.list() List instance pool info. - + Gets a list of instance pools with their statistics. - + :returns: Iterator over :class:`InstancePoolAndStats` .. py:method:: set_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions Set instance pool permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional) - + :returns: :class:`InstancePoolPermissions` .. py:method:: update_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions Update instance pool permissions. - + Updates the permissions on an instance pool. Instance pools can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional) - + :returns: :class:`InstancePoolPermissions` \ No newline at end of file diff --git a/docs/workspace/compute/instance_profiles.rst b/docs/workspace/compute/instance_profiles.rst index 182e1aa79..abf959324 100644 --- a/docs/workspace/compute/instance_profiles.rst +++ b/docs/workspace/compute/instance_profiles.rst @@ -7,9 +7,8 @@ The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3 buckets] using instance profiles for more information. - - [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html + [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html .. py:method:: add(instance_profile_arn: str [, iam_role_arn: Optional[str], is_meta_instance_profile: Optional[bool], skip_validation: Optional[bool]]) @@ -31,21 +30,21 @@ ) Register an instance profile. - + Registers an instance profile in Databricks. In the UI, you can then give users the permission to use this instance profile when launching clusters. - + This API is only available to admin users. - + :param instance_profile_arn: str The AWS ARN of the instance profile to register with Databricks. This field is required. :param iam_role_arn: str (optional) The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile with [Databricks SQL Serverless]. - + Otherwise, this field is optional. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html :param is_meta_instance_profile: bool (optional) Boolean flag indicating whether the instance profile should only be used in credential passthrough @@ -58,8 +57,8 @@ fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your requested instance type is not supported in your requested availability zone”), you can pass this flag to skip the validation and forcibly add the instance profile. - - + + .. py:method:: edit(instance_profile_arn: str [, iam_role_arn: Optional[str], is_meta_instance_profile: Optional[bool]]) @@ -81,37 +80,37 @@ ) Edit an instance profile. - + The only supported field to change is the optional IAM role ARN associated with the instance profile. It is required to specify the IAM role ARN if both of the following are true: - + * Your role name and instance profile name do not match. The name is the part after the last slash in each ARN. * You want to use the instance profile with [Databricks SQL Serverless]. - + To understand where these fields are in the AWS console, see [Enable serverless SQL warehouses]. - + This API is only available to admin users. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html - + :param instance_profile_arn: str The AWS ARN of the instance profile to register with Databricks. This field is required. :param iam_role_arn: str (optional) The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile with [Databricks SQL Serverless]. - + Otherwise, this field is optional. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html :param is_meta_instance_profile: bool (optional) Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios. If true, it means the instance profile contains an meta IAM role which could assume a wide range of roles. Therefore it should always be used with authorization. This field is optional, the default value is `false`. - - + + .. py:method:: list() -> Iterator[InstanceProfile] @@ -128,25 +127,25 @@ all = w.instance_profiles.list() List available instance profiles. - + List the instance profiles that the calling user can use to launch a cluster. - + This API is available to all users. - + :returns: Iterator over :class:`InstanceProfile` .. py:method:: remove(instance_profile_arn: str) Remove the instance profile. - + Remove the instance profile with the provided ARN. Existing clusters with this instance profile will continue to function. - + This API is only accessible to admin users. - + :param instance_profile_arn: str The ARN of the instance profile to remove. This field is required. - - + + \ No newline at end of file diff --git a/docs/workspace/compute/libraries.rst b/docs/workspace/compute/libraries.rst index 339f54de2..64f688fdc 100644 --- a/docs/workspace/compute/libraries.rst +++ b/docs/workspace/compute/libraries.rst @@ -6,70 +6,70 @@ The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster. - + To make third-party or custom code available to notebooks and jobs running on your clusters, you can install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories. - + Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library directly from a public repository such as PyPI or Maven, using a previously installed workspace library, or using an init script. - + When you uninstall a library from a cluster, the library is removed only when you restart the cluster. Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart. .. py:method:: all_cluster_statuses() -> Iterator[ClusterLibraryStatuses] Get all statuses. - + Get the status of all libraries on all clusters. A status is returned for all libraries installed on this cluster via the API or the libraries UI. - + :returns: Iterator over :class:`ClusterLibraryStatuses` .. py:method:: cluster_status(cluster_id: str) -> Iterator[LibraryFullStatus] Get status. - + Get the status of libraries on a cluster. A status is returned for all libraries installed on this cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries set to be installed on this cluster, in the order that the libraries were added to the cluster, are returned first. 2. Libraries that were previously requested to be installed on this cluster or, but are now marked for removal, in no particular order, are returned last. - + :param cluster_id: str Unique identifier of the cluster whose status should be retrieved. - + :returns: Iterator over :class:`LibraryFullStatus` .. py:method:: install(cluster_id: str, libraries: List[Library]) Add a library. - + Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. - + :param cluster_id: str Unique identifier for the cluster on which to install these libraries. :param libraries: List[:class:`Library`] The libraries to install. - - + + .. py:method:: uninstall(cluster_id: str, libraries: List[Library]) Uninstall libraries. - + Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. - + :param cluster_id: str Unique identifier for the cluster on which to uninstall these libraries. :param libraries: List[:class:`Library`] The libraries to uninstall. - - + + \ No newline at end of file diff --git a/docs/workspace/compute/policy_compliance_for_clusters.rst b/docs/workspace/compute/policy_compliance_for_clusters.rst index fea7a08f9..90c3aeb98 100644 --- a/docs/workspace/compute/policy_compliance_for_clusters.rst +++ b/docs/workspace/compute/policy_compliance_for_clusters.rst @@ -6,58 +6,58 @@ The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace. - + A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce compliance API allows you to update a cluster to be compliant with the current version of its policy. .. py:method:: enforce_compliance(cluster_id: str [, validate_only: Optional[bool]]) -> EnforceClusterComplianceResponse Enforce cluster policy compliance. - + Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. - + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes can take effect. - + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the cluster is started, the new attributes will take effect. - + Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API. Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs. - + :param cluster_id: str The ID of the cluster you want to enforce policy compliance on. :param validate_only: bool (optional) If set, previews the changes that would be made to a cluster to enforce compliance but does not update the cluster. - + :returns: :class:`EnforceClusterComplianceResponse` .. py:method:: get_compliance(cluster_id: str) -> GetClusterComplianceResponse Get cluster policy compliance. - + Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + :param cluster_id: str The ID of the cluster to get the compliance status - + :returns: :class:`GetClusterComplianceResponse` .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ClusterCompliance] List cluster policy compliance. - + Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + :param policy_id: str Canonical unique identifier for the cluster policy. :param page_size: int (optional) @@ -66,6 +66,6 @@ :param page_token: str (optional) A page token that can be used to navigate to the next page or previous page as returned by `next_page_token` or `prev_page_token`. - + :returns: Iterator over :class:`ClusterCompliance` \ No newline at end of file diff --git a/docs/workspace/compute/policy_families.rst b/docs/workspace/compute/policy_families.rst index 8bbcd039f..56e4f4275 100644 --- a/docs/workspace/compute/policy_families.rst +++ b/docs/workspace/compute/policy_families.rst @@ -6,10 +6,10 @@ View available policy families. A policy family contains a policy definition providing best practices for configuring clusters for a particular use case. - + Databricks manages and provides policy families for several common cluster use cases. You cannot create, edit, or delete policy families. - + Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a policy family. Cluster policies created using a policy family inherit the policy family's policy definition. @@ -31,14 +31,14 @@ first_family = w.policy_families.get(policy_family_id=all[0].policy_family_id) Get policy family information. - + Retrieve the information for an policy family based on its identifier and version - + :param policy_family_id: str The family ID about which to retrieve information. :param version: int (optional) The version number for the family to fetch. Defaults to the latest version. - + :returns: :class:`PolicyFamily` @@ -57,14 +57,14 @@ all = w.policy_families.list(compute.ListPolicyFamiliesRequest()) List policy families. - + Returns the list of policy definition types available to use at their latest version. This API is paginated. - + :param max_results: int (optional) Maximum number of policy families to return. :param page_token: str (optional) A token that can be used to get the next page of results. - + :returns: Iterator over :class:`PolicyFamily` \ No newline at end of file diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst index 60a1389f7..a3f0e1ccd 100644 --- a/docs/workspace/dashboards/genie.rst +++ b/docs/workspace/dashboards/genie.rst @@ -12,17 +12,17 @@ .. py:method:: create_message(space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage] Create conversation message. - + Create new message in a [conversation](:method:genie/startconversation). The AI response uses all previously created messages in the conversation to respond. - + :param space_id: str The ID associated with the Genie space where the conversation is started. :param conversation_id: str The ID associated with the conversation. :param content: str User message content. - + :returns: Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. @@ -34,10 +34,10 @@ .. py:method:: execute_message_attachment_query(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse Execute message attachment SQL query. - + Execute the SQL for a message query attachment. Use this API when the query attachment has expired and needs to be re-executed. - + :param space_id: str Genie space ID :param conversation_id: str @@ -46,36 +46,36 @@ Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse [Deprecated] Execute SQL query in a conversation message. - + Execute the SQL query in the message. - + :param space_id: str Genie space ID :param conversation_id: str Conversation ID :param message_id: str Message ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: generate_download_full_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGenerateDownloadFullQueryResultResponse Generate full query result download. - + Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of the download. The query result is stored in an external link and can be retrieved using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. See [Execute Statement](:method:statementexecution/executestatement) for more details. - + :param space_id: str Genie space ID :param conversation_id: str @@ -84,14 +84,14 @@ Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` .. py:method:: get_download_full_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str) -> GenieGetDownloadFullQueryResultResponse Get download full query result. - + After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and successfully receiving a `download_id`, use this API to poll the download progress. When the download is complete, the API returns one or more external links to the query result files. Warning: Databricks @@ -99,7 +99,7 @@ You must not set an Authorization header in download requests. When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant temporary access to data. See [Execute Statement](:method:statementexecution/executestatement) for more details. - + :param space_id: str Genie space ID :param conversation_id: str @@ -111,33 +111,33 @@ :param download_id: str Download ID. This ID is provided by the [Generate Download endpoint](:method:genie/generateDownloadFullQueryResult) - + :returns: :class:`GenieGetDownloadFullQueryResultResponse` .. py:method:: get_message(space_id: str, conversation_id: str, message_id: str) -> GenieMessage Get conversation message. - + Get message from conversation. - + :param space_id: str The ID associated with the Genie space where the target conversation is located. :param conversation_id: str The ID associated with the target conversation. :param message_id: str The ID associated with the target message from the identified conversation. - + :returns: :class:`GenieMessage` .. py:method:: get_message_attachment_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse Get message attachment SQL query result. - + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. - + :param space_id: str Genie space ID :param conversation_id: str @@ -146,34 +146,34 @@ Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: get_message_query_result(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse [Deprecated] Get conversation message SQL query result. - + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY`. - + :param space_id: str Genie space ID :param conversation_id: str Conversation ID :param message_id: str Message ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: get_message_query_result_by_attachment(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse [Deprecated] Get conversation message SQL query result. - + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. - + :param space_id: str Genie space ID :param conversation_id: str @@ -182,33 +182,47 @@ Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: get_space(space_id: str) -> GenieSpace Get Genie Space. - + Get details of a Genie Space. - + :param space_id: str The ID associated with the Genie space - + :returns: :class:`GenieSpace` + .. py:method:: list_spaces( [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListSpacesResponse + + List Genie spaces. + + Get list of Genie Spaces. + + :param page_size: int (optional) + Maximum number of spaces to return per page + :param page_token: str (optional) + Pagination token for getting the next page of results + + :returns: :class:`GenieListSpacesResponse` + + .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage] Start conversation. - + Start a new conversation. - + :param space_id: str The ID associated with the Genie space where you want to start a conversation. :param content: str The text of the message that starts the conversation. - + :returns: Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst index 940efa5dd..877891d25 100644 --- a/docs/workspace/dashboards/index.rst +++ b/docs/workspace/dashboards/index.rst @@ -9,5 +9,4 @@ Manage Lakeview dashboards genie lakeview - lakeview_embedded - query_execution \ No newline at end of file + lakeview_embedded \ No newline at end of file diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst index 4becea5a7..944a95f15 100644 --- a/docs/workspace/dashboards/lakeview.rst +++ b/docs/workspace/dashboards/lakeview.rst @@ -10,42 +10,42 @@ .. py:method:: create(dashboard: Dashboard) -> Dashboard Create dashboard. - + Create a draft dashboard. - + :param dashboard: :class:`Dashboard` - + :returns: :class:`Dashboard` .. py:method:: create_schedule(dashboard_id: str, schedule: Schedule) -> Schedule Create dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule: :class:`Schedule` - + :returns: :class:`Schedule` .. py:method:: create_subscription(dashboard_id: str, schedule_id: str, subscription: Subscription) -> Subscription Create schedule subscription. - + :param dashboard_id: str UUID identifying the dashboard to which the subscription belongs. :param schedule_id: str UUID identifying the schedule to which the subscription belongs. :param subscription: :class:`Subscription` - + :returns: :class:`Subscription` .. py:method:: delete_schedule(dashboard_id: str, schedule_id: str [, etag: Optional[str]]) Delete dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str @@ -53,14 +53,14 @@ :param etag: str (optional) The etag for the schedule. Optionally, it can be provided to verify that the schedule has not been modified from its last retrieval. - - + + .. py:method:: delete_subscription(dashboard_id: str, schedule_id: str, subscription_id: str [, etag: Optional[str]]) Delete schedule subscription. - + :param dashboard_id: str UUID identifying the dashboard which the subscription belongs. :param schedule_id: str @@ -70,64 +70,64 @@ :param etag: str (optional) The etag for the subscription. Can be optionally provided to ensure that the subscription has not been modified since the last read. - - + + .. py:method:: get(dashboard_id: str) -> Dashboard Get dashboard. - + Get a draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard. - + :returns: :class:`Dashboard` .. py:method:: get_published(dashboard_id: str) -> PublishedDashboard Get published dashboard. - + Get the current published dashboard. - + :param dashboard_id: str UUID identifying the published dashboard. - + :returns: :class:`PublishedDashboard` .. py:method:: get_schedule(dashboard_id: str, schedule_id: str) -> Schedule Get dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. - + :returns: :class:`Schedule` .. py:method:: get_subscription(dashboard_id: str, schedule_id: str, subscription_id: str) -> Subscription Get schedule subscription. - + :param dashboard_id: str UUID identifying the dashboard which the subscription belongs. :param schedule_id: str UUID identifying the schedule which the subscription belongs. :param subscription_id: str UUID identifying the subscription. - + :returns: :class:`Subscription` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str], show_trashed: Optional[bool], view: Optional[DashboardView]]) -> Iterator[Dashboard] List dashboards. - + :param page_size: int (optional) The number of dashboards to return per page. :param page_token: str (optional) @@ -138,14 +138,14 @@ returned. :param view: :class:`DashboardView` (optional) `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard. - + :returns: Iterator over :class:`Dashboard` .. py:method:: list_schedules(dashboard_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Schedule] List dashboard schedules. - + :param dashboard_id: str UUID identifying the dashboard to which the schedules belongs. :param page_size: int (optional) @@ -153,14 +153,14 @@ :param page_token: str (optional) A page token, received from a previous `ListSchedules` call. Use this to retrieve the subsequent page. - + :returns: Iterator over :class:`Schedule` .. py:method:: list_subscriptions(dashboard_id: str, schedule_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Subscription] List schedule subscriptions. - + :param dashboard_id: str UUID identifying the dashboard which the subscriptions belongs. :param schedule_id: str @@ -170,16 +170,16 @@ :param page_token: str (optional) A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the subsequent page. - + :returns: Iterator over :class:`Subscription` .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str], update_parameter_syntax: Optional[bool]]) -> Dashboard Migrate dashboard. - + Migrates a classic SQL dashboard to Lakeview. - + :param source_dashboard_id: str UUID of the dashboard to be migrated. :param display_name: str (optional) @@ -189,16 +189,16 @@ :param update_parameter_syntax: bool (optional) Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax (:param) when converting datasets in the dashboard. - + :returns: :class:`Dashboard` .. py:method:: publish(dashboard_id: str [, embed_credentials: Optional[bool], warehouse_id: Optional[str]]) -> PublishedDashboard Publish dashboard. - + Publish the current draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard to be published. :param embed_credentials: bool (optional) @@ -206,56 +206,56 @@ embedded credentials will be used to execute the published dashboard's queries. :param warehouse_id: str (optional) The ID of the warehouse that can be used to override the warehouse which was set in the draft. - + :returns: :class:`PublishedDashboard` .. py:method:: trash(dashboard_id: str) Trash dashboard. - + Trash a dashboard. - + :param dashboard_id: str UUID identifying the dashboard. - - + + .. py:method:: unpublish(dashboard_id: str) Unpublish dashboard. - + Unpublish the dashboard. - + :param dashboard_id: str UUID identifying the published dashboard. - - + + .. py:method:: update(dashboard_id: str, dashboard: Dashboard) -> Dashboard Update dashboard. - + Update a draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard. :param dashboard: :class:`Dashboard` - + :returns: :class:`Dashboard` .. py:method:: update_schedule(dashboard_id: str, schedule_id: str, schedule: Schedule) -> Schedule Update dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. :param schedule: :class:`Schedule` - + :returns: :class:`Schedule` \ No newline at end of file diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst index fb22c47ad..ea9efe244 100644 --- a/docs/workspace/dashboards/lakeview_embedded.rst +++ b/docs/workspace/dashboards/lakeview_embedded.rst @@ -6,35 +6,23 @@ Token-based Lakeview APIs for embedding dashboards in external applications. - .. py:method:: get_published_dashboard_embedded(dashboard_id: str) - - Read a published dashboard in an embedded ui. - - Get the current published dashboard within an embedded context. - - :param dashboard_id: str - UUID identifying the published dashboard. - - - - .. py:method:: get_published_dashboard_token_info(dashboard_id: str [, external_value: Optional[str], external_viewer_id: Optional[str]]) -> GetPublishedDashboardTokenInfoResponse Read an information of a published dashboard to mint an OAuth token. - + Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The `authorization_details` can be enriched to apply additional restriction. - + Example: Adding the following `authorization_details` object to downscope the viewer permission to specific table ``` { type: "unity_catalog_privileges", privileges: ["SELECT"], object_type: "TABLE", object_full_path: "main.default.testdata" } ``` - + :param dashboard_id: str UUID identifying the published dashboard. :param external_value: str (optional) Provided external value to be included in the custom claim. :param external_viewer_id: str (optional) Provided external viewer id to be included in the custom claim. - + :returns: :class:`GetPublishedDashboardTokenInfoResponse` \ No newline at end of file diff --git a/docs/workspace/database/database.rst b/docs/workspace/database/database.rst new file mode 100644 index 000000000..d4a5d0864 --- /dev/null +++ b/docs/workspace/database/database.rst @@ -0,0 +1,175 @@ +``w.database``: Database Instances +================================== +.. currentmodule:: databricks.sdk.service.database + +.. py:class:: DatabaseAPI + + Database Instances provide access to a database via REST API or direct SQL. + + .. py:method:: create_database_catalog(catalog: DatabaseCatalog) -> DatabaseCatalog + + Create a Database Catalog. + + :param catalog: :class:`DatabaseCatalog` + + :returns: :class:`DatabaseCatalog` + + + .. py:method:: create_database_instance(database_instance: DatabaseInstance) -> DatabaseInstance + + Create a Database Instance. + + :param database_instance: :class:`DatabaseInstance` + A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. + + :returns: :class:`DatabaseInstance` + + + .. py:method:: create_database_table(table: DatabaseTable) -> DatabaseTable + + Create a Database Table. + + :param table: :class:`DatabaseTable` + Next field marker: 13 + + :returns: :class:`DatabaseTable` + + + .. py:method:: create_synced_database_table(synced_table: SyncedDatabaseTable) -> SyncedDatabaseTable + + Create a Synced Database Table. + + :param synced_table: :class:`SyncedDatabaseTable` + Next field marker: 12 + + :returns: :class:`SyncedDatabaseTable` + + + .. py:method:: delete_database_catalog(name: str) + + Delete a Database Catalog. + + :param name: str + + + + + .. py:method:: delete_database_instance(name: str [, force: Optional[bool], purge: Optional[bool]]) + + Delete a Database Instance. + + :param name: str + Name of the instance to delete. + :param force: bool (optional) + By default, a instance cannot be deleted if it has descendant instances created via PITR. If this + flag is specified as true, all descendent instances will be deleted as well. + :param purge: bool (optional) + If false, the database instance is soft deleted. Soft deleted instances behave as if they are + deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by + calling the undelete API for a limited time. If true, the database instance is hard deleted and + cannot be undeleted. + + + + + .. py:method:: delete_database_table(name: str) + + Delete a Database Table. + + :param name: str + + + + + .. py:method:: delete_synced_database_table(name: str) + + Delete a Synced Database Table. + + :param name: str + + + + + .. py:method:: find_database_instance_by_uid( [, uid: Optional[str]]) -> DatabaseInstance + + Find a Database Instance by uid. + + :param uid: str (optional) + UID of the cluster to get. + + :returns: :class:`DatabaseInstance` + + + .. py:method:: generate_database_credential( [, instance_names: Optional[List[str]], request_id: Optional[str]]) -> DatabaseCredential + + Generates a credential that can be used to access database instances. + + :param instance_names: List[str] (optional) + Instances to which the token will be scoped. + :param request_id: str (optional) + + :returns: :class:`DatabaseCredential` + + + .. py:method:: get_database_catalog(name: str) -> DatabaseCatalog + + Get a Database Catalog. + + :param name: str + + :returns: :class:`DatabaseCatalog` + + + .. py:method:: get_database_instance(name: str) -> DatabaseInstance + + Get a Database Instance. + + :param name: str + Name of the cluster to get. + + :returns: :class:`DatabaseInstance` + + + .. py:method:: get_database_table(name: str) -> DatabaseTable + + Get a Database Table. + + :param name: str + + :returns: :class:`DatabaseTable` + + + .. py:method:: get_synced_database_table(name: str) -> SyncedDatabaseTable + + Get a Synced Database Table. + + :param name: str + + :returns: :class:`SyncedDatabaseTable` + + + .. py:method:: list_database_instances( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DatabaseInstance] + + List Database Instances. + + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of Database Instances. Requests first page if absent. + + :returns: Iterator over :class:`DatabaseInstance` + + + .. py:method:: update_database_instance(name: str, database_instance: DatabaseInstance, update_mask: str) -> DatabaseInstance + + Update a Database Instance. + + :param name: str + The name of the instance. This is the unique identifier for the instance. + :param database_instance: :class:`DatabaseInstance` + A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. + :param update_mask: str + The list of fields to update. + + :returns: :class:`DatabaseInstance` + \ No newline at end of file diff --git a/docs/workspace/database/index.rst b/docs/workspace/database/index.rst new file mode 100644 index 000000000..644c20f56 --- /dev/null +++ b/docs/workspace/database/index.rst @@ -0,0 +1,10 @@ + +Database Instances +================== + +Create Database Instances and manage their configurations, including integrations with Unity Catalog + +.. toctree:: + :maxdepth: 1 + + database \ No newline at end of file diff --git a/docs/workspace/files/dbfs.rst b/docs/workspace/files/dbfs.rst index 3f214908d..e6b31273a 100644 --- a/docs/workspace/files/dbfs.rst +++ b/docs/workspace/files/dbfs.rst @@ -10,31 +10,31 @@ .. py:method:: add_block(handle: int, data: str) Append data block. - + Appends a block of data to the stream specified by the input handle. If the handle does not exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``. - + If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``. - + :param handle: int The handle on an open stream. :param data: str The base64-encoded data to append to the stream. This has a limit of 1 MB. - - + + .. py:method:: close(handle: int) Close the stream. - + Closes the stream specified by the input handle. If the handle does not exist, this call throws an exception with ``RESOURCE_DOES_NOT_EXIST``. - + :param handle: int The handle on an open stream. - - + + .. py:method:: copy(src: str, dst: str [, recursive: bool = False, overwrite: bool = False]) @@ -44,21 +44,21 @@ .. py:method:: create(path: str [, overwrite: Optional[bool]]) -> CreateResponse Open a stream. - + Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``. - + A typical workflow for file upload would be: - + 1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with the handle you have. 3. Issue a ``close`` call with the handle you have. - + :param path: str The path of the new file. The path should be the absolute DBFS path. :param overwrite: bool (optional) The flag that specifies whether to overwrite existing file/files. - + :returns: :class:`CreateResponse` @@ -98,13 +98,13 @@ .. py:method:: get_status(path: str) -> FileInfo Get the information of a file or directory. - + Gets the file information for a file or directory. If the file or directory does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The path of the file or directory. The path should be the absolute DBFS path. - + :returns: :class:`FileInfo` @@ -130,18 +130,18 @@ .. py:method:: move(source_path: str, destination_path: str) Move a file. - + Moves a file from one location to another location within DBFS. If the source file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source path is a directory, this call always recursively moves all files. - + :param source_path: str The source path of the file or directory. The path should be the absolute DBFS path. :param destination_path: str The destination path of the file or directory. The path should be the absolute DBFS path. - - + + .. py:method:: move_(src: str, dst: str [, recursive: bool = False, overwrite: bool = False]) @@ -154,40 +154,40 @@ .. py:method:: put(path: str [, contents: Optional[str], overwrite: Optional[bool]]) Upload a file. - + Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but can also be used as a convenient single call for data upload. - + Alternatively you can pass contents as base64 string. - + The amount of data that can be passed (when not streaming) using the __contents__ parameter is limited to 1 MB. `MAX_BLOCK_SIZE_EXCEEDED` will be thrown if this limit is exceeded. - + If you want to upload large files, use the streaming upload. For details, see :method:dbfs/create, :method:dbfs/addBlock, :method:dbfs/close. - + :param path: str The path of the new file. The path should be the absolute DBFS path. :param contents: str (optional) This parameter might be absent, and instead a posted file will be used. :param overwrite: bool (optional) The flag that specifies whether to overwrite existing file/files. - - + + .. py:method:: read(path: str [, length: Optional[int], offset: Optional[int]]) -> ReadResponse Get the contents of a file. - + Returns the contents of a file. If the file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`. - + If `offset + length` exceeds the number of bytes in a file, it reads the contents until the end of file. - + :param path: str The path of the file to read. The path should be the absolute DBFS path. :param length: int (optional) @@ -195,7 +195,7 @@ of 0.5 MB. :param offset: int (optional) The offset to read from in bytes. - + :returns: :class:`ReadResponse` diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst index ad7bca57e..a6f1d938a 100644 --- a/docs/workspace/files/files.rst +++ b/docs/workspace/files/files.rst @@ -7,122 +7,124 @@ The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI. The API makes working with file content as raw bytes easier and more efficient. - + The API supports [Unity Catalog volumes], where files and directories to operate on are specified using their volume URI path, which follows the format /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>. - + The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI path. The path is always absolute. - + Some Files API client features are currently experimental. To enable them, set `enable_experimental_files_api_client = True` in your configuration profile or use the environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. - + + Use of Files API may incur Databricks data transfer charges. + [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html .. py:method:: create_directory(directory_path: str) Create a directory. - + Creates an empty directory. If necessary, also creates any parent directories of the new, empty directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success response; this method is idempotent (it will succeed if the directory already exists). - + :param directory_path: str The absolute path of a directory. - - + + .. py:method:: delete(file_path: str) Delete a file. - + Deletes a file. If the request is successful, there is no response body. - + :param file_path: str The absolute path of the file. - - + + .. py:method:: delete_directory(directory_path: str) Delete a directory. - + Deletes an empty directory. - + To delete a non-empty directory, first delete all of its contents. This can be done by listing the directory contents and deleting each file and subdirectory recursively. - + :param directory_path: str The absolute path of a directory. - - + + .. py:method:: download(file_path: str) -> DownloadResponse Download a file. - + Downloads a file. The file contents are the response body. This is a standard HTTP file download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers. - + :param file_path: str The absolute path of the file. - + :returns: :class:`DownloadResponse` .. py:method:: get_directory_metadata(directory_path: str) Get directory metadata. - + Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response body. - + This method is useful to check if a directory exists and the caller has access to it. - + If you wish to ensure the directory exists, you can instead use `PUT`, which will create the directory if it does not exist, and is idempotent (it will succeed if the directory already exists). - + :param directory_path: str The absolute path of a directory. - - + + .. py:method:: get_metadata(file_path: str) -> GetMetadataResponse Get file metadata. - + Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body. - + :param file_path: str The absolute path of the file. - + :returns: :class:`GetMetadataResponse` .. py:method:: list_directory_contents(directory_path: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DirectoryEntry] List directory contents. - + Returns the contents of a directory. If there is no directory at the specified path, the API returns a HTTP 404 error. - + :param directory_path: str The absolute path of a directory. :param page_size: int (optional) The maximum number of directory entries to return. The response may contain fewer entries. If the response contains a `next_page_token`, there may be more entries, even if fewer than `page_size` entries are in the response. - + We recommend not to set this value unless you are intentionally listing less than the complete directory contents. - + If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values above 1000 will be coerced to 1000. :param page_token: str (optional) @@ -132,25 +134,25 @@ request. To list all of the entries in a directory, it is necessary to continue requesting pages of entries until the response contains no `next_page_token`. Note that the number of entries returned must not be used to determine when the listing is complete. - + :returns: Iterator over :class:`DirectoryEntry` .. py:method:: upload(file_path: str, contents: BinaryIO [, overwrite: Optional[bool]]) Upload a file. - + Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an octet stream); do not encode or otherwise modify the bytes before sending. The contents of the resulting file will be exactly the bytes sent in the request body. If the request is successful, there is no response body. - + :param file_path: str The absolute path of the file. :param contents: BinaryIO :param overwrite: bool (optional) If true or unspecified, an existing file will be overwritten. If false, an error will be returned if the path points to an existing file. - - + + \ No newline at end of file diff --git a/docs/workspace/iam/access_control.rst b/docs/workspace/iam/access_control.rst index 930af105a..a5f1feeda 100644 --- a/docs/workspace/iam/access_control.rst +++ b/docs/workspace/iam/access_control.rst @@ -9,7 +9,7 @@ .. py:method:: check_policy(actor: Actor, permission: str, resource: str, consistency_token: ConsistencyToken, authz_identity: RequestAuthzIdentity [, resource_info: Optional[ResourceInfo]]) -> CheckPolicyResponse Check access policy to a resource. - + :param actor: :class:`Actor` :param permission: str :param resource: str @@ -18,6 +18,6 @@ :param consistency_token: :class:`ConsistencyToken` :param authz_identity: :class:`RequestAuthzIdentity` :param resource_info: :class:`ResourceInfo` (optional) - + :returns: :class:`CheckPolicyResponse` \ No newline at end of file diff --git a/docs/workspace/iam/account_access_control_proxy.rst b/docs/workspace/iam/account_access_control_proxy.rst index 66c396be5..1b92995c6 100644 --- a/docs/workspace/iam/account_access_control_proxy.rst +++ b/docs/workspace/iam/account_access_control_proxy.rst @@ -11,31 +11,31 @@ .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse Get assignable roles for a resource. - + Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. - + :param resource: str The resource name for which assignable roles will be listed. - + Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service principal. - + :returns: :class:`GetAssignableRolesForResourceResponse` .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse Get a rule set. - + Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. - + :param name: str The ruleset name associated with the request. - + Examples | Summary :--- | :--- `name=accounts//ruleSets/default` | A name for a rule set on the account. `name=accounts//groups//ruleSets/default` | A name for a rule set on the group. @@ -48,24 +48,24 @@ modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating. - + Examples | Summary :--- | :--- `etag=` | An empty etag can only be used in GET to indicate no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` | An etag encoded a specific version of the rule set to get or to be updated. - + :returns: :class:`RuleSetResponse` .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse Update a rule set. - + Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. - + :param name: str Name of the rule set. :param rule_set: :class:`RuleSetUpdateRequest` - + :returns: :class:`RuleSetResponse` \ No newline at end of file diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst index bf739025c..cff427451 100644 --- a/docs/workspace/iam/current_user.rst +++ b/docs/workspace/iam/current_user.rst @@ -20,8 +20,8 @@ me = w.current_user.me() Get current user info. - + Get details about the current method caller's identity. - + :returns: :class:`User` \ No newline at end of file diff --git a/docs/workspace/iam/groups.rst b/docs/workspace/iam/groups.rst index 0b62b675a..c187eb9c3 100644 --- a/docs/workspace/iam/groups.rst +++ b/docs/workspace/iam/groups.rst @@ -6,7 +6,7 @@ Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. - + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks workspace identities can be assigned as members of groups, and members inherit permissions that are assigned to their group. @@ -30,15 +30,15 @@ w.groups.delete(id=group.id) Create a new group. - + Creates a group in the Databricks workspace with a unique name, using the supplied group details. - + :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -51,7 +51,7 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - + :returns: :class:`Group` @@ -71,15 +71,18 @@ group = w.groups.create(display_name=f"sdk-{time.time_ns()}") w.groups.delete(id=group.id) + + # cleanup + w.groups.delete(id=group.id) Delete a group. - + Deletes a group from the Databricks workspace. - + :param id: str Unique ID for a group in the Databricks workspace. - - + + .. py:method:: get(id: str) -> Group @@ -103,21 +106,21 @@ w.groups.delete(id=group.id) Get group details. - + Gets the information for a specific group in the Databricks workspace. - + :param id: str Unique ID for a group in the Databricks workspace. - + :returns: :class:`Group` .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group] List group details. - + Gets all details of the groups associated with the Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -129,7 +132,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -137,7 +140,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`Group` @@ -183,24 +186,24 @@ w.groups.delete(id=group.id) Update group details. - + Partially updates the details of a group. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) Replace a group. - + Updates the details of a group by replacing the entire group entity. - + :param id: str Databricks group ID :param display_name: str (optional) @@ -208,7 +211,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -219,6 +222,6 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - - + + \ No newline at end of file diff --git a/docs/workspace/iam/permission_migration.rst b/docs/workspace/iam/permission_migration.rst index 248b1b80d..8eef6e0e1 100644 --- a/docs/workspace/iam/permission_migration.rst +++ b/docs/workspace/iam/permission_migration.rst @@ -9,7 +9,7 @@ .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> MigratePermissionsResponse Migrate Permissions. - + :param workspace_id: int WorkspaceId of the associated workspace where the permission migration will occur. :param from_workspace_group_name: str @@ -18,6 +18,6 @@ The name of the account group that permissions will be migrated to. :param size: int (optional) The maximum number of permissions that will be migrated. - + :returns: :class:`MigratePermissionsResponse` \ No newline at end of file diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 0c3ef26fc..7d8b7eb26 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -24,7 +24,7 @@ the required permissions for specific actions or abilities and other important information, see [Access Control]. Note that to manage access control on service principals, use **[Account Access Control Proxy](:service:accountaccesscontrolproxy)**. - + [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html .. py:method:: get(request_object_type: str, request_object_id: str) -> ObjectPermissions @@ -47,17 +47,17 @@ _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Get object permissions. - + Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. - + :returns: :class:`ObjectPermissions` @@ -81,15 +81,15 @@ levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Get object permission levels. - + Gets the permission levels that a user can have on an object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str - + :returns: :class:`GetPermissionLevelsResponse` @@ -128,11 +128,11 @@ w.groups.delete(id=group.id) Set object permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, @@ -140,17 +140,17 @@ :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) - + :returns: :class:`ObjectPermissions` .. py:method:: update(request_object_type: str, request_object_id: str [, access_control_list: Optional[List[AccessControlRequest]]]) -> ObjectPermissions Update object permissions. - + Updates the permissions on an object. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, @@ -158,6 +158,6 @@ :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) - + :returns: :class:`ObjectPermissions` \ No newline at end of file diff --git a/docs/workspace/iam/service_principals.rst b/docs/workspace/iam/service_principals.rst index 40b65f6d5..29ee1ba3f 100644 --- a/docs/workspace/iam/service_principals.rst +++ b/docs/workspace/iam/service_principals.rst @@ -20,18 +20,24 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import iam w = WorkspaceClient() - created = w.service_principals.create(display_name=f"sdk-{time.time_ns()}") + groups = w.groups.group_display_name_to_id_map(iam.ListGroupsRequest()) + + spn = w.service_principals.create( + display_name=f"sdk-{time.time_ns()}", + groups=[iam.ComplexValue(value=groups["admins"])], + ) # cleanup - w.service_principals.delete(id=created.id) + w.service_principals.delete(id=spn.id) Create a service principal. - + Creates a new service principal in the Databricks workspace. - + :param active: bool (optional) If this user is active :param application_id: str (optional) @@ -41,7 +47,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -51,20 +57,20 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - + :returns: :class:`ServicePrincipal` .. py:method:: delete(id: str) Delete a service principal. - + Delete a single service principal in the Databricks workspace. - + :param id: str Unique ID for a service principal in the Databricks workspace. - - + + .. py:method:: get(id: str) -> ServicePrincipal @@ -88,12 +94,12 @@ w.service_principals.delete(id=created.id) Get service principal details. - + Gets the details for a single service principal define in the Databricks workspace. - + :param id: str Unique ID for a service principal in the Databricks workspace. - + :returns: :class:`ServicePrincipal` @@ -112,9 +118,9 @@ all = w.service_principals.list(iam.ListServicePrincipalsRequest()) List service principals. - + Gets the set of service principals associated with a Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -126,7 +132,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -134,7 +140,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`ServicePrincipal` @@ -166,16 +172,16 @@ w.service_principals.delete(id=created.id) Update service principal details. - + Partially updates the details of a single service principal in the Databricks workspace. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]]) @@ -204,11 +210,11 @@ w.service_principals.delete(id=created.id) Replace service principal. - + Updates the details of a single service principal. - + This action replaces the existing service principal with the same name. - + :param id: str Databricks service principal ID. :param active: bool (optional) @@ -220,7 +226,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -228,6 +234,6 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - - + + \ No newline at end of file diff --git a/docs/workspace/iam/users.rst b/docs/workspace/iam/users.rst index 34de48f3b..2956c4c73 100644 --- a/docs/workspace/iam/users.rst +++ b/docs/workspace/iam/users.rst @@ -5,7 +5,7 @@ .. py:class:: UsersAPI User identities recognized by Databricks and represented by email addresses. - + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your identity provider to create users and groups in Databricks workspace and give them the proper level of @@ -33,23 +33,23 @@ ) Create a new user. - + Creates a new user in the Databricks workspace. This new user will also be added to the Databricks account. - + :param active: bool (optional) If this user is active :param display_name: str (optional) String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -63,7 +63,7 @@ The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - + :returns: :class:`User` @@ -80,22 +80,19 @@ w = WorkspaceClient() - user = w.users.create( - display_name=f"sdk-{time.time_ns()}", - user_name=f"sdk-{time.time_ns()}@example.com", - ) + other_owner = w.users.create(user_name=f"sdk-{time.time_ns()}@example.com") - w.users.delete(id=user.id) + w.users.delete(id=other_owner.id) Delete a user. - + Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the user. - + :param id: str Unique ID for a user in the Databricks workspace. - - + + .. py:method:: get(id: str [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[GetSortOrder], start_index: Optional[int]]) -> User @@ -119,9 +116,9 @@ fetch = w.users.get(id=user.id) Get user details. - + Gets information for a specific user in Databricks workspace. - + :param id: str Unique ID for a user in the Databricks workspace. :param attributes: str (optional) @@ -135,7 +132,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -144,25 +141,25 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: :class:`User` .. py:method:: get_permission_levels() -> GetPasswordPermissionLevelsResponse Get password permission levels. - + Gets the permission levels that a user can have on an object. - + :returns: :class:`GetPasswordPermissionLevelsResponse` .. py:method:: get_permissions() -> PasswordPermissions Get password permissions. - + Gets the permissions of all passwords. Passwords can inherit permissions from their root object. - + :returns: :class:`PasswordPermissions` @@ -185,9 +182,9 @@ ) List users. - + Gets details for all the users associated with a Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -199,7 +196,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -208,7 +205,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`User` @@ -238,27 +235,27 @@ ) Update user details. - + Partially updates a user resource by applying the supplied operations on specific user attributes. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: set_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions Set password permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - + :returns: :class:`PasswordPermissions` @@ -283,9 +280,9 @@ w.users.update(id=user.id, user_name=user.user_name, active=True) Replace a user. - + Replaces a user's information with the data supplied in request. - + :param id: str Databricks user ID. :param active: bool (optional) @@ -294,13 +291,13 @@ String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -312,17 +309,17 @@ The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - - + + .. py:method:: update_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions Update password permissions. - + Updates the permissions on all passwords. Passwords can inherit permissions from their root object. - + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - + :returns: :class:`PasswordPermissions` \ No newline at end of file diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst index dc86a0e78..d7ecc203e 100644 --- a/docs/workspace/index.rst +++ b/docs/workspace/index.rst @@ -7,17 +7,20 @@ These APIs are available from WorkspaceClient .. toctree:: :maxdepth: 1 + aibuilder/index apps/index catalog/index cleanrooms/index compute/index dashboards/index + database/index files/index iam/index jobs/index marketplace/index ml/index pipelines/index + qualitymonitorv2/index serving/index settings/index sharing/index diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 27eed0a54..a59f4155c 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -5,17 +5,17 @@ .. py:class:: JobsExt The Jobs API allows you to create, edit, and delete jobs. - + You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or Python, Scala, Spark submit, and Java applications. - + You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebooks and jobs. - + [Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html [Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets @@ -60,17 +60,17 @@ w.jobs.delete(job_id=created_job.job_id) Cancel all runs of a job. - + Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs from being started. - + :param all_queued_runs: bool (optional) Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in the workspace are canceled. :param job_id: int (optional) The canonical identifier of the job to cancel all runs of. - - + + .. py:method:: cancel_run(run_id: int) -> Wait[Run] @@ -115,13 +115,13 @@ w.jobs.delete(job_id=created_job.job_id) Cancel a run. - + Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when this request completes. - + :param run_id: int This field is required. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. @@ -168,9 +168,9 @@ w.jobs.delete(job_id=created_job.job_id) Create a new job. - + Create a new job. - + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -186,7 +186,7 @@ An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. :param edit_mode: :class:`JobEditMode` (optional) Edit mode of the job. - + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified. :param email_notifications: :class:`JobEmailNotifications` (optional) @@ -203,10 +203,10 @@ :param git_source: :class:`GitSource` (optional) An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) @@ -233,7 +233,7 @@ :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -242,7 +242,7 @@ :param run_as: :class:`JobRunAs` (optional) Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. - + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -265,32 +265,32 @@ `runNow`. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when runs of this job begin or complete. - + :returns: :class:`CreateResponse` .. py:method:: delete(job_id: int) Delete a job. - + Deletes a job. - + :param job_id: int The canonical identifier of the job to delete. This field is required. - - + + .. py:method:: delete_run(run_id: int) Delete a job run. - + Deletes a non-active run. Returns an error if the run is active. - + :param run_id: int ID of the run to delete. - - + + .. py:method:: export_run(run_id: int [, views_to_export: Optional[ViewsToExport]]) -> ExportRunOutput @@ -335,14 +335,14 @@ w.jobs.delete(job_id=created_job.job_id) Export and retrieve a job run. - + Export and retrieve the job run task. - + :param run_id: int The canonical identifier for the run. This field is required. :param views_to_export: :class:`ViewsToExport` (optional) Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE. - + :returns: :class:`ExportRunOutput` @@ -367,21 +367,23 @@ w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] ) - run = w.jobs.submit( - run_name=f"sdk-{time.time_ns()}", + created_job = w.jobs.create( + name=f"sdk-{time.time_ns()}", tasks=[ - jobs.SubmitTask( + jobs.Task( + description="test", existing_cluster_id=cluster_id, notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key=f"sdk-{time.time_ns()}", + task_key="test", + timeout_seconds=0, ) ], - ).result() + ) - output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) + by_id = w.jobs.get(job_id=created_job.job_id) # cleanup - w.jobs.delete_run(run_id=run.run_id) + w.jobs.delete(job_id=created_job.job_id) Get a single job. @@ -400,24 +402,24 @@ .. py:method:: get_permission_levels(job_id: str) -> GetJobPermissionLevelsResponse Get job permission levels. - + Gets the permission levels that a user can have on an object. - + :param job_id: str The job for which to get or manage permissions. - + :returns: :class:`GetJobPermissionLevelsResponse` .. py:method:: get_permissions(job_id: str) -> JobPermissions Get job permissions. - + Gets the permissions of a job. Jobs can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. - + :returns: :class:`JobPermissions` @@ -513,19 +515,19 @@ w.jobs.delete_run(run_id=run.run_id) Get the output for a single run. - + Retrieve the output and metadata of a single task run. When a notebook task returns a value through the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks restricts this API to returning the first 5 MB of the output. To return a larger result, you can store job results in a cloud storage service. - + This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to reference them beyond 60 days, you must save old run results before they expire. - + :param run_id: int The canonical identifier for the run. - + :returns: :class:`RunOutput` @@ -715,10 +717,10 @@ w.jobs.delete(job_id=created_job.job_id) Repair a job run. - + Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job and task settings, and can be viewed in the history for the original job run. - + :param run_id: int The job run ID of the run to repair. The run must not be in progress. :param dbt_commands: List[str] (optional) @@ -730,9 +732,9 @@ task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` @@ -743,23 +745,23 @@ A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. - + If not specified upon `run-now`, the triggered run uses the job’s base parameters. - + notebook_params cannot be specified in conjunction with jar_params. - + Use [Task parameter variables] to set parameters containing information about job runs. - + The JSON representation of this field (for example `{"notebook_params":{"name":"john doe","age":"35"}}`) cannot exceed 10,000 bytes. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined on the job level. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -771,15 +773,15 @@ The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param rerun_all_failed_tasks: bool (optional) If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used. @@ -794,20 +796,20 @@ as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param sql_params: Dict[str,str] (optional) A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. @@ -863,19 +865,19 @@ w.jobs.delete(job_id=created_job.job_id) Update all job settings (reset). - + Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update job settings partially. - + :param job_id: int The canonical identifier of the job to reset. This field is required. :param new_settings: :class:`JobSettings` The new settings of the job. These settings completely replace the old settings. - + Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only. - - + + .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run] @@ -918,9 +920,9 @@ w.jobs.delete(job_id=created_job.job_id) Trigger a new job run. - + Run a job and return the `run_id` of the triggered run. - + :param job_id: int The ID of the job to be executed :param dbt_commands: List[str] (optional) @@ -930,14 +932,14 @@ An optional token to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing run instead. If a run with the provided token is deleted, an error is returned. - + If you specify the idempotency token, upon failure you can retry until the request succeeds. Databricks guarantees that exactly one run is launched with that idempotency token. - + This token must have at most 64 characters. - + For more information, see [How to ensure idempotency for jobs]. - + [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html :param jar_params: List[str] (optional) A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. @@ -945,9 +947,9 @@ task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` @@ -955,16 +957,16 @@ A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. - + If not specified upon `run-now`, the triggered run uses the job’s base parameters. - + notebook_params cannot be specified in conjunction with jar_params. - + Use [Task parameter variables] to set parameters containing information about job runs. - + The JSON representation of this field (for example `{"notebook_params":{"name":"john doe","age":"35"}}`) cannot exceed 10,000 bytes. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html :param only: List[str] (optional) @@ -974,7 +976,7 @@ The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined on the job level. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -986,15 +988,15 @@ The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param queue: :class:`QueueSettings` (optional) The queue settings of the run. @@ -1004,20 +1006,20 @@ as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param sql_params: Dict[str,str] (optional) A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. @@ -1029,14 +1031,14 @@ .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions Set job permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) - + :returns: :class:`JobPermissions` @@ -1076,11 +1078,11 @@ w.jobs.delete_run(run_id=run.run_id) Create and trigger a one-time run. - + Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. - + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -1093,10 +1095,10 @@ :param git_source: :class:`GitSource` (optional) An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) @@ -1105,14 +1107,14 @@ An optional token that can be used to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing run instead. If a run with the provided token is deleted, an error is returned. - + If you specify the idempotency token, upon failure you can retry until the request succeeds. Databricks guarantees that exactly one run is launched with that idempotency token. - + This token must have at most 64 characters. - + For more information, see [How to ensure idempotency for jobs]. - + [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html :param notification_settings: :class:`JobNotificationSettings` (optional) Optional notification settings that are used when sending notifications to each of the @@ -1129,7 +1131,7 @@ An optional timeout applied to each run of this job. A value of `0` means no timeout. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when the run begins or completes. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. @@ -1183,10 +1185,10 @@ w.jobs.delete(job_id=created_job.job_id) Update job settings partially. - + Add, update, or remove specific settings of an existing job. Use the [_Reset_ endpoint](:method:jobs/reset) to overwrite all job settings. - + :param job_id: int The canonical identifier of the job to update. This field is required. :param fields_to_remove: List[str] (optional) @@ -1194,29 +1196,29 @@ tasks and job clusters (`tasks/task_1`). This field is optional. :param new_settings: :class:`JobSettings` (optional) The new settings for the job. - + Top-level fields specified in `new_settings` are completely replaced, except for arrays which are merged. That is, new and existing entries are completely replaced based on the respective key fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept. - + Partially updating nested fields is not supported. - + Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only. - - + + .. py:method:: update_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions Update job permissions. - + Updates the permissions on a job. Jobs can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) - + :returns: :class:`JobPermissions` diff --git a/docs/workspace/jobs/policy_compliance_for_jobs.rst b/docs/workspace/jobs/policy_compliance_for_jobs.rst index b75a73eab..69f211552 100644 --- a/docs/workspace/jobs/policy_compliance_for_jobs.rst +++ b/docs/workspace/jobs/policy_compliance_for_jobs.rst @@ -6,53 +6,53 @@ The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace. This API currently only supports compliance controls for cluster policies. - + A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last edited. The job is considered out of compliance if any of its clusters no longer comply with their updated policies. - + The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce compliance API allows you to update a job so that it becomes compliant with all of its policies. .. py:method:: enforce_compliance(job_id: int [, validate_only: Optional[bool]]) -> EnforcePolicyComplianceResponse Enforce job policy compliance. - + Updates a job so the job clusters that are created when running the job (specified in `new_cluster`) are compliant with the current versions of their respective cluster policies. All-purpose clusters used in the job will not be updated. - + :param job_id: int The ID of the job you want to enforce policy compliance on. :param validate_only: bool (optional) If set, previews changes made to the job to comply with its policy, but does not update the job. - + :returns: :class:`EnforcePolicyComplianceResponse` .. py:method:: get_compliance(job_id: int) -> GetPolicyComplianceResponse Get job policy compliance. - + Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and some of its job clusters no longer comply with their updated policies. - + :param job_id: int The ID of the job whose compliance status you are requesting. - + :returns: :class:`GetPolicyComplianceResponse` .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[JobCompliance] List job policy compliance. - + Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and its job clusters no longer comply with the updated policy. - + :param policy_id: str Canonical unique identifier for the cluster policy. :param page_size: int (optional) @@ -61,6 +61,6 @@ :param page_token: str (optional) A page token that can be used to navigate to the next page or previous page as returned by `next_page_token` or `prev_page_token`. - + :returns: Iterator over :class:`JobCompliance` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_fulfillments.rst b/docs/workspace/marketplace/consumer_fulfillments.rst index 149ec6451..4ea7a9c29 100644 --- a/docs/workspace/marketplace/consumer_fulfillments.rst +++ b/docs/workspace/marketplace/consumer_fulfillments.rst @@ -9,28 +9,28 @@ .. py:method:: get(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[SharedDataObject] Get listing content metadata. - + Get a high level preview of the metadata of listing installable content. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`SharedDataObject` .. py:method:: list(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListingFulfillment] List all listing fulfillments. - + Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation. Standard installations contain metadata about the attached share or git repo. Only one of these fields will be present. Personalized installations contain metadata about the attached share or git repo, as well as the Delta Sharing recipient type. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListingFulfillment` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_installations.rst b/docs/workspace/marketplace/consumer_installations.rst index a9539ad1f..3cdb00a5a 100644 --- a/docs/workspace/marketplace/consumer_installations.rst +++ b/docs/workspace/marketplace/consumer_installations.rst @@ -9,9 +9,9 @@ .. py:method:: create(listing_id: str [, accepted_consumer_terms: Optional[ConsumerTerms], catalog_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType], repo_detail: Optional[RepoInstallation], share_name: Optional[str]]) -> Installation Install from a listing. - + Install payload associated with a Databricks Marketplace listing. - + :param listing_id: str :param accepted_consumer_terms: :class:`ConsumerTerms` (optional) :param catalog_name: str (optional) @@ -19,60 +19,60 @@ :param repo_detail: :class:`RepoInstallation` (optional) for git repo installations :param share_name: str (optional) - + :returns: :class:`Installation` .. py:method:: delete(listing_id: str, installation_id: str) Uninstall from a listing. - + Uninstall an installation associated with a Databricks Marketplace listing. - + :param listing_id: str :param installation_id: str - - + + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail] List all installations. - + List all installations across all listings. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`InstallationDetail` .. py:method:: list_listing_installations(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail] List installations for a listing. - + List all installations for a particular listing. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`InstallationDetail` .. py:method:: update(listing_id: str, installation_id: str, installation: InstallationDetail [, rotate_token: Optional[bool]]) -> UpdateInstallationResponse Update an installation. - + This is a update API that will update the part of the fields defined in the installation table as well as interact with external services according to the fields not included in the installation table 1. the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the rotateToken flag is true and the tokenInfo field is empty - + :param listing_id: str :param installation_id: str :param installation: :class:`InstallationDetail` :param rotate_token: bool (optional) - + :returns: :class:`UpdateInstallationResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_listings.rst b/docs/workspace/marketplace/consumer_listings.rst index 15ec3790e..242a8fce7 100644 --- a/docs/workspace/marketplace/consumer_listings.rst +++ b/docs/workspace/marketplace/consumer_listings.rst @@ -10,31 +10,31 @@ .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetListingsResponse Get one batch of listings. One may specify up to 50 IDs per request. - + Batch get a published listing in the Databricks Marketplace that the consumer has access to. - + :param ids: List[str] (optional) - + :returns: :class:`BatchGetListingsResponse` .. py:method:: get(id: str) -> GetListingResponse Get listing. - + Get a published listing in the Databricks Marketplace that the consumer has access to. - + :param id: str - + :returns: :class:`GetListingResponse` .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], tags: Optional[List[ListingTag]]]) -> Iterator[Listing] List listings. - + List all published listings in the Databricks Marketplace that the consumer has access to. - + :param assets: List[:class:`AssetType`] (optional) Matches any of the following asset types :param categories: List[:class:`Category`] (optional) @@ -51,17 +51,17 @@ Matches any of the following provider ids :param tags: List[:class:`ListingTag`] (optional) Matches any of the following tags - + :returns: Iterator over :class:`Listing` .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]]]) -> Iterator[Listing] Search listings. - + Search published listings in the Databricks Marketplace that the consumer has access to. This query supports a variety of different search parameters and performs fuzzy matching. - + :param query: str Fuzzy matches query :param assets: List[:class:`AssetType`] (optional) @@ -74,6 +74,6 @@ :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - + :returns: Iterator over :class:`Listing` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_personalization_requests.rst b/docs/workspace/marketplace/consumer_personalization_requests.rst index 8624871ca..63ead75d3 100644 --- a/docs/workspace/marketplace/consumer_personalization_requests.rst +++ b/docs/workspace/marketplace/consumer_personalization_requests.rst @@ -9,9 +9,9 @@ .. py:method:: create(listing_id: str, intended_use: str, accepted_consumer_terms: ConsumerTerms [, comment: Optional[str], company: Optional[str], first_name: Optional[str], is_from_lighthouse: Optional[bool], last_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType]]) -> CreatePersonalizationRequestResponse Create a personalization request. - + Create a personalization request for a listing. - + :param listing_id: str :param intended_use: str :param accepted_consumer_terms: :class:`ConsumerTerms` @@ -21,30 +21,30 @@ :param is_from_lighthouse: bool (optional) :param last_name: str (optional) :param recipient_type: :class:`DeltaSharingRecipientType` (optional) - + :returns: :class:`CreatePersonalizationRequestResponse` .. py:method:: get(listing_id: str) -> GetPersonalizationRequestResponse Get the personalization request for a listing. - + Get the personalization request for a listing. Each consumer can make at *most* one personalization request for a listing. - + :param listing_id: str - + :returns: :class:`GetPersonalizationRequestResponse` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest] List all personalization requests. - + List personalization requests for a consumer across all listings. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`PersonalizationRequest` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_providers.rst b/docs/workspace/marketplace/consumer_providers.rst index 615bf0752..13cca357e 100644 --- a/docs/workspace/marketplace/consumer_providers.rst +++ b/docs/workspace/marketplace/consumer_providers.rst @@ -9,34 +9,34 @@ .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetProvidersResponse Get one batch of providers. One may specify up to 50 IDs per request. - + Batch get a provider in the Databricks Marketplace with at least one visible listing. - + :param ids: List[str] (optional) - + :returns: :class:`BatchGetProvidersResponse` .. py:method:: get(id: str) -> GetProviderResponse Get a provider. - + Get a provider in the Databricks Marketplace with at least one visible listing. - + :param id: str - + :returns: :class:`GetProviderResponse` .. py:method:: list( [, is_featured: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo] List providers. - + List all providers in the Databricks Marketplace with at least one visible listing. - + :param is_featured: bool (optional) :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ProviderInfo` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_exchange_filters.rst b/docs/workspace/marketplace/provider_exchange_filters.rst index 6c2254acd..ceca51e63 100644 --- a/docs/workspace/marketplace/provider_exchange_filters.rst +++ b/docs/workspace/marketplace/provider_exchange_filters.rst @@ -9,46 +9,46 @@ .. py:method:: create(filter: ExchangeFilter) -> CreateExchangeFilterResponse Create a new exchange filter. - + Add an exchange filter. - + :param filter: :class:`ExchangeFilter` - + :returns: :class:`CreateExchangeFilterResponse` .. py:method:: delete(id: str) Delete an exchange filter. - + Delete an exchange filter - + :param id: str - - + + .. py:method:: list(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeFilter] List exchange filters. - + List exchange filter - + :param exchange_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeFilter` .. py:method:: update(id: str, filter: ExchangeFilter) -> UpdateExchangeFilterResponse Update exchange filter. - + Update an exchange filter. - + :param id: str :param filter: :class:`ExchangeFilter` - + :returns: :class:`UpdateExchangeFilterResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_exchanges.rst b/docs/workspace/marketplace/provider_exchanges.rst index edaae76e1..d53fd823d 100644 --- a/docs/workspace/marketplace/provider_exchanges.rst +++ b/docs/workspace/marketplace/provider_exchanges.rst @@ -9,105 +9,105 @@ .. py:method:: add_listing_to_exchange(listing_id: str, exchange_id: str) -> AddExchangeForListingResponse Add an exchange for listing. - + Associate an exchange with a listing - + :param listing_id: str :param exchange_id: str - + :returns: :class:`AddExchangeForListingResponse` .. py:method:: create(exchange: Exchange) -> CreateExchangeResponse Create an exchange. - + Create an exchange - + :param exchange: :class:`Exchange` - + :returns: :class:`CreateExchangeResponse` .. py:method:: delete(id: str) Delete an exchange. - + This removes a listing from marketplace. - + :param id: str - - + + .. py:method:: delete_listing_from_exchange(id: str) Remove an exchange for listing. - + Disassociate an exchange with a listing - + :param id: str - - + + .. py:method:: get(id: str) -> GetExchangeResponse Get an exchange. - + Get an exchange. - + :param id: str - + :returns: :class:`GetExchangeResponse` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Exchange] List exchanges. - + List exchanges visible to provider - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Exchange` .. py:method:: list_exchanges_for_listing(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing] List exchanges for listing. - + List exchanges associated with a listing - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeListing` .. py:method:: list_listings_for_exchange(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing] List listings for exchange. - + List listings associated with an exchange - + :param exchange_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeListing` .. py:method:: update(id: str, exchange: Exchange) -> UpdateExchangeResponse Update exchange. - + Update an exchange - + :param id: str :param exchange: :class:`Exchange` - + :returns: :class:`UpdateExchangeResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_files.rst b/docs/workspace/marketplace/provider_files.rst index 413936020..f719ca65f 100644 --- a/docs/workspace/marketplace/provider_files.rst +++ b/docs/workspace/marketplace/provider_files.rst @@ -9,48 +9,48 @@ .. py:method:: create(file_parent: FileParent, marketplace_file_type: MarketplaceFileType, mime_type: str [, display_name: Optional[str]]) -> CreateFileResponse Create a file. - + Create a file. Currently, only provider icons and attached notebooks are supported. - + :param file_parent: :class:`FileParent` :param marketplace_file_type: :class:`MarketplaceFileType` :param mime_type: str :param display_name: str (optional) - + :returns: :class:`CreateFileResponse` .. py:method:: delete(file_id: str) Delete a file. - + Delete a file - + :param file_id: str - - + + .. py:method:: get(file_id: str) -> GetFileResponse Get a file. - + Get a file - + :param file_id: str - + :returns: :class:`GetFileResponse` .. py:method:: list(file_parent: FileParent [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FileInfo] List files. - + List files attached to a parent entity. - + :param file_parent: :class:`FileParent` :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FileInfo` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_listings.rst b/docs/workspace/marketplace/provider_listings.rst index dcfd45dd8..d26c5293e 100644 --- a/docs/workspace/marketplace/provider_listings.rst +++ b/docs/workspace/marketplace/provider_listings.rst @@ -10,56 +10,56 @@ .. py:method:: create(listing: Listing) -> CreateListingResponse Create a listing. - + Create a new listing - + :param listing: :class:`Listing` - + :returns: :class:`CreateListingResponse` .. py:method:: delete(id: str) Delete a listing. - + Delete a listing - + :param id: str - - + + .. py:method:: get(id: str) -> GetListingResponse Get a listing. - + Get a listing - + :param id: str - + :returns: :class:`GetListingResponse` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Listing] List listings. - + List listings owned by this provider - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Listing` .. py:method:: update(id: str, listing: Listing) -> UpdateListingResponse Update listing. - + Update a listing - + :param id: str :param listing: :class:`Listing` - + :returns: :class:`UpdateListingResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_personalization_requests.rst b/docs/workspace/marketplace/provider_personalization_requests.rst index b9b5a0174..32cdbdbb3 100644 --- a/docs/workspace/marketplace/provider_personalization_requests.rst +++ b/docs/workspace/marketplace/provider_personalization_requests.rst @@ -10,27 +10,27 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest] All personalization requests across all listings. - + List personalization requests to this provider. This will return all personalization requests, regardless of which listing they are for. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`PersonalizationRequest` .. py:method:: update(listing_id: str, request_id: str, status: PersonalizationRequestStatus [, reason: Optional[str], share: Optional[ShareInfo]]) -> UpdatePersonalizationRequestResponse Update personalization request status. - + Update personalization request. This method only permits updating the status of the request. - + :param listing_id: str :param request_id: str :param status: :class:`PersonalizationRequestStatus` :param reason: str (optional) :param share: :class:`ShareInfo` (optional) - + :returns: :class:`UpdatePersonalizationRequestResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst index f77b9d436..cc29e089f 100644 --- a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst +++ b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst @@ -9,42 +9,42 @@ .. py:method:: create() -> ProviderAnalyticsDashboard Create provider analytics dashboard. - + Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the Lakeview dashboard id. - + :returns: :class:`ProviderAnalyticsDashboard` .. py:method:: get() -> ListProviderAnalyticsDashboardResponse Get provider analytics dashboard. - + Get provider analytics dashboard. - + :returns: :class:`ListProviderAnalyticsDashboardResponse` .. py:method:: get_latest_version() -> GetLatestVersionProviderAnalyticsDashboardResponse Get latest version of provider analytics dashboard. - + Get latest version of provider analytics dashboard. - + :returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse` .. py:method:: update(id: str [, version: Optional[int]]) -> UpdateProviderAnalyticsDashboardResponse Update provider analytics dashboard. - + Update provider analytics dashboard. - + :param id: str id is immutable property and can't be updated. :param version: int (optional) this is the version of the dashboard template we want to update our user to current expectation is that it should be equal to latest version of the dashboard template - + :returns: :class:`UpdateProviderAnalyticsDashboardResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_providers.rst b/docs/workspace/marketplace/provider_providers.rst index ac8a4fdc3..610c9602e 100644 --- a/docs/workspace/marketplace/provider_providers.rst +++ b/docs/workspace/marketplace/provider_providers.rst @@ -9,56 +9,56 @@ .. py:method:: create(provider: ProviderInfo) -> CreateProviderResponse Create a provider. - + Create a provider - + :param provider: :class:`ProviderInfo` - + :returns: :class:`CreateProviderResponse` .. py:method:: delete(id: str) Delete provider. - + Delete provider - + :param id: str - - + + .. py:method:: get(id: str) -> GetProviderResponse Get provider. - + Get provider profile - + :param id: str - + :returns: :class:`GetProviderResponse` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo] List providers. - + List provider profiles for account. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ProviderInfo` .. py:method:: update(id: str, provider: ProviderInfo) -> UpdateProviderResponse Update provider. - + Update provider profile - + :param id: str :param provider: :class:`ProviderInfo` - + :returns: :class:`UpdateProviderResponse` \ No newline at end of file diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst index f18915885..d0ca3e0f6 100644 --- a/docs/workspace/ml/experiments.rst +++ b/docs/workspace/ml/experiments.rst @@ -7,7 +7,7 @@ Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment. Each experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking server. - + Experiments are located in the workspace file tree. You manage experiments using the same tools you use to manage other workspace objects such as folders, notebooks, and libraries. @@ -30,13 +30,13 @@ w.experiments.delete_experiment(experiment_id=experiment.experiment_id) Create experiment. - + Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that another experiment with the same name does not already exist and fails if another experiment with the same name already exists. - + Throws `RESOURCE_ALREADY_EXISTS` if an experiment with the given name exists. - + :param name: str Experiment name. :param artifact_location: str (optional) @@ -47,14 +47,14 @@ depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to support up to 20 tags per request. - + :returns: :class:`CreateExperimentResponse` .. py:method:: create_logged_model(experiment_id: str [, model_type: Optional[str], name: Optional[str], params: Optional[List[LoggedModelParameter]], source_run_id: Optional[str], tags: Optional[List[LoggedModelTag]]]) -> CreateLoggedModelResponse Create a logged model. - + :param experiment_id: str The ID of the experiment that owns the model. :param model_type: str (optional) @@ -67,7 +67,7 @@ The ID of the run that created the model. :param tags: List[:class:`LoggedModelTag`] (optional) Tags attached to the model. - + :returns: :class:`CreateLoggedModelResponse` @@ -97,11 +97,11 @@ w.experiments.delete_run(run_id=created.run.info.run_id) Create a run. - + Creates a new run within an experiment. A run is usually a single execution of a machine learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric`, and `mlflowRunTag` associated with a single execution. - + :param experiment_id: str (optional) ID of the associated experiment. :param run_name: str (optional) @@ -113,65 +113,65 @@ :param user_id: str (optional) ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in a future MLflow release. Use 'mlflow.user' tag instead. - + :returns: :class:`CreateRunResponse` .. py:method:: delete_experiment(experiment_id: str) Delete an experiment. - + Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the experiment uses FileStore, artifacts associated with the experiment are also deleted. - + :param experiment_id: str ID of the associated experiment. - - + + .. py:method:: delete_logged_model(model_id: str) Delete a logged model. - + :param model_id: str The ID of the logged model to delete. - - + + .. py:method:: delete_logged_model_tag(model_id: str, tag_key: str) Delete a tag on a logged model. - + :param model_id: str The ID of the logged model to delete the tag from. :param tag_key: str The tag key. - - + + .. py:method:: delete_run(run_id: str) Delete a run. - + Marks a run for deletion. - + :param run_id: str ID of the run to delete. - - + + .. py:method:: delete_runs(experiment_id: str, max_timestamp_millis: int [, max_runs: Optional[int]]) -> DeleteRunsResponse Delete runs by creation time. - + Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on - + :param experiment_id: str The ID of the experiment containing the runs to delete. :param max_timestamp_millis: int @@ -180,74 +180,54 @@ :param max_runs: int (optional) An optional positive integer indicating the maximum number of runs to delete. The maximum allowed value for max_runs is 10000. - + :returns: :class:`DeleteRunsResponse` .. py:method:: delete_tag(run_id: str, key: str) Delete a tag on a run. - + Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. - + :param run_id: str ID of the run that the tag was logged under. Must be provided. :param key: str Name of the tag. Maximum size is 255 bytes. Must be provided. - - + + .. py:method:: finalize_logged_model(model_id: str, status: LoggedModelStatus) -> FinalizeLoggedModelResponse Finalize a logged model. - + :param model_id: str The ID of the logged model to finalize. :param status: :class:`LoggedModelStatus` Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that something - went wrong when logging the model weights / agent code). - + went wrong when logging the model weights / agent code. + :returns: :class:`FinalizeLoggedModelResponse` .. py:method:: get_by_name(experiment_name: str) -> GetExperimentByNameResponse Get an experiment by name. - + Gets metadata for an experiment. - + This endpoint will return deleted experiments, but prefers the active experiment if an active and deleted experiment share the same name. If multiple deleted experiments share the same name, the API will return one of them. - + Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists. - + :param experiment_name: str Name of the associated experiment. - - :returns: :class:`GetExperimentByNameResponse` - - - .. py:method:: get_credentials_for_trace_data_download(request_id: str) -> GetCredentialsForTraceDataDownloadResponse - - Get credentials to download trace data. - - :param request_id: str - The ID of the trace to fetch artifact download credentials for. - - :returns: :class:`GetCredentialsForTraceDataDownloadResponse` - - .. py:method:: get_credentials_for_trace_data_upload(request_id: str) -> GetCredentialsForTraceDataUploadResponse - - Get credentials to upload trace data. - - :param request_id: str - The ID of the trace to fetch artifact upload credentials for. - - :returns: :class:`GetCredentialsForTraceDataUploadResponse` + :returns: :class:`GetExperimentByNameResponse` .. py:method:: get_experiment(experiment_id: str) -> GetExperimentResponse @@ -271,21 +251,21 @@ w.experiments.delete_experiment(experiment_id=experiment.experiment_id) Get an experiment. - + Gets metadata for an experiment. This method works on deleted experiments. - + :param experiment_id: str ID of the associated experiment. - + :returns: :class:`GetExperimentResponse` .. py:method:: get_history(metric_key: str [, max_results: Optional[int], page_token: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[Metric] Get metric history for a run. - + Gets a list of all values for the specified metric for a given run. - + :param metric_key: str Name of the metric. :param max_results: int (optional) @@ -298,72 +278,72 @@ :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run from which to fetch metric values. This field will be removed in a future MLflow version. - + :returns: Iterator over :class:`Metric` .. py:method:: get_logged_model(model_id: str) -> GetLoggedModelResponse Get a logged model. - + :param model_id: str The ID of the logged model to retrieve. - + :returns: :class:`GetLoggedModelResponse` .. py:method:: get_permission_levels(experiment_id: str) -> GetExperimentPermissionLevelsResponse Get experiment permission levels. - + Gets the permission levels that a user can have on an object. - + :param experiment_id: str The experiment for which to get or manage permissions. - + :returns: :class:`GetExperimentPermissionLevelsResponse` .. py:method:: get_permissions(experiment_id: str) -> ExperimentPermissions Get experiment permissions. - + Gets the permissions of an experiment. Experiments can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. - + :returns: :class:`ExperimentPermissions` .. py:method:: get_run(run_id: str [, run_uuid: Optional[str]]) -> GetRunResponse Get a run. - + Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the same key are logged for a run, return only the value with the latest timestamp. - + If there are multiple values with the latest timestamp, return the maximum of these values. - + :param run_id: str ID of the run to fetch. Must be provided. :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run to fetch. This field will be removed in a future MLflow version. - + :returns: :class:`GetRunResponse` .. py:method:: list_artifacts( [, page_token: Optional[str], path: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[FileInfo] List artifacts. - + List artifacts for a run. Takes an optional `artifact_path` prefix which if specified, the response contains only artifacts with the specified prefix. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). - + :param page_token: str (optional) The token indicating the page of artifact results to fetch. `page_token` is not supported when listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. @@ -377,7 +357,7 @@ :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run whose artifacts to list. This field will be removed in a future MLflow version. - + :returns: Iterator over :class:`FileInfo` @@ -396,9 +376,9 @@ all = w.experiments.list_experiments(ml.ListExperimentsRequest()) List experiments. - + Gets a list of all experiments. - + :param max_results: int (optional) Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If `max_results` is too large, it'll be automatically capped at 1000. Callers of this endpoint are @@ -407,75 +387,55 @@ Token indicating the page of experiments to fetch :param view_type: :class:`ViewType` (optional) Qualifier for type of experiments to be returned. If unspecified, return only active experiments. - - :returns: Iterator over :class:`Experiment` - - .. py:method:: list_logged_model_artifacts(model_id: str [, artifact_directory_path: Optional[str], page_token: Optional[str]]) -> ListLoggedModelArtifactsResponse - - List artifacts for a logged model. - - List artifacts for a logged model. Takes an optional ``artifact_directory_path`` prefix which if - specified, the response contains only artifacts with the specified prefix. - - :param model_id: str - The ID of the logged model for which to list the artifacts. - :param artifact_directory_path: str (optional) - Filter artifacts matching this path (a relative path from the root artifact directory). - :param page_token: str (optional) - Token indicating the page of artifact results to fetch. `page_token` is not supported when listing - artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call - `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports - pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). - - :returns: :class:`ListLoggedModelArtifactsResponse` + :returns: Iterator over :class:`Experiment` .. py:method:: log_batch( [, metrics: Optional[List[Metric]], params: Optional[List[Param]], run_id: Optional[str], tags: Optional[List[RunTag]]]) Log a batch of metrics/params/tags for a run. - + Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server will respond with an error (non-200 status code). - + In case of error (due to internal server error or an invalid request), partial data may be written. - + You can write metrics, params, and tags in interleaving fashion, but within a given entity type are guaranteed to follow the order specified in the request body. - + The overwrite behavior for metrics, params, and tags is as follows: - + * Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to the set of values for the metric with the provided key. - + * Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple tag values with the same key are provided in the same API request, the last-provided tag value is written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent. - + * Parameters: once written, param values cannot be changed (attempting to overwrite a param value will result in an error). However, logging the same param (key, value) is permitted. Specifically, logging a param is idempotent. - + Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB in size and contain: - + * No more than 1000 metrics, params, and tags in total - + * Up to 1000 metrics - + * Up to 100 params - + * Up to 100 tags - + For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900 metrics, 50 params, and 51 tags is invalid. - + The following limits also apply to metric, param, and tag keys and values: - + * Metric keys, param keys, and tag keys can be up to 250 characters in length - + * Parameter and tag values can be up to 250 characters in length - + :param metrics: List[:class:`Metric`] (optional) Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and tags in total. @@ -487,52 +447,52 @@ :param tags: List[:class:`RunTag`] (optional) Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags in total. - - + + .. py:method:: log_inputs(run_id: str [, datasets: Optional[List[DatasetInput]], models: Optional[List[ModelInput]]]) Log inputs to a run. - + **NOTE:** Experimental: This API may change or be removed in a future release without warning. - + Logs inputs, such as datasets and models, to an MLflow Run. - + :param run_id: str ID of the run to log under :param datasets: List[:class:`DatasetInput`] (optional) Dataset inputs :param models: List[:class:`ModelInput`] (optional) Model inputs - - + + .. py:method:: log_logged_model_params(model_id: str [, params: Optional[List[LoggedModelParameter]]]) Log params for a logged model. - + Logs params for a logged model. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training. A param can be logged only once for a logged model, and attempting to overwrite an existing param with a different value will result in an error - + :param model_id: str The ID of the logged model to log params for. :param params: List[:class:`LoggedModelParameter`] (optional) Parameters to attach to the model. - - + + .. py:method:: log_metric(key: str, value: float, timestamp: int [, dataset_digest: Optional[str], dataset_name: Optional[str], model_id: Optional[str], run_id: Optional[str], run_uuid: Optional[str], step: Optional[int]]) Log a metric for a run. - + Log a metric for a run. A metric is a key-value pair (string key, float value) with an associated timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be logged multiple times. - + :param key: str Name of the metric. :param value: float @@ -554,48 +514,48 @@ removed in a future MLflow version. :param step: int (optional) Step at which to log the metric - - + + .. py:method:: log_model( [, model_json: Optional[str], run_id: Optional[str]]) Log a model. - + **NOTE:** Experimental: This API may change or be removed in a future release without warning. - + :param model_json: str (optional) MLmodel file in json format. :param run_id: str (optional) ID of the run to log under - - + + .. py:method:: log_outputs(run_id: str [, models: Optional[List[ModelOutput]]]) Log outputs from a run. - + **NOTE**: Experimental: This API may change or be removed in a future release without warning. - + Logs outputs, such as models, from an MLflow Run. - + :param run_id: str The ID of the Run from which to log outputs. :param models: List[:class:`ModelOutput`] (optional) The model outputs from the Run. - - + + .. py:method:: log_param(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]]) Log a param for a run. - + Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A param can be logged only once for a run. - + :param key: str Name of the param. Maximum size is 255 bytes. :param value: str @@ -605,48 +565,48 @@ :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will be removed in a future MLflow version. - - + + .. py:method:: restore_experiment(experiment_id: str) Restore an experiment. - + Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are also restored. - + Throws `RESOURCE_DOES_NOT_EXIST` if experiment was never created or was permanently deleted. - + :param experiment_id: str ID of the associated experiment. - - + + .. py:method:: restore_run(run_id: str) Restore a run. - + Restores a deleted run. This also restores associated metadata, runs, metrics, params, and tags. - + Throws `RESOURCE_DOES_NOT_EXIST` if the run was never created or was permanently deleted. - + :param run_id: str ID of the run to restore. - - + + .. py:method:: restore_runs(experiment_id: str, min_timestamp_millis: int [, max_runs: Optional[int]]) -> RestoreRunsResponse Restore runs by deletion time. - + Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on - + :param experiment_id: str The ID of the experiment containing the runs to restore. :param min_timestamp_millis: int @@ -655,16 +615,16 @@ :param max_runs: int (optional) An optional positive integer indicating the maximum number of runs to restore. The maximum allowed value for max_runs is 10000. - + :returns: :class:`RestoreRunsResponse` .. py:method:: search_experiments( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], view_type: Optional[ViewType]]) -> Iterator[Experiment] Search experiments. - + Searches for experiments that satisfy specified search criteria. - + :param filter: str (optional) String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'") :param max_results: int (optional) @@ -677,16 +637,16 @@ Token indicating the page of experiments to fetch :param view_type: :class:`ViewType` (optional) Qualifier for type of experiments to be returned. If unspecified, return only active experiments. - + :returns: Iterator over :class:`Experiment` .. py:method:: search_logged_models( [, datasets: Optional[List[SearchLoggedModelsDataset]], experiment_ids: Optional[List[str]], filter: Optional[str], max_results: Optional[int], order_by: Optional[List[SearchLoggedModelsOrderBy]], page_token: Optional[str]]) -> SearchLoggedModelsResponse Search logged models. - + Search for Logged Models that satisfy specified search criteria. - + :param datasets: List[:class:`SearchLoggedModelsDataset`] (optional) List of datasets on which to apply the metrics filter clauses. For example, a filter with `metrics.accuracy > 0.9` and dataset info with name "test_dataset" means we will return all logged @@ -698,7 +658,7 @@ :param filter: str (optional) A filter expression over logged model info and data that allows returning a subset of logged models. The syntax is a subset of SQL that supports AND'ing together binary operations. - + Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``. :param max_results: int (optional) The maximum number of Logged Models to return. The maximum limit is 50. @@ -706,30 +666,30 @@ The list of columns for ordering the results, with additional fields for sorting criteria. :param page_token: str (optional) The token indicating the page of logged models to fetch. - + :returns: :class:`SearchLoggedModelsResponse` .. py:method:: search_runs( [, experiment_ids: Optional[List[str]], filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], run_view_type: Optional[ViewType]]) -> Iterator[Run] Search for runs. - + Searches for runs that satisfy expressions. - + Search expressions can use `mlflowMetric` and `mlflowParam` keys. - + :param experiment_ids: List[str] (optional) List of experiment IDs to search over. :param filter: str (optional) A filter expression over params, metrics, and tags, that allows returning a subset of runs. The syntax is a subset of SQL that supports ANDing together binary operations between a param, metric, or tag and a constant. - + Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'` - + You can select columns with special characters (hyphen, space, period, etc.) by using double quotes: `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'` - + Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`. :param max_results: int (optional) Maximum number of runs desired. Max threshold is 50000 @@ -743,58 +703,58 @@ Token for the current page of runs. :param run_view_type: :class:`ViewType` (optional) Whether to display only active, only deleted, or all runs. Defaults to only active runs. - + :returns: Iterator over :class:`Run` .. py:method:: set_experiment_tag(experiment_id: str, key: str, value: str) Set a tag for an experiment. - + Sets a tag on an experiment. Experiment tags are metadata that can be updated. - + :param experiment_id: str ID of the experiment under which to log the tag. Must be provided. :param key: str Name of the tag. Keys up to 250 bytes in size are supported. :param value: str String value of the tag being logged. Values up to 64KB in size are supported. - - + + .. py:method:: set_logged_model_tags(model_id: str [, tags: Optional[List[LoggedModelTag]]]) Set a tag for a logged model. - + :param model_id: str The ID of the logged model to set the tags on. :param tags: List[:class:`LoggedModelTag`] (optional) The tags to set on the logged model. - - + + .. py:method:: set_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions Set experiment permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional) - + :returns: :class:`ExperimentPermissions` .. py:method:: set_tag(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]]) Set a tag for a run. - + Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. - + :param key: str Name of the tag. Keys up to 250 bytes in size are supported. :param value: str @@ -804,8 +764,8 @@ :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be removed in a future MLflow version. - - + + .. py:method:: update_experiment(experiment_id: str [, new_name: Optional[str]]) @@ -829,27 +789,27 @@ w.experiments.delete_experiment(experiment_id=experiment.experiment_id) Update an experiment. - + Updates experiment metadata. - + :param experiment_id: str ID of the associated experiment. :param new_name: str (optional) If provided, the experiment's name is changed to the new name. The new name must be unique. - - + + .. py:method:: update_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions Update experiment permissions. - + Updates the permissions on an experiment. Experiments can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional) - + :returns: :class:`ExperimentPermissions` @@ -881,9 +841,9 @@ w.experiments.delete_run(run_id=created.run.info.run_id) Update a run. - + Updates run metadata. - + :param end_time: int (optional) Unix timestamp in milliseconds of when the run ended. :param run_id: str (optional) @@ -895,6 +855,6 @@ MLflow version. :param status: :class:`UpdateRunStatus` (optional) Updated status of the run. - + :returns: :class:`UpdateRunResponse` \ No newline at end of file diff --git a/docs/workspace/ml/forecasting.rst b/docs/workspace/ml/forecasting.rst index 79fca0ffe..79694a709 100644 --- a/docs/workspace/ml/forecasting.rst +++ b/docs/workspace/ml/forecasting.rst @@ -9,9 +9,9 @@ .. py:method:: create_experiment(train_data_path: str, target_column: str, time_column: str, forecast_granularity: str, forecast_horizon: int [, custom_weights_column: Optional[str], experiment_path: Optional[str], future_feature_data_path: Optional[str], holiday_regions: Optional[List[str]], include_features: Optional[List[str]], max_runtime: Optional[int], prediction_data_path: Optional[str], primary_metric: Optional[str], register_to: Optional[str], split_column: Optional[str], timeseries_identifier_columns: Optional[List[str]], training_frameworks: Optional[List[str]]]) -> Wait[ForecastingExperiment] Create a forecasting experiment. - + Creates a serverless forecasting experiment. Returns the experiment ID. - + :param train_data_path: str The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used as training data for the forecasting model. @@ -60,7 +60,7 @@ :param training_frameworks: List[str] (optional) List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', 'DeepAR'. An empty list includes all supported frameworks. - + :returns: Long-running operation waiter for :class:`ForecastingExperiment`. See :method:wait_get_experiment_forecasting_succeeded for more details. @@ -72,12 +72,12 @@ .. py:method:: get_experiment(experiment_id: str) -> ForecastingExperiment Get a forecasting experiment. - + Public RPC to get forecasting experiment - + :param experiment_id: str The unique ID of a forecasting experiment - + :returns: :class:`ForecastingExperiment` diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 23c357275..2d93defa9 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -8,35 +8,35 @@ [Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides centralized model governance, cross-workspace access, lineage, and deployment. Workspace Model Registry will be deprecated in the future. - + The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models. .. py:method:: approve_transition_request(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> ApproveTransitionRequestResponse Approve transition request. - + Approves a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param archive_existing_versions: bool Specifies whether to archive all current model versions in the target stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`ApproveTransitionRequestResponse` @@ -67,17 +67,17 @@ w.model_registry.delete_comment(id=created.comment.id) Post a comment. - + Posts a comment on a model version. A comment can be submitted either by a user or programmatically to display relevant information about the model. For example, test results or deployment errors. - + :param name: str Name of the model. :param version: str Version of the model. :param comment: str User-provided comment on the action. - + :returns: :class:`CreateCommentResponse` @@ -94,23 +94,21 @@ w = WorkspaceClient() - model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - - mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") Create a model. - + Creates a new registered model with the name specified in the request body. - + Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. - + :param name: str Register models under this name :param description: str (optional) Optional description for registered model. :param tags: List[:class:`ModelTag`] (optional) Additional metadata for registered model. - + :returns: :class:`CreateModelResponse` @@ -129,12 +127,12 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Create a model version. - + Creates a model version. - + :param name: str Register model under this name :param source: str @@ -149,33 +147,33 @@ hosted at another instance of MLflow. :param tags: List[:class:`ModelVersionTag`] (optional) Additional metadata for model version. - + :returns: :class:`CreateModelVersionResponse` .. py:method:: create_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> CreateTransitionRequestResponse Make a transition request. - + Creates a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`CreateTransitionRequestResponse` @@ -203,116 +201,118 @@ w.model_registry.delete_webhook(id=created.webhook.id) Create a webhook. - + **NOTE**: This endpoint is in Public Preview. - + Creates a registry webhook. - + :param events: List[:class:`RegistryWebhookEvent`] Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. - + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - + * `COMMENT_CREATED`: A user wrote a comment on a registered model. - + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be specified for a registry-wide webhook, which can be created by not specifying a model name in the create request. - + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to staging. - + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to production. - + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. :param description: str (optional) User-specified description for the webhook. :param http_url_spec: :class:`HttpUrlSpec` (optional) :param job_spec: :class:`JobSpec` (optional) :param model_name: str (optional) - Name of the model whose events would trigger this webhook. + If model name is not specified, a registry-wide webhook is created that listens for the specified + events across all versions of all registered models. :param status: :class:`RegistryWebhookStatus` (optional) Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event. - + :returns: :class:`CreateWebhookResponse` .. py:method:: delete_comment(id: str) Delete a comment. - + Deletes a comment on a model version. - + :param id: str - - + Unique identifier of an activity + + .. py:method:: delete_model(name: str) Delete a model. - + Deletes a registered model. - + :param name: str Registered model unique name identifier. - - + + .. py:method:: delete_model_tag(name: str, key: str) Delete a model tag. - + Deletes the tag for a registered model. - + :param name: str Name of the registered model that the tag was logged under. :param key: str Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size is 250 bytes. - - + + .. py:method:: delete_model_version(name: str, version: str) Delete a model version. - + Deletes a model version. - + :param name: str Name of the registered model :param version: str Model version number - - + + .. py:method:: delete_model_version_tag(name: str, version: str, key: str) Delete a model version tag. - + Deletes a model version tag. - + :param name: str Name of the registered model that the tag was logged under. :param version: str @@ -320,64 +320,64 @@ :param key: str Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size is 250 bytes. - - + + .. py:method:: delete_transition_request(name: str, version: str, stage: DeleteTransitionRequestStage, creator: str [, comment: Optional[str]]) Delete a transition request. - + Cancels a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`DeleteTransitionRequestStage` Target stage of the transition request. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param creator: str Username of the user who created this request. Of the transition requests matching the specified details, only the one transition created by this user will be deleted. :param comment: str (optional) User-provided comment on the action. - - + + .. py:method:: delete_webhook( [, id: Optional[str]]) Delete a webhook. - + **NOTE:** This endpoint is in Public Preview. - + Deletes a registry webhook. - + :param id: str (optional) Webhook ID required to delete a registry webhook. - - + + .. py:method:: get_latest_versions(name: str [, stages: Optional[List[str]]]) -> Iterator[ModelVersion] Get the latest version. - + Gets the latest version of a registered model. - + :param name: str Registered model unique name identifier. :param stages: List[str] (optional) List of stages. - + :returns: Iterator over :class:`ModelVersion` @@ -399,69 +399,69 @@ model = w.model_registry.get_model(name=created.registered_model.name) Get model. - + Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also returns the model's Databricks workspace ID and the permission level of the requesting user on the model. - + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel - + :param name: str Registered model unique name identifier. - + :returns: :class:`GetModelResponse` .. py:method:: get_model_version(name: str, version: str) -> GetModelVersionResponse Get a model version. - + Get a model version. - + :param name: str Name of the registered model :param version: str Model version number - + :returns: :class:`GetModelVersionResponse` .. py:method:: get_model_version_download_uri(name: str, version: str) -> GetModelVersionDownloadUriResponse Get a model version URI. - + Gets a URI to download the model version. - + :param name: str Name of the registered model :param version: str Model version number - + :returns: :class:`GetModelVersionDownloadUriResponse` .. py:method:: get_permission_levels(registered_model_id: str) -> GetRegisteredModelPermissionLevelsResponse Get registered model permission levels. - + Gets the permission levels that a user can have on an object. - + :param registered_model_id: str The registered model for which to get or manage permissions. - + :returns: :class:`GetRegisteredModelPermissionLevelsResponse` .. py:method:: get_permissions(registered_model_id: str) -> RegisteredModelPermissions Get registered model permissions. - + Gets the permissions of a registered model. Registered models can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. - + :returns: :class:`RegisteredModelPermissions` @@ -480,28 +480,28 @@ all = w.model_registry.list_models(ml.ListModelsRequest()) List models. - + Lists all available registered models, up to the limit specified in __max_results__. - + :param max_results: int (optional) Maximum number of registered models desired. Max threshold is 1000. :param page_token: str (optional) Pagination token to go to the next page based on a previous query. - + :returns: Iterator over :class:`Model` .. py:method:: list_transition_requests(name: str, version: str) -> Iterator[Activity] List transition requests. - + Gets a list of all open stage transition requests for the model version. - + :param name: str Name of the model. :param version: str Version of the model. - + :returns: Iterator over :class:`Activity` @@ -520,11 +520,11 @@ all = w.model_registry.list_webhooks(ml.ListWebhooksRequest()) List registry webhooks. - + **NOTE:** This endpoint is in Public Preview. - + Lists all registry webhooks. - + :param events: List[:class:`RegistryWebhookEvent`] (optional) If `events` is specified, any webhook with one or more of the specified trigger events is included in the output. If `events` is not specified, webhooks of all event types are included in the output. @@ -533,56 +533,56 @@ associated model. :param page_token: str (optional) Token indicating the page of artifact results to fetch - + :returns: Iterator over :class:`RegistryWebhook` .. py:method:: reject_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> RejectTransitionRequestResponse Reject a transition request. - + Rejects a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`RejectTransitionRequestResponse` .. py:method:: rename_model(name: str [, new_name: Optional[str]]) -> RenameModelResponse Rename a model. - + Renames a registered model. - + :param name: str Registered model unique name identifier. :param new_name: str (optional) If provided, updates the name for this `registered_model`. - + :returns: :class:`RenameModelResponse` .. py:method:: search_model_versions( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[ModelVersion] Searches model versions. - + Searches for specific model versions based on the supplied __filter__. - + :param filter: str (optional) String filter condition, like "name='my-model-name'". Must be a single boolean condition, with string values wrapped in single quotes. @@ -594,16 +594,16 @@ timestamp, followed by name ASC, followed by version DESC. :param page_token: str (optional) Pagination token to go to next page based on previous search query. - + :returns: Iterator over :class:`ModelVersion` .. py:method:: search_models( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[Model] Search models. - + Search for registered models based on the specified __filter__. - + :param filter: str (optional) String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically as "name LIKE '%my-model-name%'". Single boolean condition, with string values wrapped in single @@ -616,16 +616,16 @@ name ASC. :param page_token: str (optional) Pagination token to go to the next page based on a previous search query. - + :returns: Iterator over :class:`Model` .. py:method:: set_model_tag(name: str, key: str, value: str) Set a tag. - + Sets a tag on a registered model. - + :param name: str Unique name of the model. :param key: str @@ -635,16 +635,16 @@ :param value: str String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size. - - + + .. py:method:: set_model_version_tag(name: str, version: str, key: str, value: str) Set a version tag. - + Sets a model version tag. - + :param name: str Unique name of the model. :param version: str @@ -656,69 +656,69 @@ :param value: str String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size. - - + + .. py:method:: set_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions Set registered model permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional) - + :returns: :class:`RegisteredModelPermissions` .. py:method:: test_registry_webhook(id: str [, event: Optional[RegistryWebhookEvent]]) -> TestRegistryWebhookResponse Test a webhook. - + **NOTE:** This endpoint is in Public Preview. - + Tests a registry webhook. - + :param id: str Webhook ID :param event: :class:`RegistryWebhookEvent` (optional) If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the test trigger uses a randomly chosen event associated with the webhook. - + :returns: :class:`TestRegistryWebhookResponse` .. py:method:: transition_stage(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> TransitionStageResponse Transition a stage. - + Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] that also accepts a comment associated with the transition to be recorded.", - + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param archive_existing_versions: bool Specifies whether to archive all current model versions in the target stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`TransitionStageResponse` @@ -751,14 +751,14 @@ w.model_registry.delete_comment(id=created.comment.id) Update a comment. - + Post an edit to a comment on a model version. - + :param id: str Unique identifier of an activity :param comment: str User-provided comment on the action. - + :returns: :class:`UpdateCommentResponse` @@ -785,15 +785,15 @@ ) Update model. - + Updates a registered model. - + :param name: str Registered model unique name identifier. :param description: str (optional) If provided, updates the description for this `registered_model`. - - + + .. py:method:: update_model_version(name: str, version: str [, description: Optional[str]]) @@ -820,30 +820,30 @@ ) Update model version. - + Updates the model version. - + :param name: str Name of the registered model :param version: str Model version number :param description: str (optional) If provided, updates the description for this `registered_model`. - - + + .. py:method:: update_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions Update registered model permissions. - + Updates the permissions on a registered model. Registered models can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional) - + :returns: :class:`RegisteredModelPermissions` @@ -873,11 +873,11 @@ w.model_registry.delete_webhook(id=created.webhook.id) Update a webhook. - + **NOTE:** This endpoint is in Public Preview. - + Updates a registry webhook. - + :param id: str Webhook ID :param description: str (optional) @@ -885,42 +885,42 @@ :param events: List[:class:`RegistryWebhookEvent`] (optional) Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. - + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - + * `COMMENT_CREATED`: A user wrote a comment on a registered model. - + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be specified for a registry-wide webhook, which can be created by not specifying a model name in the create request. - + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to staging. - + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to production. - + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. :param http_url_spec: :class:`HttpUrlSpec` (optional) :param job_spec: :class:`JobSpec` (optional) :param status: :class:`RegistryWebhookStatus` (optional) Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event. - - + + \ No newline at end of file diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index 5b52818da..bf85f808e 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -5,17 +5,17 @@ .. py:class:: PipelinesAPI The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines. - + Delta Live Tables is a framework for building reliable, maintainable, and testable data processing pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task orchestration, cluster management, monitoring, data quality, and error handling. - + Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables manages how your data is transformed based on a target schema you define for each processing step. You can also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations. - .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], event_log: Optional[EventLogSpec], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse + .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], event_log: Optional[EventLogSpec], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse Usage: @@ -52,10 +52,10 @@ w.pipelines.delete(pipeline_id=created.pipeline_id) Create a pipeline. - + Creates a new data processing pipeline based on the requested configuration. If successful, this method returns the ID of the new pipeline. - + :param allow_duplicate_names: bool (optional) If false, deployment will fail if name conflicts with that of another pipeline. :param budget_policy_id: str (optional) @@ -107,7 +107,7 @@ :param run_as: :class:`RunAs` (optional) Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. :param schema: str (optional) @@ -116,25 +116,29 @@ Whether serverless compute is enabled for this pipeline. :param storage: str (optional) DBFS root directory for storing checkpoints and tables. + :param tags: Dict[str,str] (optional) + A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and + are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline. :param target: str (optional) Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. - + :returns: :class:`CreatePipelineResponse` .. py:method:: delete(pipeline_id: str) Delete a pipeline. - - Deletes a pipeline. - + + Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and + its tables. You cannot undo this action. + :param pipeline_id: str - - + + .. py:method:: get(pipeline_id: str) -> GetPipelineResponse @@ -176,47 +180,47 @@ w.pipelines.delete(pipeline_id=created.pipeline_id) Get a pipeline. - + :param pipeline_id: str - + :returns: :class:`GetPipelineResponse` .. py:method:: get_permission_levels(pipeline_id: str) -> GetPipelinePermissionLevelsResponse Get pipeline permission levels. - + Gets the permission levels that a user can have on an object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. - + :returns: :class:`GetPipelinePermissionLevelsResponse` .. py:method:: get_permissions(pipeline_id: str) -> PipelinePermissions Get pipeline permissions. - + Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. - + :returns: :class:`PipelinePermissions` .. py:method:: get_update(pipeline_id: str, update_id: str) -> GetUpdateResponse Get a pipeline update. - + Gets an update from an active pipeline. - + :param pipeline_id: str The ID of the pipeline. :param update_id: str The ID of the update. - + :returns: :class:`GetUpdateResponse` @@ -259,16 +263,16 @@ w.pipelines.delete(pipeline_id=created.pipeline_id) List pipeline events. - + Retrieves events for a pipeline. - + :param pipeline_id: str The pipeline to return events for. :param filter: str (optional) Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp > 'TIMESTAMP' (or >=,<,<=,=) - + Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp> '2021-07-22T06:37:33.083Z' :param max_results: int (optional) @@ -282,7 +286,7 @@ Page token returned by previous call. This field is mutually exclusive with all fields in this request except max_results. An error is returned if any fields other than max_results are set when this field is set. - + :returns: Iterator over :class:`PipelineEvent` @@ -301,16 +305,16 @@ all = w.pipelines.list_pipelines(pipelines.ListPipelinesRequest()) List pipelines. - + Lists pipelines defined in the Delta Live Tables system. - + :param filter: str (optional) Select a subset of results based on the specified criteria. The supported filters are: - + * `notebook=''` to select pipelines that reference the provided notebook path. * `name LIKE '[pattern]'` to select pipelines with a name that matches pattern. Wildcards are supported, for example: `name LIKE '%shopping%'` - + Composite filters are not supported. This field is optional. :param max_results: int (optional) The maximum number of entries to return in a single page. The system may return fewer than @@ -322,16 +326,16 @@ default is id asc. This field is optional. :param page_token: str (optional) Page token returned by previous call - + :returns: Iterator over :class:`PipelineStateInfo` .. py:method:: list_updates(pipeline_id: str [, max_results: Optional[int], page_token: Optional[str], until_update_id: Optional[str]]) -> ListUpdatesResponse List pipeline updates. - + List updates for an active pipeline. - + :param pipeline_id: str The pipeline to return updates for. :param max_results: int (optional) @@ -340,31 +344,31 @@ Page token returned by previous call :param until_update_id: str (optional) If present, returns updates until and including this update_id. - + :returns: :class:`ListUpdatesResponse` .. py:method:: set_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions Set pipeline permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional) - + :returns: :class:`PipelinePermissions` .. py:method:: start_update(pipeline_id: str [, cause: Optional[StartUpdateCause], full_refresh: Optional[bool], full_refresh_selection: Optional[List[str]], refresh_selection: Optional[List[str]], validate_only: Optional[bool]]) -> StartUpdateResponse Start a pipeline. - + Starts a new update for the pipeline. If there is already an active update for the pipeline, the request will fail and the active update will remain running. - + :param pipeline_id: str :param cause: :class:`StartUpdateCause` (optional) What triggered this update. @@ -381,19 +385,19 @@ :param validate_only: bool (optional) If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets. - + :returns: :class:`StartUpdateResponse` .. py:method:: stop(pipeline_id: str) -> Wait[GetPipelineResponse] Stop a pipeline. - + Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this request is a no-op. - + :param pipeline_id: str - + :returns: Long-running operation waiter for :class:`GetPipelineResponse`. See :method:wait_get_pipeline_idle for more details. @@ -402,7 +406,7 @@ .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse - .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], event_log: Optional[EventLogSpec], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) + .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], event_log: Optional[EventLogSpec], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], root_path: Optional[str], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], tags: Optional[Dict[str, str]], target: Optional[str], trigger: Optional[PipelineTrigger]]) Usage: @@ -455,9 +459,9 @@ w.pipelines.delete(pipeline_id=created.pipeline_id) Edit a pipeline. - + Updates a pipeline with the supplied configuration. - + :param pipeline_id: str Unique identifier for this pipeline. :param allow_duplicate_names: bool (optional) @@ -513,7 +517,7 @@ :param run_as: :class:`RunAs` (optional) Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. :param schema: str (optional) @@ -522,26 +526,29 @@ Whether serverless compute is enabled for this pipeline. :param storage: str (optional) DBFS root directory for storing checkpoints and tables. + :param tags: Dict[str,str] (optional) + A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and + are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline. :param target: str (optional) Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. - - + + .. py:method:: update_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions Update pipeline permissions. - + Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional) - + :returns: :class:`PipelinePermissions` diff --git a/docs/workspace/qualitymonitorv2/index.rst b/docs/workspace/qualitymonitorv2/index.rst new file mode 100644 index 000000000..ccbac6108 --- /dev/null +++ b/docs/workspace/qualitymonitorv2/index.rst @@ -0,0 +1,10 @@ + +Quality Monitor +=============== + +Manage quality monitor on Unity Catalog objects. + +.. toctree:: + :maxdepth: 1 + + quality_monitor_v2 \ No newline at end of file diff --git a/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst b/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst new file mode 100644 index 000000000..129e75c14 --- /dev/null +++ b/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst @@ -0,0 +1,73 @@ +``w.quality_monitor_v2``: QualityMonitor.v2 +=========================================== +.. currentmodule:: databricks.sdk.service.qualitymonitorv2 + +.. py:class:: QualityMonitorV2API + + Manage data quality of UC objects (currently support `schema`) + + .. py:method:: create_quality_monitor(quality_monitor: QualityMonitor) -> QualityMonitor + + Create a quality monitor. + + Create a quality monitor on UC object + + :param quality_monitor: :class:`QualityMonitor` + + :returns: :class:`QualityMonitor` + + + .. py:method:: delete_quality_monitor(object_type: str, object_id: str) + + Delete a quality monitor. + + Delete a quality monitor on UC object + + :param object_type: str + The type of the monitored object. Can be one of the following: schema. + :param object_id: str + The uuid of the request object. For example, schema id. + + + + + .. py:method:: get_quality_monitor(object_type: str, object_id: str) -> QualityMonitor + + Read a quality monitor. + + Read a quality monitor on UC object + + :param object_type: str + The type of the monitored object. Can be one of the following: schema. + :param object_id: str + The uuid of the request object. For example, schema id. + + :returns: :class:`QualityMonitor` + + + .. py:method:: list_quality_monitor( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[QualityMonitor] + + List quality monitors. + + (Unimplemented) List quality monitors + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`QualityMonitor` + + + .. py:method:: update_quality_monitor(object_type: str, object_id: str, quality_monitor: QualityMonitor) -> QualityMonitor + + Update a quality monitor. + + (Unimplemented) Update a quality monitor on UC object + + :param object_type: str + The type of the monitored object. Can be one of the following: schema. + :param object_id: str + The uuid of the request object. For example, schema id. + :param quality_monitor: :class:`QualityMonitor` + + :returns: :class:`QualityMonitor` + \ No newline at end of file diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst index 1e2e32884..ad7da939a 100644 --- a/docs/workspace/serving/serving_endpoints.rst +++ b/docs/workspace/serving/serving_endpoints.rst @@ -5,7 +5,7 @@ .. py:class:: ServingEndpointsExt The Serving Endpoints API allows you to create, update, and delete model serving endpoints. - + You can use a serving endpoint to serve models from the Databricks Model Registry or from Unity Catalog. Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means the endpoints and associated compute resources are fully managed by Databricks and will not appear in your @@ -18,21 +18,21 @@ .. py:method:: build_logs(name: str, served_model_name: str) -> BuildLogsResponse Get build logs for a served model. - + Retrieves the build logs associated with the provided served model. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. :param served_model_name: str The name of the served model that build logs will be retrieved for. This field is required. - + :returns: :class:`BuildLogsResponse` .. py:method:: create(name: str [, ai_gateway: Optional[AiGatewayConfig], budget_policy_id: Optional[str], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed] Create a new serving endpoint. - + :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. @@ -51,7 +51,7 @@ Enable route optimization for the serving endpoint. :param tags: List[:class:`EndpointTag`] (optional) Tags to be attached to the serving endpoint and automatically propagated to billing logs. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. @@ -63,7 +63,7 @@ .. py:method:: create_provisioned_throughput_endpoint(name: str, config: PtEndpointCoreConfig [, ai_gateway: Optional[AiGatewayConfig], budget_policy_id: Optional[str], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed] Create a new PT serving endpoint. - + :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. @@ -75,7 +75,7 @@ The budget policy associated with the endpoint. :param tags: List[:class:`EndpointTag`] (optional) Tags to be attached to the serving endpoint and automatically propagated to billing logs. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. @@ -87,34 +87,34 @@ .. py:method:: delete(name: str) Delete a serving endpoint. - + :param name: str - - + + .. py:method:: export_metrics(name: str) -> ExportMetricsResponse Get metrics of a serving endpoint. - + Retrieves the metrics associated with the provided serving endpoint in either Prometheus or OpenMetrics exposition format. - + :param name: str The name of the serving endpoint to retrieve metrics for. This field is required. - + :returns: :class:`ExportMetricsResponse` .. py:method:: get(name: str) -> ServingEndpointDetailed Get a single serving endpoint. - + Retrieves the details for a single serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. - + :returns: :class:`ServingEndpointDetailed` @@ -127,38 +127,38 @@ .. py:method:: get_open_api(name: str) -> GetOpenApiResponse Get the schema for a serving endpoint. - + Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for the supported paths, input and output format and datatypes. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. - + :returns: :class:`GetOpenApiResponse` .. py:method:: get_permission_levels(serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse Get serving endpoint permission levels. - + Gets the permission levels that a user can have on an object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. - + :returns: :class:`GetServingEndpointPermissionLevelsResponse` .. py:method:: get_permissions(serving_endpoint_id: str) -> ServingEndpointPermissions Get serving endpoint permissions. - + Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. - + :returns: :class:`ServingEndpointPermissions` @@ -185,61 +185,61 @@ .. py:method:: list() -> Iterator[ServingEndpoint] Get all serving endpoints. - + :returns: Iterator over :class:`ServingEndpoint` .. py:method:: logs(name: str, served_model_name: str) -> ServerLogsResponse Get the latest logs for a served model. - + Retrieves the service logs associated with the provided served model. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. :param served_model_name: str The name of the served model that logs will be retrieved for. This field is required. - + :returns: :class:`ServerLogsResponse` .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> EndpointTags Update tags of a serving endpoint. - + Used to batch add and delete tags from a serving endpoint with a single API call. - + :param name: str The name of the serving endpoint who's tags to patch. This field is required. :param add_tags: List[:class:`EndpointTag`] (optional) List of endpoint tags to add :param delete_tags: List[str] (optional) List of tag keys to delete - + :returns: :class:`EndpointTags` .. py:method:: put(name: str [, rate_limits: Optional[List[RateLimit]]]) -> PutResponse Update rate limits of a serving endpoint. - + Deprecated: Please use AI Gateway to manage rate limits instead. - + :param name: str The name of the serving endpoint whose rate limits are being updated. This field is required. :param rate_limits: List[:class:`RateLimit`] (optional) The list of endpoint rate limits. - + :returns: :class:`PutResponse` .. py:method:: put_ai_gateway(name: str [, fallback_config: Optional[FallbackConfig], guardrails: Optional[AiGatewayGuardrails], inference_table_config: Optional[AiGatewayInferenceTableConfig], rate_limits: Optional[List[AiGatewayRateLimit]], usage_tracking_config: Optional[AiGatewayUsageTrackingConfig]]) -> PutAiGatewayResponse Update AI Gateway of a serving endpoint. - + Used to update the AI Gateway of a serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables. - + :param name: str The name of the serving endpoint whose AI Gateway is being updated. This field is required. :param fallback_config: :class:`FallbackConfig` (optional) @@ -255,14 +255,14 @@ :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional) Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs. - + :returns: :class:`PutAiGatewayResponse` .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse Query a serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. :param dataframe_records: List[Any] (optional) @@ -306,32 +306,32 @@ The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - + :returns: :class:`QueryEndpointResponse` .. py:method:: set_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions Set serving endpoint permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional) - + :returns: :class:`ServingEndpointPermissions` .. py:method:: update_config(name: str [, auto_capture_config: Optional[AutoCaptureConfigInput], served_entities: Optional[List[ServedEntityInput]], served_models: Optional[List[ServedModelInput]], traffic_config: Optional[TrafficConfig]]) -> Wait[ServingEndpointDetailed] Update config of a serving endpoint. - + Updates any combination of the serving endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the current update completes or fails. - + :param name: str The name of the serving endpoint to update. This field is required. :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional) @@ -346,7 +346,7 @@ config. :param traffic_config: :class:`TrafficConfig` (optional) The traffic configuration associated with the serving endpoint config. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. @@ -358,29 +358,29 @@ .. py:method:: update_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions Update serving endpoint permissions. - + Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional) - + :returns: :class:`ServingEndpointPermissions` .. py:method:: update_provisioned_throughput_endpoint_config(name: str, config: PtEndpointCoreConfig) -> Wait[ServingEndpointDetailed] Update config of a PT serving endpoint. - + Updates any combination of the pt endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. Updates are instantaneous and endpoint should be updated instantly - + :param name: str The name of the pt endpoint to update. This field is required. :param config: :class:`PtEndpointCoreConfig` - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. diff --git a/docs/workspace/serving/serving_endpoints_data_plane.rst b/docs/workspace/serving/serving_endpoints_data_plane.rst index bb22c3dd7..8fb09e7ff 100644 --- a/docs/workspace/serving/serving_endpoints_data_plane.rst +++ b/docs/workspace/serving/serving_endpoints_data_plane.rst @@ -10,7 +10,7 @@ .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse Query a serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. :param dataframe_records: List[Any] (optional) @@ -54,6 +54,6 @@ The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - + :returns: :class:`QueryEndpointResponse` \ No newline at end of file diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst index 1d5244f0a..66c621997 100644 --- a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst +++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst @@ -10,42 +10,42 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse Delete the AI/BI dashboard embedding access policy. - + Delete the AI/BI dashboard embedding access policy, reverting back to the default. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingAccessPolicySetting Retrieve the AI/BI dashboard embedding access policy. - + Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting Update the AI/BI dashboard embedding access policy. - + Updates the AI/BI dashboard embedding access policy at the workspace level. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting` @@ -55,10 +55,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` \ No newline at end of file diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst index 546d9ad7d..0c9294130 100644 --- a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst +++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst @@ -10,43 +10,43 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse Delete AI/BI dashboard embedding approved domains. - + Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default empty list. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingApprovedDomainsSetting Retrieve the list of domains approved to host embedded AI/BI dashboards. - + Retrieves the list of domains approved to host embedded AI/BI dashboards. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting Update the list of domains approved to host embedded AI/BI dashboards. - + Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` @@ -56,10 +56,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` \ No newline at end of file diff --git a/docs/workspace/settings/automatic_cluster_update.rst b/docs/workspace/settings/automatic_cluster_update.rst index 748cf428a..350e0e713 100644 --- a/docs/workspace/settings/automatic_cluster_update.rst +++ b/docs/workspace/settings/automatic_cluster_update.rst @@ -10,28 +10,28 @@ .. py:method:: get( [, etag: Optional[str]]) -> AutomaticClusterUpdateSetting Get the automatic cluster update setting. - + Gets the automatic cluster update setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AutomaticClusterUpdateSetting` .. py:method:: update(allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str) -> AutomaticClusterUpdateSetting Update the automatic cluster update setting. - + Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AutomaticClusterUpdateSetting` @@ -41,10 +41,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AutomaticClusterUpdateSetting` \ No newline at end of file diff --git a/docs/workspace/settings/compliance_security_profile.rst b/docs/workspace/settings/compliance_security_profile.rst index 807dcc1c6..855451b82 100644 --- a/docs/workspace/settings/compliance_security_profile.rst +++ b/docs/workspace/settings/compliance_security_profile.rst @@ -6,34 +6,34 @@ Controls whether to enable the compliance security profile for the current workspace. Enabling it on a workspace is permanent. By default, it is turned off. - + This settings can NOT be disabled once it is enabled. .. py:method:: get( [, etag: Optional[str]]) -> ComplianceSecurityProfileSetting Get the compliance security profile setting. - + Gets the compliance security profile setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`ComplianceSecurityProfileSetting` .. py:method:: update(allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str) -> ComplianceSecurityProfileSetting Update the compliance security profile setting. - + Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`ComplianceSecurityProfileSetting` @@ -43,10 +43,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`ComplianceSecurityProfileSetting` \ No newline at end of file diff --git a/docs/workspace/settings/credentials_manager.rst b/docs/workspace/settings/credentials_manager.rst index ea3162f6c..c8bfa4f30 100644 --- a/docs/workspace/settings/credentials_manager.rst +++ b/docs/workspace/settings/credentials_manager.rst @@ -10,16 +10,16 @@ .. py:method:: exchange_token(partition_id: PartitionId, token_type: List[TokenType], scopes: List[str]) -> ExchangeTokenResponse Exchange token. - + Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to determine token permissions. - + :param partition_id: :class:`PartitionId` The partition of Credentials store :param token_type: List[:class:`TokenType`] A list of token types being requested :param scopes: List[str] Array of scopes for the token request. - + :returns: :class:`ExchangeTokenResponse` \ No newline at end of file diff --git a/docs/workspace/settings/dashboard_email_subscriptions.rst b/docs/workspace/settings/dashboard_email_subscriptions.rst new file mode 100644 index 000000000..bdb4777eb --- /dev/null +++ b/docs/workspace/settings/dashboard_email_subscriptions.rst @@ -0,0 +1,64 @@ +``w.settings.dashboard_email_subscriptions``: Dashboard Email Subscriptions +=========================================================================== +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: DashboardEmailSubscriptionsAPI + + Controls whether schedules or workload tasks for refreshing AI/BI Dashboards in the workspace can send + subscription emails containing PDFs and/or images of the dashboard. By default, this setting is enabled + (set to `true`) + + .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDashboardEmailSubscriptionsResponse + + Delete the Dashboard Email Subscriptions setting. + + Reverts the Dashboard Email Subscriptions setting to its default value. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteDashboardEmailSubscriptionsResponse` + + + .. py:method:: get( [, etag: Optional[str]]) -> DashboardEmailSubscriptions + + Get the Dashboard Email Subscriptions setting. + + Gets the Dashboard Email Subscriptions setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DashboardEmailSubscriptions` + + + .. py:method:: update(allow_missing: bool, setting: DashboardEmailSubscriptions, field_mask: str) -> DashboardEmailSubscriptions + + Update the Dashboard Email Subscriptions setting. + + Updates the Dashboard Email Subscriptions setting. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`DashboardEmailSubscriptions` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`DashboardEmailSubscriptions` + \ No newline at end of file diff --git a/docs/workspace/settings/default_namespace.rst b/docs/workspace/settings/default_namespace.rst index a98d09b41..960949930 100644 --- a/docs/workspace/settings/default_namespace.rst +++ b/docs/workspace/settings/default_namespace.rst @@ -6,61 +6,61 @@ The default namespace setting API allows users to configure the default namespace for a Databricks workspace. - + Through this API, users can retrieve, set, or modify the default namespace used when queries do not reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the object 'retail_prod.default.myTable' (the schema 'default' is always assumed). - + This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute. .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDefaultNamespaceSettingResponse Delete the default namespace setting. - + Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDefaultNamespaceSettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> DefaultNamespaceSetting Get the default namespace setting. - + Gets the default namespace setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DefaultNamespaceSetting` .. py:method:: update(allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str) -> DefaultNamespaceSetting Update the default namespace setting. - + Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the etag is present in the error response, which should be set in the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DefaultNamespaceSetting` @@ -77,10 +77,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DefaultNamespaceSetting` \ No newline at end of file diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst index b72398c44..214fd7dfb 100644 --- a/docs/workspace/settings/disable_legacy_access.rst +++ b/docs/workspace/settings/disable_legacy_access.rst @@ -5,7 +5,7 @@ .. py:class:: DisableLegacyAccessAPI 'Disabling legacy access' has the following impacts: - + 1. Disables direct access to Hive Metastores from the workspace. However, you can still access a Hive Metastore through Hive Metastore federation. 2. Disables fallback mode on external location access from the workspace. 3. Disables Databricks Runtime versions prior to 13.3LTS. @@ -13,41 +13,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyAccessResponse Delete Legacy Access Disablement Status. - + Deletes legacy access disablement status. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyAccessResponse` .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyAccess Retrieve Legacy Access Disablement Status. - + Retrieves legacy access disablement Status. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyAccess` .. py:method:: update(allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess Update Legacy Access Disablement Status. - + Updates legacy access disablement status. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyAccess` @@ -57,10 +57,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyAccess` \ No newline at end of file diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst index 8d56e058c..f340c193d 100644 --- a/docs/workspace/settings/disable_legacy_dbfs.rst +++ b/docs/workspace/settings/disable_legacy_dbfs.rst @@ -5,10 +5,10 @@ .. py:class:: DisableLegacyDbfsAPI Disabling legacy DBFS has the following implications: - + 1. Access to DBFS root and DBFS mounts is disallowed (as well as the creation of new mounts). 2. Disables Databricks Runtime versions prior to 13.3LTS. - + When the setting is off, all DBFS functionality is enabled and no restrictions are imposed on Databricks Runtime versions. This setting can take up to 20 minutes to take effect and requires a manual restart of all-purpose compute clusters and SQL warehouses. @@ -16,41 +16,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyDbfsResponse Delete the disable legacy DBFS setting. - + Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyDbfsResponse` .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyDbfs Get the disable legacy DBFS setting. - + Gets the disable legacy DBFS setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyDbfs` .. py:method:: update(allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs Update the disable legacy DBFS setting. - + Updates the disable legacy DBFS setting for the workspace. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyDbfs` @@ -60,10 +60,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyDbfs` \ No newline at end of file diff --git a/docs/workspace/settings/enable_export_notebook.rst b/docs/workspace/settings/enable_export_notebook.rst index 30d17e46a..4f4e61784 100644 --- a/docs/workspace/settings/enable_export_notebook.rst +++ b/docs/workspace/settings/enable_export_notebook.rst @@ -10,19 +10,19 @@ .. py:method:: get_enable_export_notebook() -> EnableExportNotebook Get the Notebook and File exporting setting. - + Gets the Notebook and File exporting setting. - + :returns: :class:`EnableExportNotebook` .. py:method:: patch_enable_export_notebook(allow_missing: bool, setting: EnableExportNotebook, field_mask: str) -> EnableExportNotebook Update the Notebook and File exporting setting. - + Updates the Notebook and File exporting setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableExportNotebook` @@ -32,10 +32,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableExportNotebook` \ No newline at end of file diff --git a/docs/workspace/settings/enable_notebook_table_clipboard.rst b/docs/workspace/settings/enable_notebook_table_clipboard.rst index 2a9c394a0..6305058c3 100644 --- a/docs/workspace/settings/enable_notebook_table_clipboard.rst +++ b/docs/workspace/settings/enable_notebook_table_clipboard.rst @@ -10,19 +10,19 @@ .. py:method:: get_enable_notebook_table_clipboard() -> EnableNotebookTableClipboard Get the Results Table Clipboard features setting. - + Gets the Results Table Clipboard features setting. - + :returns: :class:`EnableNotebookTableClipboard` .. py:method:: patch_enable_notebook_table_clipboard(allow_missing: bool, setting: EnableNotebookTableClipboard, field_mask: str) -> EnableNotebookTableClipboard Update the Results Table Clipboard features setting. - + Updates the Results Table Clipboard features setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableNotebookTableClipboard` @@ -32,10 +32,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableNotebookTableClipboard` \ No newline at end of file diff --git a/docs/workspace/settings/enable_results_downloading.rst b/docs/workspace/settings/enable_results_downloading.rst index 0769eca22..ba2954df5 100644 --- a/docs/workspace/settings/enable_results_downloading.rst +++ b/docs/workspace/settings/enable_results_downloading.rst @@ -9,19 +9,19 @@ .. py:method:: get_enable_results_downloading() -> EnableResultsDownloading Get the Notebook results download setting. - + Gets the Notebook results download setting. - + :returns: :class:`EnableResultsDownloading` .. py:method:: patch_enable_results_downloading(allow_missing: bool, setting: EnableResultsDownloading, field_mask: str) -> EnableResultsDownloading Update the Notebook results download setting. - + Updates the Notebook results download setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableResultsDownloading` @@ -31,10 +31,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableResultsDownloading` \ No newline at end of file diff --git a/docs/workspace/settings/enhanced_security_monitoring.rst b/docs/workspace/settings/enhanced_security_monitoring.rst index d0f9eee3d..c9dfb547d 100644 --- a/docs/workspace/settings/enhanced_security_monitoring.rst +++ b/docs/workspace/settings/enhanced_security_monitoring.rst @@ -7,35 +7,35 @@ Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the compliance security profile is enabled, this is automatically enabled. - + If the compliance security profile is disabled, you can enable or disable this setting and it is not permanent. .. py:method:: get( [, etag: Optional[str]]) -> EnhancedSecurityMonitoringSetting Get the enhanced security monitoring setting. - + Gets the enhanced security monitoring setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`EnhancedSecurityMonitoringSetting` .. py:method:: update(allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str) -> EnhancedSecurityMonitoringSetting Update the enhanced security monitoring setting. - + Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnhancedSecurityMonitoringSetting` @@ -45,10 +45,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnhancedSecurityMonitoringSetting` \ No newline at end of file diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst index c1b45519c..6c0858b7a 100644 --- a/docs/workspace/settings/index.rst +++ b/docs/workspace/settings/index.rst @@ -15,6 +15,7 @@ Manage security settings for Accounts and Workspaces aibi_dashboard_embedding_approved_domains automatic_cluster_update compliance_security_profile + dashboard_email_subscriptions default_namespace disable_legacy_access disable_legacy_dbfs @@ -24,6 +25,7 @@ Manage security settings for Accounts and Workspaces enhanced_security_monitoring llm_proxy_partner_powered_workspace restrict_workspace_admins + sql_results_download token_management tokens workspace_conf \ No newline at end of file diff --git a/docs/workspace/settings/ip_access_lists.rst b/docs/workspace/settings/ip_access_lists.rst index 03061165d..deed40548 100644 --- a/docs/workspace/settings/ip_access_lists.rst +++ b/docs/workspace/settings/ip_access_lists.rst @@ -5,21 +5,21 @@ .. py:class:: IpAccessListsAPI IP Access List enables admins to configure IP access lists. - + IP access lists affect web application access and REST API access to this workspace only. If the feature is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists (inclusion) and block lists (exclusion). - + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address matches any block list, the connection is rejected. 2. **If the connection was not rejected by block lists**, the IP address is compared with the allow lists. - + If there is at least one allow list for the workspace, the connection is allowed only if the IP address matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed. - + For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. - + After changes to the IP access list feature, it can take a few minutes for changes to take effect. .. py:method:: create(label: str, list_type: ListType [, ip_addresses: Optional[List[str]]]) -> CreateIpAccessListResponse @@ -46,44 +46,44 @@ w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Create access list. - + Creates an IP access list for this workspace. - + A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. - + When creating or updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus - + :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) - + :returns: :class:`CreateIpAccessListResponse` .. py:method:: delete(ip_access_list_id: str) Delete access list. - + Deletes an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - - + + .. py:method:: get(ip_access_list_id: str) -> FetchIpAccessListResponse @@ -112,12 +112,12 @@ w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Get access list. - + Gets an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - + :returns: :class:`FetchIpAccessListResponse` @@ -135,9 +135,9 @@ all = w.ip_access_lists.list() Get access lists. - + Gets all IP access lists for the specified workspace. - + :returns: Iterator over :class:`IpAccessListInfo` @@ -173,9 +173,9 @@ w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Replace access list. - + Replaces an IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one @@ -184,42 +184,42 @@ returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) - - + + .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]]) Update access list. - + Updates an existing IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. - + When updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param enabled: bool (optional) @@ -229,9 +229,9 @@ Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. - - + + \ No newline at end of file diff --git a/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst b/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst index 6f464addb..2e6ff8626 100644 --- a/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst +++ b/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst @@ -9,41 +9,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse Delete the enable partner powered AI features workspace setting. - + Reverts the enable partner powered AI features workspace setting to its default value. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteLlmProxyPartnerPoweredWorkspaceResponse` .. py:method:: get( [, etag: Optional[str]]) -> LlmProxyPartnerPoweredWorkspace Get the enable partner powered AI features workspace setting. - + Gets the enable partner powered AI features workspace setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredWorkspace` .. py:method:: update(allow_missing: bool, setting: LlmProxyPartnerPoweredWorkspace, field_mask: str) -> LlmProxyPartnerPoweredWorkspace Update the enable partner powered AI features workspace setting. - + Updates the enable partner powered AI features workspace setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredWorkspace` @@ -53,10 +53,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredWorkspace` \ No newline at end of file diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst index 45c8abea1..8fb2d0c3c 100644 --- a/docs/workspace/settings/notification_destinations.rst +++ b/docs/workspace/settings/notification_destinations.rst @@ -12,64 +12,64 @@ .. py:method:: create( [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination Create a notification destination. - + Creates a notification destination. Requires workspace admin permissions. - + :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) The display name for the notification destination. - + :returns: :class:`NotificationDestination` .. py:method:: delete(id: str) Delete a notification destination. - + Deletes a notification destination. Requires workspace admin permissions. - + :param id: str - - + + .. py:method:: get(id: str) -> NotificationDestination Get a notification destination. - + Gets a notification destination. - + :param id: str - + :returns: :class:`NotificationDestination` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListNotificationDestinationsResult] List notification destinations. - + Lists notification destinations. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListNotificationDestinationsResult` .. py:method:: update(id: str [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination Update a notification destination. - + Updates a notification destination. Requires workspace admin permissions. At least one field is required in the request body. - + :param id: str UUID identifying notification destination. :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) The display name for the notification destination. - + :returns: :class:`NotificationDestination` \ No newline at end of file diff --git a/docs/workspace/settings/restrict_workspace_admins.rst b/docs/workspace/settings/restrict_workspace_admins.rst index c2853d133..b025112cc 100644 --- a/docs/workspace/settings/restrict_workspace_admins.rst +++ b/docs/workspace/settings/restrict_workspace_admins.rst @@ -17,47 +17,47 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteRestrictWorkspaceAdminsSettingResponse Delete the restrict workspace admins setting. - + Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteRestrictWorkspaceAdminsSettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> RestrictWorkspaceAdminsSetting Get the restrict workspace admins setting. - + Gets the restrict workspace admins setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`RestrictWorkspaceAdminsSetting` .. py:method:: update(allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str) -> RestrictWorkspaceAdminsSetting Update the restrict workspace admins setting. - + Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`RestrictWorkspaceAdminsSetting` @@ -67,10 +67,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`RestrictWorkspaceAdminsSetting` \ No newline at end of file diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst index 6eddb2508..0d6647301 100644 --- a/docs/workspace/settings/settings.rst +++ b/docs/workspace/settings/settings.rst @@ -29,20 +29,27 @@ Controls whether to enable the compliance security profile for the current workspace. Enabling it on a workspace is permanent. By default, it is turned off. - + This settings can NOT be disabled once it is enabled. + .. py:property:: dashboard_email_subscriptions + :type: DashboardEmailSubscriptionsAPI + + Controls whether schedules or workload tasks for refreshing AI/BI Dashboards in the workspace can send + subscription emails containing PDFs and/or images of the dashboard. By default, this setting is enabled + (set to `true`) + .. py:property:: default_namespace :type: DefaultNamespaceAPI The default namespace setting API allows users to configure the default namespace for a Databricks workspace. - + Through this API, users can retrieve, set, or modify the default namespace used when queries do not reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the object 'retail_prod.default.myTable' (the schema 'default' is always assumed). - + This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute. @@ -50,7 +57,7 @@ :type: DisableLegacyAccessAPI 'Disabling legacy access' has the following impacts: - + 1. Disables direct access to Hive Metastores from the workspace. However, you can still access a Hive Metastore through Hive Metastore federation. 2. Disables fallback mode on external location access from the workspace. 3. Disables Databricks Runtime versions prior to 13.3LTS. @@ -59,10 +66,10 @@ :type: DisableLegacyDbfsAPI Disabling legacy DBFS has the following implications: - + 1. Access to DBFS root and DBFS mounts is disallowed (as well as the creation of new mounts). 2. Disables Databricks Runtime versions prior to 13.3LTS. - + When the setting is off, all DBFS functionality is enabled and no restrictions are imposed on Databricks Runtime versions. This setting can take up to 20 minutes to take effect and requires a manual restart of all-purpose compute clusters and SQL warehouses. @@ -90,7 +97,7 @@ Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the compliance security profile is enabled, this is automatically enabled. - + If the compliance security profile is disabled, you can enable or disable this setting and it is not permanent. @@ -110,4 +117,10 @@ RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can only create personal access tokens on behalf of service principals they have the Service Principal User role on. They can also only change a job owner to themselves. And they can change the job run_as setting to themselves or to a service principal on which - they have the Service Principal User role. \ No newline at end of file + they have the Service Principal User role. + + .. py:property:: sql_results_download + :type: SqlResultsDownloadAPI + + Controls whether users within the workspace are allowed to download results from the SQL Editor and AI/BI + Dashboards UIs. By default, this setting is enabled (set to `true`) \ No newline at end of file diff --git a/docs/workspace/settings/sql_results_download.rst b/docs/workspace/settings/sql_results_download.rst new file mode 100644 index 000000000..1e378f530 --- /dev/null +++ b/docs/workspace/settings/sql_results_download.rst @@ -0,0 +1,63 @@ +``w.settings.sql_results_download``: SQL Results Download +========================================================= +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: SqlResultsDownloadAPI + + Controls whether users within the workspace are allowed to download results from the SQL Editor and AI/BI + Dashboards UIs. By default, this setting is enabled (set to `true`) + + .. py:method:: delete( [, etag: Optional[str]]) -> DeleteSqlResultsDownloadResponse + + Delete the SQL Results Download setting. + + Reverts the SQL Results Download setting to its default value. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteSqlResultsDownloadResponse` + + + .. py:method:: get( [, etag: Optional[str]]) -> SqlResultsDownload + + Get the SQL Results Download setting. + + Gets the SQL Results Download setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`SqlResultsDownload` + + + .. py:method:: update(allow_missing: bool, setting: SqlResultsDownload, field_mask: str) -> SqlResultsDownload + + Update the SQL Results Download setting. + + Updates the SQL Results Download setting. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`SqlResultsDownload` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`SqlResultsDownload` + \ No newline at end of file diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst index ceaa64cc6..b36f08396 100644 --- a/docs/workspace/settings/token_management.rst +++ b/docs/workspace/settings/token_management.rst @@ -35,29 +35,29 @@ w.token_management.delete(token_id=obo.token_info.token_id) Create on-behalf token. - + Creates a token on behalf of a service principal. - + :param application_id: str Application ID of the service principal. :param comment: str (optional) Comment that describes the purpose of the token. :param lifetime_seconds: int (optional) The number of seconds before the token expires. - + :returns: :class:`CreateOboTokenResponse` .. py:method:: delete(token_id: str) Delete a token. - + Deletes a token, specified by its ID. - + :param token_id: str The ID of the token to revoke. - - + + .. py:method:: get(token_id: str) -> GetTokenResponse @@ -90,30 +90,30 @@ w.token_management.delete(token_id=obo.token_info.token_id) Get token info. - + Gets information about a token, specified by its ID. - + :param token_id: str The ID of the token to get. - + :returns: :class:`GetTokenResponse` .. py:method:: get_permission_levels() -> GetTokenPermissionLevelsResponse Get token permission levels. - + Gets the permission levels that a user can have on an object. - + :returns: :class:`GetTokenPermissionLevelsResponse` .. py:method:: get_permissions() -> TokenPermissions Get token permissions. - + Gets the permissions of all tokens. Tokens can inherit permissions from their root object. - + :returns: :class:`TokenPermissions` @@ -132,36 +132,36 @@ all = w.token_management.list(settings.ListTokenManagementRequest()) List all tokens. - + Lists all tokens associated with the specified workspace or user. - + :param created_by_id: int (optional) User ID of the user that created the token. :param created_by_username: str (optional) Username of the user that created the token. - + :returns: Iterator over :class:`TokenInfo` .. py:method:: set_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions Set token permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) - + :returns: :class:`TokenPermissions` .. py:method:: update_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions Update token permissions. - + Updates the permissions on all tokens. Tokens can inherit permissions from their root object. - + :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) - + :returns: :class:`TokenPermissions` \ No newline at end of file diff --git a/docs/workspace/settings/tokens.rst b/docs/workspace/settings/tokens.rst index 200eb9c83..966f1093d 100644 --- a/docs/workspace/settings/tokens.rst +++ b/docs/workspace/settings/tokens.rst @@ -26,33 +26,33 @@ w.tokens.delete(token_id=token.token_info.token_id) Create a user token. - + Creates and returns a token for a user. If this call is made through token authentication, it creates a token with the same client ID as the authenticated token. If the user's token quota is exceeded, this call returns an error **QUOTA_EXCEEDED**. - + :param comment: str (optional) Optional description to attach to the token. :param lifetime_seconds: int (optional) The lifetime of the token, in seconds. - + If the lifetime is not specified, this token remains valid indefinitely. - + :returns: :class:`CreateTokenResponse` .. py:method:: delete(token_id: str) Revoke token. - + Revokes an access token. - + If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**. - + :param token_id: str The ID of the token to be revoked. - - + + .. py:method:: list() -> Iterator[PublicTokenInfo] @@ -69,8 +69,8 @@ all = w.tokens.list() List tokens. - + Lists all the valid tokens for a user-workspace pair. - + :returns: Iterator over :class:`PublicTokenInfo` \ No newline at end of file diff --git a/docs/workspace/settings/workspace_conf.rst b/docs/workspace/settings/workspace_conf.rst index d73b16180..3759de043 100644 --- a/docs/workspace/settings/workspace_conf.rst +++ b/docs/workspace/settings/workspace_conf.rst @@ -20,20 +20,20 @@ conf = w.workspace_conf.get_status(keys="enableWorkspaceFilesystem") Check configuration status. - + Gets the configuration status for a workspace. - + :param keys: str - + :returns: Dict[str,str] .. py:method:: set_status(contents: Dict[str, str]) Enable/disable features. - + Sets the configuration status for a workspace, including enabling or disabling it. - - - + + + \ No newline at end of file diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index 263545400..90e922180 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -33,10 +33,10 @@ w.providers.delete(name=created.name) Create an auth provider. - + Creates a new authentication provider minimally based on a name and authentication type. The caller must be an admin on the metastore. - + :param name: str The name of the Provider. :param authentication_type: :class:`AuthenticationType` @@ -46,21 +46,21 @@ :param recipient_profile_str: str (optional) This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided. - + :returns: :class:`ProviderInfo` .. py:method:: delete(name: str) Delete a provider. - + Deletes an authentication provider, if the caller is a metastore admin or is the owner of the provider. - + :param name: str Name of the provider. - - + + .. py:method:: get(name: str) -> ProviderInfo @@ -91,13 +91,13 @@ w.providers.delete(name=created.name) Get a provider. - + Gets a specific authentication provider. The caller must supply the name of the provider, and must either be a metastore admin or the owner of the provider. - + :param name: str Name of the provider. - + :returns: :class:`ProviderInfo` @@ -108,19 +108,32 @@ .. code-block:: + import time + from databricks.sdk import WorkspaceClient - from databricks.sdk.service import sharing w = WorkspaceClient() - all = w.providers.list(sharing.ListProvidersRequest()) + public_share_recipient = """{ + "shareCredentialsVersion":1, + "bearerToken":"dapiabcdefghijklmonpqrstuvwxyz", + "endpoint":"https://sharing.delta.io/delta-sharing/" + } + """ + + created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient) + + shares = w.providers.list_shares(name=created.name) + + # cleanup + w.providers.delete(name=created.name) List providers. - + Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There is no guarantee of a specific ordering of the elements in the array. - + :param data_provider_global_metastore_id: str (optional) If not provided, all providers will be returned. If no providers exist with this ID, no results will be returned. @@ -134,17 +147,17 @@ from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ProviderInfo` .. py:method:: list_provider_share_assets(provider_name: str, share_name: str [, function_max_results: Optional[int], notebook_max_results: Optional[int], table_max_results: Optional[int], volume_max_results: Optional[int]]) -> ListProviderShareAssetsResponse List assets by provider share. - + Get arrays of assets associated with a specified provider's share. The caller is the recipient of the share. - + :param provider_name: str The name of the provider who owns the share. :param share_name: str @@ -157,7 +170,7 @@ Maximum number of tables to return. :param volume_max_results: int (optional) Maximum number of volumes to return. - + :returns: :class:`ListProviderShareAssetsResponse` @@ -189,11 +202,11 @@ w.providers.delete(name=created.name) List shares by Provider. - + Gets an array of a specified provider's shares within the metastore where: - + * the caller is a metastore admin, or * the caller is the owner. - + :param name: str Name of the provider in which to list shares. :param max_results: int (optional) @@ -206,7 +219,7 @@ response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ProviderShare` @@ -238,11 +251,11 @@ w.providers.delete(name=created.name) Update a provider. - + Updates the information for an authentication provider, if the caller is a metastore admin or is the owner of the provider. If the update changes the provider name, the caller must be both a metastore admin and the owner of the provider. - + :param name: str Name of the provider. :param comment: str (optional) @@ -254,6 +267,6 @@ :param recipient_profile_str: str (optional) This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided. - + :returns: :class:`ProviderInfo` \ No newline at end of file diff --git a/docs/workspace/sharing/recipient_activation.rst b/docs/workspace/sharing/recipient_activation.rst index bc8ac2715..2c214d9c0 100644 --- a/docs/workspace/sharing/recipient_activation.rst +++ b/docs/workspace/sharing/recipient_activation.rst @@ -8,30 +8,30 @@ the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data provider to download the credential file that includes the access token. The recipient will then use the credential file to establish a secure connection with the provider to receive the shared data. - + Note that you can download the credential file only once. Recipients should treat the downloaded credential as a secret and must not share it outside of their organization. .. py:method:: get_activation_url_info(activation_url: str) Get a share activation URL. - + Gets an activation URL for a share. - + :param activation_url: str The one time activation url. It also accepts activation token. - - + + .. py:method:: retrieve_token(activation_url: str) -> RetrieveTokenResponse Get an access token. - + Retrieve access token with an activation url. This is a public API without any authentication. - + :param activation_url: str The one time activation url. It also accepts activation token. - + :returns: :class:`RetrieveTokenResponse` \ No newline at end of file diff --git a/docs/workspace/sharing/recipient_federation_policies.rst b/docs/workspace/sharing/recipient_federation_policies.rst index bd63cb0a8..b8fe0b856 100644 --- a/docs/workspace/sharing/recipient_federation_policies.rst +++ b/docs/workspace/sharing/recipient_federation_policies.rst @@ -19,7 +19,7 @@ Multi-Factor Authentication (MFA), and enhances security by minimizing the risk of credential leakage through the use of short-lived, expiring tokens. It is designed for strong identity governance, secure cross-platform data sharing, and reduced operational overhead for credential management. - + For more information, see https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security and https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed @@ -27,88 +27,88 @@ .. py:method:: create(recipient_name: str, policy: FederationPolicy) -> FederationPolicy Create recipient federation policy. - + Create a federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must be the owner of the recipient. When sharing data from Databricks to non-Databricks clients, you can define a federation policy to authenticate non-Databricks recipients. The federation policy validates OIDC claims in federated tokens and is defined at the recipient level. This enables secretless sharing clients to authenticate using OIDC tokens. - + Supported scenarios for federation policies: 1. **User-to-Machine (U2M) flow** (e.g., PowerBI): A user accesses a resource using their own identity. 2. **Machine-to-Machine (M2M) flow** (e.g., OAuth App): An OAuth App accesses a resource using its own identity, typically for tasks like running nightly jobs. - + For an overview, refer to: - Blog post: Overview of feature: https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security - + For detailed configuration guides based on your use case: - Creating a Federation Policy as a provider: https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed - Configuration and usage for Machine-to-Machine (M2M) applications (e.g., Python Delta Sharing Client): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-m2m - Configuration and usage for User-to-Machine (U2M) applications (e.g., PowerBI): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-u2m - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being created. :param policy: :class:`FederationPolicy` - + :returns: :class:`FederationPolicy` .. py:method:: delete(recipient_name: str, name: str) Delete recipient federation policy. - + Deletes an existing federation policy for an OIDC_FEDERATION recipient. The caller must be the owner of the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being deleted. :param name: str Name of the policy. This is the name of the policy to be deleted. - - + + .. py:method:: get_federation_policy(recipient_name: str, name: str) -> FederationPolicy Get recipient federation policy. - + Reads an existing federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being retrieved. :param name: str Name of the policy. This is the name of the policy to be retrieved. - + :returns: :class:`FederationPolicy` .. py:method:: list(recipient_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] List recipient federation policies. - + Lists federation policies for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policies are being listed. :param max_results: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` .. py:method:: update(recipient_name: str, name: str, policy: FederationPolicy [, update_mask: Optional[str]]) -> FederationPolicy Update recipient federation policy. - + Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being updated. :param name: str @@ -120,6 +120,6 @@ should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'comment,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` \ No newline at end of file diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst index e640bf038..2afcc4baf 100644 --- a/docs/workspace/sharing/recipients.rst +++ b/docs/workspace/sharing/recipients.rst @@ -7,12 +7,12 @@ A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares. The way how sharing works differs depending on whether or not your recipient has access to a Databricks workspace that is enabled for Unity Catalog: - + - For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier is the key identifier that enables the secure connection. This sharing mode is called **Databricks-to-Databricks sharing**. - + - For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you create a recipient object, Databricks generates an activation link you can send to the recipient. The recipient follows the activation link to download the credential file, and then uses the credential file @@ -37,10 +37,10 @@ w.recipients.delete(name=created.name) Create a share recipient. - + Creates a new recipient with the delta sharing authentication type in the metastore. The caller must be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore. - + :param name: str Name of Recipient. :param authentication_type: :class:`AuthenticationType` @@ -64,20 +64,20 @@ :param sharing_code: str (optional) The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. - + :returns: :class:`RecipientInfo` .. py:method:: delete(name: str) Delete a share recipient. - + Deletes the specified recipient from the metastore. The caller must be the owner of the recipient. - + :param name: str Name of the recipient. - - + + .. py:method:: get(name: str) -> RecipientInfo @@ -101,14 +101,14 @@ w.recipients.delete(name=created.name) Get a share recipient. - + Gets a share recipient from the metastore if: - + * the caller is the owner of the share recipient, or: * is a metastore admin - + :param name: str Name of the recipient. - + :returns: :class:`RecipientInfo` @@ -127,12 +127,12 @@ all = w.recipients.list(sharing.ListRecipientsRequest()) List share recipients. - + Gets an array of all share recipients within the current metastore where: - + * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific ordering of the elements in the array. - + :param data_recipient_global_metastore_id: str (optional) If not provided, all recipients will be returned. If no recipients exist with this ID, no results will be returned. @@ -146,7 +146,7 @@ from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`RecipientInfo` @@ -171,17 +171,17 @@ w.recipients.delete(name=created.name) Rotate a token. - + Refreshes the specified recipient's delta sharing authentication token with the provided token info. The caller must be the owner of the recipient. - + :param name: str The name of the Recipient. :param existing_token_expire_in_seconds: int The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire the existing token immediately, negative number will return an error. - + :returns: :class:`RecipientInfo` @@ -206,10 +206,10 @@ w.recipients.delete(name=created.name) Get recipient share permissions. - + Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the owner of the Recipient. - + :param name: str The name of the Recipient. :param max_results: int (optional) @@ -222,7 +222,7 @@ unset from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: :class:`GetRecipientSharePermissionsResponse` @@ -247,11 +247,11 @@ w.recipients.delete(name=created.name) Update a share recipient. - + Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of the recipient. If the recipient name will be updated, the user must be both a metastore admin and the owner of the recipient. - + :param name: str Name of the recipient. :param comment: str (optional) @@ -268,6 +268,6 @@ Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write. - + :returns: :class:`RecipientInfo` \ No newline at end of file diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst index 8d1dfea02..87d001e10 100644 --- a/docs/workspace/sharing/shares.rst +++ b/docs/workspace/sharing/shares.rst @@ -28,30 +28,30 @@ w.shares.delete(name=created_share.name) Create a share. - + Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore. - + :param name: str Name of the share. :param comment: str (optional) User-provided free-form text description. :param storage_root: str (optional) Storage root URL for the share. - + :returns: :class:`ShareInfo` .. py:method:: delete(name: str) Delete a share. - + Deletes a data object share from the metastore. The caller must be an owner of the share. - + :param name: str The name of the share. - - + + .. py:method:: get(name: str [, include_shared_data: Optional[bool]]) -> ShareInfo @@ -75,15 +75,15 @@ w.shares.delete(name=created_share.name) Get a share. - + Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the share. - + :param name: str The name of the share. :param include_shared_data: bool (optional) Query for data to include in the share. - + :returns: :class:`ShareInfo` @@ -102,10 +102,10 @@ all = w.shares.list(sharing.ListSharesRequest()) List shares. - + Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of shares to return. - when set to 0, the page length is set to a server configured value (recommended); - when set to a value greater than 0, the page length is the minimum of this @@ -116,17 +116,17 @@ response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ShareInfo` .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> GetSharePermissionsResponse Get permissions. - + Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the owner of the share. - + :param name: str The name of the share. :param max_results: int (optional) @@ -139,7 +139,7 @@ unset from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: :class:`GetSharePermissionsResponse` @@ -196,23 +196,23 @@ w.shares.delete(name=created_share.name) Update a share. - + Updates the share with the changes and data objects in the request. The caller must be the owner of the share or a metastore admin. - + When the caller is a metastore admin, only the __owner__ field can be updated. - + In the case that the share name is changed, **updateShare** requires that the caller is both the share owner and a metastore admin. - + If there are notebook files in the share, the __storage_root__ field cannot be updated. - + For each table that is added through this method, the share owner must also have **SELECT** privilege on the table. This privilege must be maintained indefinitely for recipients to be able to access the table. Typically, you should use a group as the share owner. - + Table removals through **update** do not require additional privileges. - + :param name: str The name of the share. :param comment: str (optional) @@ -225,26 +225,26 @@ Storage root URL for the share. :param updates: List[:class:`SharedDataObjectUpdate`] (optional) Array of shared data object updates. - + :returns: :class:`ShareInfo` .. py:method:: update_permissions(name: str [, changes: Optional[List[PermissionsChange]], omit_permissions_list: Optional[bool]]) -> UpdateSharePermissionsResponse Update permissions. - + Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an owner of the share. - + For new recipient grants, the user must also be the recipient owner or metastore admin. recipient revocations do not require additional privileges. - + :param name: str The name of the share. :param changes: List[:class:`PermissionsChange`] (optional) - Array of permission changes. + Array of permissions change objects. :param omit_permissions_list: bool (optional) Optional. Whether to return the latest permissions list of the share in the response. - + :returns: :class:`UpdateSharePermissionsResponse` \ No newline at end of file diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst index be09efec5..bd0a7f766 100644 --- a/docs/workspace/sql/alerts.rst +++ b/docs/workspace/sql/alerts.rst @@ -51,28 +51,28 @@ w.alerts.delete(id=alert.id) Create an alert. - + Creates an alert. - + :param alert: :class:`CreateAlertRequestAlert` (optional) :param auto_resolve_display_name: bool (optional) If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. - + :returns: :class:`Alert` .. py:method:: delete(id: str) Delete an alert. - + Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. - + :param id: str - - + + .. py:method:: get(id: str) -> Alert @@ -119,11 +119,11 @@ w.alerts.delete(id=alert.id) Get an alert. - + Gets an alert. - + :param id: str - + :returns: :class:`Alert` @@ -142,13 +142,13 @@ all = w.alerts.list(sql.ListAlertsRequest()) List alerts. - + Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListAlertsResponseAlert` @@ -200,9 +200,9 @@ w.alerts.delete(id=alert.id) Update an alert. - + Updates an alert. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -210,7 +210,7 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. @@ -218,6 +218,6 @@ :param auto_resolve_display_name: bool (optional) If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. - + :returns: :class:`Alert` \ No newline at end of file diff --git a/docs/workspace/sql/alerts_legacy.rst b/docs/workspace/sql/alerts_legacy.rst index 5b048d2bf..6dfd96128 100644 --- a/docs/workspace/sql/alerts_legacy.rst +++ b/docs/workspace/sql/alerts_legacy.rst @@ -8,24 +8,24 @@ periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> LegacyAlert Create an alert. - + Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification destinations if the condition was met. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param name: str Name of the alert. :param options: :class:`AlertOptions` @@ -37,68 +37,68 @@ :param rearm: int (optional) Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - + :returns: :class:`LegacyAlert` .. py:method:: delete(alert_id: str) Delete an alert. - + Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to the trash. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str - - + + .. py:method:: get(alert_id: str) -> LegacyAlert Get an alert. - + Gets an alert. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str - + :returns: :class:`LegacyAlert` .. py:method:: list() -> Iterator[LegacyAlert] Get alerts. - + Gets a list of alerts. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :returns: Iterator over :class:`LegacyAlert` .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]]) Update an alert. - + Updates an alert. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str :param name: str Name of the alert. @@ -109,6 +109,6 @@ :param rearm: int (optional) Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - - + + \ No newline at end of file diff --git a/docs/workspace/sql/alerts_v2.rst b/docs/workspace/sql/alerts_v2.rst index 2ff773949..68c761cdc 100644 --- a/docs/workspace/sql/alerts_v2.rst +++ b/docs/workspace/sql/alerts_v2.rst @@ -4,61 +4,61 @@ .. py:class:: AlertsV2API - TODO: Add description + New version of SQL Alerts .. py:method:: create_alert(alert: AlertV2) -> AlertV2 Create an alert. - + Create Alert - + :param alert: :class:`AlertV2` - + :returns: :class:`AlertV2` .. py:method:: get_alert(id: str) -> AlertV2 Get an alert. - + Gets an alert. - + :param id: str - + :returns: :class:`AlertV2` .. py:method:: list_alerts( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AlertV2] List alerts. - + Gets a list of alerts accessible to the user, ordered by creation time. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`AlertV2` .. py:method:: trash_alert(id: str) Delete an alert. - + Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. - + :param id: str - - + + .. py:method:: update_alert(id: str, alert: AlertV2, update_mask: str) -> AlertV2 Update an alert. - + Update alert - + :param id: str UUID identifying the alert. :param alert: :class:`AlertV2` @@ -68,10 +68,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AlertV2` \ No newline at end of file diff --git a/docs/workspace/sql/dashboard_widgets.rst b/docs/workspace/sql/dashboard_widgets.rst index 63e100640..d4bbcde1d 100644 --- a/docs/workspace/sql/dashboard_widgets.rst +++ b/docs/workspace/sql/dashboard_widgets.rst @@ -10,7 +10,7 @@ .. py:method:: create(dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget Add widget to a dashboard. - + :param dashboard_id: str Dashboard ID returned by :method:dashboards/create. :param options: :class:`WidgetOptions` @@ -21,24 +21,24 @@ contains a visualization in the `visualization` field. :param visualization_id: str (optional) Query Vizualization ID returned by :method:queryvisualizations/create. - + :returns: :class:`Widget` .. py:method:: delete(id: str) Remove widget. - + :param id: str Widget ID returned by :method:dashboardwidgets/create - - + + .. py:method:: update(id: str, dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget Update existing widget. - + :param id: str Widget ID returned by :method:dashboardwidgets/create :param dashboard_id: str @@ -51,6 +51,6 @@ contains a visualization in the `visualization` field. :param visualization_id: str (optional) Query Vizualization ID returned by :method:queryvisualizations/create. - + :returns: :class:`Widget` \ No newline at end of file diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst index 3ed0c4b77..fec726459 100644 --- a/docs/workspace/sql/dashboards.rst +++ b/docs/workspace/sql/dashboards.rst @@ -29,7 +29,7 @@ w.dashboards.delete(dashboard_id=created.id) Create a dashboard object. - + :param name: str The title of this dashboard that appears in list views and at the top of the dashboard page. :param dashboard_filters_enabled: bool (optional) @@ -42,7 +42,7 @@ Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`Dashboard` @@ -67,13 +67,13 @@ w.dashboards.delete(dashboard_id=created.id) Remove a dashboard. - + Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot be shared. - + :param dashboard_id: str - - + + .. py:method:: get(dashboard_id: str) -> Dashboard @@ -97,11 +97,11 @@ w.dashboards.delete(dashboard_id=created.id) Retrieve a definition. - + Returns a JSON representation of a dashboard object, including its visualization and query objects. - + :param dashboard_id: str - + :returns: :class:`Dashboard` @@ -120,12 +120,12 @@ all = w.dashboards.list(sql.ListDashboardsRequest()) Get dashboard objects. - + Fetch a paginated list of dashboard objects. - + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param order: :class:`ListOrder` (optional) Name of dashboard attribute to order by. :param page: int (optional) @@ -134,7 +134,7 @@ Number of dashboards to return per page. :param q: str (optional) Full text search term. - + :returns: Iterator over :class:`Dashboard` @@ -159,23 +159,23 @@ w.dashboards.delete(dashboard_id=created.id) Restore a dashboard. - + A restored dashboard appears in list views and searches and can be shared. - + :param dashboard_id: str - - + + .. py:method:: update(dashboard_id: str [, name: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Dashboard Change a dashboard definition. - + Modify this dashboard definition. This operation only affects attributes of the dashboard object. It does not add, modify, or remove widgets. - + **Note**: You cannot undo this operation. - + :param dashboard_id: str :param name: str (optional) The title of this dashboard that appears in list views and at the top of the dashboard page. @@ -183,6 +183,6 @@ Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`Dashboard` \ No newline at end of file diff --git a/docs/workspace/sql/data_sources.rst b/docs/workspace/sql/data_sources.rst index 472bdfb0e..8f7321fa0 100644 --- a/docs/workspace/sql/data_sources.rst +++ b/docs/workspace/sql/data_sources.rst @@ -7,13 +7,13 @@ This API is provided to assist you in making new query objects. When creating a query object, you may optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't already know the `data_source_id` for your desired SQL warehouse, this API will help you find it. - + This API does not support searches. It returns the full list of SQL warehouses in your workspace. We advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: list() -> Iterator[DataSource] @@ -30,15 +30,15 @@ srcs = w.data_sources.list() Get a list of SQL warehouses. - + Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :returns: Iterator over :class:`DataSource` \ No newline at end of file diff --git a/docs/workspace/sql/dbsql_permissions.rst b/docs/workspace/sql/dbsql_permissions.rst index a5bd010f1..7f9e5d19c 100644 --- a/docs/workspace/sql/dbsql_permissions.rst +++ b/docs/workspace/sql/dbsql_permissions.rst @@ -7,76 +7,76 @@ The SQL Permissions API is similar to the endpoints of the :method:permissions/set. However, this exposes only one endpoint, which gets the Access Control List for a given object. You cannot modify any permissions using this API. - + There are three levels of permission: - + - `CAN_VIEW`: Allows read-only access - + - `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`) - + - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: get(object_type: ObjectTypePlural, object_id: str) -> GetResponse Get object ACL. - + Gets a JSON representation of the access control list (ACL) for a specified object. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:workspace/getpermissions instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. :param object_id: str Object ID. An ACL is returned for the object with this UUID. - + :returns: :class:`GetResponse` .. py:method:: set(object_type: ObjectTypePlural, object_id: str [, access_control_list: Optional[List[AccessControl]]]) -> SetResponse Set object ACL. - + Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:workspace/setpermissions instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`ObjectTypePlural` The type of object permission to set. :param object_id: str Object ID. The ACL for the object with this UUID is overwritten by this request's POST content. :param access_control_list: List[:class:`AccessControl`] (optional) - + :returns: :class:`SetResponse` .. py:method:: transfer_ownership(object_type: OwnableObjectType, object_id: TransferOwnershipObjectId [, new_owner: Optional[str]]) -> Success Transfer object ownership. - + Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. - + **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use :method:queries/update and :method:alerts/update respectively instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. :param object_id: :class:`TransferOwnershipObjectId` The ID of the object on which to change ownership. :param new_owner: str (optional) Email address for the new owner, who must exist in the workspace. - + :returns: :class:`Success` \ No newline at end of file diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index f8553bead..66c9f0be1 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -37,28 +37,28 @@ w.queries.delete(id=query.id) Create a query. - + Creates a query. - + :param auto_resolve_display_name: bool (optional) If true, automatically resolve query display name conflicts. Otherwise, fail the request if the query's display name conflicts with an existing query's display name. :param query: :class:`CreateQueryRequestQuery` (optional) - + :returns: :class:`Query` .. py:method:: delete(id: str) Delete a query. - + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is permanently deleted after 30 days. - + :param id: str - - + + .. py:method:: get(id: str) -> Query @@ -92,37 +92,37 @@ w.queries.delete(id=query.id) Get a query. - + Gets a query. - + :param id: str - + :returns: :class:`Query` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListQueryObjectsResponseQuery] List queries. - + Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListQueryObjectsResponseQuery` .. py:method:: list_visualizations(id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Visualization] List visualizations on a query. - + Gets a list of visualizations on a query. - + :param id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Visualization` @@ -165,9 +165,9 @@ w.queries.delete(id=query.id) Update a query. - + Updates a query. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -175,7 +175,7 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. @@ -183,6 +183,6 @@ If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. :param query: :class:`UpdateQueryRequestQuery` (optional) - + :returns: :class:`Query` \ No newline at end of file diff --git a/docs/workspace/sql/queries_legacy.rst b/docs/workspace/sql/queries_legacy.rst index c35ed9b69..a7ab56836 100644 --- a/docs/workspace/sql/queries_legacy.rst +++ b/docs/workspace/sql/queries_legacy.rst @@ -7,34 +7,34 @@ These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery Create a new query definition. - + Creates a new query definition. Queries created with this endpoint belong to the authenticated user making the request. - + The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the `data_source_id` from an existing query. - + **Note**: You cannot add a visualization until you create the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] - + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list :param description: str (optional) General description that conveys additional information about this query such as usage notes. @@ -52,71 +52,71 @@ Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`LegacyQuery` .. py:method:: delete(query_id: str) Delete a query. - + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - - + + .. py:method:: get(query_id: str) -> LegacyQuery Get a query definition. - + Retrieve a query object definition along with contextual permissions information about the currently authenticated user. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - + :returns: :class:`LegacyQuery` .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[LegacyQuery] Get a list of queries. - + Gets a list of queries. Optionally, this list can be filtered by a search term. - + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param order: str (optional) Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order descending instead. - + - `name`: The name of the query. - + - `created_at`: The timestamp the query was created. - + - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank value is treated as the highest value for sorting. - + - `executed_at`: The timestamp when the query was last run. - + - `created_by`: The user name of the user that created the query. :param page: int (optional) Page number to retrieve. @@ -124,45 +124,45 @@ Number of queries to return per page. :param q: str (optional) Full text search term - + :returns: Iterator over :class:`LegacyQuery` .. py:method:: restore(query_id: str) Restore a query. - + Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - - + + .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery Change a query definition. - + Modify this query definition. - + **Note**: You cannot undo this operation. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] - + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list :param description: str (optional) General description that conveys additional information about this query such as usage notes. @@ -178,6 +178,6 @@ Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`LegacyQuery` \ No newline at end of file diff --git a/docs/workspace/sql/query_history.rst b/docs/workspace/sql/query_history.rst index 5acfb5127..f2bccdd67 100644 --- a/docs/workspace/sql/query_history.rst +++ b/docs/workspace/sql/query_history.rst @@ -26,13 +26,13 @@ ) List Queries. - + List the history of queries through SQL warehouses, and serverless compute. - + You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are returned first (up to max_results in request). The pagination token returned in response can be used to list subsequent query statuses. - + :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. :param include_metrics: bool (optional) @@ -44,6 +44,6 @@ A token that can be used to get the next page of results. The token can contains characters that need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by %2B. This field is optional. - + :returns: :class:`ListQueriesResponse` \ No newline at end of file diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst index f0865ae0a..ac3d6c565 100644 --- a/docs/workspace/sql/query_visualizations.rst +++ b/docs/workspace/sql/query_visualizations.rst @@ -10,31 +10,31 @@ .. py:method:: create( [, visualization: Optional[CreateVisualizationRequestVisualization]]) -> Visualization Add a visualization to a query. - + Adds a visualization to a query. - + :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) - + :returns: :class:`Visualization` .. py:method:: delete(id: str) Remove a visualization. - + Removes a visualization. - + :param id: str - - + + .. py:method:: update(id: str, update_mask: str [, visualization: Optional[UpdateVisualizationRequestVisualization]]) -> Visualization Update a visualization. - + Updates a visualization. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -42,11 +42,11 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) - + :returns: :class:`Visualization` \ No newline at end of file diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst index d91b97c8c..f56f78a5f 100644 --- a/docs/workspace/sql/query_visualizations_legacy.rst +++ b/docs/workspace/sql/query_visualizations_legacy.rst @@ -6,23 +6,23 @@ This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace. Data structures may change over time. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization Add visualization to a query. - + Creates visualization in the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str The identifier returned by :method:queries/create :param type: str @@ -34,38 +34,38 @@ A short description of this visualization. This is not displayed in the UI. :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. - + :returns: :class:`LegacyVisualization` .. py:method:: delete(id: str) Remove visualization. - + Removes a visualization from the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param id: str Widget ID returned by :method:queryvizualisations/create - - + + .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization Edit existing visualization. - + Updates visualization in the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param id: str The UUID for this visualization. :param created_at: str (optional) @@ -80,6 +80,6 @@ :param type: str (optional) The type of visualization: chart, table, pivot table, and so on. :param updated_at: str (optional) - + :returns: :class:`LegacyVisualization` \ No newline at end of file diff --git a/docs/workspace/sql/redash_config.rst b/docs/workspace/sql/redash_config.rst index cee23c1bd..9b4382dd5 100644 --- a/docs/workspace/sql/redash_config.rst +++ b/docs/workspace/sql/redash_config.rst @@ -9,6 +9,6 @@ .. py:method:: get_config() -> ClientConfig Read workspace configuration for Redash-v2. - + :returns: :class:`ClientConfig` \ No newline at end of file diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst index 5dabcc0d2..44f64b512 100644 --- a/docs/workspace/sql/statement_execution.rst +++ b/docs/workspace/sql/statement_execution.rst @@ -6,13 +6,13 @@ The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result. - + **Getting started** - + We suggest beginning with the [Databricks SQL Statement Execution API tutorial]. - + **Overview of statement execution and result fetching** - + Statement execution begins by issuing a :method:statementexecution/executeStatement request with a valid SQL statement and warehouse ID, along with optional parameters such as the data catalog and output format. If no other parameters are specified, the server will wait for up to 10s before returning a response. If @@ -20,7 +20,7 @@ array and metadata. Otherwise, if no result is available after the 10s timeout expired, the response will provide the statement ID that can be used to poll for results by using a :method:statementexecution/getStatement request. - + You can specify whether the call should behave synchronously, asynchronously or start synchronously with a fallback to asynchronous execution. This is controlled with the `wait_timeout` and `on_wait_timeout` settings. If `wait_timeout` is set between 5-50 seconds (default: 10s), the call waits for results up to @@ -28,7 +28,7 @@ statement ID. The `on_wait_timeout` setting specifies what should happen when the timeout is reached while the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode, or it can be set to `CANCEL`, which cancels the statement. - + In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30 seconds; if the statement execution finishes within this time, the result data is returned directly in the response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns @@ -40,38 +40,38 @@ seconds; if the statement execution finishes within this time, the result data is returned directly in the response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can be used to fetch status and results in the same way as in the asynchronous mode. - + Depending on the size, the result can be split into multiple chunks. If the statement execution is successful, the statement response contains a manifest and the first chunk of the result. The manifest contains schema information and provides metadata for each chunk in the result. Result chunks can be retrieved by index with :method:statementexecution/getStatementResultChunkN which may be called in any order and in parallel. For sequential fetching, each chunk, apart from the last, also contains a `next_chunk_index` and `next_chunk_internal_link` that point to the next chunk. - + A statement can be canceled with :method:statementexecution/cancelExecution. - + **Fetching result data: format and disposition** - + To specify the format of the result data, use the `format` field, which can be set to one of the following options: `JSON_ARRAY` (JSON), `ARROW_STREAM` ([Apache Arrow Columnar]), or `CSV`. - + There are two ways to receive statement results, controlled by the `disposition` setting, which can be either `INLINE` or `EXTERNAL_LINKS`: - + - `INLINE`: In this mode, the result data is directly included in the response. It's best suited for smaller results. This mode can only be used with the `JSON_ARRAY` format. - + - `EXTERNAL_LINKS`: In this mode, the response provides links that can be used to download the result data in chunks separately. This approach is ideal for larger results and offers higher throughput. This mode can be used with all the formats: `JSON_ARRAY`, `ARROW_STREAM`, and `CSV`. - + By default, the API uses `format=JSON_ARRAY` and `disposition=INLINE`. - + **Limits and limitations** - + Note: The byte limit for INLINE disposition is based on internal storage metrics and will not exactly match the byte count of the actual payload. - + - Statements with `disposition=INLINE` are limited to 25 MiB and will fail when this limit is exceeded. - Statements with `disposition=EXTERNAL_LINKS` are limited to 100 GiB. Result sets larger than this limit will be truncated. Truncation is indicated by the `truncated` field in the result manifest. - The maximum @@ -84,34 +84,33 @@ once every 15 minutes. - The results are only available for one hour after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it. - + [Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html - .. py:method:: cancel_execution(statement_id: str) Cancel statement execution. - + Requests that an executing statement be canceled. Callers must poll for status to see the terminal state. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - - + + .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> StatementResponse Execute a SQL statement. - + :param statement: str The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. :param warehouse_id: str Warehouse upon which to execute a statement. See also [What are SQL warehouses?] - + [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html :param byte_limit: int (optional) Applies the given byte limit to the statement's result size. Byte counts are based on internal data @@ -121,37 +120,37 @@ explcitly set. :param catalog: str (optional) Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. - + [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. - + Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS` disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition. - + When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of values, where each value is either the *string representation* of a value, or `null`. For example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM range(3)` would look like this: - + ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ``` - + When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result contains compact JSON with no indentation or extra whitespace. - + When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format]. - + When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be a CSV according to [RFC 4180] standard. All the columns values will have *string representation* similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the first chunk in the result would contain a header row with column names. For example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look like this: - + ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ``` - + [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180 :param on_wait_timeout: :class:`ExecuteStatementRequestOnWaitTimeout` (optional) @@ -166,27 +165,27 @@ of a name, a value, and optionally a type. To represent a NULL value, the `value` field may be omitted or set to `null` explicitly. If the `type` field is omitted, the value is interpreted as a string. - + If the type is given, parameters will be checked for type correctness according to the given type. A value is correct if the provided string can be converted to the requested type using the `cast` function. The exact semantics are described in the section [`cast` function] of the SQL language reference. - + For example, the following statement contains two parameters, `my_name` and `my_date`: - + SELECT * FROM my_table WHERE name = :my_name AND date = :my_date - + The parameters can be passed in the request body as follows: - + { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value": "2020-01-01", "type": "DATE" } ] } - + Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL Statement Execution API. - + Also see the section [Parameter markers] of the SQL language reference. - + [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html :param row_limit: int (optional) @@ -195,59 +194,59 @@ the limit or not. :param schema: str (optional) Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL. - + [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html :param wait_timeout: str (optional) The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set to 0 or to a value between 5 and 50. - + When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for the execution to finish. In this case, the call returns directly with `PENDING` state and a statement ID which can be used for polling with :method:statementexecution/getStatement. - + When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and wait for the statement execution to finish. If the execution finishes within this time, the call returns immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached. - + :returns: :class:`StatementResponse` .. py:method:: get_statement(statement_id: str) -> StatementResponse Get status, manifest, and result first chunk. - + This request can be used to poll for the statement's status. When the `status.state` field is `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and further calls will receive an HTTP 404 response. - + **NOTE** This call currently might take up to 5 seconds to get the latest status and result. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - + :returns: :class:`StatementResponse` .. py:method:: get_statement_result_chunk_n(statement_id: str, chunk_index: int) -> ResultData Get result chunk by index. - + After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index. Whereas the first chunk with `chunk_index=0` is typically fetched with :method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request can be used to fetch subsequent chunks. The response structure is identical to the nested `result` element described in the :method:statementexecution/getStatement request, and similarly includes the `next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. :param chunk_index: int - + :returns: :class:`ResultData` \ No newline at end of file diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst index 7695dbc8b..0ea8b3fc0 100644 --- a/docs/workspace/sql/warehouses.rst +++ b/docs/workspace/sql/warehouses.rst @@ -35,16 +35,16 @@ w.warehouses.delete(id=created.id) Create a warehouse. - + Creates a new SQL warehouse. - + :param auto_stop_mins: int (optional) The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - + Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for non-serverless warehouses - 0 indicates no autostop. - + Defaults to 120 mins :param channel: :class:`Channel` (optional) Channel Details @@ -52,14 +52,14 @@ Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, please tune max_num_clusters. - + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large :param creator_name: str (optional) warehouse creator name :param enable_photon: bool (optional) Configures whether the warehouse should use Photon optimized clusters. - + Defaults to false. :param enable_serverless_compute: bool (optional) Configures whether the warehouse should use serverless compute @@ -67,33 +67,33 @@ Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) Maximum number of clusters that the autoscaler will create to handle concurrent queries. - + Supported values: - Must be >= min_num_clusters - Must be <= 30. - + Defaults to min_clusters if unset. :param min_num_clusters: int (optional) Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce the cold start time for new queries. This is similar to reserved vs. revocable cores in a resource manager. - + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - + Defaults to 1 :param name: str (optional) Logical name for the cluster. - + Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. - + Supported values: - Number of tags < 45. :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional) Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. @@ -105,13 +105,13 @@ .. py:method:: delete(id: str) Delete a warehouse. - + Deletes a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - - + + .. py:method:: edit(id: str [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[EditWarehouseRequestWarehouseType]]) -> Wait[GetWarehouseResponse] @@ -150,17 +150,17 @@ w.warehouses.delete(id=created.id) Update a warehouse. - + Updates the configuration for a SQL warehouse. - + :param id: str Required. Id of the warehouse to configure. :param auto_stop_mins: int (optional) The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - + Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. - + Defaults to 120 mins :param channel: :class:`Channel` (optional) Channel Details @@ -168,14 +168,14 @@ Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, please tune max_num_clusters. - + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large :param creator_name: str (optional) warehouse creator name :param enable_photon: bool (optional) Configures whether the warehouse should use Photon optimized clusters. - + Defaults to false. :param enable_serverless_compute: bool (optional) Configures whether the warehouse should use serverless compute. @@ -183,33 +183,33 @@ Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) Maximum number of clusters that the autoscaler will create to handle concurrent queries. - + Supported values: - Must be >= min_num_clusters - Must be <= 30. - + Defaults to min_clusters if unset. :param min_num_clusters: int (optional) Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce the cold start time for new queries. This is similar to reserved vs. revocable cores in a resource manager. - + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - + Defaults to 1 :param name: str (optional) Logical name for the cluster. - + Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. - + Supported values: - Number of tags < 45. :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional) Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. @@ -248,46 +248,46 @@ w.warehouses.delete(id=created.id) Get warehouse info. - + Gets the information for a single SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: :class:`GetWarehouseResponse` .. py:method:: get_permission_levels(warehouse_id: str) -> GetWarehousePermissionLevelsResponse Get SQL warehouse permission levels. - + Gets the permission levels that a user can have on an object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. - + :returns: :class:`GetWarehousePermissionLevelsResponse` .. py:method:: get_permissions(warehouse_id: str) -> WarehousePermissions Get SQL warehouse permissions. - + Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. - + :returns: :class:`WarehousePermissions` .. py:method:: get_workspace_warehouse_config() -> GetWorkspaceWarehouseConfigResponse Get the workspace configuration. - + Gets the workspace level configuration that is shared by all SQL warehouses in a workspace. - + :returns: :class:`GetWorkspaceWarehouseConfigResponse` @@ -306,36 +306,36 @@ all = w.warehouses.list(sql.ListWarehousesRequest()) List warehouses. - + Lists all SQL warehouses that a user has manager permissions on. - + :param run_as_user_id: int (optional) Service Principal which will be used to fetch the list of warehouses. If not specified, the user from the session header is used. - + :returns: Iterator over :class:`EndpointInfo` .. py:method:: set_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions Set SQL warehouse permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional) - + :returns: :class:`WarehousePermissions` .. py:method:: set_workspace_warehouse_config( [, channel: Optional[Channel], config_param: Optional[RepeatedEndpointConfPairs], data_access_config: Optional[List[EndpointConfPair]], enabled_warehouse_types: Optional[List[WarehouseTypePair]], global_param: Optional[RepeatedEndpointConfPairs], google_service_account: Optional[str], instance_profile_arn: Optional[str], security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy], sql_configuration_parameters: Optional[RepeatedEndpointConfPairs]]) Set the workspace configuration. - + Sets the workspace level configuration that is shared by all SQL warehouses in a workspace. - + :param channel: :class:`Channel` (optional) Optional: Channel selection details :param config_param: :class:`RepeatedEndpointConfPairs` (optional) @@ -358,19 +358,19 @@ Security policy for warehouses :param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional) SQL configuration parameters - - + + .. py:method:: start(id: str) -> Wait[GetWarehouseResponse] Start a warehouse. - + Starts a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. @@ -382,12 +382,12 @@ .. py:method:: stop(id: str) -> Wait[GetWarehouseResponse] Stop a warehouse. - + Stops a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_stopped for more details. @@ -399,14 +399,14 @@ .. py:method:: update_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions Update SQL warehouse permissions. - + Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional) - + :returns: :class:`WarehousePermissions` diff --git a/docs/workspace/vectorsearch/vector_search_endpoints.rst b/docs/workspace/vectorsearch/vector_search_endpoints.rst index 50c335064..07f6783d6 100644 --- a/docs/workspace/vectorsearch/vector_search_endpoints.rst +++ b/docs/workspace/vectorsearch/vector_search_endpoints.rst @@ -9,16 +9,16 @@ .. py:method:: create_endpoint(name: str, endpoint_type: EndpointType [, budget_policy_id: Optional[str]]) -> Wait[EndpointInfo] Create an endpoint. - + Create a new endpoint. - + :param name: str Name of the vector search endpoint :param endpoint_type: :class:`EndpointType` Type of endpoint :param budget_policy_id: str (optional) The budget policy id to be applied - + :returns: Long-running operation waiter for :class:`EndpointInfo`. See :method:wait_get_endpoint_vector_search_endpoint_online for more details. @@ -30,62 +30,62 @@ .. py:method:: delete_endpoint(endpoint_name: str) Delete an endpoint. - + Delete a vector search endpoint. - + :param endpoint_name: str Name of the vector search endpoint - - + + .. py:method:: get_endpoint(endpoint_name: str) -> EndpointInfo Get an endpoint. - + Get details for a single vector search endpoint. - + :param endpoint_name: str Name of the endpoint - + :returns: :class:`EndpointInfo` .. py:method:: list_endpoints( [, page_token: Optional[str]]) -> Iterator[EndpointInfo] List all endpoints. - + List all vector search endpoints in the workspace. - + :param page_token: str (optional) Token for pagination - + :returns: Iterator over :class:`EndpointInfo` .. py:method:: update_endpoint_budget_policy(endpoint_name: str, budget_policy_id: str) -> PatchEndpointBudgetPolicyResponse Update the budget policy of an endpoint. - + Update the budget policy of an endpoint - + :param endpoint_name: str Name of the vector search endpoint :param budget_policy_id: str The budget policy id to be applied - + :returns: :class:`PatchEndpointBudgetPolicyResponse` .. py:method:: update_endpoint_custom_tags(endpoint_name: str, custom_tags: List[CustomTag]) -> UpdateEndpointCustomTagsResponse Update the custom tags of an endpoint. - + :param endpoint_name: str Name of the vector search endpoint :param custom_tags: List[:class:`CustomTag`] The new custom tags for the vector search endpoint - + :returns: :class:`UpdateEndpointCustomTagsResponse` diff --git a/docs/workspace/vectorsearch/vector_search_indexes.rst b/docs/workspace/vectorsearch/vector_search_indexes.rst index 90762b275..b7d945dcd 100644 --- a/docs/workspace/vectorsearch/vector_search_indexes.rst +++ b/docs/workspace/vectorsearch/vector_search_indexes.rst @@ -6,7 +6,7 @@ **Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries. - + There are 2 types of Vector Search indexes: - **Delta Sync Index**: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - **Direct Vector Access Index**: An index that supports direct read and write of @@ -15,9 +15,9 @@ .. py:method:: create_index(name: str, endpoint_name: str, primary_key: str, index_type: VectorIndexType [, delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest], direct_access_index_spec: Optional[DirectAccessVectorIndexSpec]]) -> VectorIndex Create an index. - + Create a new index. - + :param name: str Name of the index :param endpoint_name: str @@ -33,68 +33,68 @@ Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`. :param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional) Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`. - + :returns: :class:`VectorIndex` .. py:method:: delete_data_vector_index(index_name: str, primary_keys: List[str]) -> DeleteDataVectorIndexResponse Delete data from index. - + Handles the deletion of data from a specified vector index. - + :param index_name: str Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index. :param primary_keys: List[str] List of primary keys for the data to be deleted. - + :returns: :class:`DeleteDataVectorIndexResponse` .. py:method:: delete_index(index_name: str) Delete an index. - + Delete an index. - + :param index_name: str Name of the index - - + + .. py:method:: get_index(index_name: str) -> VectorIndex Get an index. - + Get an index. - + :param index_name: str Name of the index - + :returns: :class:`VectorIndex` .. py:method:: list_indexes(endpoint_name: str [, page_token: Optional[str]]) -> Iterator[MiniVectorIndex] List indexes. - + List all indexes in the given endpoint. - + :param endpoint_name: str Name of the endpoint :param page_token: str (optional) Token for pagination - + :returns: Iterator over :class:`MiniVectorIndex` .. py:method:: query_index(index_name: str, columns: List[str] [, columns_to_rerank: Optional[List[str]], filters_json: Optional[str], num_results: Optional[int], query_text: Optional[str], query_type: Optional[str], query_vector: Optional[List[float]], score_threshold: Optional[float]]) -> QueryVectorIndexResponse Query an index. - + Query the specified vector index. - + :param index_name: str Name of the vector index to query. :param columns: List[str] @@ -103,9 +103,9 @@ Column names used to retrieve data to send to the reranker. :param filters_json: str (optional) JSON string representing query filters. - + Example filters: - + - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5. @@ -120,66 +120,66 @@ vectors. :param score_threshold: float (optional) Threshold for the approximate nearest neighbor search. Defaults to 0.0. - + :returns: :class:`QueryVectorIndexResponse` .. py:method:: query_next_page(index_name: str [, endpoint_name: Optional[str], page_token: Optional[str]]) -> QueryVectorIndexResponse Query next page. - + Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request to fetch next page of results. - + :param index_name: str Name of the vector index to query. :param endpoint_name: str (optional) Name of the endpoint. :param page_token: str (optional) Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API. - + :returns: :class:`QueryVectorIndexResponse` .. py:method:: scan_index(index_name: str [, last_primary_key: Optional[str], num_results: Optional[int]]) -> ScanVectorIndexResponse Scan an index. - + Scan the specified vector index and return the first `num_results` entries after the exclusive `primary_key`. - + :param index_name: str Name of the vector index to scan. :param last_primary_key: str (optional) Primary key of the last entry returned in the previous scan. :param num_results: int (optional) Number of results to return. Defaults to 10. - + :returns: :class:`ScanVectorIndexResponse` .. py:method:: sync_index(index_name: str) Synchronize an index. - + Triggers a synchronization process for a specified vector index. - + :param index_name: str Name of the vector index to synchronize. Must be a Delta Sync Index. - - + + .. py:method:: upsert_data_vector_index(index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse Upsert data into an index. - + Handles the upserting of data into a specified vector index. - + :param index_name: str Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index. :param inputs_json: str JSON string representing the data to be upserted. - + :returns: :class:`UpsertDataVectorIndexResponse` \ No newline at end of file diff --git a/docs/workspace/workspace/git_credentials.rst b/docs/workspace/workspace/git_credentials.rst index d5efd62eb..51989b370 100644 --- a/docs/workspace/workspace/git_credentials.rst +++ b/docs/workspace/workspace/git_credentials.rst @@ -5,9 +5,9 @@ .. py:class:: GitCredentialsAPI Registers personal access token for Databricks to do operations on behalf of the user. - + See [more info]. - + [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html .. py:method:: create(git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]]) -> CreateCredentialsResponse @@ -27,11 +27,11 @@ w.git_credentials.delete(credential_id=cr.credential_id) Create a credential entry. - + Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update existing credentials, or the DELETE endpoint to delete existing credentials. - + :param git_provider: str Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, @@ -45,22 +45,22 @@ :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. - + [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html - + :returns: :class:`CreateCredentialsResponse` .. py:method:: delete(credential_id: int) Delete a credential. - + Deletes the specified Git credential. - + :param credential_id: int The ID for the corresponding credential to access. - - + + .. py:method:: get(credential_id: int) -> GetCredentialsResponse @@ -82,12 +82,12 @@ w.git_credentials.delete(credential_id=cr.credential_id) Get a credential entry. - + Gets the Git credential with the specified credential ID. - + :param credential_id: int The ID for the corresponding credential to access. - + :returns: :class:`GetCredentialsResponse` @@ -105,9 +105,9 @@ list = w.git_credentials.list() Get Git credentials. - + Lists the calling user's Git credentials. One credential per user is supported. - + :returns: Iterator over :class:`CredentialInfo` @@ -137,9 +137,9 @@ w.git_credentials.delete(credential_id=cr.credential_id) Update a credential. - + Updates the specified Git credential. - + :param credential_id: int The ID for the corresponding credential to access. :param git_provider: str @@ -155,8 +155,8 @@ :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. - + [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html - - + + \ No newline at end of file diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst index 7388ffe6b..2e7520906 100644 --- a/docs/workspace/workspace/repos.rst +++ b/docs/workspace/workspace/repos.rst @@ -6,11 +6,11 @@ The Repos API allows users to manage their git repos. Users can use the API to access all repos that they have manage permissions on. - + Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a repository, committing and pushing, pulling, branch management, and visual comparison of diffs when committing. - + Within Repos you can develop code in notebooks or other files and follow data science and engineering code development best practices using Git for version control, collaboration, and CI/CD. @@ -39,10 +39,10 @@ w.repos.delete(repo_id=ri.id) Create a repo. - + Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created programmatically must be linked to a remote Git repo, unlike repos created in the browser. - + :param url: str URL of the Git repository to be linked. :param provider: str @@ -55,20 +55,20 @@ :param sparse_checkout: :class:`SparseCheckout` (optional) If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable sparse checkout after the repo is created. - + :returns: :class:`CreateRepoResponse` .. py:method:: delete(repo_id: int) Delete a repo. - + Deletes the specified repo. - + :param repo_id: int The ID for the corresponding repo to delete. - - + + .. py:method:: get(repo_id: int) -> GetRepoResponse @@ -98,36 +98,36 @@ w.repos.delete(repo_id=ri.id) Get a repo. - + Returns the repo with the given repo ID. - + :param repo_id: int ID of the Git folder (repo) object in the workspace. - + :returns: :class:`GetRepoResponse` .. py:method:: get_permission_levels(repo_id: str) -> GetRepoPermissionLevelsResponse Get repo permission levels. - + Gets the permission levels that a user can have on an object. - + :param repo_id: str The repo for which to get or manage permissions. - + :returns: :class:`GetRepoPermissionLevelsResponse` .. py:method:: get_permissions(repo_id: str) -> RepoPermissions Get repo permissions. - + Gets the permissions of a repo. Repos can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. - + :returns: :class:`RepoPermissions` @@ -146,10 +146,10 @@ all = w.repos.list(workspace.ListReposRequest()) Get repos. - + Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate through additional pages. - + :param next_page_token: str (optional) Token used to get the next page of results. If not specified, returns the first page of results as well as a next page token if there are more results. @@ -157,21 +157,21 @@ Filters repos that have paths starting with the given path prefix. If not provided or when provided an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will be served. - + :returns: Iterator over :class:`RepoInfo` .. py:method:: set_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions Set repo permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional) - + :returns: :class:`RepoPermissions` @@ -202,10 +202,10 @@ w.repos.delete(repo_id=ri.id) Update a repo. - + Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same branch. - + :param repo_id: int ID of the Git folder (repo) object in the workspace. :param branch: str (optional) @@ -217,19 +217,19 @@ Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD. - - + + .. py:method:: update_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions Update repo permissions. - + Updates the permissions on a repo. Repos can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional) - + :returns: :class:`RepoPermissions` \ No newline at end of file diff --git a/docs/workspace/workspace/secrets.rst b/docs/workspace/workspace/secrets.rst index 2dc261114..6071802f5 100644 --- a/docs/workspace/workspace/secrets.rst +++ b/docs/workspace/workspace/secrets.rst @@ -5,11 +5,11 @@ .. py:class:: SecretsAPI The Secrets API allows you to manage secrets, secret scopes, and access permissions. - + Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of directly entering your credentials into a notebook, use Databricks secrets to store your credentials and reference them in notebooks and jobs. - + Administrators, secret creators, and users granted permission can read Databricks secrets. While Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not possible to prevent such users from reading secrets. @@ -38,10 +38,10 @@ w.secrets.delete_scope(scope=scope_name) Create a new secret scope. - + The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. - + :param scope: str Scope name requested by the user. Scope names are unique. :param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional) @@ -50,98 +50,98 @@ The principal that is initially granted `MANAGE` permission to the created scope. :param scope_backend_type: :class:`ScopeBackendType` (optional) The backend type the scope will be created with. If not specified, will default to `DATABRICKS` - - + + .. py:method:: delete_acl(scope: str, principal: str) Delete an ACL. - + Deletes the given ACL on the given scope. - + Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to remove permissions from. :param principal: str The principal to remove an existing ACL from. - - + + .. py:method:: delete_scope(scope: str) Delete a secret scope. - + Deletes a secret scope. - + Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str Name of the scope to delete. - - + + .. py:method:: delete_secret(scope: str, key: str) Delete a secret. - + Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the secret scope. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope that contains the secret to delete. :param key: str Name of the secret to delete. - - + + .. py:method:: get_acl(scope: str, principal: str) -> AclItem Get secret ACL details. - + Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` permission to invoke this API. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to fetch ACL information from. :param principal: str The principal to fetch ACL information for. - + :returns: :class:`AclItem` .. py:method:: get_secret(scope: str, key: str) -> GetSecretResponse Get a secret. - + Gets the bytes representation of a secret value for the specified scope and key. - + Users need the READ permission to make this call. - + Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the caller in DBUtils and the type the data is decoded into. - + Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. - + :param scope: str The name of the scope to fetch secret information from. :param key: str The key to fetch secret for. - + :returns: :class:`GetSecretResponse` @@ -171,15 +171,15 @@ w.secrets.delete_scope(scope=scope_name) Lists ACLs. - + List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to fetch ACL information from. - + :returns: Iterator over :class:`AclItem` @@ -197,11 +197,11 @@ scopes = w.secrets.list_scopes() List all scopes. - + Lists all secret scopes available in the workspace. - + Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :returns: Iterator over :class:`SecretScope` @@ -231,17 +231,17 @@ w.secrets.delete_scope(scope=scope_name) List secret keys. - + Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call. - + The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to list secrets within. - + :returns: Iterator over :class:`SecretMetadata` @@ -279,40 +279,40 @@ w.secrets.delete_scope(scope=scope_name) Create/update an ACL. - + Creates or overwrites the Access Control List (ACL) associated with the given principal (user or group) on the specified scope point. - + In general, a user or group will use the most powerful permission available to them, and permissions are ordered as follows: - + * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what secrets are available. - + Note that in general, secret values can only be read from within a command on a cluster (for example, through a notebook). There is no API to read the actual secret value material outside of a cluster. However, the user's permission will be applied based on who is executing the command, and they must have at least READ permission. - + Users must have the `MANAGE` permission to invoke this API. - + The principal is a user or group name corresponding to an existing Databricks principal to be granted or revoked access. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to apply permissions to. :param principal: str The principal in which the permission is applied. :param permission: :class:`AclPermission` The permission level applied to the principal. - - + + .. py:method:: put_secret(scope: str, key: str [, bytes_value: Optional[str], string_value: Optional[str]]) @@ -341,23 +341,23 @@ w.secrets.delete_scope(scope=scope_name) Add a secret. - + Inserts a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret's value. The server encrypts the secret using the secret scope's encryption settings before storing it. - + You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. - + The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine the value returned when the secret value is requested. Exactly one must be specified. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to which the secret will be associated with. :param key: str @@ -366,6 +366,6 @@ If specified, value will be stored as bytes. :param string_value: str (optional) If specified, note that the value will be stored in UTF-8 (MB4) form. - - + + \ No newline at end of file diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index a33023065..0f7a6eacb 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -5,29 +5,29 @@ .. py:class:: WorkspaceExt The Workspace API allows you to list, import, export, and delete notebooks and folders. - + A notebook is a web-based interface to a document that contains runnable code, visualizations, and explanatory text. .. py:method:: delete(path: str [, recursive: Optional[bool]]) Delete a workspace object. - + Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`. - + Object deletion cannot be undone and deleting a directory recursively is not atomic. - + :param path: str The absolute path of the notebook or directory. :param recursive: bool (optional) The flag that specifies whether to delete the object recursively. It is `false` by default. Please note this deleting directory is not atomic. If it fails in the middle, some of objects under this directory may be deleted and cannot be undone. - - + + .. py:method:: download(path: str [, format: ExportFormat]) -> BinaryIO @@ -84,58 +84,58 @@ export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook) Export a workspace object. - + Exports an object or the contents of an entire directory. - + If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`. Currently, this API does not support exporting a library. - + :param path: str The absolute path of the object or directory. Exporting a directory is only supported for the `DBC`, `SOURCE`, and `AUTO` format. :param format: :class:`ExportFormat` (optional) This specifies the format of the exported file. By default, this is `SOURCE`. - + The value is case sensitive. - + - `SOURCE`: The notebook is exported as source code. Directory exports will not include non-notebook entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The notebook is exported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in Databricks archive format. Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type. Directory exports will include notebooks and workspace files. - + :returns: :class:`ExportResponse` .. py:method:: get_permission_levels(workspace_object_type: str, workspace_object_id: str) -> GetWorkspaceObjectPermissionLevelsResponse Get workspace object permission levels. - + Gets the permission levels that a user can have on an object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. - + :returns: :class:`GetWorkspaceObjectPermissionLevelsResponse` .. py:method:: get_permissions(workspace_object_type: str, workspace_object_id: str) -> WorkspaceObjectPermissions Get workspace object permissions. - + Gets the permissions of a workspace object. Workspace objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. - + :returns: :class:`WorkspaceObjectPermissions` @@ -157,13 +157,13 @@ obj = w.workspace.get_status(path=notebook_path) Get status. - + Gets the status of an object or a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The absolute path of the notebook or directory. - + :returns: :class:`ObjectInfo` @@ -188,31 +188,31 @@ content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(), format=workspace.ImportFormat.SOURCE, language=workspace.Language.SQL, - overwrite=True, + overwrite=true_, path=notebook_path, ) Import a workspace object. - + Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. If `path` already exists and `overwrite` is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you must set the `language` field. - + :param path: str The absolute path of the object or directory. Importing a directory is only supported for the `DBC` and `SOURCE` formats. :param content: str (optional) The base64-encoded content. This has a limit of 10 MB. - + If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown. This parameter might be absent, and instead a posted file is used. :param format: :class:`ImportFormat` (optional) This specifies the format of the file to be imported. - + The value is case sensitive. - + - `AUTO`: The item is imported depending on an analysis of the item's extension and the header content provided in the request. If the item is imported as a notebook, then the item's extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`: @@ -224,8 +224,8 @@ :param overwrite: bool (optional) The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC` format, `overwrite` is not supported since it may contain a directory. - - + + .. py:method:: list(path: str [, notebooks_modified_after: int, recursive: bool = False]) -> ObjectInfo @@ -235,16 +235,14 @@ .. code-block:: - import os - import time - from databricks.sdk import WorkspaceClient w = WorkspaceClient() - notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - - objects = w.workspace.list(path=os.path.dirname(notebook)) + names = [] + for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True): + names.append(i.path) + assert len(names) > 0 List workspace objects @@ -257,51 +255,51 @@ .. py:method:: mkdirs(path: str) Create a directory. - + Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error `RESOURCE_ALREADY_EXISTS`. - + Note that if this operation fails it may have succeeded in creating some of the necessary parent directories. - + :param path: str The absolute path of the directory. If the parent directories do not exist, it will also create them. If the directory already exists, this command will do nothing and succeed. - - + + .. py:method:: set_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions Set workspace object permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) - + :returns: :class:`WorkspaceObjectPermissions` .. py:method:: update_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions Update workspace object permissions. - + Updates the permissions on a workspace object. Workspace objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) - + :returns: :class:`WorkspaceObjectPermissions` From 0e57a66c52d59bcfe4579f8200a4a88c03bc9bb1 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Thu, 5 Jun 2025 09:54:19 +0000 Subject: [PATCH 2/3] update --- .codegen.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.codegen.json b/.codegen.json index 592104872..65077c1cc 100644 --- a/.codegen.json +++ b/.codegen.json @@ -14,6 +14,8 @@ "pip install '.[dev]'" ], "post_generate": [ + "make fmt", + "pytest -m 'not integration' --cov=databricks --cov-report html tests", "pip install .", "python3.12 docs/gen-client-docs.py" ] From 90262c223f9ff5be9238a45a0f9d2634c1107db0 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Thu, 5 Jun 2025 10:49:31 +0000 Subject: [PATCH 3/3] update --- databricks/sdk/__init__.py | 707 +- databricks/sdk/errors/overrides.py | 36 +- databricks/sdk/errors/platform.py | 114 +- databricks/sdk/service/aibuilder.py | 336 +- databricks/sdk/service/apps.py | 1679 +- databricks/sdk/service/billing.py | 1901 ++- databricks/sdk/service/catalog.py | 14231 +++++++++------- databricks/sdk/service/cleanrooms.py | 1425 +- databricks/sdk/service/compute.py | 10971 +++++++----- databricks/sdk/service/dashboards.py | 2595 +-- databricks/sdk/service/database.py | 1222 +- databricks/sdk/service/files.py | 1131 +- databricks/sdk/service/iam.py | 4525 ++--- databricks/sdk/service/jobs.py | 8582 ++++++---- databricks/sdk/service/marketplace.py | 5040 +++--- databricks/sdk/service/ml.py | 8244 +++++---- databricks/sdk/service/oauth2.py | 2031 +-- databricks/sdk/service/pipelines.py | 3858 +++-- databricks/sdk/service/provisioning.py | 3232 ++-- databricks/sdk/service/qualitymonitorv2.py | 307 +- databricks/sdk/service/serving.py | 4599 +++-- databricks/sdk/service/settings.py | 8947 +++++----- databricks/sdk/service/sharing.py | 3748 ++-- databricks/sdk/service/sql.py | 10662 +++++++----- databricks/sdk/service/vectorsearch.py | 1970 ++- databricks/sdk/service/workspace.py | 2969 ++-- docs/account/billing/billable_usage.rst | 10 +- docs/account/billing/budget_policy.rst | 34 +- docs/account/billing/budgets.rst | 32 +- docs/account/billing/log_delivery.rst | 34 +- docs/account/billing/usage_dashboards.rst | 12 +- .../account/catalog/metastore_assignments.rst | 36 +- docs/account/catalog/metastores.rst | 30 +- docs/account/catalog/storage_credentials.rst | 36 +- docs/account/iam/access_control.rst | 24 +- docs/account/iam/groups.rst | 50 +- docs/account/iam/service_principals.rst | 50 +- docs/account/iam/users.rst | 56 +- docs/account/iam/workspace_assignment.rst | 26 +- .../account/oauth2/custom_app_integration.rst | 36 +- docs/account/oauth2/federation_policy.rst | 40 +- docs/account/oauth2/o_auth_published_apps.rst | 6 +- .../oauth2/published_app_integration.rst | 36 +- .../service_principal_federation_policy.rst | 38 +- .../oauth2/service_principal_secrets.rst | 27 +- docs/account/provisioning/credentials.rst | 30 +- docs/account/provisioning/encryption_keys.rst | 40 +- docs/account/provisioning/networks.rst | 30 +- docs/account/provisioning/private_access.rst | 66 +- docs/account/provisioning/storage.rst | 28 +- docs/account/provisioning/vpc_endpoints.rst | 40 +- docs/account/provisioning/workspaces.rst | 88 +- .../settings/csp_enablement_account.rst | 16 +- .../settings/disable_legacy_features.rst | 22 +- .../settings/enable_ip_access_lists.rst | 20 +- .../settings/esm_enablement_account.rst | 14 +- docs/account/settings/ip_access_lists.rst | 74 +- .../llm_proxy_partner_powered_account.rst | 14 +- .../llm_proxy_partner_powered_enforce.rst | 14 +- .../account/settings/network_connectivity.rst | 67 +- docs/account/settings/network_policies.rst | 32 +- docs/account/settings/personal_compute.rst | 22 +- docs/account/settings/settings.rst | 6 +- .../workspace_network_configuration.rst | 12 +- docs/dbdataclasses/aibuilder.rst | 4 - docs/dbdataclasses/apps.rst | 4 - docs/dbdataclasses/billing.rst | 4 - docs/dbdataclasses/catalog.rst | 4 - docs/dbdataclasses/cleanrooms.rst | 4 - docs/dbdataclasses/compute.rst | 4 - docs/dbdataclasses/dashboards.rst | 4 - docs/dbdataclasses/database.rst | 4 - docs/dbdataclasses/files.rst | 4 - docs/dbdataclasses/iam.rst | 4 - docs/dbdataclasses/jobs.rst | 4 - docs/dbdataclasses/marketplace.rst | 4 - docs/dbdataclasses/ml.rst | 4 - docs/dbdataclasses/oauth2.rst | 4 - docs/dbdataclasses/pipelines.rst | 4 - docs/dbdataclasses/provisioning.rst | 4 - docs/dbdataclasses/qualitymonitorv2.rst | 4 - docs/dbdataclasses/serving.rst | 4 - docs/dbdataclasses/settings.rst | 4 - docs/dbdataclasses/sharing.rst | 4 - docs/dbdataclasses/sql.rst | 4 - docs/dbdataclasses/vectorsearch.rst | 4 - docs/dbdataclasses/workspace.rst | 4 - docs/workspace/aibuilder/custom_llms.rst | 22 +- docs/workspace/apps/apps.rst | 84 +- .../workspace/catalog/artifact_allowlists.rst | 12 +- docs/workspace/catalog/catalogs.rst | 36 +- docs/workspace/catalog/connections.rst | 36 +- docs/workspace/catalog/credentials.rst | 58 +- docs/workspace/catalog/external_locations.rst | 36 +- docs/workspace/catalog/functions.rst | 38 +- docs/workspace/catalog/grants.rst | 24 +- docs/workspace/catalog/metastores.rst | 68 +- docs/workspace/catalog/model_versions.rst | 48 +- docs/workspace/catalog/online_tables.rst | 20 +- docs/workspace/catalog/quality_monitors.rst | 100 +- docs/workspace/catalog/registered_models.rst | 78 +- docs/workspace/catalog/resource_quotas.rst | 15 +- docs/workspace/catalog/schemas.rst | 32 +- .../workspace/catalog/storage_credentials.rst | 46 +- docs/workspace/catalog/system_schemas.rst | 22 +- docs/workspace/catalog/table_constraints.rst | 22 +- docs/workspace/catalog/tables.rst | 46 +- .../catalog/temporary_table_credentials.rst | 6 +- docs/workspace/catalog/volumes.rst | 56 +- docs/workspace/catalog/workspace_bindings.rst | 32 +- .../cleanrooms/clean_room_assets.rst | 34 +- .../cleanrooms/clean_room_task_runs.rst | 6 +- docs/workspace/cleanrooms/clean_rooms.rst | 42 +- docs/workspace/compute/cluster_policies.rst | 80 +- docs/workspace/compute/clusters.rst | 234 +- docs/workspace/compute/command_execution.rst | 46 +- .../workspace/compute/global_init_scripts.rst | 40 +- docs/workspace/compute/instance_pools.rst | 66 +- docs/workspace/compute/instance_profiles.rst | 53 +- docs/workspace/compute/libraries.rst | 32 +- .../policy_compliance_for_clusters.rst | 28 +- docs/workspace/compute/policy_families.rst | 16 +- docs/workspace/dashboards/genie.rst | 72 +- docs/workspace/dashboards/lakeview.rst | 96 +- .../dashboards/lakeview_embedded.rst | 8 +- docs/workspace/database/database.rst | 72 +- docs/workspace/files/dbfs.rst | 64 +- docs/workspace/files/files.rst | 78 +- docs/workspace/iam/access_control.rst | 4 +- .../iam/account_access_control_proxy.rst | 24 +- docs/workspace/iam/current_user.rst | 4 +- docs/workspace/iam/groups.rst | 50 +- docs/workspace/iam/permission_migration.rst | 4 +- docs/workspace/iam/permissions.rst | 26 +- docs/workspace/iam/service_principals.rst | 50 +- docs/workspace/iam/users.rst | 76 +- docs/workspace/jobs/jobs.rst | 216 +- .../jobs/policy_compliance_for_jobs.rst | 22 +- .../marketplace/consumer_fulfillments.rst | 12 +- .../marketplace/consumer_installations.rst | 32 +- .../marketplace/consumer_listings.rst | 24 +- .../consumer_personalization_requests.rst | 18 +- .../marketplace/consumer_providers.rst | 18 +- .../marketplace/provider_exchange_filters.rst | 26 +- .../marketplace/provider_exchanges.rst | 58 +- docs/workspace/marketplace/provider_files.rst | 26 +- .../marketplace/provider_listings.rst | 32 +- .../provider_personalization_requests.rst | 12 +- ...provider_provider_analytics_dashboards.rst | 18 +- .../marketplace/provider_providers.rst | 32 +- docs/workspace/ml/experiments.rst | 316 +- docs/workspace/ml/forecasting.rst | 12 +- docs/workspace/ml/model_registry.rst | 350 +- docs/workspace/pipelines/pipelines.rst | 100 +- .../qualitymonitorv2/quality_monitor_v2.rst | 32 +- docs/workspace/serving/serving_endpoints.rst | 106 +- .../serving/serving_endpoints_data_plane.rst | 4 +- ...aibi_dashboard_embedding_access_policy.rst | 20 +- ...i_dashboard_embedding_approved_domains.rst | 20 +- .../settings/automatic_cluster_update.rst | 14 +- .../settings/compliance_security_profile.rst | 16 +- .../settings/credentials_manager.rst | 6 +- .../dashboard_email_subscriptions.rst | 20 +- docs/workspace/settings/default_namespace.rst | 24 +- .../settings/disable_legacy_access.rst | 22 +- .../settings/disable_legacy_dbfs.rst | 24 +- .../settings/enable_export_notebook.rst | 12 +- .../enable_notebook_table_clipboard.rst | 12 +- .../settings/enable_results_downloading.rst | 12 +- .../settings/enhanced_security_monitoring.rst | 16 +- docs/workspace/settings/ip_access_lists.rst | 74 +- .../llm_proxy_partner_powered_workspace.rst | 20 +- .../settings/notification_destinations.rst | 32 +- .../settings/restrict_workspace_admins.rst | 20 +- docs/workspace/settings/settings.rst | 14 +- .../settings/sql_results_download.rst | 20 +- docs/workspace/settings/token_management.rst | 46 +- docs/workspace/settings/tokens.rst | 22 +- docs/workspace/settings/workspace_conf.rst | 14 +- docs/workspace/sharing/providers.rst | 46 +- .../sharing/recipient_activation.rst | 16 +- .../sharing/recipient_federation_policies.rst | 40 +- docs/workspace/sharing/recipients.rst | 52 +- docs/workspace/sharing/shares.rst | 56 +- docs/workspace/sql/alerts.rst | 34 +- docs/workspace/sql/alerts_legacy.rst | 56 +- docs/workspace/sql/alerts_v2.rst | 34 +- docs/workspace/sql/dashboard_widgets.rst | 14 +- docs/workspace/sql/dashboards.rst | 42 +- docs/workspace/sql/data_sources.rst | 14 +- docs/workspace/sql/dbsql_permissions.rst | 42 +- docs/workspace/sql/queries.rst | 40 +- docs/workspace/sql/queries_legacy.rst | 90 +- docs/workspace/sql/query_history.rst | 8 +- docs/workspace/sql/query_visualizations.rst | 22 +- .../sql/query_visualizations_legacy.rst | 36 +- docs/workspace/sql/redash_config.rst | 2 +- docs/workspace/sql/statement_execution.rst | 105 +- docs/workspace/sql/warehouses.rst | 120 +- .../vectorsearch/vector_search_endpoints.rst | 36 +- .../vectorsearch/vector_search_indexes.rst | 70 +- docs/workspace/workspace/git_credentials.rst | 40 +- docs/workspace/workspace/repos.rst | 62 +- docs/workspace/workspace/secrets.rst | 118 +- docs/workspace/workspace/workspace.rst | 80 +- 205 files changed, 61540 insertions(+), 50325 deletions(-) diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 4fbaddab8..612d1dd1b 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -1,11 +1,18 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. import logging +from typing import Optional import databricks.sdk.core as client import databricks.sdk.dbutils as dbutils +from databricks.sdk import azure from databricks.sdk.credentials_provider import CredentialsStrategy from databricks.sdk.data_plane import DataPlaneTokenSource +from databricks.sdk.mixins.compute import ClustersExt +from databricks.sdk.mixins.files import DbfsExt, FilesExt +from databricks.sdk.mixins.jobs import JobsExt +from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt +from databricks.sdk.mixins.workspace import WorkspaceExt from databricks.sdk.service import aibuilder as pkg_aibuilder from databricks.sdk.service import apps as pkg_apps from databricks.sdk.service import billing as pkg_billing @@ -29,176 +36,111 @@ from databricks.sdk.service import sql as pkg_sql from databricks.sdk.service import vectorsearch as pkg_vectorsearch from databricks.sdk.service import workspace as pkg_workspace - -from databricks.sdk.mixins.files import DbfsExt, FilesExt -from databricks.sdk.mixins.compute import ClustersExt -from databricks.sdk.mixins.workspace import WorkspaceExt -from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt -from databricks.sdk.mixins.jobs import JobsExt -from databricks.sdk.service.iam import AccessControlAPI -from databricks.sdk.service.iam import AccountAccessControlAPI -from databricks.sdk.service.iam import AccountAccessControlProxyAPI -from databricks.sdk.service.settings import AibiDashboardEmbeddingAccessPolicyAPI -from databricks.sdk.service.settings import AibiDashboardEmbeddingApprovedDomainsAPI -from databricks.sdk.service.sql import AlertsAPI -from databricks.sdk.service.sql import AlertsLegacyAPI -from databricks.sdk.service.sql import AlertsV2API -from databricks.sdk.service.apps import AppsAPI -from databricks.sdk.service.catalog import ArtifactAllowlistsAPI -from databricks.sdk.service.settings import AutomaticClusterUpdateAPI -from databricks.sdk.service.billing import BillableUsageAPI -from databricks.sdk.service.billing import BudgetPolicyAPI -from databricks.sdk.service.catalog import CatalogsAPI -from databricks.sdk.service.cleanrooms import CleanRoomAssetsAPI -from databricks.sdk.service.cleanrooms import CleanRoomTaskRunsAPI -from databricks.sdk.service.cleanrooms import CleanRoomsAPI -from databricks.sdk.service.compute import ClusterPoliciesAPI -from databricks.sdk.service.compute import ClustersAPI -from databricks.sdk.service.compute import CommandExecutionAPI -from databricks.sdk.service.settings import ComplianceSecurityProfileAPI -from databricks.sdk.service.catalog import ConnectionsAPI -from databricks.sdk.service.marketplace import ConsumerFulfillmentsAPI -from databricks.sdk.service.marketplace import ConsumerInstallationsAPI -from databricks.sdk.service.marketplace import ConsumerListingsAPI -from databricks.sdk.service.marketplace import ConsumerPersonalizationRequestsAPI -from databricks.sdk.service.marketplace import ConsumerProvidersAPI -from databricks.sdk.service.catalog import CredentialsAPI -from databricks.sdk.service.provisioning import CredentialsAPI -from databricks.sdk.service.settings import CredentialsManagerAPI -from databricks.sdk.service.settings import CspEnablementAccountAPI -from databricks.sdk.service.iam import CurrentUserAPI -from databricks.sdk.service.oauth2 import CustomAppIntegrationAPI from databricks.sdk.service.aibuilder import CustomLlmsAPI -from databricks.sdk.service.settings import DashboardEmailSubscriptionsAPI -from databricks.sdk.service.sql import DashboardWidgetsAPI -from databricks.sdk.service.sql import DashboardsAPI -from databricks.sdk.service.sql import DataSourcesAPI +from databricks.sdk.service.apps import AppsAPI +from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI, + BudgetsAPI, LogDeliveryAPI, + UsageDashboardsAPI) +from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, + AccountMetastoresAPI, + AccountStorageCredentialsAPI, + ArtifactAllowlistsAPI, CatalogsAPI, + ConnectionsAPI, CredentialsAPI, + ExternalLocationsAPI, FunctionsAPI, + GrantsAPI, MetastoresAPI, + ModelVersionsAPI, OnlineTablesAPI, + QualityMonitorsAPI, + RegisteredModelsAPI, + ResourceQuotasAPI, SchemasAPI, + StorageCredentialsAPI, + SystemSchemasAPI, + TableConstraintsAPI, TablesAPI, + TemporaryTableCredentialsAPI, + VolumesAPI, WorkspaceBindingsAPI) +from databricks.sdk.service.cleanrooms import (CleanRoomAssetsAPI, + CleanRoomsAPI, + CleanRoomTaskRunsAPI) +from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI, + CommandExecutionAPI, + GlobalInitScriptsAPI, + InstancePoolsAPI, + InstanceProfilesAPI, LibrariesAPI, + PolicyComplianceForClustersAPI, + PolicyFamiliesAPI) +from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI, + LakeviewEmbeddedAPI) from databricks.sdk.service.database import DatabaseAPI -from databricks.sdk.service.files import DbfsAPI -from databricks.sdk.service.sql import DbsqlPermissionsAPI -from databricks.sdk.service.settings import DefaultNamespaceAPI -from databricks.sdk.service.settings import DisableLegacyAccessAPI -from databricks.sdk.service.settings import DisableLegacyDbfsAPI -from databricks.sdk.service.settings import DisableLegacyFeaturesAPI -from databricks.sdk.service.settings import EnableExportNotebookAPI -from databricks.sdk.service.settings import EnableIpAccessListsAPI -from databricks.sdk.service.settings import EnableNotebookTableClipboardAPI -from databricks.sdk.service.settings import EnableResultsDownloadingAPI -from databricks.sdk.service.provisioning import EncryptionKeysAPI -from databricks.sdk.service.settings import EnhancedSecurityMonitoringAPI -from databricks.sdk.service.settings import EsmEnablementAccountAPI -from databricks.sdk.service.ml import ExperimentsAPI -from databricks.sdk.service.catalog import ExternalLocationsAPI -from databricks.sdk.service.oauth2 import AccountFederationPolicyAPI -from databricks.sdk.service.files import FilesAPI -from databricks.sdk.service.catalog import FunctionsAPI -from databricks.sdk.service.dashboards import GenieAPI -from databricks.sdk.service.workspace import GitCredentialsAPI -from databricks.sdk.service.compute import GlobalInitScriptsAPI -from databricks.sdk.service.catalog import GrantsAPI -from databricks.sdk.service.iam import GroupsAPI -from databricks.sdk.service.iam import AccountGroupsAPI -from databricks.sdk.service.compute import InstancePoolsAPI -from databricks.sdk.service.compute import InstanceProfilesAPI -from databricks.sdk.service.settings import IpAccessListsAPI -from databricks.sdk.service.settings import AccountIpAccessListsAPI -from databricks.sdk.service.jobs import JobsAPI -from databricks.sdk.service.dashboards import LakeviewAPI -from databricks.sdk.service.dashboards import LakeviewEmbeddedAPI -from databricks.sdk.service.compute import LibrariesAPI -from databricks.sdk.service.settings import LlmProxyPartnerPoweredAccountAPI -from databricks.sdk.service.settings import LlmProxyPartnerPoweredEnforceAPI -from databricks.sdk.service.settings import LlmProxyPartnerPoweredWorkspaceAPI -from databricks.sdk.service.billing import LogDeliveryAPI -from databricks.sdk.service.catalog import AccountMetastoreAssignmentsAPI -from databricks.sdk.service.catalog import MetastoresAPI -from databricks.sdk.service.catalog import AccountMetastoresAPI -from databricks.sdk.service.ml import ModelRegistryAPI -from databricks.sdk.service.catalog import ModelVersionsAPI -from databricks.sdk.service.settings import NetworkConnectivityAPI -from databricks.sdk.service.settings import NetworkPoliciesAPI -from databricks.sdk.service.provisioning import NetworksAPI -from databricks.sdk.service.settings import NotificationDestinationsAPI -from databricks.sdk.service.oauth2 import OAuthPublishedAppsAPI -from databricks.sdk.service.catalog import OnlineTablesAPI -from databricks.sdk.service.iam import PermissionMigrationAPI -from databricks.sdk.service.iam import PermissionsAPI -from databricks.sdk.service.settings import PersonalComputeAPI +from databricks.sdk.service.files import DbfsAPI, FilesAPI +from databricks.sdk.service.iam import (AccessControlAPI, + AccountAccessControlAPI, + AccountAccessControlProxyAPI, + AccountGroupsAPI, + AccountServicePrincipalsAPI, + AccountUsersAPI, CurrentUserAPI, + GroupsAPI, PermissionMigrationAPI, + PermissionsAPI, ServicePrincipalsAPI, + UsersAPI, WorkspaceAssignmentAPI) +from databricks.sdk.service.jobs import JobsAPI, PolicyComplianceForJobsAPI +from databricks.sdk.service.marketplace import ( + ConsumerFulfillmentsAPI, ConsumerInstallationsAPI, ConsumerListingsAPI, + ConsumerPersonalizationRequestsAPI, ConsumerProvidersAPI, + ProviderExchangeFiltersAPI, ProviderExchangesAPI, ProviderFilesAPI, + ProviderListingsAPI, ProviderPersonalizationRequestsAPI, + ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI) +from databricks.sdk.service.ml import (ExperimentsAPI, ForecastingAPI, + ModelRegistryAPI) +from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI, + CustomAppIntegrationAPI, + OAuthPublishedAppsAPI, + PublishedAppIntegrationAPI, + ServicePrincipalFederationPolicyAPI, + ServicePrincipalSecretsAPI) from databricks.sdk.service.pipelines import PipelinesAPI -from databricks.sdk.service.compute import PolicyComplianceForClustersAPI -from databricks.sdk.service.jobs import PolicyComplianceForJobsAPI -from databricks.sdk.service.compute import PolicyFamiliesAPI -from databricks.sdk.service.provisioning import PrivateAccessAPI -from databricks.sdk.service.marketplace import ProviderExchangeFiltersAPI -from databricks.sdk.service.marketplace import ProviderExchangesAPI -from databricks.sdk.service.marketplace import ProviderFilesAPI -from databricks.sdk.service.marketplace import ProviderListingsAPI -from databricks.sdk.service.marketplace import ProviderPersonalizationRequestsAPI -from databricks.sdk.service.marketplace import ProviderProviderAnalyticsDashboardsAPI -from databricks.sdk.service.marketplace import ProviderProvidersAPI -from databricks.sdk.service.sharing import ProvidersAPI -from databricks.sdk.service.oauth2 import PublishedAppIntegrationAPI +from databricks.sdk.service.provisioning import (CredentialsAPI, + EncryptionKeysAPI, + NetworksAPI, PrivateAccessAPI, + StorageAPI, VpcEndpointsAPI, + Workspace, WorkspacesAPI) from databricks.sdk.service.qualitymonitorv2 import QualityMonitorV2API -from databricks.sdk.service.catalog import QualityMonitorsAPI -from databricks.sdk.service.sql import QueriesAPI -from databricks.sdk.service.sql import QueriesLegacyAPI -from databricks.sdk.service.sql import QueryHistoryAPI -from databricks.sdk.service.sql import QueryVisualizationsAPI -from databricks.sdk.service.sql import QueryVisualizationsLegacyAPI -from databricks.sdk.service.sharing import RecipientActivationAPI -from databricks.sdk.service.sharing import RecipientFederationPoliciesAPI -from databricks.sdk.service.sharing import RecipientsAPI -from databricks.sdk.service.sql import RedashConfigAPI -from databricks.sdk.service.catalog import RegisteredModelsAPI -from databricks.sdk.service.workspace import ReposAPI -from databricks.sdk.service.catalog import ResourceQuotasAPI -from databricks.sdk.service.settings import RestrictWorkspaceAdminsAPI -from databricks.sdk.service.catalog import SchemasAPI -from databricks.sdk.service.workspace import SecretsAPI -from databricks.sdk.service.oauth2 import ServicePrincipalFederationPolicyAPI -from databricks.sdk.service.oauth2 import ServicePrincipalSecretsAPI -from databricks.sdk.service.iam import ServicePrincipalsAPI -from databricks.sdk.service.iam import AccountServicePrincipalsAPI -from databricks.sdk.service.serving import ServingEndpointsAPI -from databricks.sdk.service.serving import ServingEndpointsDataPlaneAPI -from databricks.sdk.service.settings import SettingsAPI -from databricks.sdk.service.settings import AccountSettingsAPI -from databricks.sdk.service.sharing import SharesAPI -from databricks.sdk.service.settings import SqlResultsDownloadAPI -from databricks.sdk.service.sql import StatementExecutionAPI -from databricks.sdk.service.provisioning import StorageAPI -from databricks.sdk.service.catalog import StorageCredentialsAPI -from databricks.sdk.service.catalog import AccountStorageCredentialsAPI -from databricks.sdk.service.catalog import SystemSchemasAPI -from databricks.sdk.service.catalog import TableConstraintsAPI -from databricks.sdk.service.catalog import TablesAPI -from databricks.sdk.service.catalog import TemporaryTableCredentialsAPI -from databricks.sdk.service.settings import TokenManagementAPI -from databricks.sdk.service.settings import TokensAPI -from databricks.sdk.service.billing import UsageDashboardsAPI -from databricks.sdk.service.iam import UsersAPI -from databricks.sdk.service.iam import AccountUsersAPI -from databricks.sdk.service.vectorsearch import VectorSearchEndpointsAPI -from databricks.sdk.service.vectorsearch import VectorSearchIndexesAPI -from databricks.sdk.service.catalog import VolumesAPI -from databricks.sdk.service.provisioning import VpcEndpointsAPI -from databricks.sdk.service.sql import WarehousesAPI -from databricks.sdk.service.workspace import WorkspaceAPI -from databricks.sdk.service.iam import WorkspaceAssignmentAPI -from databricks.sdk.service.catalog import WorkspaceBindingsAPI -from databricks.sdk.service.settings import WorkspaceConfAPI -from databricks.sdk.service.settings import WorkspaceNetworkConfigurationAPI -from databricks.sdk.service.provisioning import WorkspacesAPI -from databricks.sdk.service.billing import BudgetsAPI -from databricks.sdk.service.ml import ForecastingAPI -from databricks.sdk.service.provisioning import Workspace -from databricks.sdk import azure -from typing import Optional - - +from databricks.sdk.service.serving import (ServingEndpointsAPI, + ServingEndpointsDataPlaneAPI) +from databricks.sdk.service.settings import ( + AccountIpAccessListsAPI, AccountSettingsAPI, + AibiDashboardEmbeddingAccessPolicyAPI, + AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI, + ComplianceSecurityProfileAPI, CredentialsManagerAPI, + CspEnablementAccountAPI, DashboardEmailSubscriptionsAPI, + DefaultNamespaceAPI, DisableLegacyAccessAPI, DisableLegacyDbfsAPI, + DisableLegacyFeaturesAPI, EnableExportNotebookAPI, EnableIpAccessListsAPI, + EnableNotebookTableClipboardAPI, EnableResultsDownloadingAPI, + EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI, + LlmProxyPartnerPoweredAccountAPI, LlmProxyPartnerPoweredEnforceAPI, + LlmProxyPartnerPoweredWorkspaceAPI, NetworkConnectivityAPI, + NetworkPoliciesAPI, NotificationDestinationsAPI, PersonalComputeAPI, + RestrictWorkspaceAdminsAPI, SettingsAPI, SqlResultsDownloadAPI, + TokenManagementAPI, TokensAPI, WorkspaceConfAPI, + WorkspaceNetworkConfigurationAPI) +from databricks.sdk.service.sharing import (ProvidersAPI, + RecipientActivationAPI, + RecipientFederationPoliciesAPI, + RecipientsAPI, SharesAPI) +from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI, + AlertsV2API, DashboardsAPI, + DashboardWidgetsAPI, DataSourcesAPI, + DbsqlPermissionsAPI, QueriesAPI, + QueriesLegacyAPI, QueryHistoryAPI, + QueryVisualizationsAPI, + QueryVisualizationsLegacyAPI, + RedashConfigAPI, StatementExecutionAPI, + WarehousesAPI) +from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI, + VectorSearchIndexesAPI) +from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI, + SecretsAPI, WorkspaceAPI) _LOG = logging.getLogger(__name__) + def _make_dbutils(config: client.Config): # We try to directly check if we are in runtime, instead of # trying to import from databricks.sdk.runtime. This is to prevent @@ -212,6 +154,7 @@ def _make_dbutils(config: client.Config): # We are in runtime, so we can use the runtime dbutils from databricks.sdk.runtime import dbutils as runtime_dbutils + return runtime_dbutils @@ -227,25 +170,65 @@ class WorkspaceClient: """ The WorkspaceClient is a client for the workspace-level Databricks REST API. """ - def __init__(self, *, host: Optional[str] = None, account_id: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, client_id: Optional[str] = None, client_secret: Optional[str] = None, token: Optional[str] = None, profile: Optional[str] = None, config_file: Optional[str] = None, azure_workspace_resource_id: Optional[str] = None, azure_client_secret: Optional[str] = None, azure_client_id: Optional[str] = None, azure_tenant_id: Optional[str] = None, azure_environment: Optional[str] = None, auth_type: Optional[str] = None, cluster_id: Optional[str] = None, google_credentials: Optional[str] = None, google_service_account: Optional[str] = None, - debug_truncate_bytes: Optional[int] = None, - debug_headers: Optional[bool] = None, - product="unknown", - product_version="0.0.0", - credentials_strategy: Optional[CredentialsStrategy] = None, - credentials_provider: Optional[CredentialsStrategy] = None, - token_audience: Optional[str] = None, - config: Optional[client.Config] = None): + + def __init__( + self, + *, + host: Optional[str] = None, + account_id: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + token: Optional[str] = None, + profile: Optional[str] = None, + config_file: Optional[str] = None, + azure_workspace_resource_id: Optional[str] = None, + azure_client_secret: Optional[str] = None, + azure_client_id: Optional[str] = None, + azure_tenant_id: Optional[str] = None, + azure_environment: Optional[str] = None, + auth_type: Optional[str] = None, + cluster_id: Optional[str] = None, + google_credentials: Optional[str] = None, + google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, + product="unknown", + product_version="0.0.0", + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + token_audience: Optional[str] = None, + config: Optional[client.Config] = None, + ): if not config: - config = client.Config(host=host, account_id=account_id, username=username, password=password, client_id=client_id, client_secret=client_secret, token=token, profile=profile, config_file=config_file, azure_workspace_resource_id=azure_workspace_resource_id, azure_client_secret=azure_client_secret, azure_client_id=azure_client_id, azure_tenant_id=azure_tenant_id, azure_environment=azure_environment, auth_type=auth_type, cluster_id=cluster_id, google_credentials=google_credentials, google_service_account=google_service_account, - credentials_strategy=credentials_strategy, - credentials_provider=credentials_provider, - debug_truncate_bytes=debug_truncate_bytes, - debug_headers=debug_headers, - product=product, - product_version=product_version, - token_audience=token_audience, - ) + config = client.Config( + host=host, + account_id=account_id, + username=username, + password=password, + client_id=client_id, + client_secret=client_secret, + token=token, + profile=profile, + config_file=config_file, + azure_workspace_resource_id=azure_workspace_resource_id, + azure_client_secret=azure_client_secret, + azure_client_id=azure_client_id, + azure_tenant_id=azure_tenant_id, + azure_environment=azure_environment, + auth_type=auth_type, + cluster_id=cluster_id, + google_credentials=google_credentials, + google_service_account=google_service_account, + credentials_strategy=credentials_strategy, + credentials_provider=credentials_provider, + debug_truncate_bytes=debug_truncate_bytes, + debug_headers=debug_headers, + product=product, + product_version=product_version, + token_audience=token_audience, + ) self._config = config.copy() self._dbutils = _make_dbutils(self._config) self._api_client = client.ApiClient(self._config) @@ -312,7 +295,9 @@ def __init__(self, *, host: Optional[str] = None, account_id: Optional[str] = No self._provider_files = pkg_marketplace.ProviderFilesAPI(self._api_client) self._provider_listings = pkg_marketplace.ProviderListingsAPI(self._api_client) self._provider_personalization_requests = pkg_marketplace.ProviderPersonalizationRequestsAPI(self._api_client) - self._provider_provider_analytics_dashboards = pkg_marketplace.ProviderProviderAnalyticsDashboardsAPI(self._api_client) + self._provider_provider_analytics_dashboards = pkg_marketplace.ProviderProviderAnalyticsDashboardsAPI( + self._api_client + ) self._provider_providers = pkg_marketplace.ProviderProvidersAPI(self._api_client) self._providers = pkg_sharing.ProvidersAPI(self._api_client) self._quality_monitor_v2 = pkg_qualitymonitorv2.QualityMonitorV2API(self._api_client) @@ -333,8 +318,12 @@ def __init__(self, *, host: Optional[str] = None, account_id: Optional[str] = No self._secrets = pkg_workspace.SecretsAPI(self._api_client) self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) self._serving_endpoints = serving_endpoints - serving_endpoints_data_plane_token_source = DataPlaneTokenSource(self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh) - self._serving_endpoints_data_plane = pkg_serving.ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source) + serving_endpoints_data_plane_token_source = DataPlaneTokenSource( + self._config.host, self._config.oauth_token, self._config.disable_async_token_refresh + ) + self._serving_endpoints_data_plane = pkg_serving.ServingEndpointsDataPlaneAPI( + self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source + ) self._settings = pkg_settings.SettingsAPI(self._api_client) self._shares = pkg_sharing.SharesAPI(self._api_client) self._statement_execution = pkg_sql.StatementExecutionAPI(self._api_client) @@ -366,556 +355,594 @@ def api_client(self) -> client.ApiClient: @property def dbutils(self) -> dbutils.RemoteDbUtils: return self._dbutils + @property def access_control(self) -> pkg_iam.AccessControlAPI: """Rule based Access Control for Databricks Resources.""" return self._access_control - + @property def account_access_control_proxy(self) -> pkg_iam.AccountAccessControlProxyAPI: """These APIs manage access rules on resources in an account.""" return self._account_access_control_proxy - + @property def alerts(self) -> pkg_sql.AlertsAPI: """The alerts API can be used to perform CRUD operations on alerts.""" return self._alerts - + @property def alerts_legacy(self) -> pkg_sql.AlertsLegacyAPI: """The alerts API can be used to perform CRUD operations on alerts.""" return self._alerts_legacy - + @property def alerts_v2(self) -> pkg_sql.AlertsV2API: """New version of SQL Alerts.""" return self._alerts_v2 - + @property def apps(self) -> pkg_apps.AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" return self._apps - + @property def artifact_allowlists(self) -> pkg_catalog.ArtifactAllowlistsAPI: """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode.""" return self._artifact_allowlists - + @property def catalogs(self) -> pkg_catalog.CatalogsAPI: """A catalog is the first layer of Unity Catalog’s three-level namespace.""" return self._catalogs - + @property def clean_room_assets(self) -> pkg_cleanrooms.CleanRoomAssetsAPI: """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room.""" return self._clean_room_assets - + @property def clean_room_task_runs(self) -> pkg_cleanrooms.CleanRoomTaskRunsAPI: """Clean room task runs are the executions of notebooks in a clean room.""" return self._clean_room_task_runs - + @property def clean_rooms(self) -> pkg_cleanrooms.CleanRoomsAPI: """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data.""" return self._clean_rooms - + @property def cluster_policies(self) -> pkg_compute.ClusterPoliciesAPI: """You can use cluster policies to control users' ability to configure clusters based on a set of rules.""" return self._cluster_policies - + @property def clusters(self) -> ClustersExt: """The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.""" return self._clusters - + @property def command_execution(self) -> pkg_compute.CommandExecutionAPI: """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.""" return self._command_execution - + @property def connections(self) -> pkg_catalog.ConnectionsAPI: """Connections allow for creating a connection to an external data source.""" return self._connections - + @property def consumer_fulfillments(self) -> pkg_marketplace.ConsumerFulfillmentsAPI: """Fulfillments are entities that allow consumers to preview installations.""" return self._consumer_fulfillments - + @property def consumer_installations(self) -> pkg_marketplace.ConsumerInstallationsAPI: """Installations are entities that allow consumers to interact with Databricks Marketplace listings.""" return self._consumer_installations - + @property def consumer_listings(self) -> pkg_marketplace.ConsumerListingsAPI: """Listings are the core entities in the Marketplace.""" return self._consumer_listings - + @property def consumer_personalization_requests(self) -> pkg_marketplace.ConsumerPersonalizationRequestsAPI: """Personalization Requests allow customers to interact with the individualized Marketplace listing flow.""" return self._consumer_personalization_requests - + @property def consumer_providers(self) -> pkg_marketplace.ConsumerProvidersAPI: """Providers are the entities that publish listings to the Marketplace.""" return self._consumer_providers - + @property def credentials(self) -> pkg_catalog.CredentialsAPI: """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant.""" return self._credentials - + @property def credentials_manager(self) -> pkg_settings.CredentialsManagerAPI: """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens.""" return self._credentials_manager - + @property def current_user(self) -> pkg_iam.CurrentUserAPI: """This API allows retrieving information about currently authenticated user or service principal.""" return self._current_user - + @property def custom_llms(self) -> pkg_aibuilder.CustomLlmsAPI: """The Custom LLMs service manages state and powers the UI for the Custom LLM product.""" return self._custom_llms - + @property def dashboard_widgets(self) -> pkg_sql.DashboardWidgetsAPI: """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace.""" return self._dashboard_widgets - + @property def dashboards(self) -> pkg_sql.DashboardsAPI: """In general, there is little need to modify dashboards using the API.""" return self._dashboards - + @property def data_sources(self) -> pkg_sql.DataSourcesAPI: """This API is provided to assist you in making new query objects.""" return self._data_sources - + @property def database(self) -> pkg_database.DatabaseAPI: """Database Instances provide access to a database via REST API or direct SQL.""" return self._database - + @property def dbfs(self) -> DbfsExt: """DBFS API makes it simple to interact with various data sources without having to include a users credentials every time to read a file.""" return self._dbfs - + @property def dbsql_permissions(self) -> pkg_sql.DbsqlPermissionsAPI: """The SQL Permissions API is similar to the endpoints of the :method:permissions/set.""" return self._dbsql_permissions - + @property def experiments(self) -> pkg_ml.ExperimentsAPI: """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.""" return self._experiments - + @property def external_locations(self) -> pkg_catalog.ExternalLocationsAPI: """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.""" return self._external_locations - + @property def files(self) -> pkg_files.FilesAPI: """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI.""" return self._files - + @property def functions(self) -> pkg_catalog.FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog.""" return self._functions - + @property def genie(self) -> pkg_dashboards.GenieAPI: """Genie provides a no-code experience for business users, powered by AI/BI.""" return self._genie - + @property def git_credentials(self) -> pkg_workspace.GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user.""" return self._git_credentials - + @property def global_init_scripts(self) -> pkg_compute.GlobalInitScriptsAPI: """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.""" return self._global_init_scripts - + @property def grants(self) -> pkg_catalog.GrantsAPI: """In Unity Catalog, data is secure by default.""" return self._grants - + @property def groups(self) -> pkg_iam.GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.""" return self._groups - + @property def instance_pools(self) -> pkg_compute.InstancePoolsAPI: """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.""" return self._instance_pools - + @property def instance_profiles(self) -> pkg_compute.InstanceProfilesAPI: """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.""" return self._instance_profiles - + @property def ip_access_lists(self) -> pkg_settings.IpAccessListsAPI: """IP Access List enables admins to configure IP access lists.""" return self._ip_access_lists - + @property def jobs(self) -> JobsExt: """The Jobs API allows you to create, edit, and delete jobs.""" return self._jobs - + @property def lakeview(self) -> pkg_dashboards.LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards.""" return self._lakeview - + @property def lakeview_embedded(self) -> pkg_dashboards.LakeviewEmbeddedAPI: """Token-based Lakeview APIs for embedding dashboards in external applications.""" return self._lakeview_embedded - + @property def libraries(self) -> pkg_compute.LibrariesAPI: """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.""" return self._libraries - + @property def metastores(self) -> pkg_catalog.MetastoresAPI: """A metastore is the top-level container of objects in Unity Catalog.""" return self._metastores - + @property def model_registry(self) -> pkg_ml.ModelRegistryAPI: """Note: This API reference documents APIs for the Workspace Model Registry.""" return self._model_registry - + @property def model_versions(self) -> pkg_catalog.ModelVersionsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.""" return self._model_versions - + @property def notification_destinations(self) -> pkg_settings.NotificationDestinationsAPI: """The notification destinations API lets you programmatically manage a workspace's notification destinations.""" return self._notification_destinations - + @property def online_tables(self) -> pkg_catalog.OnlineTablesAPI: """Online tables provide lower latency and higher QPS access to data from Delta tables.""" return self._online_tables - + @property def permission_migration(self) -> pkg_iam.PermissionMigrationAPI: """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx.""" return self._permission_migration - + @property def permissions(self) -> pkg_iam.PermissionsAPI: """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.""" return self._permissions - + @property def pipelines(self) -> pkg_pipelines.PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.""" return self._pipelines - + @property def policy_compliance_for_clusters(self) -> pkg_compute.PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.""" return self._policy_compliance_for_clusters - + @property def policy_compliance_for_jobs(self) -> pkg_jobs.PolicyComplianceForJobsAPI: """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.""" return self._policy_compliance_for_jobs - + @property def policy_families(self) -> pkg_compute.PolicyFamiliesAPI: """View available policy families.""" return self._policy_families - + @property def provider_exchange_filters(self) -> pkg_marketplace.ProviderExchangeFiltersAPI: """Marketplace exchanges filters curate which groups can access an exchange.""" return self._provider_exchange_filters - + @property def provider_exchanges(self) -> pkg_marketplace.ProviderExchangesAPI: """Marketplace exchanges allow providers to share their listings with a curated set of customers.""" return self._provider_exchanges - + @property def provider_files(self) -> pkg_marketplace.ProviderFilesAPI: """Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons.""" return self._provider_files - + @property def provider_listings(self) -> pkg_marketplace.ProviderListingsAPI: """Listings are the core entities in the Marketplace.""" return self._provider_listings - + @property def provider_personalization_requests(self) -> pkg_marketplace.ProviderPersonalizationRequestsAPI: """Personalization requests are an alternate to instantly available listings.""" return self._provider_personalization_requests - + @property def provider_provider_analytics_dashboards(self) -> pkg_marketplace.ProviderProviderAnalyticsDashboardsAPI: """Manage templated analytics solution for providers.""" return self._provider_provider_analytics_dashboards - + @property def provider_providers(self) -> pkg_marketplace.ProviderProvidersAPI: """Providers are entities that manage assets in Marketplace.""" return self._provider_providers - + @property def providers(self) -> pkg_sharing.ProvidersAPI: """A data provider is an object representing the organization in the real world who shares the data.""" return self._providers - + @property def quality_monitor_v2(self) -> pkg_qualitymonitorv2.QualityMonitorV2API: """Manage data quality of UC objects (currently support `schema`).""" return self._quality_monitor_v2 - + @property def quality_monitors(self) -> pkg_catalog.QualityMonitorsAPI: """A monitor computes and monitors data or model quality metrics for a table over time.""" return self._quality_monitors - + @property def queries(self) -> pkg_sql.QueriesAPI: """The queries API can be used to perform CRUD operations on queries.""" return self._queries - + @property def queries_legacy(self) -> pkg_sql.QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions.""" return self._queries_legacy - + @property def query_history(self) -> pkg_sql.QueryHistoryAPI: """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" return self._query_history - + @property def query_visualizations(self) -> pkg_sql.QueryVisualizationsAPI: """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace.""" return self._query_visualizations - + @property def query_visualizations_legacy(self) -> pkg_sql.QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" return self._query_visualizations_legacy - + @property def recipient_activation(self) -> pkg_sharing.RecipientActivationAPI: """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`.""" return self._recipient_activation - + @property def recipient_federation_policies(self) -> pkg_sharing.RecipientFederationPoliciesAPI: """The Recipient Federation Policies APIs are only applicable in the open sharing model where the recipient object has the authentication type of `OIDC_RECIPIENT`, enabling data sharing from Databricks to non-Databricks recipients.""" return self._recipient_federation_policies - + @property def recipients(self) -> pkg_sharing.RecipientsAPI: """A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares.""" return self._recipients - + @property def redash_config(self) -> pkg_sql.RedashConfigAPI: """Redash V2 service for workspace configurations (internal).""" return self._redash_config - + @property def registered_models(self) -> pkg_catalog.RegisteredModelsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.""" return self._registered_models - + @property def repos(self) -> pkg_workspace.ReposAPI: """The Repos API allows users to manage their git repos.""" return self._repos - + @property def resource_quotas(self) -> pkg_catalog.ResourceQuotasAPI: """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.""" return self._resource_quotas - + @property def schemas(self) -> pkg_catalog.SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.""" return self._schemas - + @property def secrets(self) -> pkg_workspace.SecretsAPI: """The Secrets API allows you to manage secrets, secret scopes, and access permissions.""" return self._secrets - + @property def service_principals(self) -> pkg_iam.ServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" return self._service_principals - + @property def serving_endpoints(self) -> ServingEndpointsExt: """The Serving Endpoints API allows you to create, update, and delete model serving endpoints.""" return self._serving_endpoints - + @property def serving_endpoints_data_plane(self) -> pkg_serving.ServingEndpointsDataPlaneAPI: """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service.""" return self._serving_endpoints_data_plane - + @property def settings(self) -> pkg_settings.SettingsAPI: """Workspace Settings API allows users to manage settings at the workspace level.""" return self._settings - + @property def shares(self) -> pkg_sharing.SharesAPI: """A share is a container instantiated with :method:shares/create.""" return self._shares - + @property def statement_execution(self) -> pkg_sql.StatementExecutionAPI: """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result.""" return self._statement_execution - + @property def storage_credentials(self) -> pkg_catalog.StorageCredentialsAPI: """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.""" return self._storage_credentials - + @property def system_schemas(self) -> pkg_catalog.SystemSchemasAPI: """A system schema is a schema that lives within the system catalog.""" return self._system_schemas - + @property def table_constraints(self) -> pkg_catalog.TableConstraintsAPI: """Primary key and foreign key constraints encode relationships between fields in tables.""" return self._table_constraints - + @property def tables(self) -> pkg_catalog.TablesAPI: """A table resides in the third layer of Unity Catalog’s three-level namespace.""" return self._tables - + @property def temporary_table_credentials(self) -> pkg_catalog.TemporaryTableCredentialsAPI: """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks.""" return self._temporary_table_credentials - + @property def token_management(self) -> pkg_settings.TokenManagementAPI: """Enables administrators to get all tokens and delete tokens for other users.""" return self._token_management - + @property def tokens(self) -> pkg_settings.TokensAPI: """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.""" return self._tokens - + @property def users(self) -> pkg_iam.UsersAPI: """User identities recognized by Databricks and represented by email addresses.""" return self._users - + @property def vector_search_endpoints(self) -> pkg_vectorsearch.VectorSearchEndpointsAPI: """**Endpoint**: Represents the compute resources to host vector search indexes.""" return self._vector_search_endpoints - + @property def vector_search_indexes(self) -> pkg_vectorsearch.VectorSearchIndexesAPI: """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries.""" return self._vector_search_indexes - + @property def volumes(self) -> pkg_catalog.VolumesAPI: """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.""" return self._volumes - + @property def warehouses(self) -> pkg_sql.WarehousesAPI: """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.""" return self._warehouses - + @property def workspace(self) -> WorkspaceExt: """The Workspace API allows you to list, import, export, and delete notebooks and folders.""" return self._workspace - + @property def workspace_bindings(self) -> pkg_catalog.WorkspaceBindingsAPI: """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__.""" return self._workspace_bindings - + @property def workspace_conf(self) -> pkg_settings.WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" return self._workspace_conf - + @property def forecasting(self) -> pkg_ml.ForecastingAPI: """The Forecasting API allows you to create and get serverless forecasting experiments.""" return self._forecasting - def get_workspace_id(self) -> int: """Get the workspace ID of the workspace that this client is connected to.""" - response = self._api_client.do("GET", - "/api/2.0/preview/scim/v2/Me", - response_headers=['X-Databricks-Org-Id']) + response = self._api_client.do("GET", "/api/2.0/preview/scim/v2/Me", response_headers=["X-Databricks-Org-Id"]) return int(response["X-Databricks-Org-Id"]) def __repr__(self): return f"WorkspaceClient(host='{self._config.host}', auth_type='{self._config.auth_type}', ...)" + class AccountClient: """ The AccountClient is a client for the account-level Databricks REST API. """ - def __init__(self, *, host: Optional[str] = None, account_id: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, client_id: Optional[str] = None, client_secret: Optional[str] = None, token: Optional[str] = None, profile: Optional[str] = None, config_file: Optional[str] = None, azure_workspace_resource_id: Optional[str] = None, azure_client_secret: Optional[str] = None, azure_client_id: Optional[str] = None, azure_tenant_id: Optional[str] = None, azure_environment: Optional[str] = None, auth_type: Optional[str] = None, cluster_id: Optional[str] = None, google_credentials: Optional[str] = None, google_service_account: Optional[str] = None, - debug_truncate_bytes: Optional[int] = None, - debug_headers: Optional[bool] = None, - product="unknown", - product_version="0.0.0", - credentials_strategy: Optional[CredentialsStrategy] = None, - credentials_provider: Optional[CredentialsStrategy] = None, - token_audience: Optional[str] = None, - config: Optional[client.Config] = None): + def __init__( + self, + *, + host: Optional[str] = None, + account_id: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + token: Optional[str] = None, + profile: Optional[str] = None, + config_file: Optional[str] = None, + azure_workspace_resource_id: Optional[str] = None, + azure_client_secret: Optional[str] = None, + azure_client_id: Optional[str] = None, + azure_tenant_id: Optional[str] = None, + azure_environment: Optional[str] = None, + auth_type: Optional[str] = None, + cluster_id: Optional[str] = None, + google_credentials: Optional[str] = None, + google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, + product="unknown", + product_version="0.0.0", + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + token_audience: Optional[str] = None, + config: Optional[client.Config] = None, + ): if not config: - config = client.Config(host=host, account_id=account_id, username=username, password=password, client_id=client_id, client_secret=client_secret, token=token, profile=profile, config_file=config_file, azure_workspace_resource_id=azure_workspace_resource_id, azure_client_secret=azure_client_secret, azure_client_id=azure_client_id, azure_tenant_id=azure_tenant_id, azure_environment=azure_environment, auth_type=auth_type, cluster_id=cluster_id, google_credentials=google_credentials, google_service_account=google_service_account, - credentials_strategy=credentials_strategy, - credentials_provider=credentials_provider, - debug_truncate_bytes=debug_truncate_bytes, - debug_headers=debug_headers, - product=product, - product_version=product_version, - token_audience=token_audience, - ) + config = client.Config( + host=host, + account_id=account_id, + username=username, + password=password, + client_id=client_id, + client_secret=client_secret, + token=token, + profile=profile, + config_file=config_file, + azure_workspace_resource_id=azure_workspace_resource_id, + azure_client_secret=azure_client_secret, + azure_client_id=azure_client_id, + azure_tenant_id=azure_tenant_id, + azure_environment=azure_environment, + auth_type=auth_type, + cluster_id=cluster_id, + google_credentials=google_credentials, + google_service_account=google_service_account, + credentials_strategy=credentials_strategy, + credentials_provider=credentials_provider, + debug_truncate_bytes=debug_truncate_bytes, + debug_headers=debug_headers, + product=product, + product_version=product_version, + token_audience=token_audience, + ) self._config = config.copy() self._api_client = client.ApiClient(self._config) self._access_control = pkg_iam.AccountAccessControlAPI(self._api_client) @@ -957,161 +984,161 @@ def config(self) -> client.Config: @property def api_client(self) -> client.ApiClient: return self._api_client + @property def access_control(self) -> pkg_iam.AccountAccessControlAPI: """These APIs manage access rules on resources in an account.""" return self._access_control - + @property def billable_usage(self) -> pkg_billing.BillableUsageAPI: """This API allows you to download billable usage logs for the specified account and date range.""" return self._billable_usage - + @property def budget_policy(self) -> pkg_billing.BudgetPolicyAPI: """A service serves REST API about Budget policies.""" return self._budget_policy - + @property def credentials(self) -> pkg_provisioning.CredentialsAPI: """These APIs manage credential configurations for this workspace.""" return self._credentials - + @property def custom_app_integration(self) -> pkg_oauth2.CustomAppIntegrationAPI: """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" return self._custom_app_integration - + @property def encryption_keys(self) -> pkg_provisioning.EncryptionKeysAPI: """These APIs manage encryption key configurations for this workspace (optional).""" return self._encryption_keys - + @property def federation_policy(self) -> pkg_oauth2.AccountFederationPolicyAPI: """These APIs manage account federation policies.""" return self._federation_policy - + @property def groups(self) -> pkg_iam.AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.""" return self._groups - + @property def ip_access_lists(self) -> pkg_settings.AccountIpAccessListsAPI: """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.""" return self._ip_access_lists - + @property def log_delivery(self) -> pkg_billing.LogDeliveryAPI: """These APIs manage Log delivery configurations for this account.""" return self._log_delivery - + @property def metastore_assignments(self) -> pkg_catalog.AccountMetastoreAssignmentsAPI: """These APIs manage metastore assignments to a workspace.""" return self._metastore_assignments - + @property def metastores(self) -> pkg_catalog.AccountMetastoresAPI: """These APIs manage Unity Catalog metastores for an account.""" return self._metastores - + @property def network_connectivity(self) -> pkg_settings.NetworkConnectivityAPI: """These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.""" return self._network_connectivity - + @property def network_policies(self) -> pkg_settings.NetworkPoliciesAPI: """These APIs manage network policies for this account.""" return self._network_policies - + @property def networks(self) -> pkg_provisioning.NetworksAPI: """These APIs manage network configurations for customer-managed VPCs (optional).""" return self._networks - + @property def o_auth_published_apps(self) -> pkg_oauth2.OAuthPublishedAppsAPI: """These APIs enable administrators to view all the available published OAuth applications in Databricks.""" return self._o_auth_published_apps - + @property def private_access(self) -> pkg_provisioning.PrivateAccessAPI: """These APIs manage private access settings for this account.""" return self._private_access - + @property def published_app_integration(self) -> pkg_oauth2.PublishedAppIntegrationAPI: """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" return self._published_app_integration - + @property def service_principal_federation_policy(self) -> pkg_oauth2.ServicePrincipalFederationPolicyAPI: """These APIs manage service principal federation policies.""" return self._service_principal_federation_policy - + @property def service_principal_secrets(self) -> pkg_oauth2.ServicePrincipalSecretsAPI: """These APIs enable administrators to manage service principal secrets.""" return self._service_principal_secrets - + @property def service_principals(self) -> pkg_iam.AccountServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" return self._service_principals - + @property def settings(self) -> pkg_settings.AccountSettingsAPI: """Accounts Settings API allows users to manage settings at the account level.""" return self._settings - + @property def storage(self) -> pkg_provisioning.StorageAPI: """These APIs manage storage configurations for this workspace.""" return self._storage - + @property def storage_credentials(self) -> pkg_catalog.AccountStorageCredentialsAPI: """These APIs manage storage credentials for a particular metastore.""" return self._storage_credentials - + @property def usage_dashboards(self) -> pkg_billing.UsageDashboardsAPI: """These APIs manage usage dashboards for this account.""" return self._usage_dashboards - + @property def users(self) -> pkg_iam.AccountUsersAPI: """User identities recognized by Databricks and represented by email addresses.""" return self._users - + @property def vpc_endpoints(self) -> pkg_provisioning.VpcEndpointsAPI: """These APIs manage VPC endpoint configurations for this account.""" return self._vpc_endpoints - + @property def workspace_assignment(self) -> pkg_iam.WorkspaceAssignmentAPI: """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.""" return self._workspace_assignment - + @property def workspace_network_configuration(self) -> pkg_settings.WorkspaceNetworkConfigurationAPI: """These APIs allow configuration of network settings for Databricks workspaces by selecting which network policy to associate with the workspace.""" return self._workspace_network_configuration - + @property def workspaces(self) -> pkg_provisioning.WorkspacesAPI: """These APIs manage workspaces for this account.""" return self._workspaces - + @property def budgets(self) -> pkg_billing.BudgetsAPI: """These APIs manage budget configurations for this account.""" return self._budgets - def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: """Constructs a ``WorkspaceClient`` for the given workspace. @@ -1143,4 +1170,4 @@ def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: return WorkspaceClient(config=config) def __repr__(self): - return f"AccountClient(account_id='{self._config.account_id}', auth_type='{self._config.auth_type}', ...)" \ No newline at end of file + return f"AccountClient(account_id='{self._config.account_id}', auth_type='{self._config.auth_type}', ...)" diff --git a/databricks/sdk/errors/overrides.py b/databricks/sdk/errors/overrides.py index 714e1033b..08311fa9f 100644 --- a/databricks/sdk/errors/overrides.py +++ b/databricks/sdk/errors/overrides.py @@ -1,34 +1,36 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -from .base import _ErrorOverride -from .platform import * import re +from .base import _ErrorOverride +from .platform import ResourceDoesNotExist _ALL_OVERRIDES = [ _ErrorOverride( debug_name="Clusters InvalidParameterValue=>ResourceDoesNotExist", - path_regex=re.compile(r'^/api/2\.\d/clusters/get'), + path_regex=re.compile(r"^/api/2\.\d/clusters/get"), verb="GET", - status_code_matcher=re.compile(r'^400$'), - error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'), - message_matcher=re.compile(r'Cluster .* does not exist'), + status_code_matcher=re.compile(r"^400$"), + error_code_matcher=re.compile(r"INVALID_PARAMETER_VALUE"), + message_matcher=re.compile(r"Cluster .* does not exist"), custom_error=ResourceDoesNotExist, - ),_ErrorOverride( + ), + _ErrorOverride( debug_name="Jobs InvalidParameterValue=>ResourceDoesNotExist", - path_regex=re.compile(r'^/api/2\.\d/jobs/get'), + path_regex=re.compile(r"^/api/2\.\d/jobs/get"), verb="GET", - status_code_matcher=re.compile(r'^400$'), - error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'), - message_matcher=re.compile(r'Job .* does not exist'), + status_code_matcher=re.compile(r"^400$"), + error_code_matcher=re.compile(r"INVALID_PARAMETER_VALUE"), + message_matcher=re.compile(r"Job .* does not exist"), custom_error=ResourceDoesNotExist, - ),_ErrorOverride( + ), + _ErrorOverride( debug_name="Job Runs InvalidParameterValue=>ResourceDoesNotExist", - path_regex=re.compile(r'^/api/2\.\d/jobs/runs/get'), + path_regex=re.compile(r"^/api/2\.\d/jobs/runs/get"), verb="GET", - status_code_matcher=re.compile(r'^400$'), - error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'), - message_matcher=re.compile(r'(Run .* does not exist|Run: .* in job: .* doesn\'t exist)'), + status_code_matcher=re.compile(r"^400$"), + error_code_matcher=re.compile(r"INVALID_PARAMETER_VALUE"), + message_matcher=re.compile(r"(Run .* does not exist|Run: .* in job: .* doesn\'t exist)"), custom_error=ResourceDoesNotExist, ), -] \ No newline at end of file +] diff --git a/databricks/sdk/errors/platform.py b/databricks/sdk/errors/platform.py index c2ee30b8e..a5b3cb111 100755 --- a/databricks/sdk/errors/platform.py +++ b/databricks/sdk/errors/platform.py @@ -4,93 +4,113 @@ class BadRequest(DatabricksError): - """the request is invalid""" + """the request is invalid""" + class Unauthenticated(DatabricksError): - """the request does not have valid authentication (AuthN) credentials for the operation""" + """the request does not have valid authentication (AuthN) credentials for the operation""" + class PermissionDenied(DatabricksError): - """the caller does not have permission to execute the specified operation""" + """the caller does not have permission to execute the specified operation""" + class NotFound(DatabricksError): - """the operation was performed on a resource that does not exist""" + """the operation was performed on a resource that does not exist""" + class ResourceConflict(DatabricksError): - """maps to all HTTP 409 (Conflict) responses""" + """maps to all HTTP 409 (Conflict) responses""" + class TooManyRequests(DatabricksError): - """maps to HTTP code: 429 Too Many Requests""" + """maps to HTTP code: 429 Too Many Requests""" + class Cancelled(DatabricksError): - """the operation was explicitly canceled by the caller""" + """the operation was explicitly canceled by the caller""" + class InternalError(DatabricksError): - """some invariants expected by the underlying system have been broken""" + """some invariants expected by the underlying system have been broken""" + class NotImplemented(DatabricksError): - """the operation is not implemented or is not supported/enabled in this service""" + """the operation is not implemented or is not supported/enabled in this service""" + class TemporarilyUnavailable(DatabricksError): - """the service is currently unavailable""" + """the service is currently unavailable""" + class DeadlineExceeded(DatabricksError): - """the deadline expired before the operation could complete""" + """the deadline expired before the operation could complete""" + class InvalidState(BadRequest): - """unexpected state""" + """unexpected state""" + class InvalidParameterValue(BadRequest): - """supplied value for a parameter was invalid""" + """supplied value for a parameter was invalid""" + class ResourceDoesNotExist(NotFound): - """operation was performed on a resource that does not exist""" + """operation was performed on a resource that does not exist""" + class Aborted(ResourceConflict): - """the operation was aborted, typically due to a concurrency issue such as a sequencer check + """the operation was aborted, typically due to a concurrency issue such as a sequencer check failure""" + class AlreadyExists(ResourceConflict): - """operation was rejected due a conflict with an existing resource""" + """operation was rejected due a conflict with an existing resource""" + class ResourceAlreadyExists(ResourceConflict): - """operation was rejected due a conflict with an existing resource""" + """operation was rejected due a conflict with an existing resource""" + class ResourceExhausted(TooManyRequests): - """operation is rejected due to per-user rate limiting""" + """operation is rejected due to per-user rate limiting""" + class RequestLimitExceeded(TooManyRequests): - """cluster request was rejected because it would exceed a resource limit""" + """cluster request was rejected because it would exceed a resource limit""" + class Unknown(InternalError): - """this error is used as a fallback if the platform-side mapping is missing some reason""" + """this error is used as a fallback if the platform-side mapping is missing some reason""" + class DataLoss(InternalError): - """unrecoverable data loss or corruption""" - - -STATUS_CODE_MAPPING = { - 400: BadRequest, - 401: Unauthenticated, - 403: PermissionDenied, - 404: NotFound, - 409: ResourceConflict, - 429: TooManyRequests, - 499: Cancelled, - 500: InternalError, - 501: NotImplemented, - 503: TemporarilyUnavailable, - 504: DeadlineExceeded, + """unrecoverable data loss or corruption""" + + +STATUS_CODE_MAPPING = { + 400: BadRequest, + 401: Unauthenticated, + 403: PermissionDenied, + 404: NotFound, + 409: ResourceConflict, + 429: TooManyRequests, + 499: Cancelled, + 500: InternalError, + 501: NotImplemented, + 503: TemporarilyUnavailable, + 504: DeadlineExceeded, } -ERROR_CODE_MAPPING = { - 'INVALID_STATE': InvalidState, - 'INVALID_PARAMETER_VALUE': InvalidParameterValue, - 'RESOURCE_DOES_NOT_EXIST': ResourceDoesNotExist, - 'ABORTED': Aborted, - 'ALREADY_EXISTS': AlreadyExists, - 'RESOURCE_ALREADY_EXISTS': ResourceAlreadyExists, - 'RESOURCE_EXHAUSTED': ResourceExhausted, - 'REQUEST_LIMIT_EXCEEDED': RequestLimitExceeded, - 'UNKNOWN': Unknown, - 'DATA_LOSS': DataLoss, -} \ No newline at end of file +ERROR_CODE_MAPPING = { + "INVALID_STATE": InvalidState, + "INVALID_PARAMETER_VALUE": InvalidParameterValue, + "RESOURCE_DOES_NOT_EXIST": ResourceDoesNotExist, + "ABORTED": Aborted, + "ALREADY_EXISTS": AlreadyExists, + "RESOURCE_ALREADY_EXISTS": ResourceAlreadyExists, + "RESOURCE_EXHAUSTED": ResourceExhausted, + "REQUEST_LIMIT_EXCEEDED": RequestLimitExceeded, + "UNKNOWN": Unknown, + "DATA_LOSS": DataLoss, +} diff --git a/databricks/sdk/service/aibuilder.py b/databricks/sdk/service/aibuilder.py index 80e5ea033..3b37a2070 100755 --- a/databricks/sdk/service/aibuilder.py +++ b/databricks/sdk/service/aibuilder.py @@ -1,32 +1,23 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Dict, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ._internal import _enum, _from_dict, _repeated_dict +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class CancelCustomLlmOptimizationRunRequest: id: Optional[str] = None - - - @dataclass @@ -45,164 +36,194 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelResponse: """Deserializes the CancelResponse from a dictionary.""" return cls() - - @dataclass class CustomLlm: name: str """Name of the custom LLM""" - + instructions: str """Instructions for the custom LLM to follow""" - + optimization_state: State """If optimization is kicked off, tracks the state of the custom LLM""" - + agent_artifact_path: Optional[str] = None - + creation_time: Optional[str] = None """Creation timestamp of the custom LLM""" - + creator: Optional[str] = None """Creator of the custom LLM""" - + datasets: Optional[List[Dataset]] = None """Datasets used for training and evaluating the model, not for inference""" - + endpoint_name: Optional[str] = None """Name of the endpoint that will be used to serve the custom LLM""" - + guidelines: Optional[List[str]] = None """Guidelines for the custom LLM to adhere to""" - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CustomLlm into a dictionary suitable for use as a JSON request body.""" body = {} - if self.agent_artifact_path is not None: body['agent_artifact_path'] = self.agent_artifact_path - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.creator is not None: body['creator'] = self.creator - if self.datasets: body['datasets'] = [v.as_dict() for v in self.datasets] - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.guidelines: body['guidelines'] = [v for v in self.guidelines] - if self.id is not None: body['id'] = self.id - if self.instructions is not None: body['instructions'] = self.instructions - if self.name is not None: body['name'] = self.name - if self.optimization_state is not None: body['optimization_state'] = self.optimization_state.value + if self.agent_artifact_path is not None: + body["agent_artifact_path"] = self.agent_artifact_path + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.creator is not None: + body["creator"] = self.creator + if self.datasets: + body["datasets"] = [v.as_dict() for v in self.datasets] + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.guidelines: + body["guidelines"] = [v for v in self.guidelines] + if self.id is not None: + body["id"] = self.id + if self.instructions is not None: + body["instructions"] = self.instructions + if self.name is not None: + body["name"] = self.name + if self.optimization_state is not None: + body["optimization_state"] = self.optimization_state.value return body def as_shallow_dict(self) -> dict: """Serializes the CustomLlm into a shallow dictionary of its immediate attributes.""" body = {} - if self.agent_artifact_path is not None: body['agent_artifact_path'] = self.agent_artifact_path - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.creator is not None: body['creator'] = self.creator - if self.datasets: body['datasets'] = self.datasets - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.guidelines: body['guidelines'] = self.guidelines - if self.id is not None: body['id'] = self.id - if self.instructions is not None: body['instructions'] = self.instructions - if self.name is not None: body['name'] = self.name - if self.optimization_state is not None: body['optimization_state'] = self.optimization_state + if self.agent_artifact_path is not None: + body["agent_artifact_path"] = self.agent_artifact_path + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.creator is not None: + body["creator"] = self.creator + if self.datasets: + body["datasets"] = self.datasets + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.guidelines: + body["guidelines"] = self.guidelines + if self.id is not None: + body["id"] = self.id + if self.instructions is not None: + body["instructions"] = self.instructions + if self.name is not None: + body["name"] = self.name + if self.optimization_state is not None: + body["optimization_state"] = self.optimization_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomLlm: """Deserializes the CustomLlm from a dictionary.""" - return cls(agent_artifact_path=d.get('agent_artifact_path', None), creation_time=d.get('creation_time', None), creator=d.get('creator', None), datasets=_repeated_dict(d, 'datasets', Dataset), endpoint_name=d.get('endpoint_name', None), guidelines=d.get('guidelines', None), id=d.get('id', None), instructions=d.get('instructions', None), name=d.get('name', None), optimization_state=_enum(d, 'optimization_state', State)) - - + return cls( + agent_artifact_path=d.get("agent_artifact_path", None), + creation_time=d.get("creation_time", None), + creator=d.get("creator", None), + datasets=_repeated_dict(d, "datasets", Dataset), + endpoint_name=d.get("endpoint_name", None), + guidelines=d.get("guidelines", None), + id=d.get("id", None), + instructions=d.get("instructions", None), + name=d.get("name", None), + optimization_state=_enum(d, "optimization_state", State), + ) @dataclass class Dataset: table: Table - + def as_dict(self) -> dict: """Serializes the Dataset into a dictionary suitable for use as a JSON request body.""" body = {} - if self.table: body['table'] = self.table.as_dict() + if self.table: + body["table"] = self.table.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Dataset into a shallow dictionary of its immediate attributes.""" body = {} - if self.table: body['table'] = self.table + if self.table: + body["table"] = self.table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Dataset: """Deserializes the Dataset from a dictionary.""" - return cls(table=_from_dict(d, 'table', Table)) - - - - - + return cls(table=_from_dict(d, "table", Table)) @dataclass class StartCustomLlmOptimizationRunRequest: id: Optional[str] = None """The Id of the tile.""" - - - class State(Enum): """States of Custom LLM optimization lifecycle.""" - - CANCELLED = 'CANCELLED' - COMPLETED = 'COMPLETED' - CREATED = 'CREATED' - FAILED = 'FAILED' - PENDING = 'PENDING' - RUNNING = 'RUNNING' + + CANCELLED = "CANCELLED" + COMPLETED = "COMPLETED" + CREATED = "CREATED" + FAILED = "FAILED" + PENDING = "PENDING" + RUNNING = "RUNNING" + @dataclass class Table: table_path: str """Full UC table path in catalog.schema.table_name format""" - + request_col: str """Name of the request column""" - + response_col: Optional[str] = None """Optional: Name of the response column if the data is labeled""" - + def as_dict(self) -> dict: """Serializes the Table into a dictionary suitable for use as a JSON request body.""" body = {} - if self.request_col is not None: body['request_col'] = self.request_col - if self.response_col is not None: body['response_col'] = self.response_col - if self.table_path is not None: body['table_path'] = self.table_path + if self.request_col is not None: + body["request_col"] = self.request_col + if self.response_col is not None: + body["response_col"] = self.response_col + if self.table_path is not None: + body["table_path"] = self.table_path return body def as_shallow_dict(self) -> dict: """Serializes the Table into a shallow dictionary of its immediate attributes.""" body = {} - if self.request_col is not None: body['request_col'] = self.request_col - if self.response_col is not None: body['response_col'] = self.response_col - if self.table_path is not None: body['table_path'] = self.table_path + if self.request_col is not None: + body["request_col"] = self.request_col + if self.response_col is not None: + body["response_col"] = self.response_col + if self.table_path is not None: + body["table_path"] = self.table_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Table: """Deserializes the Table from a dictionary.""" - return cls(request_col=d.get('request_col', None), response_col=d.get('response_col', None), table_path=d.get('table_path', None)) - - + return cls( + request_col=d.get("request_col", None), + response_col=d.get("response_col", None), + table_path=d.get("table_path", None), + ) @dataclass class UpdateCustomLlmRequest: custom_llm: CustomLlm """The CustomLlm containing the fields which should be updated.""" - + update_mask: str """The list of the CustomLlm fields to update. These should correspond to the values (or lack thereof) present in `custom_llm`. @@ -216,123 +237,99 @@ class UpdateCustomLlmRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + id: Optional[str] = None """The id of the custom llm""" - + def as_dict(self) -> dict: """Serializes the UpdateCustomLlmRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_llm: body['custom_llm'] = self.custom_llm.as_dict() - if self.id is not None: body['id'] = self.id - if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.custom_llm: + body["custom_llm"] = self.custom_llm.as_dict() + if self.id is not None: + body["id"] = self.id + if self.update_mask is not None: + body["update_mask"] = self.update_mask return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCustomLlmRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_llm: body['custom_llm'] = self.custom_llm - if self.id is not None: body['id'] = self.id - if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.custom_llm: + body["custom_llm"] = self.custom_llm + if self.id is not None: + body["id"] = self.id + if self.update_mask is not None: + body["update_mask"] = self.update_mask return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomLlmRequest: """Deserializes the UpdateCustomLlmRequest from a dictionary.""" - return cls(custom_llm=_from_dict(d, 'custom_llm', CustomLlm), id=d.get('id', None), update_mask=d.get('update_mask', None)) - - - - + return cls( + custom_llm=_from_dict(d, "custom_llm", CustomLlm), + id=d.get("id", None), + update_mask=d.get("update_mask", None), + ) class CustomLlmsAPI: """The Custom LLMs service manages state and powers the UI for the Custom LLM product.""" - + def __init__(self, api_client): self._api = api_client - - + def cancel(self, id: str): + """Cancel a Custom LLM Optimization Run. - + :param id: str - - - def cancel(self - , id: str - ): - """Cancel a Custom LLM Optimization Run. - - :param id: str - - """ - - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST',f'/api/2.0/custom-llms/{id}/optimize/cancel' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", f"/api/2.0/custom-llms/{id}/optimize/cancel", headers=headers) - def create(self - , id: str - ) -> CustomLlm: + def create(self, id: str) -> CustomLlm: """Start a Custom LLM Optimization Run. - + :param id: str The Id of the tile. - + :returns: :class:`CustomLlm` """ - - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/custom-llms/{id}/optimize' - - , headers=headers - ) - return CustomLlm.from_dict(res) - - - + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - def get(self - , id: str - ) -> CustomLlm: + res = self._api.do("POST", f"/api/2.0/custom-llms/{id}/optimize", headers=headers) + return CustomLlm.from_dict(res) + + def get(self, id: str) -> CustomLlm: """Get a Custom LLM. - + :param id: str The id of the custom llm - + :returns: :class:`CustomLlm` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/custom-llms/{id}' - - , headers=headers - ) - return CustomLlm.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def update(self - , id: str, custom_llm: CustomLlm, update_mask: str - ) -> CustomLlm: + res = self._api.do("GET", f"/api/2.0/custom-llms/{id}", headers=headers) + return CustomLlm.from_dict(res) + + def update(self, id: str, custom_llm: CustomLlm, update_mask: str) -> CustomLlm: """Update a Custom LLM. - + :param id: str The id of the custom llm :param custom_llm: :class:`CustomLlm` @@ -340,29 +337,28 @@ def update(self :param update_mask: str The list of the CustomLlm fields to update. These should correspond to the values (or lack thereof) present in `custom_llm`. - + The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`CustomLlm` """ body = {} - if custom_llm is not None: body['custom_llm'] = custom_llm.as_dict() - if update_mask is not None: body['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/custom-llms/{id}', body=body - - , headers=headers - ) + if custom_llm is not None: + body["custom_llm"] = custom_llm.as_dict() + if update_mask is not None: + body["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/custom-llms/{id}", body=body, headers=headers) return CustomLlm.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index 1b1e77629..e0ca7d9a4 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -1,1507 +1,1568 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Callable, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class App: name: str """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It must be unique within the workspace.""" - + active_deployment: Optional[AppDeployment] = None """The active deployment of the app. A deployment is considered active when it has been deployed to the app compute.""" - + app_status: Optional[ApplicationStatus] = None - + budget_policy_id: Optional[str] = None - + compute_status: Optional[ComputeStatus] = None - + create_time: Optional[str] = None """The creation time of the app. Formatted timestamp in ISO 6801.""" - + creator: Optional[str] = None """The email of the user that created the app.""" - + default_source_code_path: Optional[str] = None """The default workspace file system path of the source code from which app deployment are created. This field tracks the workspace source code path of the last active deployment.""" - + description: Optional[str] = None """The description of the app.""" - + effective_budget_policy_id: Optional[str] = None - + effective_user_api_scopes: Optional[List[str]] = None """The effective api scopes granted to the user access token.""" - + id: Optional[str] = None """The unique identifier of the app.""" - + oauth2_app_client_id: Optional[str] = None - + oauth2_app_integration_id: Optional[str] = None - + pending_deployment: Optional[AppDeployment] = None """The pending deployment of the app. A deployment is considered pending when it is being prepared for deployment to the app compute.""" - + resources: Optional[List[AppResource]] = None """Resources for the app.""" - + service_principal_client_id: Optional[str] = None - + service_principal_id: Optional[int] = None - + service_principal_name: Optional[str] = None - + update_time: Optional[str] = None """The update time of the app. Formatted timestamp in ISO 6801.""" - + updater: Optional[str] = None """The email of the user that last updated the app.""" - + url: Optional[str] = None """The URL of the app once it is deployed.""" - + user_api_scopes: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the App into a dictionary suitable for use as a JSON request body.""" body = {} - if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict() - if self.app_status: body['app_status'] = self.app_status.as_dict() - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.compute_status: body['compute_status'] = self.compute_status.as_dict() - if self.create_time is not None: body['create_time'] = self.create_time - if self.creator is not None: body['creator'] = self.creator - if self.default_source_code_path is not None: body['default_source_code_path'] = self.default_source_code_path - if self.description is not None: body['description'] = self.description - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.effective_user_api_scopes: body['effective_user_api_scopes'] = [v for v in self.effective_user_api_scopes] - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.oauth2_app_client_id is not None: body['oauth2_app_client_id'] = self.oauth2_app_client_id - if self.oauth2_app_integration_id is not None: body['oauth2_app_integration_id'] = self.oauth2_app_integration_id - if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict() - if self.resources: body['resources'] = [v.as_dict() for v in self.resources] - if self.service_principal_client_id is not None: body['service_principal_client_id'] = self.service_principal_client_id - if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.update_time is not None: body['update_time'] = self.update_time - if self.updater is not None: body['updater'] = self.updater - if self.url is not None: body['url'] = self.url - if self.user_api_scopes: body['user_api_scopes'] = [v for v in self.user_api_scopes] + if self.active_deployment: + body["active_deployment"] = self.active_deployment.as_dict() + if self.app_status: + body["app_status"] = self.app_status.as_dict() + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_status: + body["compute_status"] = self.compute_status.as_dict() + if self.create_time is not None: + body["create_time"] = self.create_time + if self.creator is not None: + body["creator"] = self.creator + if self.default_source_code_path is not None: + body["default_source_code_path"] = self.default_source_code_path + if self.description is not None: + body["description"] = self.description + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_user_api_scopes: + body["effective_user_api_scopes"] = [v for v in self.effective_user_api_scopes] + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.oauth2_app_client_id is not None: + body["oauth2_app_client_id"] = self.oauth2_app_client_id + if self.oauth2_app_integration_id is not None: + body["oauth2_app_integration_id"] = self.oauth2_app_integration_id + if self.pending_deployment: + body["pending_deployment"] = self.pending_deployment.as_dict() + if self.resources: + body["resources"] = [v.as_dict() for v in self.resources] + if self.service_principal_client_id is not None: + body["service_principal_client_id"] = self.service_principal_client_id + if self.service_principal_id is not None: + body["service_principal_id"] = self.service_principal_id + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.update_time is not None: + body["update_time"] = self.update_time + if self.updater is not None: + body["updater"] = self.updater + if self.url is not None: + body["url"] = self.url + if self.user_api_scopes: + body["user_api_scopes"] = [v for v in self.user_api_scopes] return body def as_shallow_dict(self) -> dict: """Serializes the App into a shallow dictionary of its immediate attributes.""" body = {} - if self.active_deployment: body['active_deployment'] = self.active_deployment - if self.app_status: body['app_status'] = self.app_status - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.compute_status: body['compute_status'] = self.compute_status - if self.create_time is not None: body['create_time'] = self.create_time - if self.creator is not None: body['creator'] = self.creator - if self.default_source_code_path is not None: body['default_source_code_path'] = self.default_source_code_path - if self.description is not None: body['description'] = self.description - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.effective_user_api_scopes: body['effective_user_api_scopes'] = self.effective_user_api_scopes - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.oauth2_app_client_id is not None: body['oauth2_app_client_id'] = self.oauth2_app_client_id - if self.oauth2_app_integration_id is not None: body['oauth2_app_integration_id'] = self.oauth2_app_integration_id - if self.pending_deployment: body['pending_deployment'] = self.pending_deployment - if self.resources: body['resources'] = self.resources - if self.service_principal_client_id is not None: body['service_principal_client_id'] = self.service_principal_client_id - if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.update_time is not None: body['update_time'] = self.update_time - if self.updater is not None: body['updater'] = self.updater - if self.url is not None: body['url'] = self.url - if self.user_api_scopes: body['user_api_scopes'] = self.user_api_scopes + if self.active_deployment: + body["active_deployment"] = self.active_deployment + if self.app_status: + body["app_status"] = self.app_status + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.compute_status: + body["compute_status"] = self.compute_status + if self.create_time is not None: + body["create_time"] = self.create_time + if self.creator is not None: + body["creator"] = self.creator + if self.default_source_code_path is not None: + body["default_source_code_path"] = self.default_source_code_path + if self.description is not None: + body["description"] = self.description + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.effective_user_api_scopes: + body["effective_user_api_scopes"] = self.effective_user_api_scopes + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.oauth2_app_client_id is not None: + body["oauth2_app_client_id"] = self.oauth2_app_client_id + if self.oauth2_app_integration_id is not None: + body["oauth2_app_integration_id"] = self.oauth2_app_integration_id + if self.pending_deployment: + body["pending_deployment"] = self.pending_deployment + if self.resources: + body["resources"] = self.resources + if self.service_principal_client_id is not None: + body["service_principal_client_id"] = self.service_principal_client_id + if self.service_principal_id is not None: + body["service_principal_id"] = self.service_principal_id + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.update_time is not None: + body["update_time"] = self.update_time + if self.updater is not None: + body["updater"] = self.updater + if self.url is not None: + body["url"] = self.url + if self.user_api_scopes: + body["user_api_scopes"] = self.user_api_scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> App: """Deserializes the App from a dictionary.""" - return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment), app_status=_from_dict(d, 'app_status', ApplicationStatus), budget_policy_id=d.get('budget_policy_id', None), compute_status=_from_dict(d, 'compute_status', ComputeStatus), create_time=d.get('create_time', None), creator=d.get('creator', None), default_source_code_path=d.get('default_source_code_path', None), description=d.get('description', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), effective_user_api_scopes=d.get('effective_user_api_scopes', None), id=d.get('id', None), name=d.get('name', None), oauth2_app_client_id=d.get('oauth2_app_client_id', None), oauth2_app_integration_id=d.get('oauth2_app_integration_id', None), pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment), resources=_repeated_dict(d, 'resources', AppResource), service_principal_client_id=d.get('service_principal_client_id', None), service_principal_id=d.get('service_principal_id', None), service_principal_name=d.get('service_principal_name', None), update_time=d.get('update_time', None), updater=d.get('updater', None), url=d.get('url', None), user_api_scopes=d.get('user_api_scopes', None)) - - + return cls( + active_deployment=_from_dict(d, "active_deployment", AppDeployment), + app_status=_from_dict(d, "app_status", ApplicationStatus), + budget_policy_id=d.get("budget_policy_id", None), + compute_status=_from_dict(d, "compute_status", ComputeStatus), + create_time=d.get("create_time", None), + creator=d.get("creator", None), + default_source_code_path=d.get("default_source_code_path", None), + description=d.get("description", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + effective_user_api_scopes=d.get("effective_user_api_scopes", None), + id=d.get("id", None), + name=d.get("name", None), + oauth2_app_client_id=d.get("oauth2_app_client_id", None), + oauth2_app_integration_id=d.get("oauth2_app_integration_id", None), + pending_deployment=_from_dict(d, "pending_deployment", AppDeployment), + resources=_repeated_dict(d, "resources", AppResource), + service_principal_client_id=d.get("service_principal_client_id", None), + service_principal_id=d.get("service_principal_id", None), + service_principal_name=d.get("service_principal_name", None), + update_time=d.get("update_time", None), + updater=d.get("updater", None), + url=d.get("url", None), + user_api_scopes=d.get("user_api_scopes", None), + ) @dataclass class AppAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[AppPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the AppAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AppAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppAccessControlRequest: """Deserializes the AppAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', AppPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", AppPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class AppAccessControlResponse: all_permissions: Optional[List[AppPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the AppAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AppAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppAccessControlResponse: """Deserializes the AppAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', AppPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", AppPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class AppDeployment: create_time: Optional[str] = None """The creation time of the deployment. Formatted timestamp in ISO 6801.""" - + creator: Optional[str] = None """The email of the user creates the deployment.""" - + deployment_artifacts: Optional[AppDeploymentArtifacts] = None """The deployment artifacts for an app.""" - + deployment_id: Optional[str] = None """The unique id of the deployment.""" - + mode: Optional[AppDeploymentMode] = None """The mode of which the deployment will manage the source code.""" - + source_code_path: Optional[str] = None """The workspace file system path of the source code used to create the app deployment. This is different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. The former refers to the original source code location of the app in the workspace during deployment creation, whereas the latter provides a system generated stable snapshotted source code path used by the deployment.""" - + status: Optional[AppDeploymentStatus] = None """Status and status message of the deployment""" - + update_time: Optional[str] = None """The update time of the deployment. Formatted timestamp in ISO 6801.""" - + def as_dict(self) -> dict: """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.creator is not None: body['creator'] = self.creator - if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict() - if self.deployment_id is not None: body['deployment_id'] = self.deployment_id - if self.mode is not None: body['mode'] = self.mode.value - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path - if self.status: body['status'] = self.status.as_dict() - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.creator is not None: + body["creator"] = self.creator + if self.deployment_artifacts: + body["deployment_artifacts"] = self.deployment_artifacts.as_dict() + if self.deployment_id is not None: + body["deployment_id"] = self.deployment_id + if self.mode is not None: + body["mode"] = self.mode.value + if self.source_code_path is not None: + body["source_code_path"] = self.source_code_path + if self.status: + body["status"] = self.status.as_dict() + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the AppDeployment into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.creator is not None: body['creator'] = self.creator - if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts - if self.deployment_id is not None: body['deployment_id'] = self.deployment_id - if self.mode is not None: body['mode'] = self.mode - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path - if self.status: body['status'] = self.status - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.creator is not None: + body["creator"] = self.creator + if self.deployment_artifacts: + body["deployment_artifacts"] = self.deployment_artifacts + if self.deployment_id is not None: + body["deployment_id"] = self.deployment_id + if self.mode is not None: + body["mode"] = self.mode + if self.source_code_path is not None: + body["source_code_path"] = self.source_code_path + if self.status: + body["status"] = self.status + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppDeployment: """Deserializes the AppDeployment from a dictionary.""" - return cls(create_time=d.get('create_time', None), creator=d.get('creator', None), deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts), deployment_id=d.get('deployment_id', None), mode=_enum(d, 'mode', AppDeploymentMode), source_code_path=d.get('source_code_path', None), status=_from_dict(d, 'status', AppDeploymentStatus), update_time=d.get('update_time', None)) - - + return cls( + create_time=d.get("create_time", None), + creator=d.get("creator", None), + deployment_artifacts=_from_dict(d, "deployment_artifacts", AppDeploymentArtifacts), + deployment_id=d.get("deployment_id", None), + mode=_enum(d, "mode", AppDeploymentMode), + source_code_path=d.get("source_code_path", None), + status=_from_dict(d, "status", AppDeploymentStatus), + update_time=d.get("update_time", None), + ) @dataclass class AppDeploymentArtifacts: source_code_path: Optional[str] = None """The snapshotted workspace file system path of the source code loaded by the deployed app.""" - + def as_dict(self) -> dict: """Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body.""" body = {} - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + if self.source_code_path is not None: + body["source_code_path"] = self.source_code_path return body def as_shallow_dict(self) -> dict: """Serializes the AppDeploymentArtifacts into a shallow dictionary of its immediate attributes.""" body = {} - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + if self.source_code_path is not None: + body["source_code_path"] = self.source_code_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppDeploymentArtifacts: """Deserializes the AppDeploymentArtifacts from a dictionary.""" - return cls(source_code_path=d.get('source_code_path', None)) - - + return cls(source_code_path=d.get("source_code_path", None)) class AppDeploymentMode(Enum): - - - AUTO_SYNC = 'AUTO_SYNC' - SNAPSHOT = 'SNAPSHOT' + + AUTO_SYNC = "AUTO_SYNC" + SNAPSHOT = "SNAPSHOT" + class AppDeploymentState(Enum): - - - CANCELLED = 'CANCELLED' - FAILED = 'FAILED' - IN_PROGRESS = 'IN_PROGRESS' - SUCCEEDED = 'SUCCEEDED' + + CANCELLED = "CANCELLED" + FAILED = "FAILED" + IN_PROGRESS = "IN_PROGRESS" + SUCCEEDED = "SUCCEEDED" + @dataclass class AppDeploymentStatus: message: Optional[str] = None """Message corresponding with the deployment state.""" - + state: Optional[AppDeploymentState] = None """State of the deployment.""" - + def as_dict(self) -> dict: """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state.value + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the AppDeploymentStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppDeploymentStatus: """Deserializes the AppDeploymentStatus from a dictionary.""" - return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState)) - - + return cls(message=d.get("message", None), state=_enum(d, "state", AppDeploymentState)) @dataclass class AppPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[AppPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the AppPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the AppPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppPermission: """Deserializes the AppPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', AppPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", AppPermissionLevel), + ) class AppPermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = 'CAN_MANAGE' - CAN_USE = 'CAN_USE' + + CAN_MANAGE = "CAN_MANAGE" + CAN_USE = "CAN_USE" + @dataclass class AppPermissions: access_control_list: Optional[List[AppAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AppPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the AppPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppPermissions: """Deserializes the AppPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", AppAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class AppPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[AppPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the AppPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the AppPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppPermissionsDescription: """Deserializes the AppPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', AppPermissionLevel)) - - + return cls( + description=d.get("description", None), permission_level=_enum(d, "permission_level", AppPermissionLevel) + ) @dataclass class AppPermissionsRequest: access_control_list: Optional[List[AppAccessControlRequest]] = None - + app_name: Optional[str] = None """The app for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the AppPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.app_name is not None: body['app_name'] = self.app_name + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.app_name is not None: + body["app_name"] = self.app_name return body def as_shallow_dict(self) -> dict: """Serializes the AppPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.app_name is not None: body['app_name'] = self.app_name + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.app_name is not None: + body["app_name"] = self.app_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppPermissionsRequest: """Deserializes the AppPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlRequest), app_name=d.get('app_name', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", AppAccessControlRequest), + app_name=d.get("app_name", None), + ) @dataclass class AppResource: name: str """Name of the App Resource.""" - + description: Optional[str] = None """Description of the App Resource.""" - + job: Optional[AppResourceJob] = None - + secret: Optional[AppResourceSecret] = None - + serving_endpoint: Optional[AppResourceServingEndpoint] = None - + sql_warehouse: Optional[AppResourceSqlWarehouse] = None - + uc_securable: Optional[AppResourceUcSecurable] = None - + def as_dict(self) -> dict: """Serializes the AppResource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.job: body['job'] = self.job.as_dict() - if self.name is not None: body['name'] = self.name - if self.secret: body['secret'] = self.secret.as_dict() - if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint.as_dict() - if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse.as_dict() - if self.uc_securable: body['uc_securable'] = self.uc_securable.as_dict() + if self.description is not None: + body["description"] = self.description + if self.job: + body["job"] = self.job.as_dict() + if self.name is not None: + body["name"] = self.name + if self.secret: + body["secret"] = self.secret.as_dict() + if self.serving_endpoint: + body["serving_endpoint"] = self.serving_endpoint.as_dict() + if self.sql_warehouse: + body["sql_warehouse"] = self.sql_warehouse.as_dict() + if self.uc_securable: + body["uc_securable"] = self.uc_securable.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AppResource into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.job: body['job'] = self.job - if self.name is not None: body['name'] = self.name - if self.secret: body['secret'] = self.secret - if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint - if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse - if self.uc_securable: body['uc_securable'] = self.uc_securable + if self.description is not None: + body["description"] = self.description + if self.job: + body["job"] = self.job + if self.name is not None: + body["name"] = self.name + if self.secret: + body["secret"] = self.secret + if self.serving_endpoint: + body["serving_endpoint"] = self.serving_endpoint + if self.sql_warehouse: + body["sql_warehouse"] = self.sql_warehouse + if self.uc_securable: + body["uc_securable"] = self.uc_securable return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResource: """Deserializes the AppResource from a dictionary.""" - return cls(description=d.get('description', None), job=_from_dict(d, 'job', AppResourceJob), name=d.get('name', None), secret=_from_dict(d, 'secret', AppResourceSecret), serving_endpoint=_from_dict(d, 'serving_endpoint', AppResourceServingEndpoint), sql_warehouse=_from_dict(d, 'sql_warehouse', AppResourceSqlWarehouse), uc_securable=_from_dict(d, 'uc_securable', AppResourceUcSecurable)) - - + return cls( + description=d.get("description", None), + job=_from_dict(d, "job", AppResourceJob), + name=d.get("name", None), + secret=_from_dict(d, "secret", AppResourceSecret), + serving_endpoint=_from_dict(d, "serving_endpoint", AppResourceServingEndpoint), + sql_warehouse=_from_dict(d, "sql_warehouse", AppResourceSqlWarehouse), + uc_securable=_from_dict(d, "uc_securable", AppResourceUcSecurable), + ) @dataclass class AppResourceJob: id: str """Id of the job to grant permission on.""" - + permission: AppResourceJobJobPermission """Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER", "CAN_MANAGE_RUN", "CAN_VIEW".""" - + def as_dict(self) -> dict: """Serializes the AppResourceJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.permission is not None: body['permission'] = self.permission.value + if self.id is not None: + body["id"] = self.id + if self.permission is not None: + body["permission"] = self.permission.value return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.permission is not None: body['permission'] = self.permission + if self.id is not None: + body["id"] = self.id + if self.permission is not None: + body["permission"] = self.permission return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceJob: """Deserializes the AppResourceJob from a dictionary.""" - return cls(id=d.get('id', None), permission=_enum(d, 'permission', AppResourceJobJobPermission)) - - + return cls(id=d.get("id", None), permission=_enum(d, "permission", AppResourceJobJobPermission)) class AppResourceJobJobPermission(Enum): - - - CAN_MANAGE = 'CAN_MANAGE' - CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' - CAN_VIEW = 'CAN_VIEW' - IS_OWNER = 'IS_OWNER' + + CAN_MANAGE = "CAN_MANAGE" + CAN_MANAGE_RUN = "CAN_MANAGE_RUN" + CAN_VIEW = "CAN_VIEW" + IS_OWNER = "IS_OWNER" + @dataclass class AppResourceSecret: scope: str """Scope of the secret to grant permission on.""" - + key: str """Key of the secret to grant permission on.""" - + permission: AppResourceSecretSecretPermission """Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission must be one of: "READ", "WRITE", "MANAGE".""" - + def as_dict(self) -> dict: """Serializes the AppResourceSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.permission is not None: body['permission'] = self.permission.value - if self.scope is not None: body['scope'] = self.scope + if self.key is not None: + body["key"] = self.key + if self.permission is not None: + body["permission"] = self.permission.value + if self.scope is not None: + body["scope"] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.permission is not None: body['permission'] = self.permission - if self.scope is not None: body['scope'] = self.scope + if self.key is not None: + body["key"] = self.key + if self.permission is not None: + body["permission"] = self.permission + if self.scope is not None: + body["scope"] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceSecret: """Deserializes the AppResourceSecret from a dictionary.""" - return cls(key=d.get('key', None), permission=_enum(d, 'permission', AppResourceSecretSecretPermission), scope=d.get('scope', None)) - - + return cls( + key=d.get("key", None), + permission=_enum(d, "permission", AppResourceSecretSecretPermission), + scope=d.get("scope", None), + ) class AppResourceSecretSecretPermission(Enum): """Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE".""" - - MANAGE = 'MANAGE' - READ = 'READ' - WRITE = 'WRITE' + + MANAGE = "MANAGE" + READ = "READ" + WRITE = "WRITE" + @dataclass class AppResourceServingEndpoint: name: str """Name of the serving endpoint to grant permission on.""" - + permission: AppResourceServingEndpointServingEndpointPermission """Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE", "CAN_QUERY", "CAN_VIEW".""" - + def as_dict(self) -> dict: """Serializes the AppResourceServingEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.permission is not None: body['permission'] = self.permission.value + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission.value return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceServingEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.permission is not None: body['permission'] = self.permission + if self.name is not None: + body["name"] = self.name + if self.permission is not None: + body["permission"] = self.permission return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceServingEndpoint: """Deserializes the AppResourceServingEndpoint from a dictionary.""" - return cls(name=d.get('name', None), permission=_enum(d, 'permission', AppResourceServingEndpointServingEndpointPermission)) - - + return cls( + name=d.get("name", None), + permission=_enum(d, "permission", AppResourceServingEndpointServingEndpointPermission), + ) class AppResourceServingEndpointServingEndpointPermission(Enum): - - - CAN_MANAGE = 'CAN_MANAGE' - CAN_QUERY = 'CAN_QUERY' - CAN_VIEW = 'CAN_VIEW' + + CAN_MANAGE = "CAN_MANAGE" + CAN_QUERY = "CAN_QUERY" + CAN_VIEW = "CAN_VIEW" + @dataclass class AppResourceSqlWarehouse: id: str """Id of the SQL warehouse to grant permission on.""" - + permission: AppResourceSqlWarehouseSqlWarehousePermission """Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE", "IS_OWNER".""" - + def as_dict(self) -> dict: """Serializes the AppResourceSqlWarehouse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.permission is not None: body['permission'] = self.permission.value + if self.id is not None: + body["id"] = self.id + if self.permission is not None: + body["permission"] = self.permission.value return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceSqlWarehouse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.permission is not None: body['permission'] = self.permission + if self.id is not None: + body["id"] = self.id + if self.permission is not None: + body["permission"] = self.permission return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceSqlWarehouse: """Deserializes the AppResourceSqlWarehouse from a dictionary.""" - return cls(id=d.get('id', None), permission=_enum(d, 'permission', AppResourceSqlWarehouseSqlWarehousePermission)) - - + return cls( + id=d.get("id", None), permission=_enum(d, "permission", AppResourceSqlWarehouseSqlWarehousePermission) + ) class AppResourceSqlWarehouseSqlWarehousePermission(Enum): - - - CAN_MANAGE = 'CAN_MANAGE' - CAN_USE = 'CAN_USE' - IS_OWNER = 'IS_OWNER' + + CAN_MANAGE = "CAN_MANAGE" + CAN_USE = "CAN_USE" + IS_OWNER = "IS_OWNER" + @dataclass class AppResourceUcSecurable: securable_full_name: str - + securable_type: AppResourceUcSecurableUcSecurableType - + permission: AppResourceUcSecurableUcSecurablePermission - + def as_dict(self) -> dict: """Serializes the AppResourceUcSecurable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission is not None: body['permission'] = self.permission.value - if self.securable_full_name is not None: body['securable_full_name'] = self.securable_full_name - if self.securable_type is not None: body['securable_type'] = self.securable_type.value + if self.permission is not None: + body["permission"] = self.permission.value + if self.securable_full_name is not None: + body["securable_full_name"] = self.securable_full_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type.value return body def as_shallow_dict(self) -> dict: """Serializes the AppResourceUcSecurable into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission is not None: body['permission'] = self.permission - if self.securable_full_name is not None: body['securable_full_name'] = self.securable_full_name - if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.permission is not None: + body["permission"] = self.permission + if self.securable_full_name is not None: + body["securable_full_name"] = self.securable_full_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AppResourceUcSecurable: """Deserializes the AppResourceUcSecurable from a dictionary.""" - return cls(permission=_enum(d, 'permission', AppResourceUcSecurableUcSecurablePermission), securable_full_name=d.get('securable_full_name', None), securable_type=_enum(d, 'securable_type', AppResourceUcSecurableUcSecurableType)) - - + return cls( + permission=_enum(d, "permission", AppResourceUcSecurableUcSecurablePermission), + securable_full_name=d.get("securable_full_name", None), + securable_type=_enum(d, "securable_type", AppResourceUcSecurableUcSecurableType), + ) class AppResourceUcSecurableUcSecurablePermission(Enum): - - - READ_VOLUME = 'READ_VOLUME' - WRITE_VOLUME = 'WRITE_VOLUME' + + READ_VOLUME = "READ_VOLUME" + WRITE_VOLUME = "WRITE_VOLUME" + class AppResourceUcSecurableUcSecurableType(Enum): - - - VOLUME = 'VOLUME' + + VOLUME = "VOLUME" + class ApplicationState(Enum): - - - CRASHED = 'CRASHED' - DEPLOYING = 'DEPLOYING' - RUNNING = 'RUNNING' - UNAVAILABLE = 'UNAVAILABLE' + + CRASHED = "CRASHED" + DEPLOYING = "DEPLOYING" + RUNNING = "RUNNING" + UNAVAILABLE = "UNAVAILABLE" + @dataclass class ApplicationStatus: message: Optional[str] = None """Application status message""" - + state: Optional[ApplicationState] = None """State of the application.""" - + def as_dict(self) -> dict: """Serializes the ApplicationStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state.value + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the ApplicationStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ApplicationStatus: """Deserializes the ApplicationStatus from a dictionary.""" - return cls(message=d.get('message', None), state=_enum(d, 'state', ApplicationState)) - - + return cls(message=d.get("message", None), state=_enum(d, "state", ApplicationState)) class ComputeState(Enum): - - - ACTIVE = 'ACTIVE' - DELETING = 'DELETING' - ERROR = 'ERROR' - STARTING = 'STARTING' - STOPPED = 'STOPPED' - STOPPING = 'STOPPING' - UPDATING = 'UPDATING' + + ACTIVE = "ACTIVE" + DELETING = "DELETING" + ERROR = "ERROR" + STARTING = "STARTING" + STOPPED = "STOPPED" + STOPPING = "STOPPING" + UPDATING = "UPDATING" + @dataclass class ComputeStatus: message: Optional[str] = None """Compute status message""" - + state: Optional[ComputeState] = None """State of the app compute.""" - + def as_dict(self) -> dict: """Serializes the ComputeStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state.value + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the ComputeStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComputeStatus: """Deserializes the ComputeStatus from a dictionary.""" - return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState)) - - - - - - - - - - - - - - - - - + return cls(message=d.get("message", None), state=_enum(d, "state", ComputeState)) @dataclass class GetAppPermissionLevelsResponse: permission_levels: Optional[List[AppPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetAppPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetAppPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetAppPermissionLevelsResponse: """Deserializes the GetAppPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', AppPermissionsDescription)) - - - - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", AppPermissionsDescription)) @dataclass class ListAppDeploymentsResponse: app_deployments: Optional[List[AppDeployment]] = None """Deployment history of the app.""" - + next_page_token: Optional[str] = None """Pagination token to request the next page of apps.""" - + def as_dict(self) -> dict: """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.app_deployments: + body["app_deployments"] = [v.as_dict() for v in self.app_deployments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListAppDeploymentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_deployments: body['app_deployments'] = self.app_deployments - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.app_deployments: + body["app_deployments"] = self.app_deployments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAppDeploymentsResponse: """Deserializes the ListAppDeploymentsResponse from a dictionary.""" - return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + app_deployments=_repeated_dict(d, "app_deployments", AppDeployment), + next_page_token=d.get("next_page_token", None), + ) @dataclass class ListAppsResponse: apps: Optional[List[App]] = None - + next_page_token: Optional[str] = None """Pagination token to request the next page of apps.""" - + def as_dict(self) -> dict: """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apps: body['apps'] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.apps: + body["apps"] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListAppsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.apps: body['apps'] = self.apps - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.apps: + body["apps"] = self.apps + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAppsResponse: """Deserializes the ListAppsResponse from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None)) - - + return cls(apps=_repeated_dict(d, "apps", App), next_page_token=d.get("next_page_token", None)) @dataclass class StartAppRequest: name: Optional[str] = None """The name of the app.""" - - - @dataclass class StopAppRequest: name: Optional[str] = None """The name of the app.""" - - - - - - - - class AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_get_app_active(self, name: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[App], None]] = None) -> App: - deadline = time.time() + timeout.total_seconds() - target_states = (ComputeState.ACTIVE, ) - failure_states = (ComputeState.ERROR, ComputeState.STOPPED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.compute_status.state - status_message = f'current status: {status}' - if poll.compute_status: - status_message = poll.compute_status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach ACTIVE, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_get_deployment_app_succeeded(self, app_name: str, deployment_id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment: - deadline = time.time() + timeout.total_seconds() - target_states = (AppDeploymentState.SUCCEEDED, ) - failure_states = (AppDeploymentState.FAILED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) - status = poll.status.state - status_message = f'current status: {status}' - if poll.status: - status_message = poll.status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"app_name={app_name}, deployment_id={deployment_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_get_app_stopped(self, name: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[App], None]] = None) -> App: - deadline = time.time() + timeout.total_seconds() - target_states = (ComputeState.STOPPED, ) - failure_states = (ComputeState.ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.compute_status.state - status_message = f'current status: {status}' - if poll.compute_status: - status_message = poll.compute_status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach STOPPED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - - def create(self - , app: App - , * - , no_compute: Optional[bool] = None) -> Wait[App]: + + def wait_get_app_active( + self, name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[App], None]] = None + ) -> App: + deadline = time.time() + timeout.total_seconds() + target_states = (ComputeState.ACTIVE,) + failure_states = ( + ComputeState.ERROR, + ComputeState.STOPPED, + ) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.compute_status.state + status_message = f"current status: {status}" + if poll.compute_status: + status_message = poll.compute_status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach ACTIVE, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def wait_get_deployment_app_succeeded( + self, + app_name: str, + deployment_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[AppDeployment], None]] = None, + ) -> AppDeployment: + deadline = time.time() + timeout.total_seconds() + target_states = (AppDeploymentState.SUCCEEDED,) + failure_states = (AppDeploymentState.FAILED,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) + status = poll.status.state + status_message = f"current status: {status}" + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach SUCCEEDED, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"app_name={app_name}, deployment_id={deployment_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def wait_get_app_stopped( + self, name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[App], None]] = None + ) -> App: + deadline = time.time() + timeout.total_seconds() + target_states = (ComputeState.STOPPED,) + failure_states = (ComputeState.ERROR,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.compute_status.state + status_message = f"current status: {status}" + if poll.compute_status: + status_message = poll.compute_status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach STOPPED, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def create(self, app: App, *, no_compute: Optional[bool] = None) -> Wait[App]: """Create an app. - + Creates a new app. - + :param app: :class:`App` :param no_compute: bool (optional) If true, the app will not be started after creation. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. """ body = app.as_dict() query = {} - if no_compute is not None: query['no_compute'] = no_compute - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.0/apps', query=query, body=body - - , headers=headers - ) - return Wait(self.wait_get_app_active - , response = App.from_dict(op_response) - , name=op_response['name']) - - - def create_and_wait(self - , app: App - , * - , no_compute: Optional[bool] = None, - timeout=timedelta(minutes=20)) -> App: + if no_compute is not None: + query["no_compute"] = no_compute + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.0/apps", query=query, body=body, headers=headers) + return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response["name"]) + + def create_and_wait(self, app: App, *, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)) -> App: return self.create(app=app, no_compute=no_compute).result(timeout=timeout) - - - - def delete(self - , name: str - ) -> App: + def delete(self, name: str) -> App: """Delete an app. - + Deletes an app. - + :param name: str The name of the app. - + :returns: :class:`App` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE',f'/api/2.0/apps/{name}' - - , headers=headers - ) - return App.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("DELETE", f"/api/2.0/apps/{name}", headers=headers) + return App.from_dict(res) - def deploy(self - , app_name: str, app_deployment: AppDeployment - ) -> Wait[AppDeployment]: + def deploy(self, app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment]: """Create an app deployment. - + Creates an app deployment for the app with the supplied name. - + :param app_name: str The name of the app. :param app_deployment: :class:`AppDeployment` - + :returns: Long-running operation waiter for :class:`AppDeployment`. See :method:wait_get_deployment_app_succeeded for more details. """ body = app_deployment.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/apps/{app_name}/deployments', body=body - - , headers=headers - ) - return Wait(self.wait_get_deployment_app_succeeded - , response = AppDeployment.from_dict(op_response) - , app_name=app_name, deployment_id=op_response['deployment_id']) - - - def deploy_and_wait(self - , app_name: str, app_deployment: AppDeployment - , - timeout=timedelta(minutes=20)) -> AppDeployment: + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", f"/api/2.0/apps/{app_name}/deployments", body=body, headers=headers) + return Wait( + self.wait_get_deployment_app_succeeded, + response=AppDeployment.from_dict(op_response), + app_name=app_name, + deployment_id=op_response["deployment_id"], + ) + + def deploy_and_wait( + self, app_name: str, app_deployment: AppDeployment, timeout=timedelta(minutes=20) + ) -> AppDeployment: return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout) - - - - def get(self - , name: str - ) -> App: + def get(self, name: str) -> App: """Get an app. - + Retrieves information for the app with the supplied name. - + :param name: str The name of the app. - + :returns: :class:`App` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/apps/{name}' - - , headers=headers - ) - return App.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/apps/{name}", headers=headers) + return App.from_dict(res) - def get_deployment(self - , app_name: str, deployment_id: str - ) -> AppDeployment: + def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment: """Get an app deployment. - + Retrieves information for the app deployment with the supplied name and deployment id. - + :param app_name: str The name of the app. :param deployment_id: str The unique id of the deployment. - + :returns: :class:`AppDeployment` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/apps/{app_name}/deployments/{deployment_id}' - - , headers=headers - ) - return AppDeployment.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/apps/{app_name}/deployments/{deployment_id}", headers=headers) + return AppDeployment.from_dict(res) - def get_permission_levels(self - , app_name: str - ) -> GetAppPermissionLevelsResponse: + def get_permission_levels(self, app_name: str) -> GetAppPermissionLevelsResponse: """Get app permission levels. - + Gets the permission levels that a user can have on an object. - + :param app_name: str The app for which to get or manage permissions. - + :returns: :class:`GetAppPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/apps/{app_name}/permissionLevels' - - , headers=headers - ) - return GetAppPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/apps/{app_name}/permissionLevels", headers=headers) + return GetAppPermissionLevelsResponse.from_dict(res) - def get_permissions(self - , app_name: str - ) -> AppPermissions: + def get_permissions(self, app_name: str) -> AppPermissions: """Get app permissions. - + Gets the permissions of an app. Apps can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. - + :returns: :class:`AppPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/apps/{app_name}' - - , headers=headers - ) - return AppPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: + res = self._api.do("GET", f"/api/2.0/permissions/apps/{app_name}", headers=headers) + return AppPermissions.from_dict(res) + + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: """List apps. - + Lists all apps in the workspace. - + :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of apps. Requests first page if absent. - + :returns: Iterator over :class:`App` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/apps', query=query - - , headers=headers - ) - if 'apps' in json: - for v in json['apps']: - yield App.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def list_deployments(self - , app_name: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[AppDeployment]: + json = self._api.do("GET", "/api/2.0/apps", query=query, headers=headers) + if "apps" in json: + for v in json["apps"]: + yield App.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_deployments( + self, app_name: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[AppDeployment]: """List app deployments. - + Lists all app deployments for the app with the supplied name. - + :param app_name: str The name of the app. :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of apps. Requests first page if absent. - + :returns: Iterator over :class:`AppDeployment` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.0/apps/{app_name}/deployments', query=query - - , headers=headers - ) - if 'app_deployments' in json: - for v in json['app_deployments']: - yield AppDeployment.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def set_permissions(self - , app_name: str - , * - , access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions: + json = self._api.do("GET", f"/api/2.0/apps/{app_name}/deployments", query=query, headers=headers) + if "app_deployments" in json: + for v in json["app_deployments"]: + yield AppDeployment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def set_permissions( + self, app_name: str, *, access_control_list: Optional[List[AppAccessControlRequest]] = None + ) -> AppPermissions: """Set app permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) - + :returns: :class:`AppPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/apps/{app_name}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", f"/api/2.0/permissions/apps/{app_name}", body=body, headers=headers) return AppPermissions.from_dict(res) - - - - - def start(self - , name: str - ) -> Wait[App]: + def start(self, name: str) -> Wait[App]: """Start an app. - + Start the last active deployment of the app in the workspace. - + :param name: str The name of the app. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. """ - - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/apps/{name}/start' - - , headers=headers - ) - return Wait(self.wait_get_app_active - , response = App.from_dict(op_response) - , name=op_response['name']) - - - def start_and_wait(self - , name: str - , - timeout=timedelta(minutes=20)) -> App: + + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", f"/api/2.0/apps/{name}/start", headers=headers) + return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response["name"]) + + def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App: return self.start(name=name).result(timeout=timeout) - - - - def stop(self - , name: str - ) -> Wait[App]: + def stop(self, name: str) -> Wait[App]: """Stop an app. - + Stops the active deployment of the app in the workspace. - + :param name: str The name of the app. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_stopped for more details. """ - - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/apps/{name}/stop' - - , headers=headers - ) - return Wait(self.wait_get_app_stopped - , response = App.from_dict(op_response) - , name=op_response['name']) - - - def stop_and_wait(self - , name: str - , - timeout=timedelta(minutes=20)) -> App: + + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", f"/api/2.0/apps/{name}/stop", headers=headers) + return Wait(self.wait_get_app_stopped, response=App.from_dict(op_response), name=op_response["name"]) + + def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App: return self.stop(name=name).result(timeout=timeout) - - - - def update(self - , name: str, app: App - ) -> App: + def update(self, name: str, app: App) -> App: """Update an app. - + Updates the app with the supplied name. - + :param name: str The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It must be unique within the workspace. :param app: :class:`App` - + :returns: :class:`App` """ body = app.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/apps/{name}', body=body - - , headers=headers - ) - return App.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PATCH", f"/api/2.0/apps/{name}", body=body, headers=headers) + return App.from_dict(res) - def update_permissions(self - , app_name: str - , * - , access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions: + def update_permissions( + self, app_name: str, *, access_control_list: Optional[List[AppAccessControlRequest]] = None + ) -> AppPermissions: """Update app permissions. - + Updates the permissions on an app. Apps can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) - + :returns: :class:`AppPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/apps/{app_name}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/permissions/apps/{app_name}", body=body, headers=headers) return AppPermissions.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index a09c58758..4dc535891 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -1,191 +1,231 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading +from typing import Any, BinaryIO, Dict, Iterator, List, Optional -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from ._internal import _enum, _from_dict, _repeated_dict -_LOG = logging.getLogger('databricks.sdk') +_LOG = logging.getLogger("databricks.sdk") from databricks.sdk.service import compute # all definitions in this file are in alphabetical order + @dataclass class ActionConfiguration: action_configuration_id: Optional[str] = None """Databricks action configuration ID.""" - + action_type: Optional[ActionConfigurationType] = None """The type of the action.""" - + target: Optional[str] = None """Target for the action. For example, an email address.""" - + def as_dict(self) -> dict: """Serializes the ActionConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action_configuration_id is not None: body['action_configuration_id'] = self.action_configuration_id - if self.action_type is not None: body['action_type'] = self.action_type.value - if self.target is not None: body['target'] = self.target + if self.action_configuration_id is not None: + body["action_configuration_id"] = self.action_configuration_id + if self.action_type is not None: + body["action_type"] = self.action_type.value + if self.target is not None: + body["target"] = self.target return body def as_shallow_dict(self) -> dict: """Serializes the ActionConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.action_configuration_id is not None: body['action_configuration_id'] = self.action_configuration_id - if self.action_type is not None: body['action_type'] = self.action_type - if self.target is not None: body['target'] = self.target + if self.action_configuration_id is not None: + body["action_configuration_id"] = self.action_configuration_id + if self.action_type is not None: + body["action_type"] = self.action_type + if self.target is not None: + body["target"] = self.target return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ActionConfiguration: """Deserializes the ActionConfiguration from a dictionary.""" - return cls(action_configuration_id=d.get('action_configuration_id', None), action_type=_enum(d, 'action_type', ActionConfigurationType), target=d.get('target', None)) - - + return cls( + action_configuration_id=d.get("action_configuration_id", None), + action_type=_enum(d, "action_type", ActionConfigurationType), + target=d.get("target", None), + ) class ActionConfigurationType(Enum): - - - EMAIL_NOTIFICATION = 'EMAIL_NOTIFICATION' + + EMAIL_NOTIFICATION = "EMAIL_NOTIFICATION" + @dataclass class AlertConfiguration: action_configurations: Optional[List[ActionConfiguration]] = None """Configured actions for this alert. These define what happens when an alert enters a triggered state.""" - + alert_configuration_id: Optional[str] = None """Databricks alert configuration ID.""" - + quantity_threshold: Optional[str] = None """The threshold for the budget alert to determine if it is in a triggered state. The number is evaluated based on `quantity_type`.""" - + quantity_type: Optional[AlertConfigurationQuantityType] = None """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured in.""" - + time_period: Optional[AlertConfigurationTimePeriod] = None """The time window of usage data for the budget.""" - + trigger_type: Optional[AlertConfigurationTriggerType] = None """The evaluation method to determine when this budget alert is in a triggered state.""" - + def as_dict(self) -> dict: """Serializes the AlertConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action_configurations: body['action_configurations'] = [v.as_dict() for v in self.action_configurations] - if self.alert_configuration_id is not None: body['alert_configuration_id'] = self.alert_configuration_id - if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold - if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value - if self.time_period is not None: body['time_period'] = self.time_period.value - if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value + if self.action_configurations: + body["action_configurations"] = [v.as_dict() for v in self.action_configurations] + if self.alert_configuration_id is not None: + body["alert_configuration_id"] = self.alert_configuration_id + if self.quantity_threshold is not None: + body["quantity_threshold"] = self.quantity_threshold + if self.quantity_type is not None: + body["quantity_type"] = self.quantity_type.value + if self.time_period is not None: + body["time_period"] = self.time_period.value + if self.trigger_type is not None: + body["trigger_type"] = self.trigger_type.value return body def as_shallow_dict(self) -> dict: """Serializes the AlertConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.action_configurations: body['action_configurations'] = self.action_configurations - if self.alert_configuration_id is not None: body['alert_configuration_id'] = self.alert_configuration_id - if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold - if self.quantity_type is not None: body['quantity_type'] = self.quantity_type - if self.time_period is not None: body['time_period'] = self.time_period - if self.trigger_type is not None: body['trigger_type'] = self.trigger_type + if self.action_configurations: + body["action_configurations"] = self.action_configurations + if self.alert_configuration_id is not None: + body["alert_configuration_id"] = self.alert_configuration_id + if self.quantity_threshold is not None: + body["quantity_threshold"] = self.quantity_threshold + if self.quantity_type is not None: + body["quantity_type"] = self.quantity_type + if self.time_period is not None: + body["time_period"] = self.time_period + if self.trigger_type is not None: + body["trigger_type"] = self.trigger_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertConfiguration: """Deserializes the AlertConfiguration from a dictionary.""" - return cls(action_configurations=_repeated_dict(d, 'action_configurations', ActionConfiguration), alert_configuration_id=d.get('alert_configuration_id', None), quantity_threshold=d.get('quantity_threshold', None), quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType), time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod), trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType)) - - + return cls( + action_configurations=_repeated_dict(d, "action_configurations", ActionConfiguration), + alert_configuration_id=d.get("alert_configuration_id", None), + quantity_threshold=d.get("quantity_threshold", None), + quantity_type=_enum(d, "quantity_type", AlertConfigurationQuantityType), + time_period=_enum(d, "time_period", AlertConfigurationTimePeriod), + trigger_type=_enum(d, "trigger_type", AlertConfigurationTriggerType), + ) class AlertConfigurationQuantityType(Enum): - - - LIST_PRICE_DOLLARS_USD = 'LIST_PRICE_DOLLARS_USD' + + LIST_PRICE_DOLLARS_USD = "LIST_PRICE_DOLLARS_USD" + class AlertConfigurationTimePeriod(Enum): - - - MONTH = 'MONTH' + + MONTH = "MONTH" + class AlertConfigurationTriggerType(Enum): - - - CUMULATIVE_SPENDING_EXCEEDED = 'CUMULATIVE_SPENDING_EXCEEDED' + + CUMULATIVE_SPENDING_EXCEEDED = "CUMULATIVE_SPENDING_EXCEEDED" + @dataclass class BudgetConfiguration: account_id: Optional[str] = None """Databricks account ID.""" - + alert_configurations: Optional[List[AlertConfiguration]] = None """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one alert configuration.""" - + budget_configuration_id: Optional[str] = None """Databricks budget configuration ID.""" - + create_time: Optional[int] = None """Creation time of this budget configuration.""" - + display_name: Optional[str] = None """Human-readable name of budget configuration. Max Length: 128""" - + filter: Optional[BudgetConfigurationFilter] = None """Configured filters for this budget. These are applied to your account's usage to limit the scope of what is considered for this budget. Leave empty to include all usage for this account. All provided filters must be matched for usage to be included.""" - + update_time: Optional[int] = None """Update time of this budget configuration.""" - + def as_dict(self) -> dict: """Serializes the BudgetConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.alert_configurations: body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] - if self.budget_configuration_id is not None: body['budget_configuration_id'] = self.budget_configuration_id - if self.create_time is not None: body['create_time'] = self.create_time - if self.display_name is not None: body['display_name'] = self.display_name - if self.filter: body['filter'] = self.filter.as_dict() - if self.update_time is not None: body['update_time'] = self.update_time + if self.account_id is not None: + body["account_id"] = self.account_id + if self.alert_configurations: + body["alert_configurations"] = [v.as_dict() for v in self.alert_configurations] + if self.budget_configuration_id is not None: + body["budget_configuration_id"] = self.budget_configuration_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.display_name is not None: + body["display_name"] = self.display_name + if self.filter: + body["filter"] = self.filter.as_dict() + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.alert_configurations: body['alert_configurations'] = self.alert_configurations - if self.budget_configuration_id is not None: body['budget_configuration_id'] = self.budget_configuration_id - if self.create_time is not None: body['create_time'] = self.create_time - if self.display_name is not None: body['display_name'] = self.display_name - if self.filter: body['filter'] = self.filter - if self.update_time is not None: body['update_time'] = self.update_time + if self.account_id is not None: + body["account_id"] = self.account_id + if self.alert_configurations: + body["alert_configurations"] = self.alert_configurations + if self.budget_configuration_id is not None: + body["budget_configuration_id"] = self.budget_configuration_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.display_name is not None: + body["display_name"] = self.display_name + if self.filter: + body["filter"] = self.filter + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfiguration: """Deserializes the BudgetConfiguration from a dictionary.""" - return cls(account_id=d.get('account_id', None), alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration), budget_configuration_id=d.get('budget_configuration_id', None), create_time=d.get('create_time', None), display_name=d.get('display_name', None), filter=_from_dict(d, 'filter', BudgetConfigurationFilter), update_time=d.get('update_time', None)) - - + return cls( + account_id=d.get("account_id", None), + alert_configurations=_repeated_dict(d, "alert_configurations", AlertConfiguration), + budget_configuration_id=d.get("budget_configuration_id", None), + create_time=d.get("create_time", None), + display_name=d.get("display_name", None), + filter=_from_dict(d, "filter", BudgetConfigurationFilter), + update_time=d.get("update_time", None), + ) @dataclass @@ -194,164 +234,186 @@ class BudgetConfigurationFilter: """A list of tag keys and values that will limit the budget to usage that includes those specific custom tags. Tags are case-sensitive and should be entered exactly as they appear in your usage data.""" - + workspace_id: Optional[BudgetConfigurationFilterWorkspaceIdClause] = None """If provided, usage must match with the provided Databricks workspace IDs.""" - + def as_dict(self) -> dict: """Serializes the BudgetConfigurationFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] - if self.workspace_id: body['workspace_id'] = self.workspace_id.as_dict() + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] + if self.workspace_id: + body["workspace_id"] = self.workspace_id.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfigurationFilter into a shallow dictionary of its immediate attributes.""" body = {} - if self.tags: body['tags'] = self.tags - if self.workspace_id: body['workspace_id'] = self.workspace_id + if self.tags: + body["tags"] = self.tags + if self.workspace_id: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfigurationFilter: """Deserializes the BudgetConfigurationFilter from a dictionary.""" - return cls(tags=_repeated_dict(d, 'tags', BudgetConfigurationFilterTagClause), workspace_id=_from_dict(d, 'workspace_id', BudgetConfigurationFilterWorkspaceIdClause)) - - + return cls( + tags=_repeated_dict(d, "tags", BudgetConfigurationFilterTagClause), + workspace_id=_from_dict(d, "workspace_id", BudgetConfigurationFilterWorkspaceIdClause), + ) @dataclass class BudgetConfigurationFilterClause: operator: Optional[BudgetConfigurationFilterOperator] = None - + values: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the BudgetConfigurationFilterClause into a dictionary suitable for use as a JSON request body.""" body = {} - if self.operator is not None: body['operator'] = self.operator.value - if self.values: body['values'] = [v for v in self.values] + if self.operator is not None: + body["operator"] = self.operator.value + if self.values: + body["values"] = [v for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfigurationFilterClause into a shallow dictionary of its immediate attributes.""" body = {} - if self.operator is not None: body['operator'] = self.operator - if self.values: body['values'] = self.values + if self.operator is not None: + body["operator"] = self.operator + if self.values: + body["values"] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfigurationFilterClause: """Deserializes the BudgetConfigurationFilterClause from a dictionary.""" - return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator), values=d.get('values', None)) - - + return cls(operator=_enum(d, "operator", BudgetConfigurationFilterOperator), values=d.get("values", None)) class BudgetConfigurationFilterOperator(Enum): - - - IN = 'IN' + + IN = "IN" + @dataclass class BudgetConfigurationFilterTagClause: key: Optional[str] = None - + value: Optional[BudgetConfigurationFilterClause] = None - + def as_dict(self) -> dict: """Serializes the BudgetConfigurationFilterTagClause into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value: body['value'] = self.value.as_dict() + if self.key is not None: + body["key"] = self.key + if self.value: + body["value"] = self.value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfigurationFilterTagClause into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfigurationFilterTagClause: """Deserializes the BudgetConfigurationFilterTagClause from a dictionary.""" - return cls(key=d.get('key', None), value=_from_dict(d, 'value', BudgetConfigurationFilterClause)) - - + return cls(key=d.get("key", None), value=_from_dict(d, "value", BudgetConfigurationFilterClause)) @dataclass class BudgetConfigurationFilterWorkspaceIdClause: operator: Optional[BudgetConfigurationFilterOperator] = None - + values: Optional[List[int]] = None - + def as_dict(self) -> dict: """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a dictionary suitable for use as a JSON request body.""" body = {} - if self.operator is not None: body['operator'] = self.operator.value - if self.values: body['values'] = [v for v in self.values] + if self.operator is not None: + body["operator"] = self.operator.value + if self.values: + body["values"] = [v for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a shallow dictionary of its immediate attributes.""" body = {} - if self.operator is not None: body['operator'] = self.operator - if self.values: body['values'] = self.values + if self.operator is not None: + body["operator"] = self.operator + if self.values: + body["values"] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetConfigurationFilterWorkspaceIdClause: """Deserializes the BudgetConfigurationFilterWorkspaceIdClause from a dictionary.""" - return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator), values=d.get('values', None)) - - + return cls(operator=_enum(d, "operator", BudgetConfigurationFilterOperator), values=d.get("values", None)) @dataclass class BudgetPolicy: """Contains the BudgetPolicy details.""" - + binding_workspace_ids: Optional[List[int]] = None """List of workspaces that this budget policy will be exclusively bound to. An empty binding implies that this budget policy is open to any workspace in the account.""" - + custom_tags: Optional[List[compute.CustomPolicyTag]] = None """A list of tags defined by the customer. At most 20 entries are allowed per policy.""" - + policy_id: Optional[str] = None """The Id of the policy. This field is generated by Databricks and globally unique.""" - + policy_name: Optional[str] = None """The name of the policy. - Must be unique among active policies. - Can contain only characters from the ISO 8859-1 (latin1) set. - Can't start with reserved keywords such as `databricks:default-policy`.""" - + def as_dict(self) -> dict: """Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.binding_workspace_ids: body['binding_workspace_ids'] = [v for v in self.binding_workspace_ids] - if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.policy_name is not None: body['policy_name'] = self.policy_name + if self.binding_workspace_ids: + body["binding_workspace_ids"] = [v for v in self.binding_workspace_ids] + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.policy_name is not None: + body["policy_name"] = self.policy_name return body def as_shallow_dict(self) -> dict: """Serializes the BudgetPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.binding_workspace_ids: body['binding_workspace_ids'] = self.binding_workspace_ids - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.policy_name is not None: body['policy_name'] = self.policy_name + if self.binding_workspace_ids: + body["binding_workspace_ids"] = self.binding_workspace_ids + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.policy_name is not None: + body["policy_name"] = self.policy_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BudgetPolicy: """Deserializes the BudgetPolicy from a dictionary.""" - return cls(binding_workspace_ids=d.get('binding_workspace_ids', None), custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag), policy_id=d.get('policy_id', None), policy_name=d.get('policy_name', None)) - - + return cls( + binding_workspace_ids=d.get("binding_workspace_ids", None), + custom_tags=_repeated_dict(d, "custom_tags", compute.CustomPolicyTag), + policy_id=d.get("policy_id", None), + policy_name=d.get("policy_name", None), + ) @dataclass @@ -359,128 +421,147 @@ class CreateBillingUsageDashboardRequest: dashboard_type: Optional[UsageDashboardType] = None """Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account.""" - + workspace_id: Optional[int] = None """The workspace ID of the workspace in which the usage dashboard is created.""" - + def as_dict(self) -> dict: """Serializes the CreateBillingUsageDashboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type.value - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.dashboard_type is not None: + body["dashboard_type"] = self.dashboard_type.value + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateBillingUsageDashboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.dashboard_type is not None: + body["dashboard_type"] = self.dashboard_type + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBillingUsageDashboardRequest: """Deserializes the CreateBillingUsageDashboardRequest from a dictionary.""" - return cls(dashboard_type=_enum(d, 'dashboard_type', UsageDashboardType), workspace_id=d.get('workspace_id', None)) - - + return cls( + dashboard_type=_enum(d, "dashboard_type", UsageDashboardType), workspace_id=d.get("workspace_id", None) + ) @dataclass class CreateBillingUsageDashboardResponse: dashboard_id: Optional[str] = None """The unique id of the usage dashboard.""" - + def as_dict(self) -> dict: """Serializes the CreateBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBillingUsageDashboardResponse: """Deserializes the CreateBillingUsageDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None)) - - + return cls(dashboard_id=d.get("dashboard_id", None)) @dataclass class CreateBudgetConfigurationBudget: account_id: Optional[str] = None """Databricks account ID.""" - + alert_configurations: Optional[List[CreateBudgetConfigurationBudgetAlertConfigurations]] = None """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one alert configuration.""" - + display_name: Optional[str] = None """Human-readable name of budget configuration. Max Length: 128""" - + filter: Optional[BudgetConfigurationFilter] = None """Configured filters for this budget. These are applied to your account's usage to limit the scope of what is considered for this budget. Leave empty to include all usage for this account. All provided filters must be matched for usage to be included.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.alert_configurations: body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] - if self.display_name is not None: body['display_name'] = self.display_name - if self.filter: body['filter'] = self.filter.as_dict() + if self.account_id is not None: + body["account_id"] = self.account_id + if self.alert_configurations: + body["alert_configurations"] = [v.as_dict() for v in self.alert_configurations] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.filter: + body["filter"] = self.filter.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.alert_configurations: body['alert_configurations'] = self.alert_configurations - if self.display_name is not None: body['display_name'] = self.display_name - if self.filter: body['filter'] = self.filter + if self.account_id is not None: + body["account_id"] = self.account_id + if self.alert_configurations: + body["alert_configurations"] = self.alert_configurations + if self.display_name is not None: + body["display_name"] = self.display_name + if self.filter: + body["filter"] = self.filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationBudget: """Deserializes the CreateBudgetConfigurationBudget from a dictionary.""" - return cls(account_id=d.get('account_id', None), alert_configurations=_repeated_dict(d, 'alert_configurations', CreateBudgetConfigurationBudgetAlertConfigurations), display_name=d.get('display_name', None), filter=_from_dict(d, 'filter', BudgetConfigurationFilter)) - - + return cls( + account_id=d.get("account_id", None), + alert_configurations=_repeated_dict( + d, "alert_configurations", CreateBudgetConfigurationBudgetAlertConfigurations + ), + display_name=d.get("display_name", None), + filter=_from_dict(d, "filter", BudgetConfigurationFilter), + ) @dataclass class CreateBudgetConfigurationBudgetActionConfigurations: action_type: Optional[ActionConfigurationType] = None """The type of the action.""" - + target: Optional[str] = None """Target for the action. For example, an email address.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action_type is not None: body['action_type'] = self.action_type.value - if self.target is not None: body['target'] = self.target + if self.action_type is not None: + body["action_type"] = self.action_type.value + if self.target is not None: + body["target"] = self.target return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a shallow dictionary of its immediate attributes.""" body = {} - if self.action_type is not None: body['action_type'] = self.action_type - if self.target is not None: body['target'] = self.target + if self.action_type is not None: + body["action_type"] = self.action_type + if self.target is not None: + body["target"] = self.target return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationBudgetActionConfigurations: """Deserializes the CreateBudgetConfigurationBudgetActionConfigurations from a dictionary.""" - return cls(action_type=_enum(d, 'action_type', ActionConfigurationType), target=d.get('target', None)) - - + return cls(action_type=_enum(d, "action_type", ActionConfigurationType), target=d.get("target", None)) @dataclass @@ -488,138 +569,156 @@ class CreateBudgetConfigurationBudgetAlertConfigurations: action_configurations: Optional[List[CreateBudgetConfigurationBudgetActionConfigurations]] = None """Configured actions for this alert. These define what happens when an alert enters a triggered state.""" - + quantity_threshold: Optional[str] = None """The threshold for the budget alert to determine if it is in a triggered state. The number is evaluated based on `quantity_type`.""" - + quantity_type: Optional[AlertConfigurationQuantityType] = None """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured in.""" - + time_period: Optional[AlertConfigurationTimePeriod] = None """The time window of usage data for the budget.""" - + trigger_type: Optional[AlertConfigurationTriggerType] = None """The evaluation method to determine when this budget alert is in a triggered state.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action_configurations: body['action_configurations'] = [v.as_dict() for v in self.action_configurations] - if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold - if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value - if self.time_period is not None: body['time_period'] = self.time_period.value - if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value + if self.action_configurations: + body["action_configurations"] = [v.as_dict() for v in self.action_configurations] + if self.quantity_threshold is not None: + body["quantity_threshold"] = self.quantity_threshold + if self.quantity_type is not None: + body["quantity_type"] = self.quantity_type.value + if self.time_period is not None: + body["time_period"] = self.time_period.value + if self.trigger_type is not None: + body["trigger_type"] = self.trigger_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a shallow dictionary of its immediate attributes.""" body = {} - if self.action_configurations: body['action_configurations'] = self.action_configurations - if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold - if self.quantity_type is not None: body['quantity_type'] = self.quantity_type - if self.time_period is not None: body['time_period'] = self.time_period - if self.trigger_type is not None: body['trigger_type'] = self.trigger_type + if self.action_configurations: + body["action_configurations"] = self.action_configurations + if self.quantity_threshold is not None: + body["quantity_threshold"] = self.quantity_threshold + if self.quantity_type is not None: + body["quantity_type"] = self.quantity_type + if self.time_period is not None: + body["time_period"] = self.time_period + if self.trigger_type is not None: + body["trigger_type"] = self.trigger_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationBudgetAlertConfigurations: """Deserializes the CreateBudgetConfigurationBudgetAlertConfigurations from a dictionary.""" - return cls(action_configurations=_repeated_dict(d, 'action_configurations', CreateBudgetConfigurationBudgetActionConfigurations), quantity_threshold=d.get('quantity_threshold', None), quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType), time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod), trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType)) - - + return cls( + action_configurations=_repeated_dict( + d, "action_configurations", CreateBudgetConfigurationBudgetActionConfigurations + ), + quantity_threshold=d.get("quantity_threshold", None), + quantity_type=_enum(d, "quantity_type", AlertConfigurationQuantityType), + time_period=_enum(d, "time_period", AlertConfigurationTimePeriod), + trigger_type=_enum(d, "trigger_type", AlertConfigurationTriggerType), + ) @dataclass class CreateBudgetConfigurationRequest: budget: CreateBudgetConfigurationBudget """Properties of the new budget configuration.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: body['budget'] = self.budget.as_dict() + if self.budget: + body["budget"] = self.budget.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: body['budget'] = self.budget + if self.budget: + body["budget"] = self.budget return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationRequest: """Deserializes the CreateBudgetConfigurationRequest from a dictionary.""" - return cls(budget=_from_dict(d, 'budget', CreateBudgetConfigurationBudget)) - - + return cls(budget=_from_dict(d, "budget", CreateBudgetConfigurationBudget)) @dataclass class CreateBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None """The created budget configuration.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: body['budget'] = self.budget.as_dict() + if self.budget: + body["budget"] = self.budget.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: body['budget'] = self.budget + if self.budget: + body["budget"] = self.budget return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationResponse: """Deserializes the CreateBudgetConfigurationResponse from a dictionary.""" - return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) - - + return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) @dataclass class CreateBudgetPolicyRequest: """A request to create a BudgetPolicy.""" - + policy: Optional[BudgetPolicy] = None """The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must be provided, custom_tags may need to be provided depending on the cloud provider. All other fields are optional.""" - + request_id: Optional[str] = None """A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is recommended. This request is only idempotent if a `request_id` is provided.""" - + def as_dict(self) -> dict: """Serializes the CreateBudgetPolicyRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.policy: body['policy'] = self.policy.as_dict() - if self.request_id is not None: body['request_id'] = self.request_id + if self.policy: + body["policy"] = self.policy.as_dict() + if self.request_id is not None: + body["request_id"] = self.request_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateBudgetPolicyRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.policy: body['policy'] = self.policy - if self.request_id is not None: body['request_id'] = self.request_id + if self.policy: + body["policy"] = self.policy + if self.request_id is not None: + body["request_id"] = self.request_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetPolicyRequest: """Deserializes the CreateBudgetPolicyRequest from a dictionary.""" - return cls(policy=_from_dict(d, 'policy', BudgetPolicy), request_id=d.get('request_id', None)) - - + return cls(policy=_from_dict(d, "policy", BudgetPolicy), request_id=d.get("request_id", None)) @dataclass class CreateLogDeliveryConfigurationParams: """* Log Delivery Configuration""" - + log_type: LogType """Log delivery type. Supported values are: * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the [View billable usage]. * `AUDIT_LOGS` — Configure @@ -629,7 +728,7 @@ class CreateLogDeliveryConfigurationParams: [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - + output_format: OutputFormat """The file type of log delivery. * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated values) format is supported. For the schema, see the [View @@ -639,39 +738,39 @@ class CreateLogDeliveryConfigurationParams: [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html""" - + credentials_id: str """The ID for a method:credentials/create that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page. See [Configure billable usage delivery]. [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - + storage_configuration_id: str """The ID for a method:storage/create that represents the S3 bucket with bucket policy as described in the main billable usage documentation page. See [Configure billable usage delivery]. [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - + config_name: Optional[str] = None """The optional human-readable name of the log delivery configuration. Defaults to empty.""" - + delivery_path_prefix: Optional[str] = None """The optional delivery path prefix within Amazon S3 storage. Defaults to empty, which means that logs are delivered to the root of the bucket. This must be a valid S3 object key. This must not start or end with a slash character.""" - + delivery_start_time: Optional[str] = None """This field applies only if log_type is BILLABLE_USAGE. This is the optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. BILLABLE_USAGE logs are not available for usage before March 2019 (2019-03).""" - + status: Optional[LogDeliveryConfigStatus] = None """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.""" - + workspace_ids_filter: Optional[List[int]] = None """Optional filter that specifies workspace IDs to deliver logs for. By default the workspace filter is empty and log delivery applies at the account level, delivering workspace-level logs @@ -683,44 +782,67 @@ class CreateLogDeliveryConfigurationParams: new workspaces created in the future, and delivery won't include account level logs. For some types of Databricks deployments there is only one workspace per account ID, so this field is unnecessary.""" - + def as_dict(self) -> dict: """Serializes the CreateLogDeliveryConfigurationParams into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config_name is not None: body['config_name'] = self.config_name - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix - if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time - if self.log_type is not None: body['log_type'] = self.log_type.value - if self.output_format is not None: body['output_format'] = self.output_format.value - if self.status is not None: body['status'] = self.status.value - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter] + if self.config_name is not None: + body["config_name"] = self.config_name + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.delivery_path_prefix is not None: + body["delivery_path_prefix"] = self.delivery_path_prefix + if self.delivery_start_time is not None: + body["delivery_start_time"] = self.delivery_start_time + if self.log_type is not None: + body["log_type"] = self.log_type.value + if self.output_format is not None: + body["output_format"] = self.output_format.value + if self.status is not None: + body["status"] = self.status.value + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.workspace_ids_filter: + body["workspace_ids_filter"] = [v for v in self.workspace_ids_filter] return body def as_shallow_dict(self) -> dict: """Serializes the CreateLogDeliveryConfigurationParams into a shallow dictionary of its immediate attributes.""" body = {} - if self.config_name is not None: body['config_name'] = self.config_name - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix - if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time - if self.log_type is not None: body['log_type'] = self.log_type - if self.output_format is not None: body['output_format'] = self.output_format - if self.status is not None: body['status'] = self.status - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter + if self.config_name is not None: + body["config_name"] = self.config_name + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.delivery_path_prefix is not None: + body["delivery_path_prefix"] = self.delivery_path_prefix + if self.delivery_start_time is not None: + body["delivery_start_time"] = self.delivery_start_time + if self.log_type is not None: + body["log_type"] = self.log_type + if self.output_format is not None: + body["output_format"] = self.output_format + if self.status is not None: + body["status"] = self.status + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.workspace_ids_filter: + body["workspace_ids_filter"] = self.workspace_ids_filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateLogDeliveryConfigurationParams: """Deserializes the CreateLogDeliveryConfigurationParams from a dictionary.""" - return cls(config_name=d.get('config_name', None), credentials_id=d.get('credentials_id', None), delivery_path_prefix=d.get('delivery_path_prefix', None), delivery_start_time=d.get('delivery_start_time', None), log_type=_enum(d, 'log_type', LogType), output_format=_enum(d, 'output_format', OutputFormat), status=_enum(d, 'status', LogDeliveryConfigStatus), storage_configuration_id=d.get('storage_configuration_id', None), workspace_ids_filter=d.get('workspace_ids_filter', None)) - - - - - + return cls( + config_name=d.get("config_name", None), + credentials_id=d.get("credentials_id", None), + delivery_path_prefix=d.get("delivery_path_prefix", None), + delivery_start_time=d.get("delivery_start_time", None), + log_type=_enum(d, "log_type", LogType), + output_format=_enum(d, "output_format", OutputFormat), + status=_enum(d, "status", LogDeliveryConfigStatus), + storage_configuration_id=d.get("storage_configuration_id", None), + workspace_ids_filter=d.get("workspace_ids_filter", None), + ) @dataclass @@ -739,11 +861,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteBudgetConfigurationResponse: """Deserializes the DeleteBudgetConfigurationResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -762,8 +879,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - class DeliveryStatus(Enum): @@ -774,174 +889,170 @@ class DeliveryStatus(Enum): latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.""" - - CREATED = 'CREATED' - NOT_FOUND = 'NOT_FOUND' - SUCCEEDED = 'SUCCEEDED' - SYSTEM_FAILURE = 'SYSTEM_FAILURE' - USER_FAILURE = 'USER_FAILURE' - + CREATED = "CREATED" + NOT_FOUND = "NOT_FOUND" + SUCCEEDED = "SUCCEEDED" + SYSTEM_FAILURE = "SYSTEM_FAILURE" + USER_FAILURE = "USER_FAILURE" @dataclass class DownloadResponse: contents: Optional[BinaryIO] = None - + def as_dict(self) -> dict: """Serializes the DownloadResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: body['contents'] = self.contents + if self.contents: + body["contents"] = self.contents return body def as_shallow_dict(self) -> dict: """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: body['contents'] = self.contents + if self.contents: + body["contents"] = self.contents return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DownloadResponse: """Deserializes the DownloadResponse from a dictionary.""" - return cls(contents=d.get('contents', None)) - - + return cls(contents=d.get("contents", None)) @dataclass class Filter: """Structured representation of a filter to be applied to a list of policies. All specified filters will be applied in conjunction.""" - + creator_user_id: Optional[int] = None """The policy creator user id to be filtered on. If unspecified, all policies will be returned.""" - + creator_user_name: Optional[str] = None """The policy creator user name to be filtered on. If unspecified, all policies will be returned.""" - + policy_name: Optional[str] = None """The partial name of policies to be filtered on. If unspecified, all policies will be returned.""" - + def as_dict(self) -> dict: """Serializes the Filter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.policy_name is not None: body['policy_name'] = self.policy_name + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.policy_name is not None: + body["policy_name"] = self.policy_name return body def as_shallow_dict(self) -> dict: """Serializes the Filter into a shallow dictionary of its immediate attributes.""" body = {} - if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.policy_name is not None: body['policy_name'] = self.policy_name + if self.creator_user_id is not None: + body["creator_user_id"] = self.creator_user_id + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.policy_name is not None: + body["policy_name"] = self.policy_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Filter: """Deserializes the Filter from a dictionary.""" - return cls(creator_user_id=d.get('creator_user_id', None), creator_user_name=d.get('creator_user_name', None), policy_name=d.get('policy_name', None)) - - - - - + return cls( + creator_user_id=d.get("creator_user_id", None), + creator_user_name=d.get("creator_user_name", None), + policy_name=d.get("policy_name", None), + ) @dataclass class GetBillingUsageDashboardResponse: dashboard_id: Optional[str] = None """The unique id of the usage dashboard.""" - + dashboard_url: Optional[str] = None """The URL of the usage dashboard.""" - + def as_dict(self) -> dict: """Serializes the GetBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.dashboard_url is not None: + body["dashboard_url"] = self.dashboard_url return body def as_shallow_dict(self) -> dict: """Serializes the GetBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.dashboard_url is not None: + body["dashboard_url"] = self.dashboard_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetBillingUsageDashboardResponse: """Deserializes the GetBillingUsageDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), dashboard_url=d.get('dashboard_url', None)) - - - - - + return cls(dashboard_id=d.get("dashboard_id", None), dashboard_url=d.get("dashboard_url", None)) @dataclass class GetBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None - + def as_dict(self) -> dict: """Serializes the GetBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: body['budget'] = self.budget.as_dict() + if self.budget: + body["budget"] = self.budget.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: body['budget'] = self.budget + if self.budget: + body["budget"] = self.budget return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetBudgetConfigurationResponse: """Deserializes the GetBudgetConfigurationResponse from a dictionary.""" - return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) - - - - - + return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) @dataclass class GetLogDeliveryConfigurationResponse: log_delivery_configuration: Optional[LogDeliveryConfiguration] = None """The fetched log delivery configuration""" - + def as_dict(self) -> dict: """Serializes the GetLogDeliveryConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict() + if self.log_delivery_configuration: + body["log_delivery_configuration"] = self.log_delivery_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetLogDeliveryConfigurationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration + if self.log_delivery_configuration: + body["log_delivery_configuration"] = self.log_delivery_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetLogDeliveryConfigurationResponse: """Deserializes the GetLogDeliveryConfigurationResponse from a dictionary.""" - return cls(log_delivery_configuration=_from_dict(d, 'log_delivery_configuration', LogDeliveryConfiguration)) - - - - - + return cls(log_delivery_configuration=_from_dict(d, "log_delivery_configuration", LogDeliveryConfiguration)) @dataclass class LimitConfig: """The limit configuration of the policy. Limit configuration provide a budget policy level cost control by enforcing the limit.""" - + def as_dict(self) -> dict: """Serializes the LimitConfig into a dictionary suitable for use as a JSON request body.""" body = {} @@ -956,100 +1067,102 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LimitConfig: """Deserializes the LimitConfig from a dictionary.""" return cls() - - - - - @dataclass class ListBudgetConfigurationsResponse: budgets: Optional[List[BudgetConfiguration]] = None - + next_page_token: Optional[str] = None """Token which can be sent as `page_token` to retrieve the next page of results. If this field is omitted, there are no subsequent budgets.""" - + def as_dict(self) -> dict: """Serializes the ListBudgetConfigurationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budgets: body['budgets'] = [v.as_dict() for v in self.budgets] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.budgets: + body["budgets"] = [v.as_dict() for v in self.budgets] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListBudgetConfigurationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.budgets: body['budgets'] = self.budgets - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.budgets: + body["budgets"] = self.budgets + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListBudgetConfigurationsResponse: """Deserializes the ListBudgetConfigurationsResponse from a dictionary.""" - return cls(budgets=_repeated_dict(d, 'budgets', BudgetConfiguration), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + budgets=_repeated_dict(d, "budgets", BudgetConfiguration), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListBudgetPoliciesResponse: """A list of policies.""" - + next_page_token: Optional[str] = None """A token that can be sent as `page_token` to retrieve the next page. If this field is omitted, there are no subsequent pages.""" - + policies: Optional[List[BudgetPolicy]] = None - + previous_page_token: Optional[str] = None """A token that can be sent as `page_token` to retrieve the previous page. In this field is omitted, there are no previous pages.""" - + def as_dict(self) -> dict: """Serializes the ListBudgetPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.policies: body['policies'] = [v.as_dict() for v in self.policies] - if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = [v.as_dict() for v in self.policies] + if self.previous_page_token is not None: + body["previous_page_token"] = self.previous_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListBudgetPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.policies: body['policies'] = self.policies - if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = self.policies + if self.previous_page_token is not None: + body["previous_page_token"] = self.previous_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListBudgetPoliciesResponse: """Deserializes the ListBudgetPoliciesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), policies=_repeated_dict(d, 'policies', BudgetPolicy), previous_page_token=d.get('previous_page_token', None)) - + return cls( + next_page_token=d.get("next_page_token", None), + policies=_repeated_dict(d, "policies", BudgetPolicy), + previous_page_token=d.get("previous_page_token", None), + ) +class LogDeliveryConfigStatus(Enum): + """* Log Delivery Status + `ENABLED`: All dependencies have executed and succeeded `DISABLED`: At least one dependency has + succeeded""" + DISABLED = "DISABLED" + ENABLED = "ENABLED" -class LogDeliveryConfigStatus(Enum): - """* Log Delivery Status - - `ENABLED`: All dependencies have executed and succeeded `DISABLED`: At least one dependency has - succeeded""" - - DISABLED = 'DISABLED' - ENABLED = 'ENABLED' - @dataclass class LogDeliveryConfiguration: """* Log Delivery Configuration""" - + log_type: LogType """Log delivery type. Supported values are: * `BILLABLE_USAGE` — Configure [billable usage log delivery]. For the CSV schema, see the [View billable usage]. * `AUDIT_LOGS` — Configure @@ -1059,7 +1172,7 @@ class LogDeliveryConfiguration: [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html [audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - + output_format: OutputFormat """The file type of log delivery. * If `log_type` is `BILLABLE_USAGE`, this value must be `CSV`. Only the CSV (comma-separated values) format is supported. For the schema, see the [View @@ -1069,54 +1182,54 @@ class LogDeliveryConfiguration: [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html""" - + account_id: str """Databricks account ID.""" - + credentials_id: str """The ID for a method:credentials/create that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page. See [Configure billable usage delivery]. [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - + storage_configuration_id: str """The ID for a method:storage/create that represents the S3 bucket with bucket policy as described in the main billable usage documentation page. See [Configure billable usage delivery]. [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html""" - + config_id: Optional[str] = None """The unique UUID of log delivery configuration""" - + config_name: Optional[str] = None """The optional human-readable name of the log delivery configuration. Defaults to empty.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when the log delivery configuration was created.""" - + delivery_path_prefix: Optional[str] = None """The optional delivery path prefix within Amazon S3 storage. Defaults to empty, which means that logs are delivered to the root of the bucket. This must be a valid S3 object key. This must not start or end with a slash character.""" - + delivery_start_time: Optional[str] = None """This field applies only if log_type is BILLABLE_USAGE. This is the optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. BILLABLE_USAGE logs are not available for usage before March 2019 (2019-03).""" - + log_delivery_status: Optional[LogDeliveryStatus] = None """The LogDeliveryStatus of this log delivery configuration""" - + status: Optional[LogDeliveryConfigStatus] = None """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.""" - + update_time: Optional[int] = None """Time in epoch milliseconds when the log delivery configuration was updated.""" - + workspace_ids_filter: Optional[List[int]] = None """Optional filter that specifies workspace IDs to deliver logs for. By default the workspace filter is empty and log delivery applies at the account level, delivering workspace-level logs @@ -1128,51 +1241,92 @@ class LogDeliveryConfiguration: new workspaces created in the future, and delivery won't include account level logs. For some types of Databricks deployments there is only one workspace per account ID, so this field is unnecessary.""" - + def as_dict(self) -> dict: """Serializes the LogDeliveryConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.config_id is not None: body['config_id'] = self.config_id - if self.config_name is not None: body['config_name'] = self.config_name - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix - if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time - if self.log_delivery_status: body['log_delivery_status'] = self.log_delivery_status.as_dict() - if self.log_type is not None: body['log_type'] = self.log_type.value - if self.output_format is not None: body['output_format'] = self.output_format.value - if self.status is not None: body['status'] = self.status.value - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.update_time is not None: body['update_time'] = self.update_time - if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter] + if self.account_id is not None: + body["account_id"] = self.account_id + if self.config_id is not None: + body["config_id"] = self.config_id + if self.config_name is not None: + body["config_name"] = self.config_name + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.delivery_path_prefix is not None: + body["delivery_path_prefix"] = self.delivery_path_prefix + if self.delivery_start_time is not None: + body["delivery_start_time"] = self.delivery_start_time + if self.log_delivery_status: + body["log_delivery_status"] = self.log_delivery_status.as_dict() + if self.log_type is not None: + body["log_type"] = self.log_type.value + if self.output_format is not None: + body["output_format"] = self.output_format.value + if self.status is not None: + body["status"] = self.status.value + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.update_time is not None: + body["update_time"] = self.update_time + if self.workspace_ids_filter: + body["workspace_ids_filter"] = [v for v in self.workspace_ids_filter] return body def as_shallow_dict(self) -> dict: """Serializes the LogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.config_id is not None: body['config_id'] = self.config_id - if self.config_name is not None: body['config_name'] = self.config_name - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix - if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time - if self.log_delivery_status: body['log_delivery_status'] = self.log_delivery_status - if self.log_type is not None: body['log_type'] = self.log_type - if self.output_format is not None: body['output_format'] = self.output_format - if self.status is not None: body['status'] = self.status - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.update_time is not None: body['update_time'] = self.update_time - if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter + if self.account_id is not None: + body["account_id"] = self.account_id + if self.config_id is not None: + body["config_id"] = self.config_id + if self.config_name is not None: + body["config_name"] = self.config_name + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.delivery_path_prefix is not None: + body["delivery_path_prefix"] = self.delivery_path_prefix + if self.delivery_start_time is not None: + body["delivery_start_time"] = self.delivery_start_time + if self.log_delivery_status: + body["log_delivery_status"] = self.log_delivery_status + if self.log_type is not None: + body["log_type"] = self.log_type + if self.output_format is not None: + body["output_format"] = self.output_format + if self.status is not None: + body["status"] = self.status + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.update_time is not None: + body["update_time"] = self.update_time + if self.workspace_ids_filter: + body["workspace_ids_filter"] = self.workspace_ids_filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogDeliveryConfiguration: """Deserializes the LogDeliveryConfiguration from a dictionary.""" - return cls(account_id=d.get('account_id', None), config_id=d.get('config_id', None), config_name=d.get('config_name', None), creation_time=d.get('creation_time', None), credentials_id=d.get('credentials_id', None), delivery_path_prefix=d.get('delivery_path_prefix', None), delivery_start_time=d.get('delivery_start_time', None), log_delivery_status=_from_dict(d, 'log_delivery_status', LogDeliveryStatus), log_type=_enum(d, 'log_type', LogType), output_format=_enum(d, 'output_format', OutputFormat), status=_enum(d, 'status', LogDeliveryConfigStatus), storage_configuration_id=d.get('storage_configuration_id', None), update_time=d.get('update_time', None), workspace_ids_filter=d.get('workspace_ids_filter', None)) - - + return cls( + account_id=d.get("account_id", None), + config_id=d.get("config_id", None), + config_name=d.get("config_name", None), + creation_time=d.get("creation_time", None), + credentials_id=d.get("credentials_id", None), + delivery_path_prefix=d.get("delivery_path_prefix", None), + delivery_start_time=d.get("delivery_start_time", None), + log_delivery_status=_from_dict(d, "log_delivery_status", LogDeliveryStatus), + log_type=_enum(d, "log_type", LogType), + output_format=_enum(d, "output_format", OutputFormat), + status=_enum(d, "status", LogDeliveryConfigStatus), + storage_configuration_id=d.get("storage_configuration_id", None), + update_time=d.get("update_time", None), + workspace_ids_filter=d.get("workspace_ids_filter", None), + ) @dataclass @@ -1185,54 +1339,67 @@ class LogDeliveryStatus: latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.""" - + message: str """Informative message about the latest log delivery attempt. If the log delivery fails with USER_FAILURE, error details will be provided for fixing misconfigurations in cloud permissions.""" - + last_attempt_time: Optional[str] = None """The UTC time for the latest log delivery attempt.""" - + last_successful_attempt_time: Optional[str] = None """The UTC time for the latest successful log delivery.""" - + def as_dict(self) -> dict: """Serializes the LogDeliveryStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_attempt_time is not None: body['last_attempt_time'] = self.last_attempt_time - if self.last_successful_attempt_time is not None: body['last_successful_attempt_time'] = self.last_successful_attempt_time - if self.message is not None: body['message'] = self.message - if self.status is not None: body['status'] = self.status.value + if self.last_attempt_time is not None: + body["last_attempt_time"] = self.last_attempt_time + if self.last_successful_attempt_time is not None: + body["last_successful_attempt_time"] = self.last_successful_attempt_time + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the LogDeliveryStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_attempt_time is not None: body['last_attempt_time'] = self.last_attempt_time - if self.last_successful_attempt_time is not None: body['last_successful_attempt_time'] = self.last_successful_attempt_time - if self.message is not None: body['message'] = self.message - if self.status is not None: body['status'] = self.status + if self.last_attempt_time is not None: + body["last_attempt_time"] = self.last_attempt_time + if self.last_successful_attempt_time is not None: + body["last_successful_attempt_time"] = self.last_successful_attempt_time + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogDeliveryStatus: """Deserializes the LogDeliveryStatus from a dictionary.""" - return cls(last_attempt_time=d.get('last_attempt_time', None), last_successful_attempt_time=d.get('last_successful_attempt_time', None), message=d.get('message', None), status=_enum(d, 'status', DeliveryStatus)) - - + return cls( + last_attempt_time=d.get("last_attempt_time", None), + last_successful_attempt_time=d.get("last_successful_attempt_time", None), + message=d.get("message", None), + status=_enum(d, "status", DeliveryStatus), + ) class LogType(Enum): """* Log Delivery Type""" - - AUDIT_LOGS = 'AUDIT_LOGS' - BILLABLE_USAGE = 'BILLABLE_USAGE' + + AUDIT_LOGS = "AUDIT_LOGS" + BILLABLE_USAGE = "BILLABLE_USAGE" + class OutputFormat(Enum): """* Log Delivery Output Format""" - - CSV = 'CSV' - JSON = 'JSON' + + CSV = "CSV" + JSON = "JSON" + @dataclass class PatchStatusResponse: @@ -1250,307 +1417,319 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PatchStatusResponse: """Deserializes the PatchStatusResponse from a dictionary.""" return cls() - - @dataclass class SortSpec: descending: Optional[bool] = None """Whether to sort in descending order.""" - + field: Optional[SortSpecField] = None """The filed to sort by""" - + def as_dict(self) -> dict: """Serializes the SortSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.descending is not None: body['descending'] = self.descending - if self.field is not None: body['field'] = self.field.value + if self.descending is not None: + body["descending"] = self.descending + if self.field is not None: + body["field"] = self.field.value return body def as_shallow_dict(self) -> dict: """Serializes the SortSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.descending is not None: body['descending'] = self.descending - if self.field is not None: body['field'] = self.field + if self.descending is not None: + body["descending"] = self.descending + if self.field is not None: + body["field"] = self.field return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SortSpec: """Deserializes the SortSpec from a dictionary.""" - return cls(descending=d.get('descending', None), field=_enum(d, 'field', SortSpecField)) - - + return cls(descending=d.get("descending", None), field=_enum(d, "field", SortSpecField)) class SortSpecField(Enum): - - - POLICY_NAME = 'POLICY_NAME' + + POLICY_NAME = "POLICY_NAME" + @dataclass class UpdateBudgetConfigurationBudget: account_id: Optional[str] = None """Databricks account ID.""" - + alert_configurations: Optional[List[AlertConfiguration]] = None """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one alert configuration.""" - + budget_configuration_id: Optional[str] = None """Databricks budget configuration ID.""" - + display_name: Optional[str] = None """Human-readable name of budget configuration. Max Length: 128""" - + filter: Optional[BudgetConfigurationFilter] = None """Configured filters for this budget. These are applied to your account's usage to limit the scope of what is considered for this budget. Leave empty to include all usage for this account. All provided filters must be matched for usage to be included.""" - + def as_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.alert_configurations: body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] - if self.budget_configuration_id is not None: body['budget_configuration_id'] = self.budget_configuration_id - if self.display_name is not None: body['display_name'] = self.display_name - if self.filter: body['filter'] = self.filter.as_dict() + if self.account_id is not None: + body["account_id"] = self.account_id + if self.alert_configurations: + body["alert_configurations"] = [v.as_dict() for v in self.alert_configurations] + if self.budget_configuration_id is not None: + body["budget_configuration_id"] = self.budget_configuration_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.filter: + body["filter"] = self.filter.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.alert_configurations: body['alert_configurations'] = self.alert_configurations - if self.budget_configuration_id is not None: body['budget_configuration_id'] = self.budget_configuration_id - if self.display_name is not None: body['display_name'] = self.display_name - if self.filter: body['filter'] = self.filter + if self.account_id is not None: + body["account_id"] = self.account_id + if self.alert_configurations: + body["alert_configurations"] = self.alert_configurations + if self.budget_configuration_id is not None: + body["budget_configuration_id"] = self.budget_configuration_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.filter: + body["filter"] = self.filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationBudget: """Deserializes the UpdateBudgetConfigurationBudget from a dictionary.""" - return cls(account_id=d.get('account_id', None), alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration), budget_configuration_id=d.get('budget_configuration_id', None), display_name=d.get('display_name', None), filter=_from_dict(d, 'filter', BudgetConfigurationFilter)) - - + return cls( + account_id=d.get("account_id", None), + alert_configurations=_repeated_dict(d, "alert_configurations", AlertConfiguration), + budget_configuration_id=d.get("budget_configuration_id", None), + display_name=d.get("display_name", None), + filter=_from_dict(d, "filter", BudgetConfigurationFilter), + ) @dataclass class UpdateBudgetConfigurationRequest: budget: UpdateBudgetConfigurationBudget """The updated budget. This will overwrite the budget specified by the budget ID.""" - + budget_id: Optional[str] = None """The Databricks budget configuration ID.""" - + def as_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: body['budget'] = self.budget.as_dict() - if self.budget_id is not None: body['budget_id'] = self.budget_id + if self.budget: + body["budget"] = self.budget.as_dict() + if self.budget_id is not None: + body["budget_id"] = self.budget_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: body['budget'] = self.budget - if self.budget_id is not None: body['budget_id'] = self.budget_id + if self.budget: + body["budget"] = self.budget + if self.budget_id is not None: + body["budget_id"] = self.budget_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationRequest: """Deserializes the UpdateBudgetConfigurationRequest from a dictionary.""" - return cls(budget=_from_dict(d, 'budget', UpdateBudgetConfigurationBudget), budget_id=d.get('budget_id', None)) - - + return cls(budget=_from_dict(d, "budget", UpdateBudgetConfigurationBudget), budget_id=d.get("budget_id", None)) @dataclass class UpdateBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None """The updated budget.""" - + def as_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: body['budget'] = self.budget.as_dict() + if self.budget: + body["budget"] = self.budget.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget: body['budget'] = self.budget + if self.budget: + body["budget"] = self.budget return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationResponse: """Deserializes the UpdateBudgetConfigurationResponse from a dictionary.""" - return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) - - - - - + return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) @dataclass class UpdateLogDeliveryConfigurationStatusRequest: """* Update Log Delivery Configuration""" - + status: LogDeliveryConfigStatus """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.""" - + log_delivery_configuration_id: Optional[str] = None """The log delivery configuration id of customer""" - + def as_dict(self) -> dict: """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configuration_id is not None: body['log_delivery_configuration_id'] = self.log_delivery_configuration_id - if self.status is not None: body['status'] = self.status.value + if self.log_delivery_configuration_id is not None: + body["log_delivery_configuration_id"] = self.log_delivery_configuration_id + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_delivery_configuration_id is not None: body['log_delivery_configuration_id'] = self.log_delivery_configuration_id - if self.status is not None: body['status'] = self.status + if self.log_delivery_configuration_id is not None: + body["log_delivery_configuration_id"] = self.log_delivery_configuration_id + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateLogDeliveryConfigurationStatusRequest: """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary.""" - return cls(log_delivery_configuration_id=d.get('log_delivery_configuration_id', None), status=_enum(d, 'status', LogDeliveryConfigStatus)) - - + return cls( + log_delivery_configuration_id=d.get("log_delivery_configuration_id", None), + status=_enum(d, "status", LogDeliveryConfigStatus), + ) class UsageDashboardType(Enum): - - - USAGE_DASHBOARD_TYPE_GLOBAL = 'USAGE_DASHBOARD_TYPE_GLOBAL' - USAGE_DASHBOARD_TYPE_WORKSPACE = 'USAGE_DASHBOARD_TYPE_WORKSPACE' + + USAGE_DASHBOARD_TYPE_GLOBAL = "USAGE_DASHBOARD_TYPE_GLOBAL" + USAGE_DASHBOARD_TYPE_WORKSPACE = "USAGE_DASHBOARD_TYPE_WORKSPACE" + @dataclass class WrappedCreateLogDeliveryConfiguration: """* Properties of the new log delivery configuration.""" - + log_delivery_configuration: CreateLogDeliveryConfigurationParams """* Log Delivery Configuration""" - + def as_dict(self) -> dict: """Serializes the WrappedCreateLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict() + if self.log_delivery_configuration: + body["log_delivery_configuration"] = self.log_delivery_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the WrappedCreateLogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration + if self.log_delivery_configuration: + body["log_delivery_configuration"] = self.log_delivery_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WrappedCreateLogDeliveryConfiguration: """Deserializes the WrappedCreateLogDeliveryConfiguration from a dictionary.""" - return cls(log_delivery_configuration=_from_dict(d, 'log_delivery_configuration', CreateLogDeliveryConfigurationParams)) - - + return cls( + log_delivery_configuration=_from_dict(d, "log_delivery_configuration", CreateLogDeliveryConfigurationParams) + ) @dataclass class WrappedLogDeliveryConfiguration: log_delivery_configuration: Optional[LogDeliveryConfiguration] = None """The created log delivery configuration""" - + def as_dict(self) -> dict: """Serializes the WrappedLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict() + if self.log_delivery_configuration: + body["log_delivery_configuration"] = self.log_delivery_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the WrappedLogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_delivery_configuration: body['log_delivery_configuration'] = self.log_delivery_configuration + if self.log_delivery_configuration: + body["log_delivery_configuration"] = self.log_delivery_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WrappedLogDeliveryConfiguration: """Deserializes the WrappedLogDeliveryConfiguration from a dictionary.""" - return cls(log_delivery_configuration=_from_dict(d, 'log_delivery_configuration', LogDeliveryConfiguration)) - - + return cls(log_delivery_configuration=_from_dict(d, "log_delivery_configuration", LogDeliveryConfiguration)) @dataclass class WrappedLogDeliveryConfigurations: log_delivery_configurations: Optional[List[LogDeliveryConfiguration]] = None """Log delivery configurations were returned successfully.""" - + next_page_token: Optional[str] = None """Token which can be sent as `page_token` to retrieve the next page of results. If this field is omitted, there are no subsequent budgets.""" - + def as_dict(self) -> dict: """Serializes the WrappedLogDeliveryConfigurations into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configurations: body['log_delivery_configurations'] = [v.as_dict() for v in self.log_delivery_configurations] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.log_delivery_configurations: + body["log_delivery_configurations"] = [v.as_dict() for v in self.log_delivery_configurations] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the WrappedLogDeliveryConfigurations into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_delivery_configurations: body['log_delivery_configurations'] = self.log_delivery_configurations - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.log_delivery_configurations: + body["log_delivery_configurations"] = self.log_delivery_configurations + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WrappedLogDeliveryConfigurations: """Deserializes the WrappedLogDeliveryConfigurations from a dictionary.""" - return cls(log_delivery_configurations=_repeated_dict(d, 'log_delivery_configurations', LogDeliveryConfiguration), next_page_token=d.get('next_page_token', None)) - - - - + return cls( + log_delivery_configurations=_repeated_dict(d, "log_delivery_configurations", LogDeliveryConfiguration), + next_page_token=d.get("next_page_token", None), + ) class BillableUsageAPI: """This API allows you to download billable usage logs for the specified account and date range. This feature works with all account types.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def download(self - , start_month: str, end_month: str - , * - , personal_data: Optional[bool] = None) -> DownloadResponse: + def download(self, start_month: str, end_month: str, *, personal_data: Optional[bool] = None) -> DownloadResponse: """Return billable usage logs. - + Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see [CSV file schema]. Note that this method might take multiple minutes to complete. - + **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges. - + [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema - + :param start_month: str Format: `YYYY-MM`. First month to return billable usage logs for. This field is required. :param end_month: str @@ -1559,46 +1738,38 @@ def download(self Specify whether to include personally identifiable information in the billable usage logs, for example the email addresses of cluster creators. Handle this information with care. Defaults to false. - + :returns: :class:`DownloadResponse` """ - + query = {} - if end_month is not None: query['end_month'] = end_month - if personal_data is not None: query['personal_data'] = personal_data - if start_month is not None: query['start_month'] = start_month - headers = {'Accept': 'text/plain',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/usage/download', query=query - - , headers=headers - , raw=True) + if end_month is not None: + query["end_month"] = end_month + if personal_data is not None: + query["personal_data"] = personal_data + if start_month is not None: + query["start_month"] = start_month + headers = { + "Accept": "text/plain", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/usage/download", query=query, headers=headers, raw=True + ) return DownloadResponse.from_dict(res) - - + class BudgetPolicyAPI: """A service serves REST API about Budget policies""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , policy: Optional[BudgetPolicy] = None, request_id: Optional[str] = None) -> BudgetPolicy: + def create(self, *, policy: Optional[BudgetPolicy] = None, request_id: Optional[str] = None) -> BudgetPolicy: """Create a budget policy. - + Creates a new policy. - + :param policy: :class:`BudgetPolicy` (optional) The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must be provided, custom_tags may need to be provided depending on the cloud provider. All other fields are @@ -1606,82 +1777,73 @@ def create(self :param request_id: str (optional) A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is recommended. This request is only idempotent if a `request_id` is provided. - + :returns: :class:`BudgetPolicy` """ body = {} - if policy is not None: body['policy'] = policy.as_dict() - if request_id is not None: body['request_id'] = request_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.1/accounts/{self._api.account_id}/budget-policies', body=body - - , headers=headers - ) + if policy is not None: + body["policy"] = policy.as_dict() + if request_id is not None: + body["request_id"] = request_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.1/accounts/{self._api.account_id}/budget-policies", body=body, headers=headers + ) return BudgetPolicy.from_dict(res) - - - - - def delete(self - , policy_id: str - ): + def delete(self, policy_id: str): """Delete a budget policy. - + Deletes a policy - + :param policy_id: str The Id of the policy. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}", headers=headers) - def get(self - , policy_id: str - ) -> BudgetPolicy: + def get(self, policy_id: str) -> BudgetPolicy: """Get a budget policy. - + Retrieves a policy by it's ID. - + :param policy_id: str The Id of the policy. - + :returns: :class:`BudgetPolicy` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}' - - , headers=headers - ) - return BudgetPolicy.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , filter_by: Optional[Filter] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, sort_spec: Optional[SortSpec] = None) -> Iterator[BudgetPolicy]: + res = self._api.do( + "GET", f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}", headers=headers + ) + return BudgetPolicy.from_dict(res) + + def list( + self, + *, + filter_by: Optional[Filter] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + sort_spec: Optional[SortSpec] = None, + ) -> Iterator[BudgetPolicy]: """List policies. - + Lists all policies. Policies are returned in the alphabetically ascending order of their names. - + :param filter_by: :class:`Filter` (optional) A filter to apply to the list of policies. :param page_size: int (optional) @@ -1690,323 +1852,280 @@ def list(self :param page_token: str (optional) A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the subsequent page. If unspecified, the first page will be returned. - + When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the call that provided the page token. :param sort_spec: :class:`SortSpec` (optional) The sort specification. - + :returns: Iterator over :class:`BudgetPolicy` """ - - query = {} - if filter_by is not None: query['filter_by'] = filter_by.as_dict() - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - if sort_spec is not None: query['sort_spec'] = sort_spec.as_dict() - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.1/accounts/{self._api.account_id}/budget-policies', query=query - - , headers=headers - ) - if 'policies' in json: - for v in json['policies']: - yield BudgetPolicy.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if filter_by is not None: + query["filter_by"] = filter_by.as_dict() + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if sort_spec is not None: + query["sort_spec"] = sort_spec.as_dict() + headers = { + "Accept": "application/json", + } - def update(self - , policy_id: str, policy: BudgetPolicy - , * - , limit_config: Optional[LimitConfig] = None) -> BudgetPolicy: + while True: + json = self._api.do( + "GET", f"/api/2.1/accounts/{self._api.account_id}/budget-policies", query=query, headers=headers + ) + if "policies" in json: + for v in json["policies"]: + yield BudgetPolicy.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, policy_id: str, policy: BudgetPolicy, *, limit_config: Optional[LimitConfig] = None + ) -> BudgetPolicy: """Update a budget policy. - + Updates a policy - + :param policy_id: str The Id of the policy. This field is generated by Databricks and globally unique. :param policy: :class:`BudgetPolicy` Contains the BudgetPolicy details. :param limit_config: :class:`LimitConfig` (optional) DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy - + :returns: :class:`BudgetPolicy` """ body = policy.as_dict() query = {} - if limit_config is not None: query['limit_config'] = limit_config.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}', query=query, body=body - - , headers=headers - ) + if limit_config is not None: + query["limit_config"] = limit_config.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}", + query=query, + body=body, + headers=headers, + ) return BudgetPolicy.from_dict(res) - - + class BudgetsAPI: """These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your account. You can set up budgets to either track account-wide spending, or apply filters to track the spending of specific teams, projects, or workspaces.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , budget: CreateBudgetConfigurationBudget - ) -> CreateBudgetConfigurationResponse: + def create(self, budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse: """Create new budget. - + Create a new budget configuration for an account. For full details, see https://docs.databricks.com/en/admin/account-settings/budgets.html. - + :param budget: :class:`CreateBudgetConfigurationBudget` Properties of the new budget configuration. - + :returns: :class:`CreateBudgetConfigurationResponse` """ body = {} - if budget is not None: body['budget'] = budget.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.1/accounts/{self._api.account_id}/budgets', body=body - - , headers=headers - ) - return CreateBudgetConfigurationResponse.from_dict(res) + if budget is not None: + body["budget"] = budget.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.1/accounts/{self._api.account_id}/budgets", body=body, headers=headers) + return CreateBudgetConfigurationResponse.from_dict(res) - def delete(self - , budget_id: str - ): + def delete(self, budget_id: str): """Delete budget. - + Deletes a budget configuration for an account. Both account and budget configuration are specified by ID. This cannot be undone. - + :param budget_id: str The Databricks budget configuration ID. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get(self - , budget_id: str - ) -> GetBudgetConfigurationResponse: + self._api.do("DELETE", f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}", headers=headers) + + def get(self, budget_id: str) -> GetBudgetConfigurationResponse: """Get budget. - + Gets a budget configuration for an account. Both account and budget configuration are specified by ID. - + :param budget_id: str The budget configuration ID - + :returns: :class:`GetBudgetConfigurationResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}' - - , headers=headers - ) - return GetBudgetConfigurationResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}", headers=headers) + return GetBudgetConfigurationResponse.from_dict(res) - def list(self - - , * - , page_token: Optional[str] = None) -> Iterator[BudgetConfiguration]: + def list(self, *, page_token: Optional[str] = None) -> Iterator[BudgetConfiguration]: """Get all budgets. - + Gets all budgets associated with this account. - + :param page_token: str (optional) A page token received from a previous get all budget configurations call. This token can be used to retrieve the subsequent page. Requests first page if absent. - + :returns: Iterator over :class:`BudgetConfiguration` """ - - query = {} - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.1/accounts/{self._api.account_id}/budgets', query=query - - , headers=headers - ) - if 'budgets' in json: - for v in json['budgets']: - yield BudgetConfiguration.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , budget_id: str, budget: UpdateBudgetConfigurationBudget - ) -> UpdateBudgetConfigurationResponse: + while True: + json = self._api.do( + "GET", f"/api/2.1/accounts/{self._api.account_id}/budgets", query=query, headers=headers + ) + if "budgets" in json: + for v in json["budgets"]: + yield BudgetConfiguration.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, budget_id: str, budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse: """Modify budget. - + Updates a budget configuration for an account. Both account and budget configuration are specified by ID. - + :param budget_id: str The Databricks budget configuration ID. :param budget: :class:`UpdateBudgetConfigurationBudget` The updated budget. This will overwrite the budget specified by the budget ID. - + :returns: :class:`UpdateBudgetConfigurationResponse` """ body = {} - if budget is not None: body['budget'] = budget.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}', body=body - - , headers=headers - ) + if budget is not None: + body["budget"] = budget.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", f"/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}", body=body, headers=headers + ) return UpdateBudgetConfigurationResponse.from_dict(res) - - + class LogDeliveryAPI: """These APIs manage Log delivery configurations for this account. Log delivery configs enable you to configure the delivery of the specified type of logs to your storage account.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , log_delivery_configuration: CreateLogDeliveryConfigurationParams - ) -> WrappedLogDeliveryConfiguration: + def create( + self, log_delivery_configuration: CreateLogDeliveryConfigurationParams + ) -> WrappedLogDeliveryConfiguration: """Create a new log delivery configuration. - + Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you already created a [credential object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket). - + For full details, including the required IAM role policies and bucket policies, see [Deliver and access billable usage logs] or [Configure audit logging]. - + **Note**: There is a limit on the number of log delivery configurations available per account (each limit applies separately to each log type including billable usage and audit logs). You can create a maximum of two enabled account-level delivery configurations (configurations without a workspace filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per workspace for each log type, which means that the same workspace ID can occur in the workspace filter for no more than two delivery configurations per log type. - + You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log delivery configuration](:method:LogDelivery/PatchStatus)). - + [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - + :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` * Log Delivery Configuration - + :returns: :class:`WrappedLogDeliveryConfiguration` """ body = {} - if log_delivery_configuration is not None: body['log_delivery_configuration'] = log_delivery_configuration.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/log-delivery', body=body - - , headers=headers - ) - return WrappedLogDeliveryConfiguration.from_dict(res) + if log_delivery_configuration is not None: + body["log_delivery_configuration"] = log_delivery_configuration.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/log-delivery", body=body, headers=headers) + return WrappedLogDeliveryConfiguration.from_dict(res) - def get(self - , log_delivery_configuration_id: str - ) -> GetLogDeliveryConfigurationResponse: + def get(self, log_delivery_configuration_id: str) -> GetLogDeliveryConfigurationResponse: """Get log delivery configuration. - + Gets a Databricks log delivery configuration object for an account, both specified by ID. - + :param log_delivery_configuration_id: str The log delivery configuration id of customer - + :returns: :class:`GetLogDeliveryConfigurationResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}' - - , headers=headers - ) - return GetLogDeliveryConfigurationResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}", + headers=headers, + ) + return GetLogDeliveryConfigurationResponse.from_dict(res) - def list(self - - , * - , credentials_id: Optional[str] = None, page_token: Optional[str] = None, status: Optional[LogDeliveryConfigStatus] = None, storage_configuration_id: Optional[str] = None) -> Iterator[LogDeliveryConfiguration]: + def list( + self, + *, + credentials_id: Optional[str] = None, + page_token: Optional[str] = None, + status: Optional[LogDeliveryConfigStatus] = None, + storage_configuration_id: Optional[str] = None, + ) -> Iterator[LogDeliveryConfiguration]: """Get all log delivery configurations. - + Gets all Databricks log delivery configurations associated with an account specified by ID. - + :param credentials_id: str (optional) The Credentials id to filter the search results with :param page_token: str (optional) @@ -2016,46 +2135,42 @@ def list(self The log delivery status to filter the search results with :param storage_configuration_id: str (optional) The Storage Configuration id to filter the search results with - + :returns: Iterator over :class:`LogDeliveryConfiguration` """ - - query = {} - if credentials_id is not None: query['credentials_id'] = credentials_id - if page_token is not None: query['page_token'] = page_token - if status is not None: query['status'] = status.value - if storage_configuration_id is not None: query['storage_configuration_id'] = storage_configuration_id - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/log-delivery', query=query - - , headers=headers - ) - if 'log_delivery_configurations' in json: - for v in json['log_delivery_configurations']: - yield LogDeliveryConfiguration.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if credentials_id is not None: + query["credentials_id"] = credentials_id + if page_token is not None: + query["page_token"] = page_token + if status is not None: + query["status"] = status.value + if storage_configuration_id is not None: + query["storage_configuration_id"] = storage_configuration_id + headers = { + "Accept": "application/json", + } - def patch_status(self - , log_delivery_configuration_id: str, status: LogDeliveryConfigStatus - ): + while True: + json = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/log-delivery", query=query, headers=headers + ) + if "log_delivery_configurations" in json: + for v in json["log_delivery_configurations"]: + yield LogDeliveryConfiguration.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryConfigStatus): """Enable or disable log delivery configuration. - + Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under [Create log delivery](:method:LogDelivery/Create). - + :param log_delivery_configuration_id: str The log delivery configuration id of customer :param status: :class:`LogDeliveryConfigStatus` @@ -2063,95 +2178,85 @@ def patch_status(self to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. - - + + """ body = {} - if status is not None: body['status'] = status.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}', body=body - - , headers=headers - ) - + if status is not None: + body["status"] = status.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}", + body=body, + headers=headers, + ) + - - class UsageDashboardsAPI: """These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost drivers.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None) -> CreateBillingUsageDashboardResponse: + def create( + self, *, dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None + ) -> CreateBillingUsageDashboardResponse: """Create new usage dashboard. - + Create a usage dashboard specified by workspaceId, accountId, and dashboard type. - + :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account. :param workspace_id: int (optional) The workspace ID of the workspace in which the usage dashboard is created. - + :returns: :class:`CreateBillingUsageDashboardResponse` """ body = {} - if dashboard_type is not None: body['dashboard_type'] = dashboard_type.value - if workspace_id is not None: body['workspace_id'] = workspace_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/dashboard', body=body - - , headers=headers - ) - return CreateBillingUsageDashboardResponse.from_dict(res) + if dashboard_type is not None: + body["dashboard_type"] = dashboard_type.value + if workspace_id is not None: + body["workspace_id"] = workspace_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/dashboard", body=body, headers=headers) + return CreateBillingUsageDashboardResponse.from_dict(res) - def get(self - - , * - , dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None) -> GetBillingUsageDashboardResponse: + def get( + self, *, dashboard_type: Optional[UsageDashboardType] = None, workspace_id: Optional[int] = None + ) -> GetBillingUsageDashboardResponse: """Get usage dashboard. - + Get a usage dashboard specified by workspaceId, accountId, and dashboard type. - + :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account. :param workspace_id: int (optional) The workspace ID of the workspace in which the usage dashboard is created. - + :returns: :class:`GetBillingUsageDashboardResponse` """ - + query = {} - if dashboard_type is not None: query['dashboard_type'] = dashboard_type.value - if workspace_id is not None: query['workspace_id'] = workspace_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/dashboard', query=query - - , headers=headers - ) + if dashboard_type is not None: + query["dashboard_type"] = dashboard_type.value + if workspace_id is not None: + query["workspace_id"] = workspace_id + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/dashboard", query=query, headers=headers) return GetBillingUsageDashboardResponse.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 22be23b7a..f1819bf54 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -1,358 +1,403 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Callable, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AccountsCreateMetastore: metastore_info: Optional[CreateMetastore] = None - + def as_dict(self) -> dict: """Serializes the AccountsCreateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict() + if self.metastore_info: + body["metastore_info"] = self.metastore_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsCreateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_info: body['metastore_info'] = self.metastore_info + if self.metastore_info: + body["metastore_info"] = self.metastore_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastore: """Deserializes the AccountsCreateMetastore from a dictionary.""" - return cls(metastore_info=_from_dict(d, 'metastore_info', CreateMetastore)) - - + return cls(metastore_info=_from_dict(d, "metastore_info", CreateMetastore)) @dataclass class AccountsCreateMetastoreAssignment: metastore_assignment: Optional[CreateMetastoreAssignment] = None - + metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + workspace_id: Optional[int] = None """Workspace ID.""" - + def as_dict(self) -> dict: """Serializes the AccountsCreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict() - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.metastore_assignment: + body["metastore_assignment"] = self.metastore_assignment.as_dict() + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the AccountsCreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.metastore_assignment: + body["metastore_assignment"] = self.metastore_assignment + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastoreAssignment: """Deserializes the AccountsCreateMetastoreAssignment from a dictionary.""" - return cls(metastore_assignment=_from_dict(d, 'metastore_assignment', CreateMetastoreAssignment), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) - - + return cls( + metastore_assignment=_from_dict(d, "metastore_assignment", CreateMetastoreAssignment), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass class AccountsCreateStorageCredential: credential_info: Optional[CreateStorageCredential] = None - + metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + def as_dict(self) -> dict: """Serializes the AccountsCreateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_info: body['credential_info'] = self.credential_info.as_dict() - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.credential_info: + body["credential_info"] = self.credential_info.as_dict() + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id return body def as_shallow_dict(self) -> dict: """Serializes the AccountsCreateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_info: body['credential_info'] = self.credential_info - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.credential_info: + body["credential_info"] = self.credential_info + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateStorageCredential: """Deserializes the AccountsCreateStorageCredential from a dictionary.""" - return cls(credential_info=_from_dict(d, 'credential_info', CreateStorageCredential), metastore_id=d.get('metastore_id', None)) - - + return cls( + credential_info=_from_dict(d, "credential_info", CreateStorageCredential), + metastore_id=d.get("metastore_id", None), + ) @dataclass class AccountsMetastoreAssignment: metastore_assignment: Optional[MetastoreAssignment] = None - + def as_dict(self) -> dict: """Serializes the AccountsMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict() + if self.metastore_assignment: + body["metastore_assignment"] = self.metastore_assignment.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment + if self.metastore_assignment: + body["metastore_assignment"] = self.metastore_assignment return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsMetastoreAssignment: """Deserializes the AccountsMetastoreAssignment from a dictionary.""" - return cls(metastore_assignment=_from_dict(d, 'metastore_assignment', MetastoreAssignment)) - - + return cls(metastore_assignment=_from_dict(d, "metastore_assignment", MetastoreAssignment)) @dataclass class AccountsMetastoreInfo: metastore_info: Optional[MetastoreInfo] = None - + def as_dict(self) -> dict: """Serializes the AccountsMetastoreInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict() + if self.metastore_info: + body["metastore_info"] = self.metastore_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsMetastoreInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_info: body['metastore_info'] = self.metastore_info + if self.metastore_info: + body["metastore_info"] = self.metastore_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsMetastoreInfo: """Deserializes the AccountsMetastoreInfo from a dictionary.""" - return cls(metastore_info=_from_dict(d, 'metastore_info', MetastoreInfo)) - - + return cls(metastore_info=_from_dict(d, "metastore_info", MetastoreInfo)) @dataclass class AccountsStorageCredentialInfo: credential_info: Optional[StorageCredentialInfo] = None - + def as_dict(self) -> dict: """Serializes the AccountsStorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_info: body['credential_info'] = self.credential_info.as_dict() + if self.credential_info: + body["credential_info"] = self.credential_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_info: body['credential_info'] = self.credential_info + if self.credential_info: + body["credential_info"] = self.credential_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsStorageCredentialInfo: """Deserializes the AccountsStorageCredentialInfo from a dictionary.""" - return cls(credential_info=_from_dict(d, 'credential_info', StorageCredentialInfo)) - - + return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) @dataclass class AccountsUpdateMetastore: metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + metastore_info: Optional[UpdateMetastore] = None - + def as_dict(self) -> dict: """Serializes the AccountsUpdateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict() + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.metastore_info: + body["metastore_info"] = self.metastore_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AccountsUpdateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.metastore_info: body['metastore_info'] = self.metastore_info + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.metastore_info: + body["metastore_info"] = self.metastore_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastore: """Deserializes the AccountsUpdateMetastore from a dictionary.""" - return cls(metastore_id=d.get('metastore_id', None), metastore_info=_from_dict(d, 'metastore_info', UpdateMetastore)) - - + return cls( + metastore_id=d.get("metastore_id", None), metastore_info=_from_dict(d, "metastore_info", UpdateMetastore) + ) @dataclass class AccountsUpdateMetastoreAssignment: metastore_assignment: Optional[UpdateMetastoreAssignment] = None - + metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + workspace_id: Optional[int] = None """Workspace ID.""" - + def as_dict(self) -> dict: """Serializes the AccountsUpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict() - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.metastore_assignment: + body["metastore_assignment"] = self.metastore_assignment.as_dict() + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the AccountsUpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.metastore_assignment: + body["metastore_assignment"] = self.metastore_assignment + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastoreAssignment: """Deserializes the AccountsUpdateMetastoreAssignment from a dictionary.""" - return cls(metastore_assignment=_from_dict(d, 'metastore_assignment', UpdateMetastoreAssignment), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) - - + return cls( + metastore_assignment=_from_dict(d, "metastore_assignment", UpdateMetastoreAssignment), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass class AccountsUpdateStorageCredential: credential_info: Optional[UpdateStorageCredential] = None - + metastore_id: Optional[str] = None """Unity Catalog metastore ID""" - + storage_credential_name: Optional[str] = None """Name of the storage credential.""" - + def as_dict(self) -> dict: """Serializes the AccountsUpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_info: body['credential_info'] = self.credential_info.as_dict() - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name + if self.credential_info: + body["credential_info"] = self.credential_info.as_dict() + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name return body def as_shallow_dict(self) -> dict: """Serializes the AccountsUpdateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_info: body['credential_info'] = self.credential_info - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name + if self.credential_info: + body["credential_info"] = self.credential_info + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateStorageCredential: """Deserializes the AccountsUpdateStorageCredential from a dictionary.""" - return cls(credential_info=_from_dict(d, 'credential_info', UpdateStorageCredential), metastore_id=d.get('metastore_id', None), storage_credential_name=d.get('storage_credential_name', None)) - - + return cls( + credential_info=_from_dict(d, "credential_info", UpdateStorageCredential), + metastore_id=d.get("metastore_id", None), + storage_credential_name=d.get("storage_credential_name", None), + ) @dataclass class ArtifactAllowlistInfo: artifact_matchers: Optional[List[ArtifactMatcher]] = None """A list of allowed artifact match patterns.""" - + created_at: Optional[int] = None """Time at which this artifact allowlist was set, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of the user who set the artifact allowlist.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + def as_dict(self) -> dict: """Serializes the ArtifactAllowlistInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_matchers: body['artifact_matchers'] = [v.as_dict() for v in self.artifact_matchers] - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.artifact_matchers: + body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers] + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id return body def as_shallow_dict(self) -> dict: """Serializes the ArtifactAllowlistInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.artifact_matchers: + body["artifact_matchers"] = self.artifact_matchers + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ArtifactAllowlistInfo: """Deserializes the ArtifactAllowlistInfo from a dictionary.""" - return cls(artifact_matchers=_repeated_dict(d, 'artifact_matchers', ArtifactMatcher), created_at=d.get('created_at', None), created_by=d.get('created_by', None), metastore_id=d.get('metastore_id', None)) - - + return cls( + artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + metastore_id=d.get("metastore_id", None), + ) @dataclass class ArtifactMatcher: artifact: str """The artifact path or maven coordinate""" - + match_type: MatchType """The pattern matching type of the artifact""" - + def as_dict(self) -> dict: """Serializes the ArtifactMatcher into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact is not None: body['artifact'] = self.artifact - if self.match_type is not None: body['match_type'] = self.match_type.value + if self.artifact is not None: + body["artifact"] = self.artifact + if self.match_type is not None: + body["match_type"] = self.match_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ArtifactMatcher into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact is not None: body['artifact'] = self.artifact - if self.match_type is not None: body['match_type'] = self.match_type + if self.artifact is not None: + body["artifact"] = self.artifact + if self.match_type is not None: + body["match_type"] = self.match_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ArtifactMatcher: """Deserializes the ArtifactMatcher from a dictionary.""" - return cls(artifact=d.get('artifact', None), match_type=_enum(d, 'match_type', MatchType)) - - + return cls(artifact=d.get("artifact", None), match_type=_enum(d, "match_type", MatchType)) class ArtifactType(Enum): """The artifact type""" - - INIT_SCRIPT = 'INIT_SCRIPT' - LIBRARY_JAR = 'LIBRARY_JAR' - LIBRARY_MAVEN = 'LIBRARY_MAVEN' + + INIT_SCRIPT = "INIT_SCRIPT" + LIBRARY_JAR = "LIBRARY_JAR" + LIBRARY_MAVEN = "LIBRARY_MAVEN" + @dataclass class AssignResponse: @@ -370,182 +415,209 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AssignResponse: """Deserializes the AssignResponse from a dictionary.""" return cls() - - @dataclass class AwsCredentials: """AWS temporary credentials for API authentication. Read more at https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" - + access_key_id: Optional[str] = None """The access key ID that identifies the temporary credentials.""" - + access_point: Optional[str] = None """The Amazon Resource Name (ARN) of the S3 access point for temporary credentials related the external location.""" - + secret_access_key: Optional[str] = None """The secret access key that can be used to sign AWS API requests.""" - + session_token: Optional[str] = None """The token that users must pass to AWS API to use the temporary credentials.""" - + def as_dict(self) -> dict: """Serializes the AwsCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_key_id is not None: body['access_key_id'] = self.access_key_id - if self.access_point is not None: body['access_point'] = self.access_point - if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key - if self.session_token is not None: body['session_token'] = self.session_token + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.access_point is not None: + body["access_point"] = self.access_point + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key + if self.session_token is not None: + body["session_token"] = self.session_token return body def as_shallow_dict(self) -> dict: """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_key_id is not None: body['access_key_id'] = self.access_key_id - if self.access_point is not None: body['access_point'] = self.access_point - if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key - if self.session_token is not None: body['session_token'] = self.session_token + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.access_point is not None: + body["access_point"] = self.access_point + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key + if self.session_token is not None: + body["session_token"] = self.session_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsCredentials: """Deserializes the AwsCredentials from a dictionary.""" - return cls(access_key_id=d.get('access_key_id', None), access_point=d.get('access_point', None), secret_access_key=d.get('secret_access_key', None), session_token=d.get('session_token', None)) - - + return cls( + access_key_id=d.get("access_key_id", None), + access_point=d.get("access_point", None), + secret_access_key=d.get("secret_access_key", None), + session_token=d.get("session_token", None), + ) @dataclass class AwsIamRole: """The AWS IAM role configuration""" - + external_id: Optional[str] = None """The external ID used in role assumption to prevent the confused deputy problem.""" - + role_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials.""" - + unity_catalog_iam_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.""" - + def as_dict(self) -> dict: """Serializes the AwsIamRole into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_id is not None: body['external_id'] = self.external_id - if self.role_arn is not None: body['role_arn'] = self.role_arn - if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn + if self.external_id is not None: + body["external_id"] = self.external_id + if self.role_arn is not None: + body["role_arn"] = self.role_arn + if self.unity_catalog_iam_arn is not None: + body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn return body def as_shallow_dict(self) -> dict: """Serializes the AwsIamRole into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_id is not None: body['external_id'] = self.external_id - if self.role_arn is not None: body['role_arn'] = self.role_arn - if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn + if self.external_id is not None: + body["external_id"] = self.external_id + if self.role_arn is not None: + body["role_arn"] = self.role_arn + if self.unity_catalog_iam_arn is not None: + body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsIamRole: """Deserializes the AwsIamRole from a dictionary.""" - return cls(external_id=d.get('external_id', None), role_arn=d.get('role_arn', None), unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None)) - - + return cls( + external_id=d.get("external_id", None), + role_arn=d.get("role_arn", None), + unity_catalog_iam_arn=d.get("unity_catalog_iam_arn", None), + ) @dataclass class AwsIamRoleRequest: role_arn: str """The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access.""" - + def as_dict(self) -> dict: """Serializes the AwsIamRoleRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.role_arn is not None: + body["role_arn"] = self.role_arn return body def as_shallow_dict(self) -> dict: """Serializes the AwsIamRoleRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.role_arn is not None: + body["role_arn"] = self.role_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsIamRoleRequest: """Deserializes the AwsIamRoleRequest from a dictionary.""" - return cls(role_arn=d.get('role_arn', None)) - - + return cls(role_arn=d.get("role_arn", None)) @dataclass class AwsIamRoleResponse: role_arn: str """The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access.""" - + external_id: Optional[str] = None """The external ID used in role assumption to prevent confused deputy problem..""" - + unity_catalog_iam_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.""" - + def as_dict(self) -> dict: """Serializes the AwsIamRoleResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_id is not None: body['external_id'] = self.external_id - if self.role_arn is not None: body['role_arn'] = self.role_arn - if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn + if self.external_id is not None: + body["external_id"] = self.external_id + if self.role_arn is not None: + body["role_arn"] = self.role_arn + if self.unity_catalog_iam_arn is not None: + body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn return body def as_shallow_dict(self) -> dict: """Serializes the AwsIamRoleResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_id is not None: body['external_id'] = self.external_id - if self.role_arn is not None: body['role_arn'] = self.role_arn - if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn + if self.external_id is not None: + body["external_id"] = self.external_id + if self.role_arn is not None: + body["role_arn"] = self.role_arn + if self.unity_catalog_iam_arn is not None: + body["unity_catalog_iam_arn"] = self.unity_catalog_iam_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsIamRoleResponse: """Deserializes the AwsIamRoleResponse from a dictionary.""" - return cls(external_id=d.get('external_id', None), role_arn=d.get('role_arn', None), unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None)) - - + return cls( + external_id=d.get("external_id", None), + role_arn=d.get("role_arn", None), + unity_catalog_iam_arn=d.get("unity_catalog_iam_arn", None), + ) @dataclass class AwsSqsQueue: managed_resource_id: Optional[str] = None """Unique identifier included in the name of file events managed cloud resources.""" - + queue_url: Optional[str] = None """The AQS queue url in the format https://sqs.{region}.amazonaws.com/{account id}/{queue name} REQUIRED for provided_sqs.""" - + def as_dict(self) -> dict: """Serializes the AwsSqsQueue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id - if self.queue_url is not None: body['queue_url'] = self.queue_url + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.queue_url is not None: + body["queue_url"] = self.queue_url return body def as_shallow_dict(self) -> dict: """Serializes the AwsSqsQueue into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id - if self.queue_url is not None: body['queue_url'] = self.queue_url + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.queue_url is not None: + body["queue_url"] = self.queue_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsSqsQueue: """Deserializes the AwsSqsQueue from a dictionary.""" - return cls(managed_resource_id=d.get('managed_resource_id', None), queue_url=d.get('queue_url', None)) - - + return cls(managed_resource_id=d.get("managed_resource_id", None), queue_url=d.get("queue_url", None)) @dataclass @@ -553,73 +625,81 @@ class AzureActiveDirectoryToken: """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed Identity. Read more at https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" - + aad_token: Optional[str] = None """Opaque token that contains claims that you can use in Azure Active Directory to access cloud services.""" - + def as_dict(self) -> dict: """Serializes the AzureActiveDirectoryToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aad_token is not None: body['aad_token'] = self.aad_token + if self.aad_token is not None: + body["aad_token"] = self.aad_token return body def as_shallow_dict(self) -> dict: """Serializes the AzureActiveDirectoryToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.aad_token is not None: body['aad_token'] = self.aad_token + if self.aad_token is not None: + body["aad_token"] = self.aad_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureActiveDirectoryToken: """Deserializes the AzureActiveDirectoryToken from a dictionary.""" - return cls(aad_token=d.get('aad_token', None)) - - + return cls(aad_token=d.get("aad_token", None)) @dataclass class AzureManagedIdentity: """The Azure managed identity configuration.""" - + access_connector_id: str """The Azure resource ID of the Azure Databricks Access Connector. Use the format `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`.""" - + credential_id: Optional[str] = None """The Databricks internal ID that represents this managed identity. This field is only used to persist the credential_id once it is fetched from the credentials manager - as we only use the protobuf serializer to store credentials, this ID gets persisted to the database. .""" - + managed_identity_id: Optional[str] = None """The Azure resource ID of the managed identity. Use the format, `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}` This is only available for user-assgined identities. For system-assigned identities, the access_connector_id is used to identify the identity. If this field is not provided, then we assume the AzureManagedIdentity is using the system-assigned identity.""" - + def as_dict(self) -> dict: """Serializes the AzureManagedIdentity into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + if self.access_connector_id is not None: + body["access_connector_id"] = self.access_connector_id + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.managed_identity_id is not None: + body["managed_identity_id"] = self.managed_identity_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureManagedIdentity into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + if self.access_connector_id is not None: + body["access_connector_id"] = self.access_connector_id + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.managed_identity_id is not None: + body["managed_identity_id"] = self.managed_identity_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureManagedIdentity: """Deserializes the AzureManagedIdentity from a dictionary.""" - return cls(access_connector_id=d.get('access_connector_id', None), credential_id=d.get('credential_id', None), managed_identity_id=d.get('managed_identity_id', None)) - - + return cls( + access_connector_id=d.get("access_connector_id", None), + credential_id=d.get("credential_id", None), + managed_identity_id=d.get("managed_identity_id", None), + ) @dataclass @@ -627,34 +707,39 @@ class AzureManagedIdentityRequest: access_connector_id: str """The Azure resource ID of the Azure Databricks Access Connector. Use the format /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.""" - + managed_identity_id: Optional[str] = None """The Azure resource ID of the managed identity. Use the format /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}. This is only available for user-assgined identities. For system-assigned identities, the access_connector_id is used to identify the identity. If this field is not provided, then we assume the AzureManagedIdentity is for a system-assigned identity.""" - + def as_dict(self) -> dict: """Serializes the AzureManagedIdentityRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id - if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + if self.access_connector_id is not None: + body["access_connector_id"] = self.access_connector_id + if self.managed_identity_id is not None: + body["managed_identity_id"] = self.managed_identity_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureManagedIdentityRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id - if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + if self.access_connector_id is not None: + body["access_connector_id"] = self.access_connector_id + if self.managed_identity_id is not None: + body["managed_identity_id"] = self.managed_identity_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureManagedIdentityRequest: """Deserializes the AzureManagedIdentityRequest from a dictionary.""" - return cls(access_connector_id=d.get('access_connector_id', None), managed_identity_id=d.get('managed_identity_id', None)) - - + return cls( + access_connector_id=d.get("access_connector_id", None), + managed_identity_id=d.get("managed_identity_id", None), + ) @dataclass @@ -662,150 +747,174 @@ class AzureManagedIdentityResponse: access_connector_id: str """The Azure resource ID of the Azure Databricks Access Connector. Use the format /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}.""" - + credential_id: Optional[str] = None """The Databricks internal ID that represents this managed identity.""" - + managed_identity_id: Optional[str] = None """The Azure resource ID of the managed identity. Use the format /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}. This is only available for user-assgined identities. For system-assigned identities, the access_connector_id is used to identify the identity. If this field is not provided, then we assume the AzureManagedIdentity is for a system-assigned identity.""" - + def as_dict(self) -> dict: """Serializes the AzureManagedIdentityResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + if self.access_connector_id is not None: + body["access_connector_id"] = self.access_connector_id + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.managed_identity_id is not None: + body["managed_identity_id"] = self.managed_identity_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureManagedIdentityResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id + if self.access_connector_id is not None: + body["access_connector_id"] = self.access_connector_id + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.managed_identity_id is not None: + body["managed_identity_id"] = self.managed_identity_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureManagedIdentityResponse: """Deserializes the AzureManagedIdentityResponse from a dictionary.""" - return cls(access_connector_id=d.get('access_connector_id', None), credential_id=d.get('credential_id', None), managed_identity_id=d.get('managed_identity_id', None)) - - + return cls( + access_connector_id=d.get("access_connector_id", None), + credential_id=d.get("credential_id", None), + managed_identity_id=d.get("managed_identity_id", None), + ) @dataclass class AzureQueueStorage: managed_resource_id: Optional[str] = None """Unique identifier included in the name of file events managed cloud resources.""" - + queue_url: Optional[str] = None """The AQS queue url in the format https://{storage account}.queue.core.windows.net/{queue name} REQUIRED for provided_aqs.""" - + resource_group: Optional[str] = None """The resource group for the queue, event grid subscription, and external location storage account. ONLY REQUIRED for locations with a service principal storage credential""" - + subscription_id: Optional[str] = None """OPTIONAL: The subscription id for the queue, event grid subscription, and external location storage account. REQUIRED for locations with a service principal storage credential""" - + def as_dict(self) -> dict: """Serializes the AzureQueueStorage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id - if self.queue_url is not None: body['queue_url'] = self.queue_url - if self.resource_group is not None: body['resource_group'] = self.resource_group - if self.subscription_id is not None: body['subscription_id'] = self.subscription_id + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.queue_url is not None: + body["queue_url"] = self.queue_url + if self.resource_group is not None: + body["resource_group"] = self.resource_group + if self.subscription_id is not None: + body["subscription_id"] = self.subscription_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureQueueStorage into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id - if self.queue_url is not None: body['queue_url'] = self.queue_url - if self.resource_group is not None: body['resource_group'] = self.resource_group - if self.subscription_id is not None: body['subscription_id'] = self.subscription_id + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.queue_url is not None: + body["queue_url"] = self.queue_url + if self.resource_group is not None: + body["resource_group"] = self.resource_group + if self.subscription_id is not None: + body["subscription_id"] = self.subscription_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureQueueStorage: """Deserializes the AzureQueueStorage from a dictionary.""" - return cls(managed_resource_id=d.get('managed_resource_id', None), queue_url=d.get('queue_url', None), resource_group=d.get('resource_group', None), subscription_id=d.get('subscription_id', None)) - - + return cls( + managed_resource_id=d.get("managed_resource_id", None), + queue_url=d.get("queue_url", None), + resource_group=d.get("resource_group", None), + subscription_id=d.get("subscription_id", None), + ) @dataclass class AzureServicePrincipal: """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" - + directory_id: str """The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application.""" - + application_id: str """The application ID of the application registration within the referenced AAD tenant.""" - + client_secret: str """The client secret generated for the above app ID in AAD.""" - + def as_dict(self) -> dict: """Serializes the AzureServicePrincipal into a dictionary suitable for use as a JSON request body.""" body = {} - if self.application_id is not None: body['application_id'] = self.application_id - if self.client_secret is not None: body['client_secret'] = self.client_secret - if self.directory_id is not None: body['directory_id'] = self.directory_id + if self.application_id is not None: + body["application_id"] = self.application_id + if self.client_secret is not None: + body["client_secret"] = self.client_secret + if self.directory_id is not None: + body["directory_id"] = self.directory_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureServicePrincipal into a shallow dictionary of its immediate attributes.""" body = {} - if self.application_id is not None: body['application_id'] = self.application_id - if self.client_secret is not None: body['client_secret'] = self.client_secret - if self.directory_id is not None: body['directory_id'] = self.directory_id + if self.application_id is not None: + body["application_id"] = self.application_id + if self.client_secret is not None: + body["client_secret"] = self.client_secret + if self.directory_id is not None: + body["directory_id"] = self.directory_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureServicePrincipal: """Deserializes the AzureServicePrincipal from a dictionary.""" - return cls(application_id=d.get('application_id', None), client_secret=d.get('client_secret', None), directory_id=d.get('directory_id', None)) - - + return cls( + application_id=d.get("application_id", None), + client_secret=d.get("client_secret", None), + directory_id=d.get("directory_id", None), + ) @dataclass class AzureUserDelegationSas: """Azure temporary credentials for API authentication. Read more at https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas""" - + sas_token: Optional[str] = None """The signed URI (SAS Token) used to access blob services for a given path""" - + def as_dict(self) -> dict: """Serializes the AzureUserDelegationSas into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sas_token is not None: body['sas_token'] = self.sas_token + if self.sas_token is not None: + body["sas_token"] = self.sas_token return body def as_shallow_dict(self) -> dict: """Serializes the AzureUserDelegationSas into a shallow dictionary of its immediate attributes.""" body = {} - if self.sas_token is not None: body['sas_token'] = self.sas_token + if self.sas_token is not None: + body["sas_token"] = self.sas_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureUserDelegationSas: """Deserializes the AzureUserDelegationSas from a dictionary.""" - return cls(sas_token=d.get('sas_token', None)) - - - - - + return cls(sas_token=d.get("sas_token", None)) @dataclass @@ -824,8 +933,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelRefreshResponse: """Deserializes the CancelRefreshResponse from a dictionary.""" return cls() - - @dataclass @@ -833,612 +940,830 @@ class CatalogInfo: browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_type: Optional[CatalogType] = None """The type of the catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + connection_name: Optional[str] = None """The name of the connection to an external data source.""" - + created_at: Optional[int] = None """Time at which this catalog was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of catalog creator.""" - + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None """Whether predictive optimization should be enabled for this object and objects under it.""" - + full_name: Optional[str] = None """The full name of the catalog. Corresponds with the name field.""" - + isolation_mode: Optional[CatalogIsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of catalog.""" - - options: Optional[Dict[str,str]] = None + + options: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + owner: Optional[str] = None """Username of current owner of catalog.""" - - properties: Optional[Dict[str,str]] = None + + properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + provider_name: Optional[str] = None """The name of delta sharing provider. A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.""" - + provisioning_info: Optional[ProvisioningInfo] = None """Status of an asynchronously provisioned resource.""" - + securable_type: Optional[SecurableType] = None """The type of Unity Catalog securable.""" - + share_name: Optional[str] = None """The name of the share under the share provider.""" - + storage_location: Optional[str] = None """Storage Location URL (full path) for managed tables within catalog.""" - + storage_root: Optional[str] = None """Storage root URL for managed tables within catalog.""" - + updated_at: Optional[int] = None """Time at which this catalog was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified catalog.""" - + def as_dict(self) -> dict: """Serializes the CatalogInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_type is not None: body['catalog_type'] = self.catalog_type.value - if self.comment is not None: body['comment'] = self.comment - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value - if self.full_name is not None: body['full_name'] = self.full_name - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties - if self.provider_name is not None: body['provider_name'] = self.provider_name - if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict() - if self.securable_type is not None: body['securable_type'] = self.securable_type.value - if self.share_name is not None: body['share_name'] = self.share_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_type is not None: + body["catalog_type"] = self.catalog_type.value + if self.comment is not None: + body["comment"] = self.comment + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization.value + if self.full_name is not None: + body["full_name"] = self.full_name + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.provider_name is not None: + body["provider_name"] = self.provider_name + if self.provisioning_info: + body["provisioning_info"] = self.provisioning_info.as_dict() + if self.securable_type is not None: + body["securable_type"] = self.securable_type.value + if self.share_name is not None: + body["share_name"] = self.share_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the CatalogInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_type is not None: body['catalog_type'] = self.catalog_type - if self.comment is not None: body['comment'] = self.comment - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization - if self.full_name is not None: body['full_name'] = self.full_name - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties - if self.provider_name is not None: body['provider_name'] = self.provider_name - if self.provisioning_info: body['provisioning_info'] = self.provisioning_info - if self.securable_type is not None: body['securable_type'] = self.securable_type - if self.share_name is not None: body['share_name'] = self.share_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_type is not None: + body["catalog_type"] = self.catalog_type + if self.comment is not None: + body["comment"] = self.comment + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization + if self.full_name is not None: + body["full_name"] = self.full_name + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.provider_name is not None: + body["provider_name"] = self.provider_name + if self.provisioning_info: + body["provisioning_info"] = self.provisioning_info + if self.securable_type is not None: + body["securable_type"] = self.securable_type + if self.share_name is not None: + body["share_name"] = self.share_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CatalogInfo: """Deserializes the CatalogInfo from a dictionary.""" - return cls(browse_only=d.get('browse_only', None), catalog_type=_enum(d, 'catalog_type', CatalogType), comment=d.get('comment', None), connection_name=d.get('connection_name', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), effective_predictive_optimization_flag=_from_dict(d, 'effective_predictive_optimization_flag', EffectivePredictiveOptimizationFlag), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), full_name=d.get('full_name', None), isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode), metastore_id=d.get('metastore_id', None), name=d.get('name', None), options=d.get('options', None), owner=d.get('owner', None), properties=d.get('properties', None), provider_name=d.get('provider_name', None), provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo), securable_type=_enum(d, 'securable_type', SecurableType), share_name=d.get('share_name', None), storage_location=d.get('storage_location', None), storage_root=d.get('storage_root', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + browse_only=d.get("browse_only", None), + catalog_type=_enum(d, "catalog_type", CatalogType), + comment=d.get("comment", None), + connection_name=d.get("connection_name", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + effective_predictive_optimization_flag=_from_dict( + d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag + ), + enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), + full_name=d.get("full_name", None), + isolation_mode=_enum(d, "isolation_mode", CatalogIsolationMode), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + options=d.get("options", None), + owner=d.get("owner", None), + properties=d.get("properties", None), + provider_name=d.get("provider_name", None), + provisioning_info=_from_dict(d, "provisioning_info", ProvisioningInfo), + securable_type=_enum(d, "securable_type", SecurableType), + share_name=d.get("share_name", None), + storage_location=d.get("storage_location", None), + storage_root=d.get("storage_root", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) class CatalogIsolationMode(Enum): - - - ISOLATED = 'ISOLATED' - OPEN = 'OPEN' + + ISOLATED = "ISOLATED" + OPEN = "OPEN" + class CatalogType(Enum): """The type of the catalog.""" - - DELTASHARING_CATALOG = 'DELTASHARING_CATALOG' - FOREIGN_CATALOG = 'FOREIGN_CATALOG' - INTERNAL_CATALOG = 'INTERNAL_CATALOG' - MANAGED_CATALOG = 'MANAGED_CATALOG' - MANAGED_ONLINE_CATALOG = 'MANAGED_ONLINE_CATALOG' - SYSTEM_CATALOG = 'SYSTEM_CATALOG' - UNKNOWN_CATALOG_TYPE = 'UNKNOWN_CATALOG_TYPE' + + DELTASHARING_CATALOG = "DELTASHARING_CATALOG" + FOREIGN_CATALOG = "FOREIGN_CATALOG" + INTERNAL_CATALOG = "INTERNAL_CATALOG" + MANAGED_CATALOG = "MANAGED_CATALOG" + MANAGED_ONLINE_CATALOG = "MANAGED_ONLINE_CATALOG" + SYSTEM_CATALOG = "SYSTEM_CATALOG" + UNKNOWN_CATALOG_TYPE = "UNKNOWN_CATALOG_TYPE" + @dataclass class CloudflareApiToken: access_key_id: str """The Cloudflare access key id of the token.""" - + secret_access_key: str """The secret access token generated for the access key id""" - + account_id: str """The account id associated with the API token.""" - + def as_dict(self) -> dict: """Serializes the CloudflareApiToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_key_id is not None: body['access_key_id'] = self.access_key_id - if self.account_id is not None: body['account_id'] = self.account_id - if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key return body def as_shallow_dict(self) -> dict: """Serializes the CloudflareApiToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_key_id is not None: body['access_key_id'] = self.access_key_id - if self.account_id is not None: body['account_id'] = self.account_id - if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CloudflareApiToken: """Deserializes the CloudflareApiToken from a dictionary.""" - return cls(access_key_id=d.get('access_key_id', None), account_id=d.get('account_id', None), secret_access_key=d.get('secret_access_key', None)) - - + return cls( + access_key_id=d.get("access_key_id", None), + account_id=d.get("account_id", None), + secret_access_key=d.get("secret_access_key", None), + ) @dataclass class ColumnInfo: comment: Optional[str] = None """User-provided free-form text description.""" - + mask: Optional[ColumnMask] = None - + name: Optional[str] = None """Name of Column.""" - + nullable: Optional[bool] = None """Whether field may be Null (default: true).""" - + partition_index: Optional[int] = None """Partition index for column.""" - + position: Optional[int] = None """Ordinal position of column (starting at position 0).""" - + type_interval_type: Optional[str] = None """Format of IntervalType.""" - + type_json: Optional[str] = None """Full data type specification, JSON-serialized.""" - + type_name: Optional[ColumnTypeName] = None - + type_precision: Optional[int] = None """Digits of precision; required for DecimalTypes.""" - + type_scale: Optional[int] = None """Digits to right of decimal; Required for DecimalTypes.""" - + type_text: Optional[str] = None """Full data type specification as SQL/catalogString text.""" - + def as_dict(self) -> dict: """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.mask: body['mask'] = self.mask.as_dict() - if self.name is not None: body['name'] = self.name - if self.nullable is not None: body['nullable'] = self.nullable - if self.partition_index is not None: body['partition_index'] = self.partition_index - if self.position is not None: body['position'] = self.position - if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type - if self.type_json is not None: body['type_json'] = self.type_json - if self.type_name is not None: body['type_name'] = self.type_name.value - if self.type_precision is not None: body['type_precision'] = self.type_precision - if self.type_scale is not None: body['type_scale'] = self.type_scale - if self.type_text is not None: body['type_text'] = self.type_text + if self.comment is not None: + body["comment"] = self.comment + if self.mask: + body["mask"] = self.mask.as_dict() + if self.name is not None: + body["name"] = self.name + if self.nullable is not None: + body["nullable"] = self.nullable + if self.partition_index is not None: + body["partition_index"] = self.partition_index + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name.value + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body def as_shallow_dict(self) -> dict: """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.mask: body['mask'] = self.mask - if self.name is not None: body['name'] = self.name - if self.nullable is not None: body['nullable'] = self.nullable - if self.partition_index is not None: body['partition_index'] = self.partition_index - if self.position is not None: body['position'] = self.position - if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type - if self.type_json is not None: body['type_json'] = self.type_json - if self.type_name is not None: body['type_name'] = self.type_name - if self.type_precision is not None: body['type_precision'] = self.type_precision - if self.type_scale is not None: body['type_scale'] = self.type_scale - if self.type_text is not None: body['type_text'] = self.type_text + if self.comment is not None: + body["comment"] = self.comment + if self.mask: + body["mask"] = self.mask + if self.name is not None: + body["name"] = self.name + if self.nullable is not None: + body["nullable"] = self.nullable + if self.partition_index is not None: + body["partition_index"] = self.partition_index + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" - return cls(comment=d.get('comment', None), mask=_from_dict(d, 'mask', ColumnMask), name=d.get('name', None), nullable=d.get('nullable', None), partition_index=d.get('partition_index', None), position=d.get('position', None), type_interval_type=d.get('type_interval_type', None), type_json=d.get('type_json', None), type_name=_enum(d, 'type_name', ColumnTypeName), type_precision=d.get('type_precision', None), type_scale=d.get('type_scale', None), type_text=d.get('type_text', None)) - - + return cls( + comment=d.get("comment", None), + mask=_from_dict(d, "mask", ColumnMask), + name=d.get("name", None), + nullable=d.get("nullable", None), + partition_index=d.get("partition_index", None), + position=d.get("position", None), + type_interval_type=d.get("type_interval_type", None), + type_json=d.get("type_json", None), + type_name=_enum(d, "type_name", ColumnTypeName), + type_precision=d.get("type_precision", None), + type_scale=d.get("type_scale", None), + type_text=d.get("type_text", None), + ) @dataclass class ColumnMask: function_name: Optional[str] = None """The full name of the column mask SQL UDF.""" - + using_column_names: Optional[List[str]] = None """The list of additional table columns to be passed as input to the column mask function. The first arg of the mask function should be of the type of the column being masked and the types of the rest of the args should match the types of columns in 'using_column_names'.""" - + def as_dict(self) -> dict: """Serializes the ColumnMask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_name is not None: body['function_name'] = self.function_name - if self.using_column_names: body['using_column_names'] = [v for v in self.using_column_names] + if self.function_name is not None: + body["function_name"] = self.function_name + if self.using_column_names: + body["using_column_names"] = [v for v in self.using_column_names] return body def as_shallow_dict(self) -> dict: """Serializes the ColumnMask into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_name is not None: body['function_name'] = self.function_name - if self.using_column_names: body['using_column_names'] = self.using_column_names + if self.function_name is not None: + body["function_name"] = self.function_name + if self.using_column_names: + body["using_column_names"] = self.using_column_names return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ColumnMask: """Deserializes the ColumnMask from a dictionary.""" - return cls(function_name=d.get('function_name', None), using_column_names=d.get('using_column_names', None)) - - + return cls(function_name=d.get("function_name", None), using_column_names=d.get("using_column_names", None)) class ColumnTypeName(Enum): - - - ARRAY = 'ARRAY' - BINARY = 'BINARY' - BOOLEAN = 'BOOLEAN' - BYTE = 'BYTE' - CHAR = 'CHAR' - DATE = 'DATE' - DECIMAL = 'DECIMAL' - DOUBLE = 'DOUBLE' - FLOAT = 'FLOAT' - GEOGRAPHY = 'GEOGRAPHY' - GEOMETRY = 'GEOMETRY' - INT = 'INT' - INTERVAL = 'INTERVAL' - LONG = 'LONG' - MAP = 'MAP' - NULL = 'NULL' - SHORT = 'SHORT' - STRING = 'STRING' - STRUCT = 'STRUCT' - TABLE_TYPE = 'TABLE_TYPE' - TIMESTAMP = 'TIMESTAMP' - TIMESTAMP_NTZ = 'TIMESTAMP_NTZ' - USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' - VARIANT = 'VARIANT' + + ARRAY = "ARRAY" + BINARY = "BINARY" + BOOLEAN = "BOOLEAN" + BYTE = "BYTE" + CHAR = "CHAR" + DATE = "DATE" + DECIMAL = "DECIMAL" + DOUBLE = "DOUBLE" + FLOAT = "FLOAT" + GEOGRAPHY = "GEOGRAPHY" + GEOMETRY = "GEOMETRY" + INT = "INT" + INTERVAL = "INTERVAL" + LONG = "LONG" + MAP = "MAP" + NULL = "NULL" + SHORT = "SHORT" + STRING = "STRING" + STRUCT = "STRUCT" + TABLE_TYPE = "TABLE_TYPE" + TIMESTAMP = "TIMESTAMP" + TIMESTAMP_NTZ = "TIMESTAMP_NTZ" + USER_DEFINED_TYPE = "USER_DEFINED_TYPE" + VARIANT = "VARIANT" + @dataclass class ConnectionInfo: comment: Optional[str] = None """User-provided free-form text description.""" - + connection_id: Optional[str] = None """Unique identifier of the Connection.""" - + connection_type: Optional[ConnectionType] = None """The type of connection.""" - + created_at: Optional[int] = None """Time at which this connection was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of connection creator.""" - + credential_type: Optional[CredentialType] = None """The type of credential.""" - + full_name: Optional[str] = None """Full name of connection.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of the connection.""" - - options: Optional[Dict[str,str]] = None + + options: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + owner: Optional[str] = None """Username of current owner of the connection.""" - - properties: Optional[Dict[str,str]] = None + + properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + provisioning_info: Optional[ProvisioningInfo] = None """Status of an asynchronously provisioned resource.""" - + read_only: Optional[bool] = None """If the connection is read only.""" - + securable_type: Optional[SecurableType] = None """The type of Unity Catalog securable.""" - + updated_at: Optional[int] = None """Time at which this connection was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified connection.""" - + url: Optional[str] = None """URL of the remote data source, extracted from options.""" - + def as_dict(self) -> dict: """Serializes the ConnectionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.connection_id is not None: body['connection_id'] = self.connection_id - if self.connection_type is not None: body['connection_type'] = self.connection_type.value - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.credential_type is not None: body['credential_type'] = self.credential_type.value - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties - if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict() - if self.read_only is not None: body['read_only'] = self.read_only - if self.securable_type is not None: body['securable_type'] = self.securable_type.value - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.url is not None: body['url'] = self.url + if self.comment is not None: + body["comment"] = self.comment + if self.connection_id is not None: + body["connection_id"] = self.connection_id + if self.connection_type is not None: + body["connection_type"] = self.connection_type.value + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.credential_type is not None: + body["credential_type"] = self.credential_type.value + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.provisioning_info: + body["provisioning_info"] = self.provisioning_info.as_dict() + if self.read_only is not None: + body["read_only"] = self.read_only + if self.securable_type is not None: + body["securable_type"] = self.securable_type.value + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the ConnectionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.connection_id is not None: body['connection_id'] = self.connection_id - if self.connection_type is not None: body['connection_type'] = self.connection_type - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.credential_type is not None: body['credential_type'] = self.credential_type - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties - if self.provisioning_info: body['provisioning_info'] = self.provisioning_info - if self.read_only is not None: body['read_only'] = self.read_only - if self.securable_type is not None: body['securable_type'] = self.securable_type - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.url is not None: body['url'] = self.url + if self.comment is not None: + body["comment"] = self.comment + if self.connection_id is not None: + body["connection_id"] = self.connection_id + if self.connection_type is not None: + body["connection_type"] = self.connection_type + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.credential_type is not None: + body["credential_type"] = self.credential_type + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.provisioning_info: + body["provisioning_info"] = self.provisioning_info + if self.read_only is not None: + body["read_only"] = self.read_only + if self.securable_type is not None: + body["securable_type"] = self.securable_type + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: """Deserializes the ConnectionInfo from a dictionary.""" - return cls(comment=d.get('comment', None), connection_id=d.get('connection_id', None), connection_type=_enum(d, 'connection_type', ConnectionType), created_at=d.get('created_at', None), created_by=d.get('created_by', None), credential_type=_enum(d, 'credential_type', CredentialType), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), options=d.get('options', None), owner=d.get('owner', None), properties=d.get('properties', None), provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo), read_only=d.get('read_only', None), securable_type=_enum(d, 'securable_type', SecurableType), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), url=d.get('url', None)) - - + return cls( + comment=d.get("comment", None), + connection_id=d.get("connection_id", None), + connection_type=_enum(d, "connection_type", ConnectionType), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + credential_type=_enum(d, "credential_type", CredentialType), + full_name=d.get("full_name", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + options=d.get("options", None), + owner=d.get("owner", None), + properties=d.get("properties", None), + provisioning_info=_from_dict(d, "provisioning_info", ProvisioningInfo), + read_only=d.get("read_only", None), + securable_type=_enum(d, "securable_type", SecurableType), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + url=d.get("url", None), + ) class ConnectionType(Enum): """Next Id: 31""" - - BIGQUERY = 'BIGQUERY' - DATABRICKS = 'DATABRICKS' - GA4_RAW_DATA = 'GA4_RAW_DATA' - GLUE = 'GLUE' - HIVE_METASTORE = 'HIVE_METASTORE' - HTTP = 'HTTP' - MYSQL = 'MYSQL' - ORACLE = 'ORACLE' - POSTGRESQL = 'POSTGRESQL' - POWER_BI = 'POWER_BI' - REDSHIFT = 'REDSHIFT' - SALESFORCE = 'SALESFORCE' - SALESFORCE_DATA_CLOUD = 'SALESFORCE_DATA_CLOUD' - SERVICENOW = 'SERVICENOW' - SNOWFLAKE = 'SNOWFLAKE' - SQLDW = 'SQLDW' - SQLSERVER = 'SQLSERVER' - TERADATA = 'TERADATA' - UNKNOWN_CONNECTION_TYPE = 'UNKNOWN_CONNECTION_TYPE' - WORKDAY_RAAS = 'WORKDAY_RAAS' + + BIGQUERY = "BIGQUERY" + DATABRICKS = "DATABRICKS" + GA4_RAW_DATA = "GA4_RAW_DATA" + GLUE = "GLUE" + HIVE_METASTORE = "HIVE_METASTORE" + HTTP = "HTTP" + MYSQL = "MYSQL" + ORACLE = "ORACLE" + POSTGRESQL = "POSTGRESQL" + POWER_BI = "POWER_BI" + REDSHIFT = "REDSHIFT" + SALESFORCE = "SALESFORCE" + SALESFORCE_DATA_CLOUD = "SALESFORCE_DATA_CLOUD" + SERVICENOW = "SERVICENOW" + SNOWFLAKE = "SNOWFLAKE" + SQLDW = "SQLDW" + SQLSERVER = "SQLSERVER" + TERADATA = "TERADATA" + UNKNOWN_CONNECTION_TYPE = "UNKNOWN_CONNECTION_TYPE" + WORKDAY_RAAS = "WORKDAY_RAAS" + @dataclass class ContinuousUpdateStatus: """Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE or the ONLINE_UPDATING_PIPELINE_RESOURCES state.""" - + initial_pipeline_sync_progress: Optional[PipelineProgress] = None """Progress of the initial data synchronization.""" - + last_processed_commit_version: Optional[int] = None """The last source table Delta version that was synced to the online table. Note that this Delta version may not be completely synced to the online table yet.""" - + timestamp: Optional[str] = None """The timestamp of the last time any data was synchronized from the source table to the online table.""" - + def as_dict(self) -> dict: """Serializes the ContinuousUpdateStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict() - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the ContinuousUpdateStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ContinuousUpdateStatus: """Deserializes the ContinuousUpdateStatus from a dictionary.""" - return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', PipelineProgress), last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None)) - - + return cls( + initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress), + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + ) @dataclass class CreateCatalog: name: str """Name of catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + connection_name: Optional[str] = None """The name of the connection to an external data source.""" - - options: Optional[Dict[str,str]] = None + + options: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - - properties: Optional[Dict[str,str]] = None + + properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + provider_name: Optional[str] = None """The name of delta sharing provider. A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.""" - + share_name: Optional[str] = None """The name of the share under the share provider.""" - + storage_root: Optional[str] = None """Storage root URL for managed tables within catalog.""" - + def as_dict(self) -> dict: """Serializes the CreateCatalog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.properties: body['properties'] = self.properties - if self.provider_name is not None: body['provider_name'] = self.provider_name - if self.share_name is not None: body['share_name'] = self.share_name - if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.comment is not None: + body["comment"] = self.comment + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.properties: + body["properties"] = self.properties + if self.provider_name is not None: + body["provider_name"] = self.provider_name + if self.share_name is not None: + body["share_name"] = self.share_name + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body def as_shallow_dict(self) -> dict: """Serializes the CreateCatalog into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.properties: body['properties'] = self.properties - if self.provider_name is not None: body['provider_name'] = self.provider_name - if self.share_name is not None: body['share_name'] = self.share_name - if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.comment is not None: + body["comment"] = self.comment + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.properties: + body["properties"] = self.properties + if self.provider_name is not None: + body["provider_name"] = self.provider_name + if self.share_name is not None: + body["share_name"] = self.share_name + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCatalog: """Deserializes the CreateCatalog from a dictionary.""" - return cls(comment=d.get('comment', None), connection_name=d.get('connection_name', None), name=d.get('name', None), options=d.get('options', None), properties=d.get('properties', None), provider_name=d.get('provider_name', None), share_name=d.get('share_name', None), storage_root=d.get('storage_root', None)) - - + return cls( + comment=d.get("comment", None), + connection_name=d.get("connection_name", None), + name=d.get("name", None), + options=d.get("options", None), + properties=d.get("properties", None), + provider_name=d.get("provider_name", None), + share_name=d.get("share_name", None), + storage_root=d.get("storage_root", None), + ) @dataclass class CreateConnection: name: str """Name of the connection.""" - + connection_type: ConnectionType """The type of connection.""" - - options: Dict[str,str] + + options: Dict[str, str] """A map of key-value properties attached to the securable.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - - properties: Optional[Dict[str,str]] = None + + properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + read_only: Optional[bool] = None """If the connection is read only.""" - + def as_dict(self) -> dict: """Serializes the CreateConnection into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.connection_type is not None: body['connection_type'] = self.connection_type.value - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.properties: body['properties'] = self.properties - if self.read_only is not None: body['read_only'] = self.read_only + if self.comment is not None: + body["comment"] = self.comment + if self.connection_type is not None: + body["connection_type"] = self.connection_type.value + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.properties: + body["properties"] = self.properties + if self.read_only is not None: + body["read_only"] = self.read_only return body def as_shallow_dict(self) -> dict: """Serializes the CreateConnection into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.connection_type is not None: body['connection_type'] = self.connection_type - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.properties: body['properties'] = self.properties - if self.read_only is not None: body['read_only'] = self.read_only + if self.comment is not None: + body["comment"] = self.comment + if self.connection_type is not None: + body["connection_type"] = self.connection_type + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.properties: + body["properties"] = self.properties + if self.read_only is not None: + body["read_only"] = self.read_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateConnection: """Deserializes the CreateConnection from a dictionary.""" - return cls(comment=d.get('comment', None), connection_type=_enum(d, 'connection_type', ConnectionType), name=d.get('name', None), options=d.get('options', None), properties=d.get('properties', None), read_only=d.get('read_only', None)) - - + return cls( + comment=d.get("comment", None), + connection_type=_enum(d, "connection_type", ConnectionType), + name=d.get("name", None), + options=d.get("options", None), + properties=d.get("properties", None), + read_only=d.get("read_only", None), + ) @dataclass @@ -1446,295 +1771,413 @@ class CreateCredentialRequest: name: str """The credential name. The name must be unique among storage and service credentials within the metastore.""" - + aws_iam_role: Optional[AwsIamRole] = None """The AWS IAM role configuration""" - + azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + purpose: Optional[CredentialPurpose] = None """Indicates the purpose of the credential.""" - + read_only: Optional[bool] = None """Whether the credential is usable only for read operations. Only applicable when purpose is **STORAGE**.""" - + skip_validation: Optional[bool] = None """Optional. Supplying true to this argument skips validation of the created set of credentials.""" - + def as_dict(self) -> dict: """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() - if self.comment is not None: body['comment'] = self.comment - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() - if self.name is not None: body['name'] = self.name - if self.purpose is not None: body['purpose'] = self.purpose.value - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.name is not None: + body["name"] = self.name + if self.purpose is not None: + body["purpose"] = self.purpose.value + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal - if self.comment is not None: body['comment'] = self.comment - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account - if self.name is not None: body['name'] = self.name - if self.purpose is not None: body['purpose'] = self.purpose - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.name is not None: + body["name"] = self.name + if self.purpose is not None: + body["purpose"] = self.purpose + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialRequest: """Deserializes the CreateCredentialRequest from a dictionary.""" - return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), comment=d.get('comment', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccount), name=d.get('name', None), purpose=_enum(d, 'purpose', CredentialPurpose), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) - - + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), + name=d.get("name", None), + purpose=_enum(d, "purpose", CredentialPurpose), + read_only=d.get("read_only", None), + skip_validation=d.get("skip_validation", None), + ) @dataclass class CreateExternalLocation: name: str """Name of the external location.""" - + url: str """Path URL of the external location.""" - + credential_name: str """Name of the storage credential used with this location.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + enable_file_events: Optional[bool] = None """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + fallback: Optional[bool] = None """Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient.""" - + file_event_queue: Optional[FileEventQueue] = None """[Create:OPT Update:OPT] File event queue settings.""" - + read_only: Optional[bool] = None """Indicates whether the external location is read-only.""" - + skip_validation: Optional[bool] = None """Skips validation of the storage credential associated with the external location.""" - + def as_dict(self) -> dict: """Serializes the CreateExternalLocation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events - if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() - if self.fallback is not None: body['fallback'] = self.fallback - if self.file_event_queue: body['file_event_queue'] = self.file_event_queue.as_dict() - if self.name is not None: body['name'] = self.name - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation - if self.url is not None: body['url'] = self.url + if self.comment is not None: + body["comment"] = self.comment + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details.as_dict() + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue.as_dict() + if self.name is not None: + body["name"] = self.name + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the CreateExternalLocation into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events - if self.encryption_details: body['encryption_details'] = self.encryption_details - if self.fallback is not None: body['fallback'] = self.fallback - if self.file_event_queue: body['file_event_queue'] = self.file_event_queue - if self.name is not None: body['name'] = self.name - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation - if self.url is not None: body['url'] = self.url + if self.comment is not None: + body["comment"] = self.comment + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue + if self.name is not None: + body["name"] = self.name + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExternalLocation: """Deserializes the CreateExternalLocation from a dictionary.""" - return cls(comment=d.get('comment', None), credential_name=d.get('credential_name', None), enable_file_events=d.get('enable_file_events', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), fallback=d.get('fallback', None), file_event_queue=_from_dict(d, 'file_event_queue', FileEventQueue), name=d.get('name', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None), url=d.get('url', None)) - - + return cls( + comment=d.get("comment", None), + credential_name=d.get("credential_name", None), + enable_file_events=d.get("enable_file_events", None), + encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), + fallback=d.get("fallback", None), + file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), + name=d.get("name", None), + read_only=d.get("read_only", None), + skip_validation=d.get("skip_validation", None), + url=d.get("url", None), + ) @dataclass class CreateFunction: name: str """Name of function, relative to parent schema.""" - + catalog_name: str """Name of parent catalog.""" - + schema_name: str """Name of parent schema relative to its parent catalog.""" - + input_params: FunctionParameterInfos - + data_type: ColumnTypeName """Scalar function return data type.""" - + full_data_type: str """Pretty printed function data type.""" - + routine_body: CreateFunctionRoutineBody """Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**.""" - + routine_definition: str """Function body.""" - + parameter_style: CreateFunctionParameterStyle """Function parameter style. **S** is the value for SQL.""" - + is_deterministic: bool """Whether the function is deterministic.""" - + sql_data_access: CreateFunctionSqlDataAccess """Function SQL data access.""" - + is_null_call: bool """Function null call.""" - + security_type: CreateFunctionSecurityType """Function security type.""" - + specific_name: str """Specific name of the function; Reserved for future use.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + external_language: Optional[str] = None """External function language.""" - + external_name: Optional[str] = None """External function name.""" - + properties: Optional[str] = None """JSON-serialized key-value pair map, encoded (escaped) as a string.""" - + return_params: Optional[FunctionParameterInfos] = None """Table function return parameters.""" - + routine_dependencies: Optional[DependencyList] = None """Function dependencies.""" - + sql_path: Optional[str] = None """List of schemes whose objects can be referenced without qualification.""" - + def as_dict(self) -> dict: """Serializes the CreateFunction into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.data_type is not None: body['data_type'] = self.data_type.value - if self.external_language is not None: body['external_language'] = self.external_language - if self.external_name is not None: body['external_name'] = self.external_name - if self.full_data_type is not None: body['full_data_type'] = self.full_data_type - if self.input_params: body['input_params'] = self.input_params.as_dict() - if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic - if self.is_null_call is not None: body['is_null_call'] = self.is_null_call - if self.name is not None: body['name'] = self.name - if self.parameter_style is not None: body['parameter_style'] = self.parameter_style.value - if self.properties is not None: body['properties'] = self.properties - if self.return_params: body['return_params'] = self.return_params.as_dict() - if self.routine_body is not None: body['routine_body'] = self.routine_body.value - if self.routine_definition is not None: body['routine_definition'] = self.routine_definition - if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies.as_dict() - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.security_type is not None: body['security_type'] = self.security_type.value - if self.specific_name is not None: body['specific_name'] = self.specific_name - if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access.value - if self.sql_path is not None: body['sql_path'] = self.sql_path + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.data_type is not None: + body["data_type"] = self.data_type.value + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.input_params: + body["input_params"] = self.input_params.as_dict() + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.name is not None: + body["name"] = self.name + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style.value + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params.as_dict() + if self.routine_body is not None: + body["routine_body"] = self.routine_body.value + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies.as_dict() + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type.value + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access.value + if self.sql_path is not None: + body["sql_path"] = self.sql_path return body def as_shallow_dict(self) -> dict: """Serializes the CreateFunction into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.data_type is not None: body['data_type'] = self.data_type - if self.external_language is not None: body['external_language'] = self.external_language - if self.external_name is not None: body['external_name'] = self.external_name - if self.full_data_type is not None: body['full_data_type'] = self.full_data_type - if self.input_params: body['input_params'] = self.input_params - if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic - if self.is_null_call is not None: body['is_null_call'] = self.is_null_call - if self.name is not None: body['name'] = self.name - if self.parameter_style is not None: body['parameter_style'] = self.parameter_style - if self.properties is not None: body['properties'] = self.properties - if self.return_params: body['return_params'] = self.return_params - if self.routine_body is not None: body['routine_body'] = self.routine_body - if self.routine_definition is not None: body['routine_definition'] = self.routine_definition - if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.security_type is not None: body['security_type'] = self.security_type - if self.specific_name is not None: body['specific_name'] = self.specific_name - if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access - if self.sql_path is not None: body['sql_path'] = self.sql_path + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.data_type is not None: + body["data_type"] = self.data_type + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.input_params: + body["input_params"] = self.input_params + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.name is not None: + body["name"] = self.name + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params + if self.routine_body is not None: + body["routine_body"] = self.routine_body + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access + if self.sql_path is not None: + body["sql_path"] = self.sql_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateFunction: """Deserializes the CreateFunction from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), data_type=_enum(d, 'data_type', ColumnTypeName), external_language=d.get('external_language', None), external_name=d.get('external_name', None), full_data_type=d.get('full_data_type', None), input_params=_from_dict(d, 'input_params', FunctionParameterInfos), is_deterministic=d.get('is_deterministic', None), is_null_call=d.get('is_null_call', None), name=d.get('name', None), parameter_style=_enum(d, 'parameter_style', CreateFunctionParameterStyle), properties=d.get('properties', None), return_params=_from_dict(d, 'return_params', FunctionParameterInfos), routine_body=_enum(d, 'routine_body', CreateFunctionRoutineBody), routine_definition=d.get('routine_definition', None), routine_dependencies=_from_dict(d, 'routine_dependencies', DependencyList), schema_name=d.get('schema_name', None), security_type=_enum(d, 'security_type', CreateFunctionSecurityType), specific_name=d.get('specific_name', None), sql_data_access=_enum(d, 'sql_data_access', CreateFunctionSqlDataAccess), sql_path=d.get('sql_path', None)) - - + return cls( + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + data_type=_enum(d, "data_type", ColumnTypeName), + external_language=d.get("external_language", None), + external_name=d.get("external_name", None), + full_data_type=d.get("full_data_type", None), + input_params=_from_dict(d, "input_params", FunctionParameterInfos), + is_deterministic=d.get("is_deterministic", None), + is_null_call=d.get("is_null_call", None), + name=d.get("name", None), + parameter_style=_enum(d, "parameter_style", CreateFunctionParameterStyle), + properties=d.get("properties", None), + return_params=_from_dict(d, "return_params", FunctionParameterInfos), + routine_body=_enum(d, "routine_body", CreateFunctionRoutineBody), + routine_definition=d.get("routine_definition", None), + routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), + schema_name=d.get("schema_name", None), + security_type=_enum(d, "security_type", CreateFunctionSecurityType), + specific_name=d.get("specific_name", None), + sql_data_access=_enum(d, "sql_data_access", CreateFunctionSqlDataAccess), + sql_path=d.get("sql_path", None), + ) class CreateFunctionParameterStyle(Enum): """Function parameter style. **S** is the value for SQL.""" - - S = 'S' + + S = "S" + @dataclass class CreateFunctionRequest: function_info: CreateFunction """Partial __FunctionInfo__ specifying the function to be created.""" - + def as_dict(self) -> dict: """Serializes the CreateFunctionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_info: body['function_info'] = self.function_info.as_dict() + if self.function_info: + body["function_info"] = self.function_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateFunctionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_info: body['function_info'] = self.function_info + if self.function_info: + body["function_info"] = self.function_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateFunctionRequest: """Deserializes the CreateFunctionRequest from a dictionary.""" - return cls(function_info=_from_dict(d, 'function_info', CreateFunction)) - - + return cls(function_info=_from_dict(d, "function_info", CreateFunction)) class CreateFunctionRoutineBody(Enum): @@ -1742,235 +2185,302 @@ class CreateFunctionRoutineBody(Enum): specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**.""" - - EXTERNAL = 'EXTERNAL' - SQL = 'SQL' + + EXTERNAL = "EXTERNAL" + SQL = "SQL" + class CreateFunctionSecurityType(Enum): """The security type of the function.""" - - DEFINER = 'DEFINER' + + DEFINER = "DEFINER" + class CreateFunctionSqlDataAccess(Enum): """Function SQL data access.""" - - CONTAINS_SQL = 'CONTAINS_SQL' - NO_SQL = 'NO_SQL' - READS_SQL_DATA = 'READS_SQL_DATA' + + CONTAINS_SQL = "CONTAINS_SQL" + NO_SQL = "NO_SQL" + READS_SQL_DATA = "READS_SQL_DATA" + @dataclass class CreateMetastore: name: str """The user-specified name of the metastore.""" - + region: Optional[str] = None """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - + storage_root: Optional[str] = None """The storage root URL for metastore""" - + def as_dict(self) -> dict: """Serializes the CreateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.region is not None: body['region'] = self.region - if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.name is not None: + body["name"] = self.name + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body def as_shallow_dict(self) -> dict: """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.region is not None: body['region'] = self.region - if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.name is not None: + body["name"] = self.name + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateMetastore: """Deserializes the CreateMetastore from a dictionary.""" - return cls(name=d.get('name', None), region=d.get('region', None), storage_root=d.get('storage_root', None)) - - + return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) @dataclass class CreateMetastoreAssignment: metastore_id: str """The unique ID of the metastore.""" - + default_catalog_name: str """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" - + workspace_id: Optional[int] = None """A workspace ID.""" - + def as_dict(self) -> dict: """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateMetastoreAssignment: """Deserializes the CreateMetastoreAssignment from a dictionary.""" - return cls(default_catalog_name=d.get('default_catalog_name', None), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) - - + return cls( + default_catalog_name=d.get("default_catalog_name", None), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass class CreateMonitor: assets_dir: str """The directory to store monitoring assets (e.g. dashboard, metric tables).""" - + output_schema_name: str """Schema where output metric tables are created.""" - + baseline_table_name: Optional[str] = None """Name of the baseline table from which drift metrics are computed from. Columns in the monitored table should also be present in the baseline table.""" - + custom_metrics: Optional[List[MonitorMetric]] = None """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).""" - + data_classification_config: Optional[MonitorDataClassificationConfig] = None """The data classification config for the monitor.""" - + inference_log: Optional[MonitorInferenceLog] = None """Configuration for monitoring inference logs.""" - + notifications: Optional[MonitorNotifications] = None """The notification settings for the monitor.""" - + schedule: Optional[MonitorCronSchedule] = None """The schedule for automatically updating and refreshing metric tables.""" - + skip_builtin_dashboard: Optional[bool] = None """Whether to skip creating a default dashboard summarizing data quality metrics.""" - + slicing_exprs: Optional[List[str]] = None """List of column expressions to slice data with for targeted analysis. The data is grouped by each expression independently, resulting in a separate slice for each predicate and its complements. For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" - + snapshot: Optional[MonitorSnapshot] = None """Configuration for monitoring snapshot tables.""" - + table_name: Optional[str] = None """Full name of the table.""" - + time_series: Optional[MonitorTimeSeries] = None """Configuration for monitoring time series tables.""" - + warehouse_id: Optional[str] = None """Optional argument to specify the warehouse for dashboard creation. If not specified, the first running warehouse will be used.""" - + def as_dict(self) -> dict: """Serializes the CreateMonitor into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets_dir is not None: body['assets_dir'] = self.assets_dir - if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name - if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics] - if self.data_classification_config: body['data_classification_config'] = self.data_classification_config.as_dict() - if self.inference_log: body['inference_log'] = self.inference_log.as_dict() - if self.notifications: body['notifications'] = self.notifications.as_dict() - if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name - if self.schedule: body['schedule'] = self.schedule.as_dict() - if self.skip_builtin_dashboard is not None: body['skip_builtin_dashboard'] = self.skip_builtin_dashboard - if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs] - if self.snapshot: body['snapshot'] = self.snapshot.as_dict() - if self.table_name is not None: body['table_name'] = self.table_name - if self.time_series: body['time_series'] = self.time_series.as_dict() - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config.as_dict() + if self.inference_log: + body["inference_log"] = self.inference_log.as_dict() + if self.notifications: + body["notifications"] = self.notifications.as_dict() + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.skip_builtin_dashboard is not None: + body["skip_builtin_dashboard"] = self.skip_builtin_dashboard + if self.slicing_exprs: + body["slicing_exprs"] = [v for v in self.slicing_exprs] + if self.snapshot: + body["snapshot"] = self.snapshot.as_dict() + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series.as_dict() + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateMonitor into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets_dir is not None: body['assets_dir'] = self.assets_dir - if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name - if self.custom_metrics: body['custom_metrics'] = self.custom_metrics - if self.data_classification_config: body['data_classification_config'] = self.data_classification_config - if self.inference_log: body['inference_log'] = self.inference_log - if self.notifications: body['notifications'] = self.notifications - if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name - if self.schedule: body['schedule'] = self.schedule - if self.skip_builtin_dashboard is not None: body['skip_builtin_dashboard'] = self.skip_builtin_dashboard - if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs - if self.snapshot: body['snapshot'] = self.snapshot - if self.table_name is not None: body['table_name'] = self.table_name - if self.time_series: body['time_series'] = self.time_series - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = self.custom_metrics + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config + if self.inference_log: + body["inference_log"] = self.inference_log + if self.notifications: + body["notifications"] = self.notifications + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.schedule: + body["schedule"] = self.schedule + if self.skip_builtin_dashboard is not None: + body["skip_builtin_dashboard"] = self.skip_builtin_dashboard + if self.slicing_exprs: + body["slicing_exprs"] = self.slicing_exprs + if self.snapshot: + body["snapshot"] = self.snapshot + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateMonitor: """Deserializes the CreateMonitor from a dictionary.""" - return cls(assets_dir=d.get('assets_dir', None), baseline_table_name=d.get('baseline_table_name', None), custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric), data_classification_config=_from_dict(d, 'data_classification_config', MonitorDataClassificationConfig), inference_log=_from_dict(d, 'inference_log', MonitorInferenceLog), notifications=_from_dict(d, 'notifications', MonitorNotifications), output_schema_name=d.get('output_schema_name', None), schedule=_from_dict(d, 'schedule', MonitorCronSchedule), skip_builtin_dashboard=d.get('skip_builtin_dashboard', None), slicing_exprs=d.get('slicing_exprs', None), snapshot=_from_dict(d, 'snapshot', MonitorSnapshot), table_name=d.get('table_name', None), time_series=_from_dict(d, 'time_series', MonitorTimeSeries), warehouse_id=d.get('warehouse_id', None)) - - - - - + return cls( + assets_dir=d.get("assets_dir", None), + baseline_table_name=d.get("baseline_table_name", None), + custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), + data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), + inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), + notifications=_from_dict(d, "notifications", MonitorNotifications), + output_schema_name=d.get("output_schema_name", None), + schedule=_from_dict(d, "schedule", MonitorCronSchedule), + skip_builtin_dashboard=d.get("skip_builtin_dashboard", None), + slicing_exprs=d.get("slicing_exprs", None), + snapshot=_from_dict(d, "snapshot", MonitorSnapshot), + table_name=d.get("table_name", None), + time_series=_from_dict(d, "time_series", MonitorTimeSeries), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class CreateRegisteredModelRequest: catalog_name: str """The name of the catalog where the schema and the registered model reside""" - + schema_name: str """The name of the schema where the registered model resides""" - + name: str """The name of the registered model""" - + comment: Optional[str] = None """The comment attached to the registered model""" - + storage_location: Optional[str] = None """The storage location on the cloud under which model version data files are stored""" - + def as_dict(self) -> dict: """Serializes the CreateRegisteredModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location return body def as_shallow_dict(self) -> dict: """Serializes the CreateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.storage_location is not None: body['storage_location'] = self.storage_location + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRegisteredModelRequest: """Deserializes the CreateRegisteredModelRequest from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), name=d.get('name', None), schema_name=d.get('schema_name', None), storage_location=d.get('storage_location', None)) - - + return cls( + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + name=d.get("name", None), + schema_name=d.get("schema_name", None), + storage_location=d.get("storage_location", None), + ) @dataclass @@ -1989,434 +2499,558 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" return cls() - - @dataclass class CreateSchema: name: str """Name of schema, relative to parent catalog.""" - + catalog_name: str """Name of parent catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - - properties: Optional[Dict[str,str]] = None + + properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + storage_root: Optional[str] = None """Storage root URL for managed tables within schema.""" - + def as_dict(self) -> dict: """Serializes the CreateSchema into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.properties: body['properties'] = self.properties - if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.properties: + body["properties"] = self.properties + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body def as_shallow_dict(self) -> dict: """Serializes the CreateSchema into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.properties: body['properties'] = self.properties - if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.properties: + body["properties"] = self.properties + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateSchema: """Deserializes the CreateSchema from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), name=d.get('name', None), properties=d.get('properties', None), storage_root=d.get('storage_root', None)) - - + return cls( + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + name=d.get("name", None), + properties=d.get("properties", None), + storage_root=d.get("storage_root", None), + ) @dataclass class CreateStorageCredential: name: str """The credential name. The name must be unique within the metastore.""" - + aws_iam_role: Optional[AwsIamRoleRequest] = None """The AWS IAM role configuration.""" - + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" - + cloudflare_api_token: Optional[CloudflareApiToken] = None """The Cloudflare API token configuration.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None """The Databricks managed GCP service account configuration.""" - + read_only: Optional[bool] = None """Whether the storage credential is only usable for read operations.""" - + skip_validation: Optional[bool] = None """Supplying true to this argument skips validation of the created credential.""" - + def as_dict(self) -> dict: """Serializes the CreateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() - if self.comment is not None: body['comment'] = self.comment - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() - if self.name is not None: body['name'] = self.name - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.name is not None: + body["name"] = self.name + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal - if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token - if self.comment is not None: body['comment'] = self.comment - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account - if self.name is not None: body['name'] = self.name - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.name is not None: + body["name"] = self.name + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateStorageCredential: """Deserializes the CreateStorageCredential from a dictionary.""" - return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleRequest), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentityRequest), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), comment=d.get('comment', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccountRequest), name=d.get('name', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) - - + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), + name=d.get("name", None), + read_only=d.get("read_only", None), + skip_validation=d.get("skip_validation", None), + ) @dataclass class CreateTableConstraint: full_name_arg: str """The full name of the table referenced by the constraint.""" - + constraint: TableConstraint """A table constraint, as defined by *one* of the following fields being set: __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" - + def as_dict(self) -> dict: """Serializes the CreateTableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.constraint: body['constraint'] = self.constraint.as_dict() - if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg + if self.constraint: + body["constraint"] = self.constraint.as_dict() + if self.full_name_arg is not None: + body["full_name_arg"] = self.full_name_arg return body def as_shallow_dict(self) -> dict: """Serializes the CreateTableConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.constraint: body['constraint'] = self.constraint - if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg + if self.constraint: + body["constraint"] = self.constraint + if self.full_name_arg is not None: + body["full_name_arg"] = self.full_name_arg return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTableConstraint: """Deserializes the CreateTableConstraint from a dictionary.""" - return cls(constraint=_from_dict(d, 'constraint', TableConstraint), full_name_arg=d.get('full_name_arg', None)) - - + return cls(constraint=_from_dict(d, "constraint", TableConstraint), full_name_arg=d.get("full_name_arg", None)) @dataclass class CreateVolumeRequestContent: catalog_name: str """The name of the catalog where the schema and the volume are""" - + schema_name: str """The name of the schema where the volume is""" - + name: str """The name of the volume""" - + volume_type: VolumeType """The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" - + comment: Optional[str] = None """The comment attached to the volume""" - + storage_location: Optional[str] = None """The storage location on the cloud""" - + def as_dict(self) -> dict: """Serializes the CreateVolumeRequestContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.volume_type is not None: body['volume_type'] = self.volume_type.value + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.volume_type is not None: + body["volume_type"] = self.volume_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.volume_type is not None: body['volume_type'] = self.volume_type + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.volume_type is not None: + body["volume_type"] = self.volume_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVolumeRequestContent: """Deserializes the CreateVolumeRequestContent from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), name=d.get('name', None), schema_name=d.get('schema_name', None), storage_location=d.get('storage_location', None), volume_type=_enum(d, 'volume_type', VolumeType)) - - + return cls( + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + name=d.get("name", None), + schema_name=d.get("schema_name", None), + storage_location=d.get("storage_location", None), + volume_type=_enum(d, "volume_type", VolumeType), + ) @dataclass class CredentialInfo: aws_iam_role: Optional[AwsIamRole] = None """The AWS IAM role configuration""" - + azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + created_at: Optional[int] = None """Time at which this credential was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of credential creator.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + full_name: Optional[str] = None """The full name of the credential.""" - + id: Optional[str] = None """The unique identifier of the credential.""" - + isolation_mode: Optional[IsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - + metastore_id: Optional[str] = None """Unique identifier of the parent metastore.""" - + name: Optional[str] = None """The credential name. The name must be unique among storage and service credentials within the metastore.""" - + owner: Optional[str] = None """Username of current owner of credential.""" - + purpose: Optional[CredentialPurpose] = None """Indicates the purpose of the credential.""" - + read_only: Optional[bool] = None """Whether the credential is usable only for read operations. Only applicable when purpose is **STORAGE**.""" - + updated_at: Optional[int] = None """Time at which this credential was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the credential.""" - + used_for_managed_storage: Optional[bool] = None """Whether this credential is the current metastore's root storage credential. Only applicable when purpose is **STORAGE**.""" - + def as_dict(self) -> dict: """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() - if self.full_name is not None: body['full_name'] = self.full_name - if self.id is not None: body['id'] = self.id - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.purpose is not None: body['purpose'] = self.purpose.value - if self.read_only is not None: body['read_only'] = self.read_only - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.used_for_managed_storage is not None: body['used_for_managed_storage'] = self.used_for_managed_storage + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.purpose is not None: + body["purpose"] = self.purpose.value + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body def as_shallow_dict(self) -> dict: """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account - if self.full_name is not None: body['full_name'] = self.full_name - if self.id is not None: body['id'] = self.id - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.purpose is not None: body['purpose'] = self.purpose - if self.read_only is not None: body['read_only'] = self.read_only - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.used_for_managed_storage is not None: body['used_for_managed_storage'] = self.used_for_managed_storage + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.purpose is not None: + body["purpose"] = self.purpose + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: """Deserializes the CredentialInfo from a dictionary.""" - return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccount), full_name=d.get('full_name', None), id=d.get('id', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), purpose=_enum(d, 'purpose', CredentialPurpose), read_only=d.get('read_only', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), used_for_managed_storage=d.get('used_for_managed_storage', None)) - - + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), + full_name=d.get("full_name", None), + id=d.get("id", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + purpose=_enum(d, "purpose", CredentialPurpose), + read_only=d.get("read_only", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + used_for_managed_storage=d.get("used_for_managed_storage", None), + ) class CredentialPurpose(Enum): - - - SERVICE = 'SERVICE' - STORAGE = 'STORAGE' + + SERVICE = "SERVICE" + STORAGE = "STORAGE" + class CredentialType(Enum): """Next Id: 12""" - - BEARER_TOKEN = 'BEARER_TOKEN' - OAUTH_ACCESS_TOKEN = 'OAUTH_ACCESS_TOKEN' - OAUTH_M2M = 'OAUTH_M2M' - OAUTH_REFRESH_TOKEN = 'OAUTH_REFRESH_TOKEN' - OAUTH_RESOURCE_OWNER_PASSWORD = 'OAUTH_RESOURCE_OWNER_PASSWORD' - OAUTH_U2M = 'OAUTH_U2M' - OAUTH_U2M_MAPPING = 'OAUTH_U2M_MAPPING' - OIDC_TOKEN = 'OIDC_TOKEN' - PEM_PRIVATE_KEY = 'PEM_PRIVATE_KEY' - SERVICE_CREDENTIAL = 'SERVICE_CREDENTIAL' - UNKNOWN_CREDENTIAL_TYPE = 'UNKNOWN_CREDENTIAL_TYPE' - USERNAME_PASSWORD = 'USERNAME_PASSWORD' + + BEARER_TOKEN = "BEARER_TOKEN" + OAUTH_ACCESS_TOKEN = "OAUTH_ACCESS_TOKEN" + OAUTH_M2M = "OAUTH_M2M" + OAUTH_REFRESH_TOKEN = "OAUTH_REFRESH_TOKEN" + OAUTH_RESOURCE_OWNER_PASSWORD = "OAUTH_RESOURCE_OWNER_PASSWORD" + OAUTH_U2M = "OAUTH_U2M" + OAUTH_U2M_MAPPING = "OAUTH_U2M_MAPPING" + OIDC_TOKEN = "OIDC_TOKEN" + PEM_PRIVATE_KEY = "PEM_PRIVATE_KEY" + SERVICE_CREDENTIAL = "SERVICE_CREDENTIAL" + UNKNOWN_CREDENTIAL_TYPE = "UNKNOWN_CREDENTIAL_TYPE" + USERNAME_PASSWORD = "USERNAME_PASSWORD" + @dataclass class CredentialValidationResult: message: Optional[str] = None """Error message would exist when the result does not equal to **PASS**.""" - + result: Optional[ValidateCredentialResult] = None """The results of the tested operation.""" - + def as_dict(self) -> dict: """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: body['message'] = self.message - if self.result is not None: body['result'] = self.result.value + if self.message is not None: + body["message"] = self.message + if self.result is not None: + body["result"] = self.result.value return body def as_shallow_dict(self) -> dict: """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: body['message'] = self.message - if self.result is not None: body['result'] = self.result + if self.message is not None: + body["message"] = self.message + if self.result is not None: + body["result"] = self.result return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CredentialValidationResult: """Deserializes the CredentialValidationResult from a dictionary.""" - return cls(message=d.get('message', None), result=_enum(d, 'result', ValidateCredentialResult)) - - + return cls(message=d.get("message", None), result=_enum(d, "result", ValidateCredentialResult)) class DataSourceFormat(Enum): """Data source format""" - - AVRO = 'AVRO' - BIGQUERY_FORMAT = 'BIGQUERY_FORMAT' - CSV = 'CSV' - DATABRICKS_FORMAT = 'DATABRICKS_FORMAT' - DELTA = 'DELTA' - DELTASHARING = 'DELTASHARING' - HIVE_CUSTOM = 'HIVE_CUSTOM' - HIVE_SERDE = 'HIVE_SERDE' - JSON = 'JSON' - MYSQL_FORMAT = 'MYSQL_FORMAT' - NETSUITE_FORMAT = 'NETSUITE_FORMAT' - ORC = 'ORC' - PARQUET = 'PARQUET' - POSTGRESQL_FORMAT = 'POSTGRESQL_FORMAT' - REDSHIFT_FORMAT = 'REDSHIFT_FORMAT' - SALESFORCE_FORMAT = 'SALESFORCE_FORMAT' - SNOWFLAKE_FORMAT = 'SNOWFLAKE_FORMAT' - SQLDW_FORMAT = 'SQLDW_FORMAT' - SQLSERVER_FORMAT = 'SQLSERVER_FORMAT' - TEXT = 'TEXT' - UNITY_CATALOG = 'UNITY_CATALOG' - VECTOR_INDEX_FORMAT = 'VECTOR_INDEX_FORMAT' - WORKDAY_RAAS_FORMAT = 'WORKDAY_RAAS_FORMAT' + + AVRO = "AVRO" + BIGQUERY_FORMAT = "BIGQUERY_FORMAT" + CSV = "CSV" + DATABRICKS_FORMAT = "DATABRICKS_FORMAT" + DELTA = "DELTA" + DELTASHARING = "DELTASHARING" + HIVE_CUSTOM = "HIVE_CUSTOM" + HIVE_SERDE = "HIVE_SERDE" + JSON = "JSON" + MYSQL_FORMAT = "MYSQL_FORMAT" + NETSUITE_FORMAT = "NETSUITE_FORMAT" + ORC = "ORC" + PARQUET = "PARQUET" + POSTGRESQL_FORMAT = "POSTGRESQL_FORMAT" + REDSHIFT_FORMAT = "REDSHIFT_FORMAT" + SALESFORCE_FORMAT = "SALESFORCE_FORMAT" + SNOWFLAKE_FORMAT = "SNOWFLAKE_FORMAT" + SQLDW_FORMAT = "SQLDW_FORMAT" + SQLSERVER_FORMAT = "SQLSERVER_FORMAT" + TEXT = "TEXT" + UNITY_CATALOG = "UNITY_CATALOG" + VECTOR_INDEX_FORMAT = "VECTOR_INDEX_FORMAT" + WORKDAY_RAAS_FORMAT = "WORKDAY_RAAS_FORMAT" + @dataclass class DatabricksGcpServiceAccount: """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + credential_id: Optional[str] = None """The Databricks internal ID that represents this managed identity. This field is only used to persist the credential_id once it is fetched from the credentials manager - as we only use the protobuf serializer to store credentials, this ID gets persisted to the database""" - + email: Optional[str] = None """The email of the service account.""" - + private_key_id: Optional[str] = None """The ID that represents the private key for this Service Account""" - + def as_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.email is not None: body['email'] = self.email - if self.private_key_id is not None: body['private_key_id'] = self.private_key_id + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + if self.private_key_id is not None: + body["private_key_id"] = self.private_key_id return body def as_shallow_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.email is not None: body['email'] = self.email - if self.private_key_id is not None: body['private_key_id'] = self.private_key_id + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + if self.private_key_id is not None: + body["private_key_id"] = self.private_key_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccount: """Deserializes the DatabricksGcpServiceAccount from a dictionary.""" - return cls(credential_id=d.get('credential_id', None), email=d.get('email', None), private_key_id=d.get('private_key_id', None)) - - + return cls( + credential_id=d.get("credential_id", None), + email=d.get("email", None), + private_key_id=d.get("private_key_id", None), + ) @dataclass @@ -2435,50 +3069,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountRequest: """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary.""" return cls() - - @dataclass class DatabricksGcpServiceAccountResponse: credential_id: Optional[str] = None """The Databricks internal ID that represents this service account. This is an output-only field.""" - + email: Optional[str] = None """The email of the service account. This is an output-only field.""" - + def as_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.email is not None: body['email'] = self.email + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email return body def as_shallow_dict(self) -> dict: """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.email is not None: body['email'] = self.email + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountResponse: """Deserializes the DatabricksGcpServiceAccountResponse from a dictionary.""" - return cls(credential_id=d.get('credential_id', None), email=d.get('email', None)) - - - - - - - - - - - - - - + return cls(credential_id=d.get("credential_id", None), email=d.get("email", None)) @dataclass @@ -2497,17 +3119,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteAliasResponse: """Deserializes the DeleteAliasResponse from a dictionary.""" return cls() - - - - - - - - - - - @dataclass @@ -2526,29 +3137,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCredentialResponse: """Deserializes the DeleteCredentialResponse from a dictionary.""" return cls() - - - - - - - - - - - - - - - - - - - - - - - @dataclass @@ -2567,120 +3155,104 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - - - - - - - - - - - - - - - - @dataclass class DeltaRuntimePropertiesKvPairs: """Properties pertaining to the current state of the delta table as given by the commit server. This does not contain **delta.*** (input) properties in __TableInfo.properties__.""" - - delta_runtime_properties: Dict[str,str] + + delta_runtime_properties: Dict[str, str] """A map of key-value properties attached to the securable.""" - + def as_dict(self) -> dict: """Serializes the DeltaRuntimePropertiesKvPairs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties + if self.delta_runtime_properties: + body["delta_runtime_properties"] = self.delta_runtime_properties return body def as_shallow_dict(self) -> dict: """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" body = {} - if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties + if self.delta_runtime_properties: + body["delta_runtime_properties"] = self.delta_runtime_properties return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaRuntimePropertiesKvPairs: """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary.""" - return cls(delta_runtime_properties=d.get('delta_runtime_properties', None)) - - + return cls(delta_runtime_properties=d.get("delta_runtime_properties", None)) class DeltaSharingScopeEnum(Enum): - - - INTERNAL = 'INTERNAL' - INTERNAL_AND_EXTERNAL = 'INTERNAL_AND_EXTERNAL' + + INTERNAL = "INTERNAL" + INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL" + @dataclass class Dependency: """A dependency of a SQL object. Either the __table__ field or the __function__ field must be defined.""" - + function: Optional[FunctionDependency] = None """A function that is dependent on a SQL object.""" - + table: Optional[TableDependency] = None """A table that is dependent on a SQL object.""" - + def as_dict(self) -> dict: """Serializes the Dependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function: body['function'] = self.function.as_dict() - if self.table: body['table'] = self.table.as_dict() + if self.function: + body["function"] = self.function.as_dict() + if self.table: + body["table"] = self.table.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Dependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.function: body['function'] = self.function - if self.table: body['table'] = self.table + if self.function: + body["function"] = self.function + if self.table: + body["table"] = self.table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Dependency: """Deserializes the Dependency from a dictionary.""" - return cls(function=_from_dict(d, 'function', FunctionDependency), table=_from_dict(d, 'table', TableDependency)) - - + return cls( + function=_from_dict(d, "function", FunctionDependency), table=_from_dict(d, "table", TableDependency) + ) @dataclass class DependencyList: """A list of dependencies.""" - + dependencies: Optional[List[Dependency]] = None """Array of dependencies.""" - + def as_dict(self) -> dict: """Serializes the DependencyList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dependencies: body['dependencies'] = [v.as_dict() for v in self.dependencies] + if self.dependencies: + body["dependencies"] = [v.as_dict() for v in self.dependencies] return body def as_shallow_dict(self) -> dict: """Serializes the DependencyList into a shallow dictionary of its immediate attributes.""" body = {} - if self.dependencies: body['dependencies'] = self.dependencies + if self.dependencies: + body["dependencies"] = self.dependencies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DependencyList: """Deserializes the DependencyList from a dictionary.""" - return cls(dependencies=_repeated_dict(d, 'dependencies', Dependency)) - - - - - + return cls(dependencies=_repeated_dict(d, "dependencies", Dependency)) @dataclass @@ -2699,8 +3271,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: """Deserializes the DisableResponse from a dictionary.""" return cls() - - @dataclass @@ -2708,183 +3278,215 @@ class EffectivePermissionsList: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + privilege_assignments: Optional[List[EffectivePrivilegeAssignment]] = None """The privileges conveyed to each principal (either directly or via inheritance)""" - + def as_dict(self) -> dict: """Serializes the EffectivePermissionsList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EffectivePermissionsList: """Deserializes the EffectivePermissionsList from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), privilege_assignments=_repeated_dict(d, 'privilege_assignments', EffectivePrivilegeAssignment)) - - + return cls( + next_page_token=d.get("next_page_token", None), + privilege_assignments=_repeated_dict(d, "privilege_assignments", EffectivePrivilegeAssignment), + ) @dataclass class EffectivePredictiveOptimizationFlag: value: EnablePredictiveOptimization """Whether predictive optimization should be enabled for this object and objects under it.""" - + inherited_from_name: Optional[str] = None """The name of the object from which the flag was inherited. If there was no inheritance, this field is left blank.""" - + inherited_from_type: Optional[EffectivePredictiveOptimizationFlagInheritedFromType] = None """The type of the object from which the flag was inherited. If there was no inheritance, this field is left blank.""" - + def as_dict(self) -> dict: """Serializes the EffectivePredictiveOptimizationFlag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name - if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type.value - if self.value is not None: body['value'] = self.value.value + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type.value + if self.value is not None: + body["value"] = self.value.value return body def as_shallow_dict(self) -> dict: """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name - if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type - if self.value is not None: body['value'] = self.value + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EffectivePredictiveOptimizationFlag: """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary.""" - return cls(inherited_from_name=d.get('inherited_from_name', None), inherited_from_type=_enum(d, 'inherited_from_type', EffectivePredictiveOptimizationFlagInheritedFromType), value=_enum(d, 'value', EnablePredictiveOptimization)) - - + return cls( + inherited_from_name=d.get("inherited_from_name", None), + inherited_from_type=_enum(d, "inherited_from_type", EffectivePredictiveOptimizationFlagInheritedFromType), + value=_enum(d, "value", EnablePredictiveOptimization), + ) class EffectivePredictiveOptimizationFlagInheritedFromType(Enum): """The type of the object from which the flag was inherited. If there was no inheritance, this field is left blank.""" - - CATALOG = 'CATALOG' - SCHEMA = 'SCHEMA' + + CATALOG = "CATALOG" + SCHEMA = "SCHEMA" + @dataclass class EffectivePrivilege: inherited_from_name: Optional[str] = None """The full name of the object that conveys this privilege via inheritance. This field is omitted when privilege is not inherited (it's assigned to the securable itself).""" - + inherited_from_type: Optional[SecurableType] = None """The type of the object that conveys this privilege via inheritance. This field is omitted when privilege is not inherited (it's assigned to the securable itself).""" - + privilege: Optional[Privilege] = None """The privilege assigned to the principal.""" - + def as_dict(self) -> dict: """Serializes the EffectivePrivilege into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name - if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type.value - if self.privilege is not None: body['privilege'] = self.privilege.value + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type.value + if self.privilege is not None: + body["privilege"] = self.privilege.value return body def as_shallow_dict(self) -> dict: """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name - if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type - if self.privilege is not None: body['privilege'] = self.privilege + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type + if self.privilege is not None: + body["privilege"] = self.privilege return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilege: """Deserializes the EffectivePrivilege from a dictionary.""" - return cls(inherited_from_name=d.get('inherited_from_name', None), inherited_from_type=_enum(d, 'inherited_from_type', SecurableType), privilege=_enum(d, 'privilege', Privilege)) - - + return cls( + inherited_from_name=d.get("inherited_from_name", None), + inherited_from_type=_enum(d, "inherited_from_type", SecurableType), + privilege=_enum(d, "privilege", Privilege), + ) @dataclass class EffectivePrivilegeAssignment: principal: Optional[str] = None """The principal (user email address or group name).""" - + privileges: Optional[List[EffectivePrivilege]] = None """The privileges conveyed to the principal (either directly or via inheritance).""" - + def as_dict(self) -> dict: """Serializes the EffectivePrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principal is not None: body['principal'] = self.principal - if self.privileges: body['privileges'] = [v.as_dict() for v in self.privileges] + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = [v.as_dict() for v in self.privileges] return body def as_shallow_dict(self) -> dict: """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.principal is not None: body['principal'] = self.principal - if self.privileges: body['privileges'] = self.privileges + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = self.privileges return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilegeAssignment: """Deserializes the EffectivePrivilegeAssignment from a dictionary.""" - return cls(principal=d.get('principal', None), privileges=_repeated_dict(d, 'privileges', EffectivePrivilege)) - - + return cls(principal=d.get("principal", None), privileges=_repeated_dict(d, "privileges", EffectivePrivilege)) class EnablePredictiveOptimization(Enum): - - - DISABLE = 'DISABLE' - ENABLE = 'ENABLE' - INHERIT = 'INHERIT' + + DISABLE = "DISABLE" + ENABLE = "ENABLE" + INHERIT = "INHERIT" + @dataclass class EnableRequest: catalog_name: Optional[str] = None """the catalog for which the system schema is to enabled in""" - + metastore_id: Optional[str] = None """The metastore ID under which the system schema lives.""" - + schema_name: Optional[str] = None """Full name of the system schema.""" - + def as_dict(self) -> dict: """Serializes the EnableRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body def as_shallow_dict(self) -> dict: """Serializes the EnableRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnableRequest: """Deserializes the EnableRequest from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), metastore_id=d.get('metastore_id', None), schema_name=d.get('schema_name', None)) - - + return cls( + catalog_name=d.get("catalog_name", None), + metastore_id=d.get("metastore_id", None), + schema_name=d.get("schema_name", None), + ) @dataclass @@ -2903,38 +3505,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EnableResponse: """Deserializes the EnableResponse from a dictionary.""" return cls() - - @dataclass class EncryptionDetails: """Encryption options that apply to clients connecting to cloud storage.""" - + sse_encryption_details: Optional[SseEncryptionDetails] = None """Server-Side Encryption properties for clients communicating with AWS s3.""" - + def as_dict(self) -> dict: """Serializes the EncryptionDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details.as_dict() + if self.sse_encryption_details: + body["sse_encryption_details"] = self.sse_encryption_details.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details + if self.sse_encryption_details: + body["sse_encryption_details"] = self.sse_encryption_details return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EncryptionDetails: """Deserializes the EncryptionDetails from a dictionary.""" - return cls(sse_encryption_details=_from_dict(d, 'sse_encryption_details', SseEncryptionDetails)) - - - - - + return cls(sse_encryption_details=_from_dict(d, "sse_encryption_details", SseEncryptionDetails)) @dataclass @@ -2942,259 +3539,345 @@ class ExternalLocationInfo: browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this external location was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of external location creator.""" - + credential_id: Optional[str] = None """Unique ID of the location's storage credential.""" - + credential_name: Optional[str] = None """Name of the storage credential used with this location.""" - + enable_file_events: Optional[bool] = None """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + fallback: Optional[bool] = None """Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient.""" - + file_event_queue: Optional[FileEventQueue] = None """[Create:OPT Update:OPT] File event queue settings.""" - + isolation_mode: Optional[IsolationMode] = None - + metastore_id: Optional[str] = None """Unique identifier of metastore hosting the external location.""" - + name: Optional[str] = None """Name of the external location.""" - + owner: Optional[str] = None """The owner of the external location.""" - + read_only: Optional[bool] = None """Indicates whether the external location is read-only.""" - + updated_at: Optional[int] = None """Time at which external location this was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the external location.""" - + url: Optional[str] = None """Path URL of the external location.""" - + def as_dict(self) -> dict: """Serializes the ExternalLocationInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events - if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() - if self.fallback is not None: body['fallback'] = self.fallback - if self.file_event_queue: body['file_event_queue'] = self.file_event_queue.as_dict() - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.url is not None: body['url'] = self.url + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details.as_dict() + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue.as_dict() + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events - if self.encryption_details: body['encryption_details'] = self.encryption_details - if self.fallback is not None: body['fallback'] = self.fallback - if self.file_event_queue: body['file_event_queue'] = self.file_event_queue - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.url is not None: body['url'] = self.url + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalLocationInfo: """Deserializes the ExternalLocationInfo from a dictionary.""" - return cls(browse_only=d.get('browse_only', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), credential_id=d.get('credential_id', None), credential_name=d.get('credential_name', None), enable_file_events=d.get('enable_file_events', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), fallback=d.get('fallback', None), file_event_queue=_from_dict(d, 'file_event_queue', FileEventQueue), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), url=d.get('url', None)) - - + return cls( + browse_only=d.get("browse_only", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + credential_id=d.get("credential_id", None), + credential_name=d.get("credential_name", None), + enable_file_events=d.get("enable_file_events", None), + encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), + fallback=d.get("fallback", None), + file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + read_only=d.get("read_only", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + url=d.get("url", None), + ) @dataclass class FailedStatus: """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the ONLINE_PIPELINE_FAILED state.""" - + last_processed_commit_version: Optional[int] = None """The last source table Delta version that was synced to the online table. Note that this Delta version may only be partially synced to the online table. Only populated if the table is still online and available for serving.""" - + timestamp: Optional[str] = None """The timestamp of the last time any data was synchronized from the source table to the online table. Only populated if the table is still online and available for serving.""" - + def as_dict(self) -> dict: """Serializes the FailedStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the FailedStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FailedStatus: """Deserializes the FailedStatus from a dictionary.""" - return cls(last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None)) - - + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + ) @dataclass class FileEventQueue: managed_aqs: Optional[AzureQueueStorage] = None - + managed_pubsub: Optional[GcpPubsub] = None - + managed_sqs: Optional[AwsSqsQueue] = None - + provided_aqs: Optional[AzureQueueStorage] = None - + provided_pubsub: Optional[GcpPubsub] = None - + provided_sqs: Optional[AwsSqsQueue] = None - + def as_dict(self) -> dict: """Serializes the FileEventQueue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_aqs: body['managed_aqs'] = self.managed_aqs.as_dict() - if self.managed_pubsub: body['managed_pubsub'] = self.managed_pubsub.as_dict() - if self.managed_sqs: body['managed_sqs'] = self.managed_sqs.as_dict() - if self.provided_aqs: body['provided_aqs'] = self.provided_aqs.as_dict() - if self.provided_pubsub: body['provided_pubsub'] = self.provided_pubsub.as_dict() - if self.provided_sqs: body['provided_sqs'] = self.provided_sqs.as_dict() + if self.managed_aqs: + body["managed_aqs"] = self.managed_aqs.as_dict() + if self.managed_pubsub: + body["managed_pubsub"] = self.managed_pubsub.as_dict() + if self.managed_sqs: + body["managed_sqs"] = self.managed_sqs.as_dict() + if self.provided_aqs: + body["provided_aqs"] = self.provided_aqs.as_dict() + if self.provided_pubsub: + body["provided_pubsub"] = self.provided_pubsub.as_dict() + if self.provided_sqs: + body["provided_sqs"] = self.provided_sqs.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the FileEventQueue into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_aqs: body['managed_aqs'] = self.managed_aqs - if self.managed_pubsub: body['managed_pubsub'] = self.managed_pubsub - if self.managed_sqs: body['managed_sqs'] = self.managed_sqs - if self.provided_aqs: body['provided_aqs'] = self.provided_aqs - if self.provided_pubsub: body['provided_pubsub'] = self.provided_pubsub - if self.provided_sqs: body['provided_sqs'] = self.provided_sqs + if self.managed_aqs: + body["managed_aqs"] = self.managed_aqs + if self.managed_pubsub: + body["managed_pubsub"] = self.managed_pubsub + if self.managed_sqs: + body["managed_sqs"] = self.managed_sqs + if self.provided_aqs: + body["provided_aqs"] = self.provided_aqs + if self.provided_pubsub: + body["provided_pubsub"] = self.provided_pubsub + if self.provided_sqs: + body["provided_sqs"] = self.provided_sqs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileEventQueue: """Deserializes the FileEventQueue from a dictionary.""" - return cls(managed_aqs=_from_dict(d, 'managed_aqs', AzureQueueStorage), managed_pubsub=_from_dict(d, 'managed_pubsub', GcpPubsub), managed_sqs=_from_dict(d, 'managed_sqs', AwsSqsQueue), provided_aqs=_from_dict(d, 'provided_aqs', AzureQueueStorage), provided_pubsub=_from_dict(d, 'provided_pubsub', GcpPubsub), provided_sqs=_from_dict(d, 'provided_sqs', AwsSqsQueue)) - - + return cls( + managed_aqs=_from_dict(d, "managed_aqs", AzureQueueStorage), + managed_pubsub=_from_dict(d, "managed_pubsub", GcpPubsub), + managed_sqs=_from_dict(d, "managed_sqs", AwsSqsQueue), + provided_aqs=_from_dict(d, "provided_aqs", AzureQueueStorage), + provided_pubsub=_from_dict(d, "provided_pubsub", GcpPubsub), + provided_sqs=_from_dict(d, "provided_sqs", AwsSqsQueue), + ) @dataclass class ForeignKeyConstraint: name: str """The name of the constraint.""" - + child_columns: List[str] """Column names for this constraint.""" - + parent_table: str """The full name of the parent constraint.""" - + parent_columns: List[str] """Column names for this constraint.""" - + def as_dict(self) -> dict: """Serializes the ForeignKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.child_columns: body['child_columns'] = [v for v in self.child_columns] - if self.name is not None: body['name'] = self.name - if self.parent_columns: body['parent_columns'] = [v for v in self.parent_columns] - if self.parent_table is not None: body['parent_table'] = self.parent_table + if self.child_columns: + body["child_columns"] = [v for v in self.child_columns] + if self.name is not None: + body["name"] = self.name + if self.parent_columns: + body["parent_columns"] = [v for v in self.parent_columns] + if self.parent_table is not None: + body["parent_table"] = self.parent_table return body def as_shallow_dict(self) -> dict: """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.child_columns: body['child_columns'] = self.child_columns - if self.name is not None: body['name'] = self.name - if self.parent_columns: body['parent_columns'] = self.parent_columns - if self.parent_table is not None: body['parent_table'] = self.parent_table + if self.child_columns: + body["child_columns"] = self.child_columns + if self.name is not None: + body["name"] = self.name + if self.parent_columns: + body["parent_columns"] = self.parent_columns + if self.parent_table is not None: + body["parent_table"] = self.parent_table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: """Deserializes the ForeignKeyConstraint from a dictionary.""" - return cls(child_columns=d.get('child_columns', None), name=d.get('name', None), parent_columns=d.get('parent_columns', None), parent_table=d.get('parent_table', None)) - - + return cls( + child_columns=d.get("child_columns", None), + name=d.get("name", None), + parent_columns=d.get("parent_columns", None), + parent_table=d.get("parent_table", None), + ) @dataclass class FunctionDependency: """A function that is dependent on a SQL object.""" - + function_full_name: str """Full name of the dependent function, in the form of __catalog_name__.__schema_name__.__function_name__.""" - + def as_dict(self) -> dict: """Serializes the FunctionDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_full_name is not None: body['function_full_name'] = self.function_full_name + if self.function_full_name is not None: + body["function_full_name"] = self.function_full_name return body def as_shallow_dict(self) -> dict: """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_full_name is not None: body['function_full_name'] = self.function_full_name + if self.function_full_name is not None: + body["function_full_name"] = self.function_full_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionDependency: """Deserializes the FunctionDependency from a dictionary.""" - return cls(function_full_name=d.get('function_full_name', None)) - - + return cls(function_full_name=d.get("function_full_name", None)) @dataclass @@ -3202,464 +3885,606 @@ class FunctionInfo: browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """Name of parent catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this function was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of function creator.""" - + data_type: Optional[ColumnTypeName] = None """Scalar function return data type.""" - + external_language: Optional[str] = None """External function language.""" - + external_name: Optional[str] = None """External function name.""" - + full_data_type: Optional[str] = None """Pretty printed function data type.""" - + full_name: Optional[str] = None """Full name of function, in form of __catalog_name__.__schema_name__.__function__name__""" - + function_id: Optional[str] = None """Id of Function, relative to parent schema.""" - + input_params: Optional[FunctionParameterInfos] = None - + is_deterministic: Optional[bool] = None """Whether the function is deterministic.""" - + is_null_call: Optional[bool] = None """Function null call.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of function, relative to parent schema.""" - + owner: Optional[str] = None """Username of current owner of function.""" - + parameter_style: Optional[FunctionInfoParameterStyle] = None """Function parameter style. **S** is the value for SQL.""" - + properties: Optional[str] = None """JSON-serialized key-value pair map, encoded (escaped) as a string.""" - + return_params: Optional[FunctionParameterInfos] = None """Table function return parameters.""" - + routine_body: Optional[FunctionInfoRoutineBody] = None """Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**.""" - + routine_definition: Optional[str] = None """Function body.""" - + routine_dependencies: Optional[DependencyList] = None """Function dependencies.""" - + schema_name: Optional[str] = None """Name of parent schema relative to its parent catalog.""" - + security_type: Optional[FunctionInfoSecurityType] = None """Function security type.""" - + specific_name: Optional[str] = None """Specific name of the function; Reserved for future use.""" - + sql_data_access: Optional[FunctionInfoSqlDataAccess] = None """Function SQL data access.""" - + sql_path: Optional[str] = None """List of schemes whose objects can be referenced without qualification.""" - + updated_at: Optional[int] = None """Time at which this function was created, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified function.""" - + def as_dict(self) -> dict: """Serializes the FunctionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.data_type is not None: body['data_type'] = self.data_type.value - if self.external_language is not None: body['external_language'] = self.external_language - if self.external_name is not None: body['external_name'] = self.external_name - if self.full_data_type is not None: body['full_data_type'] = self.full_data_type - if self.full_name is not None: body['full_name'] = self.full_name - if self.function_id is not None: body['function_id'] = self.function_id - if self.input_params: body['input_params'] = self.input_params.as_dict() - if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic - if self.is_null_call is not None: body['is_null_call'] = self.is_null_call - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.parameter_style is not None: body['parameter_style'] = self.parameter_style.value - if self.properties is not None: body['properties'] = self.properties - if self.return_params: body['return_params'] = self.return_params.as_dict() - if self.routine_body is not None: body['routine_body'] = self.routine_body.value - if self.routine_definition is not None: body['routine_definition'] = self.routine_definition - if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies.as_dict() - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.security_type is not None: body['security_type'] = self.security_type.value - if self.specific_name is not None: body['specific_name'] = self.specific_name - if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access.value - if self.sql_path is not None: body['sql_path'] = self.sql_path - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_type is not None: + body["data_type"] = self.data_type.value + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.full_name is not None: + body["full_name"] = self.full_name + if self.function_id is not None: + body["function_id"] = self.function_id + if self.input_params: + body["input_params"] = self.input_params.as_dict() + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style.value + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params.as_dict() + if self.routine_body is not None: + body["routine_body"] = self.routine_body.value + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies.as_dict() + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type.value + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access.value + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.data_type is not None: body['data_type'] = self.data_type - if self.external_language is not None: body['external_language'] = self.external_language - if self.external_name is not None: body['external_name'] = self.external_name - if self.full_data_type is not None: body['full_data_type'] = self.full_data_type - if self.full_name is not None: body['full_name'] = self.full_name - if self.function_id is not None: body['function_id'] = self.function_id - if self.input_params: body['input_params'] = self.input_params - if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic - if self.is_null_call is not None: body['is_null_call'] = self.is_null_call - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.parameter_style is not None: body['parameter_style'] = self.parameter_style - if self.properties is not None: body['properties'] = self.properties - if self.return_params: body['return_params'] = self.return_params - if self.routine_body is not None: body['routine_body'] = self.routine_body - if self.routine_definition is not None: body['routine_definition'] = self.routine_definition - if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.security_type is not None: body['security_type'] = self.security_type - if self.specific_name is not None: body['specific_name'] = self.specific_name - if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access - if self.sql_path is not None: body['sql_path'] = self.sql_path - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_type is not None: + body["data_type"] = self.data_type + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.full_name is not None: + body["full_name"] = self.full_name + if self.function_id is not None: + body["function_id"] = self.function_id + if self.input_params: + body["input_params"] = self.input_params + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params + if self.routine_body is not None: + body["routine_body"] = self.routine_body + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionInfo: """Deserializes the FunctionInfo from a dictionary.""" - return cls(browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), data_type=_enum(d, 'data_type', ColumnTypeName), external_language=d.get('external_language', None), external_name=d.get('external_name', None), full_data_type=d.get('full_data_type', None), full_name=d.get('full_name', None), function_id=d.get('function_id', None), input_params=_from_dict(d, 'input_params', FunctionParameterInfos), is_deterministic=d.get('is_deterministic', None), is_null_call=d.get('is_null_call', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), parameter_style=_enum(d, 'parameter_style', FunctionInfoParameterStyle), properties=d.get('properties', None), return_params=_from_dict(d, 'return_params', FunctionParameterInfos), routine_body=_enum(d, 'routine_body', FunctionInfoRoutineBody), routine_definition=d.get('routine_definition', None), routine_dependencies=_from_dict(d, 'routine_dependencies', DependencyList), schema_name=d.get('schema_name', None), security_type=_enum(d, 'security_type', FunctionInfoSecurityType), specific_name=d.get('specific_name', None), sql_data_access=_enum(d, 'sql_data_access', FunctionInfoSqlDataAccess), sql_path=d.get('sql_path', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + data_type=_enum(d, "data_type", ColumnTypeName), + external_language=d.get("external_language", None), + external_name=d.get("external_name", None), + full_data_type=d.get("full_data_type", None), + full_name=d.get("full_name", None), + function_id=d.get("function_id", None), + input_params=_from_dict(d, "input_params", FunctionParameterInfos), + is_deterministic=d.get("is_deterministic", None), + is_null_call=d.get("is_null_call", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + parameter_style=_enum(d, "parameter_style", FunctionInfoParameterStyle), + properties=d.get("properties", None), + return_params=_from_dict(d, "return_params", FunctionParameterInfos), + routine_body=_enum(d, "routine_body", FunctionInfoRoutineBody), + routine_definition=d.get("routine_definition", None), + routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), + schema_name=d.get("schema_name", None), + security_type=_enum(d, "security_type", FunctionInfoSecurityType), + specific_name=d.get("specific_name", None), + sql_data_access=_enum(d, "sql_data_access", FunctionInfoSqlDataAccess), + sql_path=d.get("sql_path", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) class FunctionInfoParameterStyle(Enum): """Function parameter style. **S** is the value for SQL.""" - - S = 'S' + + S = "S" + class FunctionInfoRoutineBody(Enum): """Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**.""" - - EXTERNAL = 'EXTERNAL' - SQL = 'SQL' + + EXTERNAL = "EXTERNAL" + SQL = "SQL" + class FunctionInfoSecurityType(Enum): """The security type of the function.""" - - DEFINER = 'DEFINER' + + DEFINER = "DEFINER" + class FunctionInfoSqlDataAccess(Enum): """Function SQL data access.""" - - CONTAINS_SQL = 'CONTAINS_SQL' - NO_SQL = 'NO_SQL' - READS_SQL_DATA = 'READS_SQL_DATA' + + CONTAINS_SQL = "CONTAINS_SQL" + NO_SQL = "NO_SQL" + READS_SQL_DATA = "READS_SQL_DATA" + @dataclass class FunctionParameterInfo: name: str """Name of parameter.""" - + type_text: str """Full data type spec, SQL/catalogString text.""" - + type_name: ColumnTypeName - + position: int """Ordinal position of column (starting at position 0).""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + parameter_default: Optional[str] = None """Default value of the parameter.""" - + parameter_mode: Optional[FunctionParameterMode] = None """The mode of the function parameter.""" - + parameter_type: Optional[FunctionParameterType] = None """The type of function parameter.""" - + type_interval_type: Optional[str] = None """Format of IntervalType.""" - + type_json: Optional[str] = None """Full data type spec, JSON-serialized.""" - + type_precision: Optional[int] = None """Digits of precision; required on Create for DecimalTypes.""" - + type_scale: Optional[int] = None """Digits to right of decimal; Required on Create for DecimalTypes.""" - + def as_dict(self) -> dict: """Serializes the FunctionParameterInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.parameter_default is not None: body['parameter_default'] = self.parameter_default - if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode.value - if self.parameter_type is not None: body['parameter_type'] = self.parameter_type.value - if self.position is not None: body['position'] = self.position - if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type - if self.type_json is not None: body['type_json'] = self.type_json - if self.type_name is not None: body['type_name'] = self.type_name.value - if self.type_precision is not None: body['type_precision'] = self.type_precision - if self.type_scale is not None: body['type_scale'] = self.type_scale - if self.type_text is not None: body['type_text'] = self.type_text + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.parameter_default is not None: + body["parameter_default"] = self.parameter_default + if self.parameter_mode is not None: + body["parameter_mode"] = self.parameter_mode.value + if self.parameter_type is not None: + body["parameter_type"] = self.parameter_type.value + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name.value + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body def as_shallow_dict(self) -> dict: """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.parameter_default is not None: body['parameter_default'] = self.parameter_default - if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode - if self.parameter_type is not None: body['parameter_type'] = self.parameter_type - if self.position is not None: body['position'] = self.position - if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type - if self.type_json is not None: body['type_json'] = self.type_json - if self.type_name is not None: body['type_name'] = self.type_name - if self.type_precision is not None: body['type_precision'] = self.type_precision - if self.type_scale is not None: body['type_scale'] = self.type_scale - if self.type_text is not None: body['type_text'] = self.type_text + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.parameter_default is not None: + body["parameter_default"] = self.parameter_default + if self.parameter_mode is not None: + body["parameter_mode"] = self.parameter_mode + if self.parameter_type is not None: + body["parameter_type"] = self.parameter_type + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: """Deserializes the FunctionParameterInfo from a dictionary.""" - return cls(comment=d.get('comment', None), name=d.get('name', None), parameter_default=d.get('parameter_default', None), parameter_mode=_enum(d, 'parameter_mode', FunctionParameterMode), parameter_type=_enum(d, 'parameter_type', FunctionParameterType), position=d.get('position', None), type_interval_type=d.get('type_interval_type', None), type_json=d.get('type_json', None), type_name=_enum(d, 'type_name', ColumnTypeName), type_precision=d.get('type_precision', None), type_scale=d.get('type_scale', None), type_text=d.get('type_text', None)) - - + return cls( + comment=d.get("comment", None), + name=d.get("name", None), + parameter_default=d.get("parameter_default", None), + parameter_mode=_enum(d, "parameter_mode", FunctionParameterMode), + parameter_type=_enum(d, "parameter_type", FunctionParameterType), + position=d.get("position", None), + type_interval_type=d.get("type_interval_type", None), + type_json=d.get("type_json", None), + type_name=_enum(d, "type_name", ColumnTypeName), + type_precision=d.get("type_precision", None), + type_scale=d.get("type_scale", None), + type_text=d.get("type_text", None), + ) @dataclass class FunctionParameterInfos: parameters: Optional[List[FunctionParameterInfo]] = None """The array of __FunctionParameterInfo__ definitions of the function's parameters.""" - + def as_dict(self) -> dict: """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: body['parameters'] = self.parameters + if self.parameters: + body["parameters"] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: """Deserializes the FunctionParameterInfos from a dictionary.""" - return cls(parameters=_repeated_dict(d, 'parameters', FunctionParameterInfo)) - - + return cls(parameters=_repeated_dict(d, "parameters", FunctionParameterInfo)) class FunctionParameterMode(Enum): """The mode of the function parameter.""" - - IN = 'IN' + + IN = "IN" + class FunctionParameterType(Enum): """The type of function parameter.""" - - COLUMN = 'COLUMN' - PARAM = 'PARAM' + + COLUMN = "COLUMN" + PARAM = "PARAM" + @dataclass class GcpOauthToken: """GCP temporary credentials for API authentication. Read more at https://developers.google.com/identity/protocols/oauth2/service-account""" - + oauth_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.oauth_token is not None: body['oauth_token'] = self.oauth_token + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token return body def as_shallow_dict(self) -> dict: """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.oauth_token is not None: body['oauth_token'] = self.oauth_token + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpOauthToken: """Deserializes the GcpOauthToken from a dictionary.""" - return cls(oauth_token=d.get('oauth_token', None)) - - + return cls(oauth_token=d.get("oauth_token", None)) @dataclass class GcpPubsub: managed_resource_id: Optional[str] = None """Unique identifier included in the name of file events managed cloud resources.""" - + subscription_name: Optional[str] = None """The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription name} REQUIRED for provided_pubsub.""" - + def as_dict(self) -> dict: """Serializes the GcpPubsub into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id - if self.subscription_name is not None: body['subscription_name'] = self.subscription_name + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.subscription_name is not None: + body["subscription_name"] = self.subscription_name return body def as_shallow_dict(self) -> dict: """Serializes the GcpPubsub into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_resource_id is not None: body['managed_resource_id'] = self.managed_resource_id - if self.subscription_name is not None: body['subscription_name'] = self.subscription_name + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.subscription_name is not None: + body["subscription_name"] = self.subscription_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub: """Deserializes the GcpPubsub from a dictionary.""" - return cls(managed_resource_id=d.get('managed_resource_id', None), subscription_name=d.get('subscription_name', None)) - - + return cls( + managed_resource_id=d.get("managed_resource_id", None), subscription_name=d.get("subscription_name", None) + ) @dataclass class GenerateTemporaryServiceCredentialAzureOptions: """The Azure cloud options to customize the requested temporary credential""" - + resources: Optional[List[str]] = None """The resources to which the temporary Azure credential should apply. These resources are the scopes that are passed to the token provider (see https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.resources: body['resources'] = [v for v in self.resources] + if self.resources: + body["resources"] = [v for v in self.resources] return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.resources: body['resources'] = self.resources + if self.resources: + body["resources"] = self.resources return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialAzureOptions: """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary.""" - return cls(resources=d.get('resources', None)) - - + return cls(resources=d.get("resources", None)) @dataclass class GenerateTemporaryServiceCredentialGcpOptions: """The GCP cloud options to customize the requested temporary credential""" - + scopes: Optional[List[str]] = None """The scopes to which the temporary GCP credential should apply. These resources are the scopes that are passed to the token provider (see https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scopes: body['scopes'] = [v for v in self.scopes] + if self.scopes: + body["scopes"] = [v for v in self.scopes] return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.scopes: body['scopes'] = self.scopes + if self.scopes: + body["scopes"] = self.scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialGcpOptions: """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary.""" - return cls(scopes=d.get('scopes', None)) - - + return cls(scopes=d.get("scopes", None)) @dataclass class GenerateTemporaryServiceCredentialRequest: credential_name: str """The name of the service credential used to generate a temporary credential""" - + azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None """The Azure cloud options to customize the requested temporary credential""" - + gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None """The GCP cloud options to customize the requested temporary credential""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.azure_options: body['azure_options'] = self.azure_options.as_dict() - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.gcp_options: body['gcp_options'] = self.gcp_options.as_dict() + if self.azure_options: + body["azure_options"] = self.azure_options.as_dict() + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.gcp_options: + body["gcp_options"] = self.gcp_options.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryServiceCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.azure_options: body['azure_options'] = self.azure_options - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.gcp_options: body['gcp_options'] = self.gcp_options + if self.azure_options: + body["azure_options"] = self.azure_options + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.gcp_options: + body["gcp_options"] = self.gcp_options return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialRequest: """Deserializes the GenerateTemporaryServiceCredentialRequest from a dictionary.""" - return cls(azure_options=_from_dict(d, 'azure_options', GenerateTemporaryServiceCredentialAzureOptions), credential_name=d.get('credential_name', None), gcp_options=_from_dict(d, 'gcp_options', GenerateTemporaryServiceCredentialGcpOptions)) - - + return cls( + azure_options=_from_dict(d, "azure_options", GenerateTemporaryServiceCredentialAzureOptions), + credential_name=d.get("credential_name", None), + gcp_options=_from_dict(d, "gcp_options", GenerateTemporaryServiceCredentialGcpOptions), + ) @dataclass @@ -3668,30 +4493,32 @@ class GenerateTemporaryTableCredentialRequest: """The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is specified, the credentials returned will have write permissions, otherwise, it will be read only.""" - + table_id: Optional[str] = None """UUID of the table to read or write.""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryTableCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.operation is not None: body['operation'] = self.operation.value - if self.table_id is not None: body['table_id'] = self.table_id + if self.operation is not None: + body["operation"] = self.operation.value + if self.table_id is not None: + body["table_id"] = self.table_id return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryTableCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.operation is not None: body['operation'] = self.operation - if self.table_id is not None: body['table_id'] = self.table_id + if self.operation is not None: + body["operation"] = self.operation + if self.table_id is not None: + body["table_id"] = self.table_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialRequest: """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary.""" - return cls(operation=_enum(d, 'operation', TableOperation), table_id=d.get('table_id', None)) - - + return cls(operation=_enum(d, "operation", TableOperation), table_id=d.get("table_id", None)) @dataclass @@ -3699,250 +4526,284 @@ class GenerateTemporaryTableCredentialResponse: aws_temp_credentials: Optional[AwsCredentials] = None """AWS temporary credentials for API authentication. Read more at https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" - + azure_aad: Optional[AzureActiveDirectoryToken] = None """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed Identity. Read more at https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" - + azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None """Azure temporary credentials for API authentication. Read more at https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas""" - + expiration_time: Optional[int] = None """Server time when the credential will expire, in epoch milliseconds. The API client is advised to cache the credential given this expiration time.""" - + gcp_oauth_token: Optional[GcpOauthToken] = None """GCP temporary credentials for API authentication. Read more at https://developers.google.com/identity/protocols/oauth2/service-account""" - + r2_temp_credentials: Optional[R2Credentials] = None """R2 temporary credentials for API authentication. Read more at https://developers.cloudflare.com/r2/api/s3/tokens/.""" - + url: Optional[str] = None """The URL of the storage path accessible by the temporary credential.""" - + def as_dict(self) -> dict: """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict() - if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict() - if self.azure_user_delegation_sas: body['azure_user_delegation_sas'] = self.azure_user_delegation_sas.as_dict() - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict() - if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials.as_dict() - if self.url is not None: body['url'] = self.url + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() + if self.azure_aad: + body["azure_aad"] = self.azure_aad.as_dict() + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict() + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials - if self.azure_aad: body['azure_aad'] = self.azure_aad - if self.azure_user_delegation_sas: body['azure_user_delegation_sas'] = self.azure_user_delegation_sas - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token - if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials - if self.url is not None: body['url'] = self.url + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials + if self.azure_aad: + body["azure_aad"] = self.azure_aad + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialResponse: """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary.""" - return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials), azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken), azure_user_delegation_sas=_from_dict(d, 'azure_user_delegation_sas', AzureUserDelegationSas), expiration_time=d.get('expiration_time', None), gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken), r2_temp_credentials=_from_dict(d, 'r2_temp_credentials', R2Credentials), url=d.get('url', None)) - - - - - - - - - - - - - - - - - - - - - - - + return cls( + aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), + azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), + azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas), + expiration_time=d.get("expiration_time", None), + gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), + r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), + url=d.get("url", None), + ) @dataclass class GetCatalogWorkspaceBindingsResponse: workspaces: Optional[List[int]] = None """A list of workspace IDs""" - + def as_dict(self) -> dict: """Serializes the GetCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspaces: body['workspaces'] = [v for v in self.workspaces] + if self.workspaces: + body["workspaces"] = [v for v in self.workspaces] return body def as_shallow_dict(self) -> dict: """Serializes the GetCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspaces: body['workspaces'] = self.workspaces + if self.workspaces: + body["workspaces"] = self.workspaces return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetCatalogWorkspaceBindingsResponse: """Deserializes the GetCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get('workspaces', None)) - - - - - - - - - - - - - - - - - - - - - - - + return cls(workspaces=d.get("workspaces", None)) @dataclass class GetMetastoreSummaryResponse: cloud: Optional[str] = None """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" - + created_at: Optional[int] = None """Time at which this metastore was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of metastore creator.""" - + default_data_access_config_id: Optional[str] = None """Unique identifier of the metastore's (Default) Data Access Configuration.""" - + delta_sharing_organization_name: Optional[str] = None """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.""" - + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None """The lifetime of delta sharing recipient token in seconds.""" - + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None """The scope of Delta Sharing enabled for the metastore.""" - + external_access_enabled: Optional[bool] = None """Whether to allow non-DBR clients to directly access entities under the metastore.""" - + global_metastore_id: Optional[str] = None """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`.""" - + metastore_id: Optional[str] = None """Unique identifier of metastore.""" - + name: Optional[str] = None """The user-specified name of the metastore.""" - + owner: Optional[str] = None """The owner of the metastore.""" - + privilege_model_version: Optional[str] = None """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" - + region: Optional[str] = None """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - + storage_root: Optional[str] = None """The storage root URL for metastore""" - + storage_root_credential_id: Optional[str] = None """UUID of storage credential to access the metastore storage_root.""" - + storage_root_credential_name: Optional[str] = None """Name of the storage credential to access the metastore storage_root.""" - + updated_at: Optional[int] = None """Time at which the metastore was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the metastore.""" - + def as_dict(self) -> dict: """Serializes the GetMetastoreSummaryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cloud is not None: body['cloud'] = self.cloud - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.default_data_access_config_id is not None: body['default_data_access_config_id'] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds - if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value - if self.external_access_enabled is not None: body['external_access_enabled'] = self.external_access_enabled - if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version - if self.region is not None: body['region'] = self.region - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: body['storage_root_credential_name'] = self.storage_root_credential_name - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.cloud is not None: + body["cloud"] = self.cloud + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.default_data_access_config_id is not None: + body["default_data_access_config_id"] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope.value + if self.external_access_enabled is not None: + body["external_access_enabled"] = self.external_access_enabled + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body["storage_root_credential_name"] = self.storage_root_credential_name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.cloud is not None: body['cloud'] = self.cloud - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.default_data_access_config_id is not None: body['default_data_access_config_id'] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds - if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope - if self.external_access_enabled is not None: body['external_access_enabled'] = self.external_access_enabled - if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version - if self.region is not None: body['region'] = self.region - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: body['storage_root_credential_name'] = self.storage_root_credential_name - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.cloud is not None: + body["cloud"] = self.cloud + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.default_data_access_config_id is not None: + body["default_data_access_config_id"] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope + if self.external_access_enabled is not None: + body["external_access_enabled"] = self.external_access_enabled + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body["storage_root_credential_name"] = self.storage_root_credential_name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetMetastoreSummaryResponse: """Deserializes the GetMetastoreSummaryResponse from a dictionary.""" - return cls(cloud=d.get('cloud', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), default_data_access_config_id=d.get('default_data_access_config_id', None), delta_sharing_organization_name=d.get('delta_sharing_organization_name', None), delta_sharing_recipient_token_lifetime_in_seconds=d.get('delta_sharing_recipient_token_lifetime_in_seconds', None), delta_sharing_scope=_enum(d, 'delta_sharing_scope', DeltaSharingScopeEnum), external_access_enabled=d.get('external_access_enabled', None), global_metastore_id=d.get('global_metastore_id', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), privilege_model_version=d.get('privilege_model_version', None), region=d.get('region', None), storage_root=d.get('storage_root', None), storage_root_credential_id=d.get('storage_root_credential_id', None), storage_root_credential_name=d.get('storage_root_credential_name', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - - - - - - - + return cls( + cloud=d.get("cloud", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + default_data_access_config_id=d.get("default_data_access_config_id", None), + delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), + delta_sharing_recipient_token_lifetime_in_seconds=d.get( + "delta_sharing_recipient_token_lifetime_in_seconds", None + ), + delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), + external_access_enabled=d.get("external_access_enabled", None), + global_metastore_id=d.get("global_metastore_id", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + privilege_model_version=d.get("privilege_model_version", None), + region=d.get("region", None), + storage_root=d.get("storage_root", None), + storage_root_credential_id=d.get("storage_root_credential_id", None), + storage_root_credential_name=d.get("storage_root_credential_name", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass @@ -3950,412 +4811,395 @@ class GetPermissionsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + privilege_assignments: Optional[List[PrivilegeAssignment]] = None """The privileges assigned to each principal""" - + def as_dict(self) -> dict: """Serializes the GetPermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the GetPermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPermissionsResponse: """Deserializes the GetPermissionsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment)) - - - - - - - - + return cls( + next_page_token=d.get("next_page_token", None), + privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), + ) @dataclass class GetQuotaResponse: quota_info: Optional[QuotaInfo] = None """The returned QuotaInfo.""" - + def as_dict(self) -> dict: """Serializes the GetQuotaResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.quota_info: body['quota_info'] = self.quota_info.as_dict() + if self.quota_info: + body["quota_info"] = self.quota_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.quota_info: body['quota_info'] = self.quota_info + if self.quota_info: + body["quota_info"] = self.quota_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetQuotaResponse: """Deserializes the GetQuotaResponse from a dictionary.""" - return cls(quota_info=_from_dict(d, 'quota_info', QuotaInfo)) - - - - - - - - - - - - - - - - - - - - + return cls(quota_info=_from_dict(d, "quota_info", QuotaInfo)) @dataclass class GetWorkspaceBindingsResponse: bindings: Optional[List[WorkspaceBinding]] = None """List of workspace bindings""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the GetWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bindings: body['bindings'] = [v.as_dict() for v in self.bindings] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.bindings: + body["bindings"] = [v.as_dict() for v in self.bindings] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bindings: body['bindings'] = self.bindings - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.bindings: + body["bindings"] = self.bindings + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceBindingsResponse: """Deserializes the GetWorkspaceBindingsResponse from a dictionary.""" - return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding), next_page_token=d.get('next_page_token', None)) - - + return cls( + bindings=_repeated_dict(d, "bindings", WorkspaceBinding), next_page_token=d.get("next_page_token", None) + ) class IsolationMode(Enum): - - - ISOLATION_MODE_ISOLATED = 'ISOLATION_MODE_ISOLATED' - ISOLATION_MODE_OPEN = 'ISOLATION_MODE_OPEN' - + ISOLATION_MODE_ISOLATED = "ISOLATION_MODE_ISOLATED" + ISOLATION_MODE_OPEN = "ISOLATION_MODE_OPEN" @dataclass class ListAccountMetastoreAssignmentsResponse: """The list of workspaces to which the given metastore is assigned.""" - + workspace_ids: Optional[List[int]] = None - + def as_dict(self) -> dict: """Serializes the ListAccountMetastoreAssignmentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspace_ids: body['workspace_ids'] = [v for v in self.workspace_ids] + if self.workspace_ids: + body["workspace_ids"] = [v for v in self.workspace_ids] return body def as_shallow_dict(self) -> dict: """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspace_ids: body['workspace_ids'] = self.workspace_ids + if self.workspace_ids: + body["workspace_ids"] = self.workspace_ids return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAccountMetastoreAssignmentsResponse: """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary.""" - return cls(workspace_ids=d.get('workspace_ids', None)) - - - - - + return cls(workspace_ids=d.get("workspace_ids", None)) @dataclass class ListAccountStorageCredentialsResponse: storage_credentials: Optional[List[StorageCredentialInfo]] = None """An array of metastore storage credentials.""" - + def as_dict(self) -> dict: """Serializes the ListAccountStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.storage_credentials: body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials] + if self.storage_credentials: + body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] return body def as_shallow_dict(self) -> dict: """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.storage_credentials: body['storage_credentials'] = self.storage_credentials + if self.storage_credentials: + body["storage_credentials"] = self.storage_credentials return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAccountStorageCredentialsResponse: """Deserializes the ListAccountStorageCredentialsResponse from a dictionary.""" - return cls(storage_credentials=_repeated_dict(d, 'storage_credentials', StorageCredentialInfo)) - - - - - + return cls(storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo)) @dataclass class ListCatalogsResponse: catalogs: Optional[List[CatalogInfo]] = None """An array of catalog information objects.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListCatalogsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalogs: body['catalogs'] = [v.as_dict() for v in self.catalogs] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.catalogs: + body["catalogs"] = [v.as_dict() for v in self.catalogs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalogs: body['catalogs'] = self.catalogs - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.catalogs: + body["catalogs"] = self.catalogs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCatalogsResponse: """Deserializes the ListCatalogsResponse from a dictionary.""" - return cls(catalogs=_repeated_dict(d, 'catalogs', CatalogInfo), next_page_token=d.get('next_page_token', None)) - - - - - + return cls(catalogs=_repeated_dict(d, "catalogs", CatalogInfo), next_page_token=d.get("next_page_token", None)) @dataclass class ListConnectionsResponse: connections: Optional[List[ConnectionInfo]] = None """An array of connection information objects.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListConnectionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connections: body['connections'] = [v.as_dict() for v in self.connections] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.connections: + body["connections"] = [v.as_dict() for v in self.connections] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.connections: body['connections'] = self.connections - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.connections: + body["connections"] = self.connections + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListConnectionsResponse: """Deserializes the ListConnectionsResponse from a dictionary.""" - return cls(connections=_repeated_dict(d, 'connections', ConnectionInfo), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + connections=_repeated_dict(d, "connections", ConnectionInfo), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListCredentialsResponse: credentials: Optional[List[CredentialInfo]] = None - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.credentials: + body["credentials"] = [v.as_dict() for v in self.credentials] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credentials: body['credentials'] = self.credentials - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.credentials: + body["credentials"] = self.credentials + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCredentialsResponse: """Deserializes the ListCredentialsResponse from a dictionary.""" - return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + credentials=_repeated_dict(d, "credentials", CredentialInfo), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListExternalLocationsResponse: external_locations: Optional[List[ExternalLocationInfo]] = None """An array of external locations.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListExternalLocationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_locations: body['external_locations'] = [v.as_dict() for v in self.external_locations] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.external_locations: + body["external_locations"] = [v.as_dict() for v in self.external_locations] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_locations: body['external_locations'] = self.external_locations - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.external_locations: + body["external_locations"] = self.external_locations + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExternalLocationsResponse: """Deserializes the ListExternalLocationsResponse from a dictionary.""" - return cls(external_locations=_repeated_dict(d, 'external_locations', ExternalLocationInfo), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + external_locations=_repeated_dict(d, "external_locations", ExternalLocationInfo), + next_page_token=d.get("next_page_token", None), + ) @dataclass class ListFunctionsResponse: functions: Optional[List[FunctionInfo]] = None """An array of function information objects.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListFunctionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.functions: body['functions'] = [v.as_dict() for v in self.functions] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.functions: + body["functions"] = [v.as_dict() for v in self.functions] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.functions: body['functions'] = self.functions - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.functions: + body["functions"] = self.functions + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFunctionsResponse: """Deserializes the ListFunctionsResponse from a dictionary.""" - return cls(functions=_repeated_dict(d, 'functions', FunctionInfo), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + functions=_repeated_dict(d, "functions", FunctionInfo), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListMetastoresResponse: metastores: Optional[List[MetastoreInfo]] = None """An array of metastore information objects.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListMetastoresResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastores: body['metastores'] = [v.as_dict() for v in self.metastores] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.metastores: + body["metastores"] = [v.as_dict() for v in self.metastores] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastores: body['metastores'] = self.metastores - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.metastores: + body["metastores"] = self.metastores + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListMetastoresResponse: """Deserializes the ListMetastoresResponse from a dictionary.""" - return cls(metastores=_repeated_dict(d, 'metastores', MetastoreInfo), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + metastores=_repeated_dict(d, "metastores", MetastoreInfo), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListModelVersionsResponse: model_versions: Optional[List[ModelVersionInfo]] = None - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListModelVersionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.model_versions: + body["model_versions"] = [v.as_dict() for v in self.model_versions] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_versions: body['model_versions'] = self.model_versions - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.model_versions: + body["model_versions"] = self.model_versions + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListModelVersionsResponse: """Deserializes the ListModelVersionsResponse from a dictionary.""" - return cls(model_versions=_repeated_dict(d, 'model_versions', ModelVersionInfo), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + model_versions=_repeated_dict(d, "model_versions", ModelVersionInfo), + next_page_token=d.get("next_page_token", None), + ) @dataclass @@ -4363,36 +5207,32 @@ class ListQuotasResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request.""" - + quotas: Optional[List[QuotaInfo]] = None """An array of returned QuotaInfos.""" - + def as_dict(self) -> dict: """Serializes the ListQuotasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.quotas: body['quotas'] = [v.as_dict() for v in self.quotas] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.quotas: + body["quotas"] = [v.as_dict() for v in self.quotas] return body def as_shallow_dict(self) -> dict: """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.quotas: body['quotas'] = self.quotas + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.quotas: + body["quotas"] = self.quotas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListQuotasResponse: """Deserializes the ListQuotasResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), quotas=_repeated_dict(d, 'quotas', QuotaInfo)) - - - - - - - - + return cls(next_page_token=d.get("next_page_token", None), quotas=_repeated_dict(d, "quotas", QuotaInfo)) @dataclass @@ -4400,32 +5240,34 @@ class ListRegisteredModelsResponse: next_page_token: Optional[str] = None """Opaque token for pagination. Omitted if there are no more results. page_token should be set to this value for fetching the next page.""" - + registered_models: Optional[List[RegisteredModelInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListRegisteredModelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = [v.as_dict() for v in self.registered_models] return body def as_shallow_dict(self) -> dict: """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.registered_models: body['registered_models'] = self.registered_models + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = self.registered_models return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListRegisteredModelsResponse: """Deserializes the ListRegisteredModelsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), registered_models=_repeated_dict(d, 'registered_models', RegisteredModelInfo)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), + registered_models=_repeated_dict(d, "registered_models", RegisteredModelInfo), + ) @dataclass @@ -4433,33 +5275,32 @@ class ListSchemasResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + schemas: Optional[List[SchemaInfo]] = None """An array of schema information objects.""" - + def as_dict(self) -> dict: """Serializes the ListSchemasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.schemas: body['schemas'] = self.schemas + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSchemasResponse: """Deserializes the ListSchemasResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), schemas=_repeated_dict(d, 'schemas', SchemaInfo)) - - - - - + return cls(next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SchemaInfo)) @dataclass @@ -4467,35 +5308,34 @@ class ListStorageCredentialsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + storage_credentials: Optional[List[StorageCredentialInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.storage_credentials: body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.storage_credentials: + body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] return body def as_shallow_dict(self) -> dict: """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.storage_credentials: body['storage_credentials'] = self.storage_credentials + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.storage_credentials: + body["storage_credentials"] = self.storage_credentials return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListStorageCredentialsResponse: """Deserializes the ListStorageCredentialsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), storage_credentials=_repeated_dict(d, 'storage_credentials', StorageCredentialInfo)) - - - - - - - - + return cls( + next_page_token=d.get("next_page_token", None), + storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo), + ) @dataclass @@ -4503,30 +5343,34 @@ class ListSystemSchemasResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + schemas: Optional[List[SystemSchemaInfo]] = None """An array of system schema information objects.""" - + def as_dict(self) -> dict: """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.schemas: body['schemas'] = self.schemas + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSystemSchemasResponse: """Deserializes the ListSystemSchemasResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), schemas=_repeated_dict(d, 'schemas', SystemSchemaInfo)) - - + return cls( + next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SystemSchemaInfo) + ) @dataclass @@ -4534,33 +5378,32 @@ class ListTableSummariesResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + tables: Optional[List[TableSummary]] = None """List of table summaries.""" - + def as_dict(self) -> dict: """Serializes the ListTableSummariesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.tables: body['tables'] = [v.as_dict() for v in self.tables] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = [v.as_dict() for v in self.tables] return body def as_shallow_dict(self) -> dict: """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.tables: body['tables'] = self.tables + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = self.tables return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListTableSummariesResponse: """Deserializes the ListTableSummariesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), tables=_repeated_dict(d, 'tables', TableSummary)) - - - - - + return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableSummary)) @dataclass @@ -4568,33 +5411,32 @@ class ListTablesResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + tables: Optional[List[TableInfo]] = None """An array of table information objects.""" - + def as_dict(self) -> dict: """Serializes the ListTablesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.tables: body['tables'] = [v.as_dict() for v in self.tables] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = [v.as_dict() for v in self.tables] return body def as_shallow_dict(self) -> dict: """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.tables: body['tables'] = self.tables + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = self.tables return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListTablesResponse: """Deserializes the ListTablesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), tables=_repeated_dict(d, 'tables', TableInfo)) - - - - - + return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableInfo)) @dataclass @@ -4603,313 +5445,443 @@ class ListVolumesResponseContent: """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request to retrieve the next page of results.""" - + volumes: Optional[List[VolumeInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListVolumesResponseContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.volumes: + body["volumes"] = [v.as_dict() for v in self.volumes] return body def as_shallow_dict(self) -> dict: """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.volumes: body['volumes'] = self.volumes + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.volumes: + body["volumes"] = self.volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListVolumesResponseContent: """Deserializes the ListVolumesResponseContent from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), volumes=_repeated_dict(d, 'volumes', VolumeInfo)) - - + return cls(next_page_token=d.get("next_page_token", None), volumes=_repeated_dict(d, "volumes", VolumeInfo)) class MatchType(Enum): """The artifact pattern matching type""" - - PREFIX_MATCH = 'PREFIX_MATCH' + + PREFIX_MATCH = "PREFIX_MATCH" + @dataclass class MetastoreAssignment: workspace_id: int """The unique ID of the Databricks workspace.""" - + metastore_id: str """The unique ID of the metastore.""" - + default_catalog_name: Optional[str] = None """The name of the default catalog in the metastore.""" - + def as_dict(self) -> dict: """Serializes the MetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MetastoreAssignment: """Deserializes the MetastoreAssignment from a dictionary.""" - return cls(default_catalog_name=d.get('default_catalog_name', None), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) - - + return cls( + default_catalog_name=d.get("default_catalog_name", None), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass class MetastoreInfo: cloud: Optional[str] = None """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" - + created_at: Optional[int] = None """Time at which this metastore was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of metastore creator.""" - + default_data_access_config_id: Optional[str] = None """Unique identifier of the metastore's (Default) Data Access Configuration.""" - + delta_sharing_organization_name: Optional[str] = None """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.""" - + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None """The lifetime of delta sharing recipient token in seconds.""" - + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None """The scope of Delta Sharing enabled for the metastore.""" - + external_access_enabled: Optional[bool] = None """Whether to allow non-DBR clients to directly access entities under the metastore.""" - + global_metastore_id: Optional[str] = None """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`.""" - + metastore_id: Optional[str] = None """Unique identifier of metastore.""" - + name: Optional[str] = None """The user-specified name of the metastore.""" - + owner: Optional[str] = None """The owner of the metastore.""" - + privilege_model_version: Optional[str] = None """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" - + region: Optional[str] = None """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - + storage_root: Optional[str] = None """The storage root URL for metastore""" - + storage_root_credential_id: Optional[str] = None """UUID of storage credential to access the metastore storage_root.""" - + storage_root_credential_name: Optional[str] = None """Name of the storage credential to access the metastore storage_root.""" - + updated_at: Optional[int] = None """Time at which the metastore was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the metastore.""" - + def as_dict(self) -> dict: """Serializes the MetastoreInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cloud is not None: body['cloud'] = self.cloud - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.default_data_access_config_id is not None: body['default_data_access_config_id'] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds - if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value - if self.external_access_enabled is not None: body['external_access_enabled'] = self.external_access_enabled - if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version - if self.region is not None: body['region'] = self.region - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: body['storage_root_credential_name'] = self.storage_root_credential_name - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.cloud is not None: + body["cloud"] = self.cloud + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.default_data_access_config_id is not None: + body["default_data_access_config_id"] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope.value + if self.external_access_enabled is not None: + body["external_access_enabled"] = self.external_access_enabled + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body["storage_root_credential_name"] = self.storage_root_credential_name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cloud is not None: body['cloud'] = self.cloud - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.default_data_access_config_id is not None: body['default_data_access_config_id'] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds - if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope - if self.external_access_enabled is not None: body['external_access_enabled'] = self.external_access_enabled - if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version - if self.region is not None: body['region'] = self.region - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: body['storage_root_credential_name'] = self.storage_root_credential_name - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.cloud is not None: + body["cloud"] = self.cloud + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.default_data_access_config_id is not None: + body["default_data_access_config_id"] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope + if self.external_access_enabled is not None: + body["external_access_enabled"] = self.external_access_enabled + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body["storage_root_credential_name"] = self.storage_root_credential_name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MetastoreInfo: """Deserializes the MetastoreInfo from a dictionary.""" - return cls(cloud=d.get('cloud', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), default_data_access_config_id=d.get('default_data_access_config_id', None), delta_sharing_organization_name=d.get('delta_sharing_organization_name', None), delta_sharing_recipient_token_lifetime_in_seconds=d.get('delta_sharing_recipient_token_lifetime_in_seconds', None), delta_sharing_scope=_enum(d, 'delta_sharing_scope', DeltaSharingScopeEnum), external_access_enabled=d.get('external_access_enabled', None), global_metastore_id=d.get('global_metastore_id', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), privilege_model_version=d.get('privilege_model_version', None), region=d.get('region', None), storage_root=d.get('storage_root', None), storage_root_credential_id=d.get('storage_root_credential_id', None), storage_root_credential_name=d.get('storage_root_credential_name', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + cloud=d.get("cloud", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + default_data_access_config_id=d.get("default_data_access_config_id", None), + delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), + delta_sharing_recipient_token_lifetime_in_seconds=d.get( + "delta_sharing_recipient_token_lifetime_in_seconds", None + ), + delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), + external_access_enabled=d.get("external_access_enabled", None), + global_metastore_id=d.get("global_metastore_id", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + privilege_model_version=d.get("privilege_model_version", None), + region=d.get("region", None), + storage_root=d.get("storage_root", None), + storage_root_credential_id=d.get("storage_root_credential_id", None), + storage_root_credential_name=d.get("storage_root_credential_name", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class ModelVersionInfo: aliases: Optional[List[RegisteredModelAlias]] = None """List of aliases associated with the model version""" - + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """The name of the catalog containing the model version""" - + comment: Optional[str] = None """The comment attached to the model version""" - + created_at: Optional[int] = None - + created_by: Optional[str] = None """The identifier of the user who created the model version""" - + id: Optional[str] = None """The unique identifier of the model version""" - + metastore_id: Optional[str] = None """The unique identifier of the metastore containing the model version""" - + model_name: Optional[str] = None """The name of the parent registered model of the model version, relative to parent schema""" - + model_version_dependencies: Optional[DependencyList] = None """Model version dependencies, for feature-store packaged models""" - + run_id: Optional[str] = None """MLflow run ID used when creating the model version, if ``source`` was generated by an experiment run stored in an MLflow tracking server""" - + run_workspace_id: Optional[int] = None """ID of the Databricks workspace containing the MLflow run that generated this model version, if applicable""" - + schema_name: Optional[str] = None """The name of the schema containing the model version, relative to parent catalog""" - + source: Optional[str] = None """URI indicating the location of the source artifacts (files) for the model version""" - + status: Optional[ModelVersionInfoStatus] = None """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION status, then move to READY status once the model version files are uploaded and the model version is finalized. Only model versions in READY status can be loaded for inference or served.""" - + storage_location: Optional[str] = None """The storage location on the cloud under which model version data files are stored""" - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None """The identifier of the user who updated the model version last time""" - + version: Optional[int] = None """Integer model version number, used to reference the model version in API requests.""" - + def as_dict(self) -> dict: """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases] - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.id is not None: body['id'] = self.id - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.model_name is not None: body['model_name'] = self.model_name - if self.model_version_dependencies: body['model_version_dependencies'] = self.model_version_dependencies.as_dict() - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.source is not None: body['source'] = self.source - if self.status is not None: body['status'] = self.status.value - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.version is not None: body['version'] = self.version + if self.aliases: + body["aliases"] = [v.as_dict() for v in self.aliases] + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.id is not None: + body["id"] = self.id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version_dependencies: + body["model_version_dependencies"] = self.model_version_dependencies.as_dict() + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_workspace_id is not None: + body["run_workspace_id"] = self.run_workspace_id + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status.value + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aliases: body['aliases'] = self.aliases - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.id is not None: body['id'] = self.id - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.model_name is not None: body['model_name'] = self.model_name - if self.model_version_dependencies: body['model_version_dependencies'] = self.model_version_dependencies - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.source is not None: body['source'] = self.source - if self.status is not None: body['status'] = self.status - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.version is not None: body['version'] = self.version + if self.aliases: + body["aliases"] = self.aliases + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.id is not None: + body["id"] = self.id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version_dependencies: + body["model_version_dependencies"] = self.model_version_dependencies + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_workspace_id is not None: + body["run_workspace_id"] = self.run_workspace_id + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: """Deserializes the ModelVersionInfo from a dictionary.""" - return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias), browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), id=d.get('id', None), metastore_id=d.get('metastore_id', None), model_name=d.get('model_name', None), model_version_dependencies=_from_dict(d, 'model_version_dependencies', DependencyList), run_id=d.get('run_id', None), run_workspace_id=d.get('run_workspace_id', None), schema_name=d.get('schema_name', None), source=d.get('source', None), status=_enum(d, 'status', ModelVersionInfoStatus), storage_location=d.get('storage_location', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), version=d.get('version', None)) - - + return cls( + aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + id=d.get("id", None), + metastore_id=d.get("metastore_id", None), + model_name=d.get("model_name", None), + model_version_dependencies=_from_dict(d, "model_version_dependencies", DependencyList), + run_id=d.get("run_id", None), + run_workspace_id=d.get("run_workspace_id", None), + schema_name=d.get("schema_name", None), + source=d.get("source", None), + status=_enum(d, "status", ModelVersionInfoStatus), + storage_location=d.get("storage_location", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + version=d.get("version", None), + ) class ModelVersionInfoStatus(Enum): """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION status, then move to READY status once the model version files are uploaded and the model version is finalized. Only model versions in READY status can be loaded for inference or served.""" - - FAILED_REGISTRATION = 'FAILED_REGISTRATION' - PENDING_REGISTRATION = 'PENDING_REGISTRATION' - READY = 'READY' + + FAILED_REGISTRATION = "FAILED_REGISTRATION" + PENDING_REGISTRATION = "PENDING_REGISTRATION" + READY = "READY" + @dataclass class MonitorCronSchedule: @@ -4917,66 +5889,75 @@ class MonitorCronSchedule: """The expression that determines when to run the monitor. See [examples]. [examples]: https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" - + timezone_id: str """The timezone id (e.g., ``"PST"``) in which to evaluate the quartz expression.""" - + pause_status: Optional[MonitorCronSchedulePauseStatus] = None """Read only field that indicates whether a schedule is paused or not.""" - + def as_dict(self) -> dict: """Serializes the MonitorCronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: body['pause_status'] = self.pause_status.value - if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: body['pause_status'] = self.pause_status - if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.pause_status is not None: + body["pause_status"] = self.pause_status + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorCronSchedule: """Deserializes the MonitorCronSchedule from a dictionary.""" - return cls(pause_status=_enum(d, 'pause_status', MonitorCronSchedulePauseStatus), quartz_cron_expression=d.get('quartz_cron_expression', None), timezone_id=d.get('timezone_id', None)) - - + return cls( + pause_status=_enum(d, "pause_status", MonitorCronSchedulePauseStatus), + quartz_cron_expression=d.get("quartz_cron_expression", None), + timezone_id=d.get("timezone_id", None), + ) class MonitorCronSchedulePauseStatus(Enum): """Read only field that indicates whether a schedule is paused or not.""" - - PAUSED = 'PAUSED' - UNPAUSED = 'UNPAUSED' + + PAUSED = "PAUSED" + UNPAUSED = "UNPAUSED" + @dataclass class MonitorDataClassificationConfig: enabled: Optional[bool] = None """Whether data classification is enabled.""" - + def as_dict(self) -> dict: """Serializes the MonitorDataClassificationConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled + if self.enabled is not None: + body["enabled"] = self.enabled return body def as_shallow_dict(self) -> dict: """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled + if self.enabled is not None: + body["enabled"] = self.enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorDataClassificationConfig: """Deserializes the MonitorDataClassificationConfig from a dictionary.""" - return cls(enabled=d.get('enabled', None)) - - + return cls(enabled=d.get("enabled", None)) @dataclass @@ -4984,25 +5965,25 @@ class MonitorDestination: email_addresses: Optional[List[str]] = None """The list of email addresses to send the notification to. A maximum of 5 email addresses is supported.""" - + def as_dict(self) -> dict: """Serializes the MonitorDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.email_addresses: body['email_addresses'] = [v for v in self.email_addresses] + if self.email_addresses: + body["email_addresses"] = [v for v in self.email_addresses] return body def as_shallow_dict(self) -> dict: """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.email_addresses: body['email_addresses'] = self.email_addresses + if self.email_addresses: + body["email_addresses"] = self.email_addresses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorDestination: """Deserializes the MonitorDestination from a dictionary.""" - return cls(email_addresses=d.get('email_addresses', None)) - - + return cls(email_addresses=d.get("email_addresses", None)) @dataclass @@ -5013,215 +5994,290 @@ class MonitorInferenceLog: pyspark ``to_timestamp`` [function]. [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - + granularities: List[str] """Granularities for aggregating data into time windows based on their timestamp. Currently the following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" - + model_id_col: str """Column that contains the id of the model generating the predictions. Metrics will be computed per model id by default, and also across all model ids.""" - + problem_type: MonitorInferenceLogProblemType """Problem type the model aims to solve. Determines the type of model-quality metrics that will be computed.""" - + prediction_col: str """Column that contains the output/prediction from the model.""" - + label_col: Optional[str] = None """Optional column that contains the ground truth for the prediction.""" - + prediction_proba_col: Optional[str] = None """Optional column that contains the prediction probabilities for each class in a classification problem type. The values in this column should be a map, mapping each class label to the prediction probability for a given sample. The map should be of PySpark MapType().""" - + def as_dict(self) -> dict: """Serializes the MonitorInferenceLog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.granularities: body['granularities'] = [v for v in self.granularities] - if self.label_col is not None: body['label_col'] = self.label_col - if self.model_id_col is not None: body['model_id_col'] = self.model_id_col - if self.prediction_col is not None: body['prediction_col'] = self.prediction_col - if self.prediction_proba_col is not None: body['prediction_proba_col'] = self.prediction_proba_col - if self.problem_type is not None: body['problem_type'] = self.problem_type.value - if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col + if self.granularities: + body["granularities"] = [v for v in self.granularities] + if self.label_col is not None: + body["label_col"] = self.label_col + if self.model_id_col is not None: + body["model_id_col"] = self.model_id_col + if self.prediction_col is not None: + body["prediction_col"] = self.prediction_col + if self.prediction_proba_col is not None: + body["prediction_proba_col"] = self.prediction_proba_col + if self.problem_type is not None: + body["problem_type"] = self.problem_type.value + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body def as_shallow_dict(self) -> dict: """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes.""" body = {} - if self.granularities: body['granularities'] = self.granularities - if self.label_col is not None: body['label_col'] = self.label_col - if self.model_id_col is not None: body['model_id_col'] = self.model_id_col - if self.prediction_col is not None: body['prediction_col'] = self.prediction_col - if self.prediction_proba_col is not None: body['prediction_proba_col'] = self.prediction_proba_col - if self.problem_type is not None: body['problem_type'] = self.problem_type - if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col + if self.granularities: + body["granularities"] = self.granularities + if self.label_col is not None: + body["label_col"] = self.label_col + if self.model_id_col is not None: + body["model_id_col"] = self.model_id_col + if self.prediction_col is not None: + body["prediction_col"] = self.prediction_col + if self.prediction_proba_col is not None: + body["prediction_proba_col"] = self.prediction_proba_col + if self.problem_type is not None: + body["problem_type"] = self.problem_type + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorInferenceLog: """Deserializes the MonitorInferenceLog from a dictionary.""" - return cls(granularities=d.get('granularities', None), label_col=d.get('label_col', None), model_id_col=d.get('model_id_col', None), prediction_col=d.get('prediction_col', None), prediction_proba_col=d.get('prediction_proba_col', None), problem_type=_enum(d, 'problem_type', MonitorInferenceLogProblemType), timestamp_col=d.get('timestamp_col', None)) - - + return cls( + granularities=d.get("granularities", None), + label_col=d.get("label_col", None), + model_id_col=d.get("model_id_col", None), + prediction_col=d.get("prediction_col", None), + prediction_proba_col=d.get("prediction_proba_col", None), + problem_type=_enum(d, "problem_type", MonitorInferenceLogProblemType), + timestamp_col=d.get("timestamp_col", None), + ) class MonitorInferenceLogProblemType(Enum): """Problem type the model aims to solve. Determines the type of model-quality metrics that will be computed.""" - - PROBLEM_TYPE_CLASSIFICATION = 'PROBLEM_TYPE_CLASSIFICATION' - PROBLEM_TYPE_REGRESSION = 'PROBLEM_TYPE_REGRESSION' + + PROBLEM_TYPE_CLASSIFICATION = "PROBLEM_TYPE_CLASSIFICATION" + PROBLEM_TYPE_REGRESSION = "PROBLEM_TYPE_REGRESSION" + @dataclass class MonitorInfo: table_name: str """The full name of the table to monitor. Format: __catalog_name__.__schema_name__.__table_name__.""" - + status: MonitorInfoStatus """The status of the monitor.""" - + monitor_version: str """The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted.""" - + profile_metrics_table_name: str """The full name of the profile metrics table. Format: __catalog_name__.__schema_name__.__table_name__.""" - + drift_metrics_table_name: str """The full name of the drift metrics table. Format: __catalog_name__.__schema_name__.__table_name__.""" - + assets_dir: Optional[str] = None """The directory to store monitoring assets (e.g. dashboard, metric tables).""" - + baseline_table_name: Optional[str] = None """Name of the baseline table from which drift metrics are computed from. Columns in the monitored table should also be present in the baseline table.""" - + custom_metrics: Optional[List[MonitorMetric]] = None """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).""" - + dashboard_id: Optional[str] = None """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in PENDING state.""" - + data_classification_config: Optional[MonitorDataClassificationConfig] = None """The data classification config for the monitor.""" - + inference_log: Optional[MonitorInferenceLog] = None """Configuration for monitoring inference logs.""" - + latest_monitor_failure_msg: Optional[str] = None """The latest failure message of the monitor (if any).""" - + notifications: Optional[MonitorNotifications] = None """The notification settings for the monitor.""" - + output_schema_name: Optional[str] = None """Schema where output metric tables are created.""" - + schedule: Optional[MonitorCronSchedule] = None """The schedule for automatically updating and refreshing metric tables.""" - + slicing_exprs: Optional[List[str]] = None """List of column expressions to slice data with for targeted analysis. The data is grouped by each expression independently, resulting in a separate slice for each predicate and its complements. For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" - + snapshot: Optional[MonitorSnapshot] = None """Configuration for monitoring snapshot tables.""" - + time_series: Optional[MonitorTimeSeries] = None """Configuration for monitoring time series tables.""" - + def as_dict(self) -> dict: """Serializes the MonitorInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets_dir is not None: body['assets_dir'] = self.assets_dir - if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name - if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics] - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.data_classification_config: body['data_classification_config'] = self.data_classification_config.as_dict() - if self.drift_metrics_table_name is not None: body['drift_metrics_table_name'] = self.drift_metrics_table_name - if self.inference_log: body['inference_log'] = self.inference_log.as_dict() - if self.latest_monitor_failure_msg is not None: body['latest_monitor_failure_msg'] = self.latest_monitor_failure_msg - if self.monitor_version is not None: body['monitor_version'] = self.monitor_version - if self.notifications: body['notifications'] = self.notifications.as_dict() - if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name - if self.profile_metrics_table_name is not None: body['profile_metrics_table_name'] = self.profile_metrics_table_name - if self.schedule: body['schedule'] = self.schedule.as_dict() - if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs] - if self.snapshot: body['snapshot'] = self.snapshot.as_dict() - if self.status is not None: body['status'] = self.status.value - if self.table_name is not None: body['table_name'] = self.table_name - if self.time_series: body['time_series'] = self.time_series.as_dict() + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config.as_dict() + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.inference_log: + body["inference_log"] = self.inference_log.as_dict() + if self.latest_monitor_failure_msg is not None: + body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.notifications: + body["notifications"] = self.notifications.as_dict() + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.slicing_exprs: + body["slicing_exprs"] = [v for v in self.slicing_exprs] + if self.snapshot: + body["snapshot"] = self.snapshot.as_dict() + if self.status is not None: + body["status"] = self.status.value + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets_dir is not None: body['assets_dir'] = self.assets_dir - if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name - if self.custom_metrics: body['custom_metrics'] = self.custom_metrics - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.data_classification_config: body['data_classification_config'] = self.data_classification_config - if self.drift_metrics_table_name is not None: body['drift_metrics_table_name'] = self.drift_metrics_table_name - if self.inference_log: body['inference_log'] = self.inference_log - if self.latest_monitor_failure_msg is not None: body['latest_monitor_failure_msg'] = self.latest_monitor_failure_msg - if self.monitor_version is not None: body['monitor_version'] = self.monitor_version - if self.notifications: body['notifications'] = self.notifications - if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name - if self.profile_metrics_table_name is not None: body['profile_metrics_table_name'] = self.profile_metrics_table_name - if self.schedule: body['schedule'] = self.schedule - if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs - if self.snapshot: body['snapshot'] = self.snapshot - if self.status is not None: body['status'] = self.status - if self.table_name is not None: body['table_name'] = self.table_name - if self.time_series: body['time_series'] = self.time_series + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = self.custom_metrics + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.inference_log: + body["inference_log"] = self.inference_log + if self.latest_monitor_failure_msg is not None: + body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.notifications: + body["notifications"] = self.notifications + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule + if self.slicing_exprs: + body["slicing_exprs"] = self.slicing_exprs + if self.snapshot: + body["snapshot"] = self.snapshot + if self.status is not None: + body["status"] = self.status + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorInfo: """Deserializes the MonitorInfo from a dictionary.""" - return cls(assets_dir=d.get('assets_dir', None), baseline_table_name=d.get('baseline_table_name', None), custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric), dashboard_id=d.get('dashboard_id', None), data_classification_config=_from_dict(d, 'data_classification_config', MonitorDataClassificationConfig), drift_metrics_table_name=d.get('drift_metrics_table_name', None), inference_log=_from_dict(d, 'inference_log', MonitorInferenceLog), latest_monitor_failure_msg=d.get('latest_monitor_failure_msg', None), monitor_version=d.get('monitor_version', None), notifications=_from_dict(d, 'notifications', MonitorNotifications), output_schema_name=d.get('output_schema_name', None), profile_metrics_table_name=d.get('profile_metrics_table_name', None), schedule=_from_dict(d, 'schedule', MonitorCronSchedule), slicing_exprs=d.get('slicing_exprs', None), snapshot=_from_dict(d, 'snapshot', MonitorSnapshot), status=_enum(d, 'status', MonitorInfoStatus), table_name=d.get('table_name', None), time_series=_from_dict(d, 'time_series', MonitorTimeSeries)) - - + return cls( + assets_dir=d.get("assets_dir", None), + baseline_table_name=d.get("baseline_table_name", None), + custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), + dashboard_id=d.get("dashboard_id", None), + data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), + drift_metrics_table_name=d.get("drift_metrics_table_name", None), + inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), + latest_monitor_failure_msg=d.get("latest_monitor_failure_msg", None), + monitor_version=d.get("monitor_version", None), + notifications=_from_dict(d, "notifications", MonitorNotifications), + output_schema_name=d.get("output_schema_name", None), + profile_metrics_table_name=d.get("profile_metrics_table_name", None), + schedule=_from_dict(d, "schedule", MonitorCronSchedule), + slicing_exprs=d.get("slicing_exprs", None), + snapshot=_from_dict(d, "snapshot", MonitorSnapshot), + status=_enum(d, "status", MonitorInfoStatus), + table_name=d.get("table_name", None), + time_series=_from_dict(d, "time_series", MonitorTimeSeries), + ) class MonitorInfoStatus(Enum): """The status of the monitor.""" - - MONITOR_STATUS_ACTIVE = 'MONITOR_STATUS_ACTIVE' - MONITOR_STATUS_DELETE_PENDING = 'MONITOR_STATUS_DELETE_PENDING' - MONITOR_STATUS_ERROR = 'MONITOR_STATUS_ERROR' - MONITOR_STATUS_FAILED = 'MONITOR_STATUS_FAILED' - MONITOR_STATUS_PENDING = 'MONITOR_STATUS_PENDING' + + MONITOR_STATUS_ACTIVE = "MONITOR_STATUS_ACTIVE" + MONITOR_STATUS_DELETE_PENDING = "MONITOR_STATUS_DELETE_PENDING" + MONITOR_STATUS_ERROR = "MONITOR_STATUS_ERROR" + MONITOR_STATUS_FAILED = "MONITOR_STATUS_FAILED" + MONITOR_STATUS_PENDING = "MONITOR_STATUS_PENDING" + @dataclass class MonitorMetric: name: str """Name of the metric in the output tables.""" - + definition: str """Jinja template for a SQL expression that specifies how to compute the metric. See [create metric definition]. [create metric definition]: https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition""" - + input_columns: List[str] """A list of column names in the input table the metric should be computed for. Can use ``":table"`` to indicate that the metric needs information from multiple columns.""" - + output_data_type: str """The output type of the custom metric.""" - + type: MonitorMetricType """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and @@ -5230,33 +6286,47 @@ class MonitorMetric: two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" - + def as_dict(self) -> dict: """Serializes the MonitorMetric into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: body['definition'] = self.definition - if self.input_columns: body['input_columns'] = [v for v in self.input_columns] - if self.name is not None: body['name'] = self.name - if self.output_data_type is not None: body['output_data_type'] = self.output_data_type - if self.type is not None: body['type'] = self.type.value + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = [v for v in self.input_columns] + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: body['definition'] = self.definition - if self.input_columns: body['input_columns'] = self.input_columns - if self.name is not None: body['name'] = self.name - if self.output_data_type is not None: body['output_data_type'] = self.output_data_type - if self.type is not None: body['type'] = self.type + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = self.input_columns + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorMetric: """Deserializes the MonitorMetric from a dictionary.""" - return cls(definition=d.get('definition', None), input_columns=d.get('input_columns', None), name=d.get('name', None), output_data_type=d.get('output_data_type', None), type=_enum(d, 'type', MonitorMetricType)) - - + return cls( + definition=d.get("definition", None), + input_columns=d.get("input_columns", None), + name=d.get("name", None), + output_data_type=d.get("output_data_type", None), + type=_enum(d, "type", MonitorMetricType), + ) class MonitorMetricType(Enum): @@ -5267,129 +6337,154 @@ class MonitorMetricType(Enum): two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" - - CUSTOM_METRIC_TYPE_AGGREGATE = 'CUSTOM_METRIC_TYPE_AGGREGATE' - CUSTOM_METRIC_TYPE_DERIVED = 'CUSTOM_METRIC_TYPE_DERIVED' - CUSTOM_METRIC_TYPE_DRIFT = 'CUSTOM_METRIC_TYPE_DRIFT' + + CUSTOM_METRIC_TYPE_AGGREGATE = "CUSTOM_METRIC_TYPE_AGGREGATE" + CUSTOM_METRIC_TYPE_DERIVED = "CUSTOM_METRIC_TYPE_DERIVED" + CUSTOM_METRIC_TYPE_DRIFT = "CUSTOM_METRIC_TYPE_DRIFT" + @dataclass class MonitorNotifications: on_failure: Optional[MonitorDestination] = None """Who to send notifications to on monitor failure.""" - + on_new_classification_tag_detected: Optional[MonitorDestination] = None """Who to send notifications to when new data classification tags are detected.""" - + def as_dict(self) -> dict: """Serializes the MonitorNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.on_failure: body['on_failure'] = self.on_failure.as_dict() - if self.on_new_classification_tag_detected: body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected.as_dict() + if self.on_failure: + body["on_failure"] = self.on_failure.as_dict() + if self.on_new_classification_tag_detected: + body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.on_failure: body['on_failure'] = self.on_failure - if self.on_new_classification_tag_detected: body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected + if self.on_failure: + body["on_failure"] = self.on_failure + if self.on_new_classification_tag_detected: + body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorNotifications: """Deserializes the MonitorNotifications from a dictionary.""" - return cls(on_failure=_from_dict(d, 'on_failure', MonitorDestination), on_new_classification_tag_detected=_from_dict(d, 'on_new_classification_tag_detected', MonitorDestination)) - - + return cls( + on_failure=_from_dict(d, "on_failure", MonitorDestination), + on_new_classification_tag_detected=_from_dict(d, "on_new_classification_tag_detected", MonitorDestination), + ) @dataclass class MonitorRefreshInfo: refresh_id: int """Unique id of the refresh operation.""" - + state: MonitorRefreshInfoState """The current state of the refresh.""" - + start_time_ms: int """Time at which refresh operation was initiated (milliseconds since 1/1/1970 UTC).""" - + end_time_ms: Optional[int] = None """Time at which refresh operation completed (milliseconds since 1/1/1970 UTC).""" - + message: Optional[str] = None """An optional message to give insight into the current state of the job (e.g. FAILURE messages).""" - + trigger: Optional[MonitorRefreshInfoTrigger] = None """The method by which the refresh was triggered.""" - + def as_dict(self) -> dict: """Serializes the MonitorRefreshInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms - if self.message is not None: body['message'] = self.message - if self.refresh_id is not None: body['refresh_id'] = self.refresh_id - if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms - if self.state is not None: body['state'] = self.state.value - if self.trigger is not None: body['trigger'] = self.trigger.value + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state.value + if self.trigger is not None: + body["trigger"] = self.trigger.value return body def as_shallow_dict(self) -> dict: """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms - if self.message is not None: body['message'] = self.message - if self.refresh_id is not None: body['refresh_id'] = self.refresh_id - if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms - if self.state is not None: body['state'] = self.state - if self.trigger is not None: body['trigger'] = self.trigger + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state + if self.trigger is not None: + body["trigger"] = self.trigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshInfo: """Deserializes the MonitorRefreshInfo from a dictionary.""" - return cls(end_time_ms=d.get('end_time_ms', None), message=d.get('message', None), refresh_id=d.get('refresh_id', None), start_time_ms=d.get('start_time_ms', None), state=_enum(d, 'state', MonitorRefreshInfoState), trigger=_enum(d, 'trigger', MonitorRefreshInfoTrigger)) - - + return cls( + end_time_ms=d.get("end_time_ms", None), + message=d.get("message", None), + refresh_id=d.get("refresh_id", None), + start_time_ms=d.get("start_time_ms", None), + state=_enum(d, "state", MonitorRefreshInfoState), + trigger=_enum(d, "trigger", MonitorRefreshInfoTrigger), + ) class MonitorRefreshInfoState(Enum): """The current state of the refresh.""" - - CANCELED = 'CANCELED' - FAILED = 'FAILED' - PENDING = 'PENDING' - RUNNING = 'RUNNING' - SUCCESS = 'SUCCESS' -class MonitorRefreshInfoTrigger(Enum): + CANCELED = "CANCELED" + FAILED = "FAILED" + PENDING = "PENDING" + RUNNING = "RUNNING" + SUCCESS = "SUCCESS" + + +class MonitorRefreshInfoTrigger(Enum): """The method by which the refresh was triggered.""" - - MANUAL = 'MANUAL' - SCHEDULE = 'SCHEDULE' + + MANUAL = "MANUAL" + SCHEDULE = "SCHEDULE" + @dataclass class MonitorRefreshListResponse: refreshes: Optional[List[MonitorRefreshInfo]] = None """List of refreshes.""" - + def as_dict(self) -> dict: """Serializes the MonitorRefreshListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.refreshes: body['refreshes'] = [v.as_dict() for v in self.refreshes] + if self.refreshes: + body["refreshes"] = [v.as_dict() for v in self.refreshes] return body def as_shallow_dict(self) -> dict: """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.refreshes: body['refreshes'] = self.refreshes + if self.refreshes: + body["refreshes"] = self.refreshes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshListResponse: """Deserializes the MonitorRefreshListResponse from a dictionary.""" - return cls(refreshes=_repeated_dict(d, 'refreshes', MonitorRefreshInfo)) - - + return cls(refreshes=_repeated_dict(d, "refreshes", MonitorRefreshInfo)) @dataclass @@ -5408,8 +6503,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> MonitorSnapshot: """Deserializes the MonitorSnapshot from a dictionary.""" return cls() - - @dataclass @@ -5420,112 +6513,128 @@ class MonitorTimeSeries: pyspark ``to_timestamp`` [function]. [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - + granularities: List[str] """Granularities for aggregating data into time windows based on their timestamp. Currently the following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" - + def as_dict(self) -> dict: """Serializes the MonitorTimeSeries into a dictionary suitable for use as a JSON request body.""" body = {} - if self.granularities: body['granularities'] = [v for v in self.granularities] - if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col + if self.granularities: + body["granularities"] = [v for v in self.granularities] + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body def as_shallow_dict(self) -> dict: """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes.""" body = {} - if self.granularities: body['granularities'] = self.granularities - if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col + if self.granularities: + body["granularities"] = self.granularities + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MonitorTimeSeries: """Deserializes the MonitorTimeSeries from a dictionary.""" - return cls(granularities=d.get('granularities', None), timestamp_col=d.get('timestamp_col', None)) - - + return cls(granularities=d.get("granularities", None), timestamp_col=d.get("timestamp_col", None)) @dataclass class NamedTableConstraint: name: str """The name of the constraint.""" - + def as_dict(self) -> dict: """Serializes the NamedTableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NamedTableConstraint: """Deserializes the NamedTableConstraint from a dictionary.""" - return cls(name=d.get('name', None)) - - + return cls(name=d.get("name", None)) @dataclass class OnlineTable: """Online Table information.""" - + name: Optional[str] = None """Full three-part (catalog, schema, table) name of the table.""" - + spec: Optional[OnlineTableSpec] = None """Specification of the online table.""" - + status: Optional[OnlineTableStatus] = None """Online Table data synchronization status""" - + table_serving_url: Optional[str] = None """Data serving REST API URL for this table""" - + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None """The provisioning state of the online table entity in Unity Catalog. This is distinct from the state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline may be in "PROVISIONING" as it runs asynchronously).""" - + def as_dict(self) -> dict: """Serializes the OnlineTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.spec: body['spec'] = self.spec.as_dict() - if self.status: body['status'] = self.status.as_dict() - if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value + if self.name is not None: + body["name"] = self.name + if self.spec: + body["spec"] = self.spec.as_dict() + if self.status: + body["status"] = self.status.as_dict() + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value return body def as_shallow_dict(self) -> dict: """Serializes the OnlineTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.spec: body['spec'] = self.spec - if self.status: body['status'] = self.status - if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state + if self.name is not None: + body["name"] = self.name + if self.spec: + body["spec"] = self.spec + if self.status: + body["status"] = self.status + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OnlineTable: """Deserializes the OnlineTable from a dictionary.""" - return cls(name=d.get('name', None), spec=_from_dict(d, 'spec', OnlineTableSpec), status=_from_dict(d, 'status', OnlineTableStatus), table_serving_url=d.get('table_serving_url', None), unity_catalog_provisioning_state=_enum(d, 'unity_catalog_provisioning_state', ProvisioningInfoState)) - - + return cls( + name=d.get("name", None), + spec=_from_dict(d, "spec", OnlineTableSpec), + status=_from_dict(d, "status", OnlineTableStatus), + table_serving_url=d.get("table_serving_url", None), + unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), + ) @dataclass class OnlineTableSpec: """Specification of an online table.""" - + perform_full_copy: Optional[bool] = None """Whether to create a full-copy pipeline -- a pipeline that stops after creates a full copy of the source table upon initialization and does not process any change data feeds (CDFs) afterwards. @@ -5533,56 +6642,76 @@ class OnlineTableSpec: the source table and there are no incremental updates. This mode is useful for syncing views or tables without CDFs to online tables. Note that the full-copy pipeline only supports "triggered" scheduling policy.""" - + pipeline_id: Optional[str] = None """ID of the associated pipeline. Generated by the server - cannot be set by the caller.""" - + primary_key_columns: Optional[List[str]] = None """Primary Key columns to be used for data insert/update in the destination.""" - + run_continuously: Optional[OnlineTableSpecContinuousSchedulingPolicy] = None """Pipeline runs continuously after generating the initial data.""" - + run_triggered: Optional[OnlineTableSpecTriggeredSchedulingPolicy] = None """Pipeline stops after generating the initial data and can be triggered later (manually, through a cron job or through data triggers)""" - + source_table_full_name: Optional[str] = None """Three-part (catalog, schema, table) name of the source Delta table.""" - + timeseries_key: Optional[str] = None """Time series key to deduplicate (tie-break) rows with the same primary key.""" - + def as_dict(self) -> dict: """Serializes the OnlineTableSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.primary_key_columns: body['primary_key_columns'] = [v for v in self.primary_key_columns] - if self.run_continuously: body['run_continuously'] = self.run_continuously.as_dict() - if self.run_triggered: body['run_triggered'] = self.run_triggered.as_dict() - if self.source_table_full_name is not None: body['source_table_full_name'] = self.source_table_full_name - if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key + if self.perform_full_copy is not None: + body["perform_full_copy"] = self.perform_full_copy + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.primary_key_columns: + body["primary_key_columns"] = [v for v in self.primary_key_columns] + if self.run_continuously: + body["run_continuously"] = self.run_continuously.as_dict() + if self.run_triggered: + body["run_triggered"] = self.run_triggered.as_dict() + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key return body def as_shallow_dict(self) -> dict: """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.primary_key_columns: body['primary_key_columns'] = self.primary_key_columns - if self.run_continuously: body['run_continuously'] = self.run_continuously - if self.run_triggered: body['run_triggered'] = self.run_triggered - if self.source_table_full_name is not None: body['source_table_full_name'] = self.source_table_full_name - if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key + if self.perform_full_copy is not None: + body["perform_full_copy"] = self.perform_full_copy + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.primary_key_columns: + body["primary_key_columns"] = self.primary_key_columns + if self.run_continuously: + body["run_continuously"] = self.run_continuously + if self.run_triggered: + body["run_triggered"] = self.run_triggered + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpec: """Deserializes the OnlineTableSpec from a dictionary.""" - return cls(perform_full_copy=d.get('perform_full_copy', None), pipeline_id=d.get('pipeline_id', None), primary_key_columns=d.get('primary_key_columns', None), run_continuously=_from_dict(d, 'run_continuously', OnlineTableSpecContinuousSchedulingPolicy), run_triggered=_from_dict(d, 'run_triggered', OnlineTableSpecTriggeredSchedulingPolicy), source_table_full_name=d.get('source_table_full_name', None), timeseries_key=d.get('timeseries_key', None)) - - + return cls( + perform_full_copy=d.get("perform_full_copy", None), + pipeline_id=d.get("pipeline_id", None), + primary_key_columns=d.get("primary_key_columns", None), + run_continuously=_from_dict(d, "run_continuously", OnlineTableSpecContinuousSchedulingPolicy), + run_triggered=_from_dict(d, "run_triggered", OnlineTableSpecTriggeredSchedulingPolicy), + source_table_full_name=d.get("source_table_full_name", None), + timeseries_key=d.get("timeseries_key", None), + ) @dataclass @@ -5601,8 +6730,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecContinuousSchedulingPolicy: """Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary.""" return cls() - - @dataclass @@ -5621,1397 +6748,1762 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecTriggeredSchedulingPolicy: """Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary.""" return cls() - - class OnlineTableState(Enum): """The state of an online table.""" - - OFFLINE = 'OFFLINE' - OFFLINE_FAILED = 'OFFLINE_FAILED' - ONLINE = 'ONLINE' - ONLINE_CONTINUOUS_UPDATE = 'ONLINE_CONTINUOUS_UPDATE' - ONLINE_NO_PENDING_UPDATE = 'ONLINE_NO_PENDING_UPDATE' - ONLINE_PIPELINE_FAILED = 'ONLINE_PIPELINE_FAILED' - ONLINE_TRIGGERED_UPDATE = 'ONLINE_TRIGGERED_UPDATE' - ONLINE_UPDATING_PIPELINE_RESOURCES = 'ONLINE_UPDATING_PIPELINE_RESOURCES' - PROVISIONING = 'PROVISIONING' - PROVISIONING_INITIAL_SNAPSHOT = 'PROVISIONING_INITIAL_SNAPSHOT' - PROVISIONING_PIPELINE_RESOURCES = 'PROVISIONING_PIPELINE_RESOURCES' + + OFFLINE = "OFFLINE" + OFFLINE_FAILED = "OFFLINE_FAILED" + ONLINE = "ONLINE" + ONLINE_CONTINUOUS_UPDATE = "ONLINE_CONTINUOUS_UPDATE" + ONLINE_NO_PENDING_UPDATE = "ONLINE_NO_PENDING_UPDATE" + ONLINE_PIPELINE_FAILED = "ONLINE_PIPELINE_FAILED" + ONLINE_TRIGGERED_UPDATE = "ONLINE_TRIGGERED_UPDATE" + ONLINE_UPDATING_PIPELINE_RESOURCES = "ONLINE_UPDATING_PIPELINE_RESOURCES" + PROVISIONING = "PROVISIONING" + PROVISIONING_INITIAL_SNAPSHOT = "PROVISIONING_INITIAL_SNAPSHOT" + PROVISIONING_PIPELINE_RESOURCES = "PROVISIONING_PIPELINE_RESOURCES" + @dataclass class OnlineTableStatus: """Status of an online table.""" - + continuous_update_status: Optional[ContinuousUpdateStatus] = None """Detailed status of an online table. Shown if the online table is in the ONLINE_CONTINUOUS_UPDATE or the ONLINE_UPDATING_PIPELINE_RESOURCES state.""" - + detailed_state: Optional[OnlineTableState] = None """The state of the online table.""" - + failed_status: Optional[FailedStatus] = None """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the ONLINE_PIPELINE_FAILED state.""" - + message: Optional[str] = None """A text description of the current state of the online table.""" - + provisioning_status: Optional[ProvisioningStatus] = None """Detailed status of an online table. Shown if the online table is in the PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - + triggered_update_status: Optional[TriggeredUpdateStatus] = None """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE or the ONLINE_NO_PENDING_UPDATE state.""" - + def as_dict(self) -> dict: """Serializes the OnlineTableStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status.as_dict() - if self.detailed_state is not None: body['detailed_state'] = self.detailed_state.value - if self.failed_status: body['failed_status'] = self.failed_status.as_dict() - if self.message is not None: body['message'] = self.message - if self.provisioning_status: body['provisioning_status'] = self.provisioning_status.as_dict() - if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status.as_dict() + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status.as_dict() + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state.value + if self.failed_status: + body["failed_status"] = self.failed_status.as_dict() + if self.message is not None: + body["message"] = self.message + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status.as_dict() + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status - if self.detailed_state is not None: body['detailed_state'] = self.detailed_state - if self.failed_status: body['failed_status'] = self.failed_status - if self.message is not None: body['message'] = self.message - if self.provisioning_status: body['provisioning_status'] = self.provisioning_status - if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state + if self.failed_status: + body["failed_status"] = self.failed_status + if self.message is not None: + body["message"] = self.message + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OnlineTableStatus: """Deserializes the OnlineTableStatus from a dictionary.""" - return cls(continuous_update_status=_from_dict(d, 'continuous_update_status', ContinuousUpdateStatus), detailed_state=_enum(d, 'detailed_state', OnlineTableState), failed_status=_from_dict(d, 'failed_status', FailedStatus), message=d.get('message', None), provisioning_status=_from_dict(d, 'provisioning_status', ProvisioningStatus), triggered_update_status=_from_dict(d, 'triggered_update_status', TriggeredUpdateStatus)) - - + return cls( + continuous_update_status=_from_dict(d, "continuous_update_status", ContinuousUpdateStatus), + detailed_state=_enum(d, "detailed_state", OnlineTableState), + failed_status=_from_dict(d, "failed_status", FailedStatus), + message=d.get("message", None), + provisioning_status=_from_dict(d, "provisioning_status", ProvisioningStatus), + triggered_update_status=_from_dict(d, "triggered_update_status", TriggeredUpdateStatus), + ) @dataclass class PermissionsChange: add: Optional[List[Privilege]] = None """The set of privileges to add.""" - + principal: Optional[str] = None """The principal whose privileges we are changing.""" - + remove: Optional[List[Privilege]] = None """The set of privileges to remove.""" - + def as_dict(self) -> dict: """Serializes the PermissionsChange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add: body['add'] = [v.value for v in self.add] - if self.principal is not None: body['principal'] = self.principal - if self.remove: body['remove'] = [v.value for v in self.remove] + if self.add: + body["add"] = [v.value for v in self.add] + if self.principal is not None: + body["principal"] = self.principal + if self.remove: + body["remove"] = [v.value for v in self.remove] return body def as_shallow_dict(self) -> dict: """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes.""" body = {} - if self.add: body['add'] = self.add - if self.principal is not None: body['principal'] = self.principal - if self.remove: body['remove'] = self.remove + if self.add: + body["add"] = self.add + if self.principal is not None: + body["principal"] = self.principal + if self.remove: + body["remove"] = self.remove return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: """Deserializes the PermissionsChange from a dictionary.""" - return cls(add=_repeated_enum(d, 'add', Privilege), principal=d.get('principal', None), remove=_repeated_enum(d, 'remove', Privilege)) - - + return cls( + add=_repeated_enum(d, "add", Privilege), + principal=d.get("principal", None), + remove=_repeated_enum(d, "remove", Privilege), + ) @dataclass class PipelineProgress: """Progress information of the Online Table data synchronization pipeline.""" - + estimated_completion_time_seconds: Optional[float] = None """The estimated time remaining to complete this update in seconds.""" - + latest_version_currently_processing: Optional[int] = None """The source table Delta version that was last processed by the pipeline. The pipeline may not have completely processed this version yet.""" - + sync_progress_completion: Optional[float] = None """The completion ratio of this update. This is a number between 0 and 1.""" - + synced_row_count: Optional[int] = None """The number of rows that have been synced in this update.""" - + total_row_count: Optional[int] = None """The total number of rows that need to be synced in this update. This number may be an estimate.""" - + def as_dict(self) -> dict: """Serializes the PipelineProgress into a dictionary suitable for use as a JSON request body.""" body = {} - if self.estimated_completion_time_seconds is not None: body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: body['latest_version_currently_processing'] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: body['sync_progress_completion'] = self.sync_progress_completion - if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count - if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count return body def as_shallow_dict(self) -> dict: """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes.""" body = {} - if self.estimated_completion_time_seconds is not None: body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: body['latest_version_currently_processing'] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: body['sync_progress_completion'] = self.sync_progress_completion - if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count - if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineProgress: """Deserializes the PipelineProgress from a dictionary.""" - return cls(estimated_completion_time_seconds=d.get('estimated_completion_time_seconds', None), latest_version_currently_processing=d.get('latest_version_currently_processing', None), sync_progress_completion=d.get('sync_progress_completion', None), synced_row_count=d.get('synced_row_count', None), total_row_count=d.get('total_row_count', None)) - - + return cls( + estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None), + latest_version_currently_processing=d.get("latest_version_currently_processing", None), + sync_progress_completion=d.get("sync_progress_completion", None), + synced_row_count=d.get("synced_row_count", None), + total_row_count=d.get("total_row_count", None), + ) @dataclass class PrimaryKeyConstraint: name: str """The name of the constraint.""" - + child_columns: List[str] """Column names for this constraint.""" - + timeseries_columns: Optional[List[str]] = None """Column names that represent a timeseries.""" - + def as_dict(self) -> dict: """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.child_columns: body['child_columns'] = [v for v in self.child_columns] - if self.name is not None: body['name'] = self.name - if self.timeseries_columns: body['timeseries_columns'] = [v for v in self.timeseries_columns] + if self.child_columns: + body["child_columns"] = [v for v in self.child_columns] + if self.name is not None: + body["name"] = self.name + if self.timeseries_columns: + body["timeseries_columns"] = [v for v in self.timeseries_columns] return body def as_shallow_dict(self) -> dict: """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.child_columns: body['child_columns'] = self.child_columns - if self.name is not None: body['name'] = self.name - if self.timeseries_columns: body['timeseries_columns'] = self.timeseries_columns + if self.child_columns: + body["child_columns"] = self.child_columns + if self.name is not None: + body["name"] = self.name + if self.timeseries_columns: + body["timeseries_columns"] = self.timeseries_columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: """Deserializes the PrimaryKeyConstraint from a dictionary.""" - return cls(child_columns=d.get('child_columns', None), name=d.get('name', None), timeseries_columns=d.get('timeseries_columns', None)) - - + return cls( + child_columns=d.get("child_columns", None), + name=d.get("name", None), + timeseries_columns=d.get("timeseries_columns", None), + ) class Privilege(Enum): - - - ACCESS = 'ACCESS' - ALL_PRIVILEGES = 'ALL_PRIVILEGES' - APPLY_TAG = 'APPLY_TAG' - BROWSE = 'BROWSE' - CREATE = 'CREATE' - CREATE_CATALOG = 'CREATE_CATALOG' - CREATE_CLEAN_ROOM = 'CREATE_CLEAN_ROOM' - CREATE_CONNECTION = 'CREATE_CONNECTION' - CREATE_EXTERNAL_LOCATION = 'CREATE_EXTERNAL_LOCATION' - CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE' - CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME' - CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG' - CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE' - CREATE_FUNCTION = 'CREATE_FUNCTION' - CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE' - CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW' - CREATE_MODEL = 'CREATE_MODEL' - CREATE_PROVIDER = 'CREATE_PROVIDER' - CREATE_RECIPIENT = 'CREATE_RECIPIENT' - CREATE_SCHEMA = 'CREATE_SCHEMA' - CREATE_SERVICE_CREDENTIAL = 'CREATE_SERVICE_CREDENTIAL' - CREATE_SHARE = 'CREATE_SHARE' - CREATE_STORAGE_CREDENTIAL = 'CREATE_STORAGE_CREDENTIAL' - CREATE_TABLE = 'CREATE_TABLE' - CREATE_VIEW = 'CREATE_VIEW' - CREATE_VOLUME = 'CREATE_VOLUME' - EXECUTE = 'EXECUTE' - EXECUTE_CLEAN_ROOM_TASK = 'EXECUTE_CLEAN_ROOM_TASK' - MANAGE = 'MANAGE' - MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST' - MODIFY = 'MODIFY' - MODIFY_CLEAN_ROOM = 'MODIFY_CLEAN_ROOM' - READ_FILES = 'READ_FILES' - READ_PRIVATE_FILES = 'READ_PRIVATE_FILES' - READ_VOLUME = 'READ_VOLUME' - REFRESH = 'REFRESH' - SELECT = 'SELECT' - SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION' - USAGE = 'USAGE' - USE_CATALOG = 'USE_CATALOG' - USE_CONNECTION = 'USE_CONNECTION' - USE_MARKETPLACE_ASSETS = 'USE_MARKETPLACE_ASSETS' - USE_PROVIDER = 'USE_PROVIDER' - USE_RECIPIENT = 'USE_RECIPIENT' - USE_SCHEMA = 'USE_SCHEMA' - USE_SHARE = 'USE_SHARE' - WRITE_FILES = 'WRITE_FILES' - WRITE_PRIVATE_FILES = 'WRITE_PRIVATE_FILES' - WRITE_VOLUME = 'WRITE_VOLUME' + + ACCESS = "ACCESS" + ALL_PRIVILEGES = "ALL_PRIVILEGES" + APPLY_TAG = "APPLY_TAG" + BROWSE = "BROWSE" + CREATE = "CREATE" + CREATE_CATALOG = "CREATE_CATALOG" + CREATE_CLEAN_ROOM = "CREATE_CLEAN_ROOM" + CREATE_CONNECTION = "CREATE_CONNECTION" + CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION" + CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE" + CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME" + CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG" + CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE" + CREATE_FUNCTION = "CREATE_FUNCTION" + CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE" + CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW" + CREATE_MODEL = "CREATE_MODEL" + CREATE_PROVIDER = "CREATE_PROVIDER" + CREATE_RECIPIENT = "CREATE_RECIPIENT" + CREATE_SCHEMA = "CREATE_SCHEMA" + CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL" + CREATE_SHARE = "CREATE_SHARE" + CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL" + CREATE_TABLE = "CREATE_TABLE" + CREATE_VIEW = "CREATE_VIEW" + CREATE_VOLUME = "CREATE_VOLUME" + EXECUTE = "EXECUTE" + EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" + MANAGE = "MANAGE" + MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" + MODIFY = "MODIFY" + MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM" + READ_FILES = "READ_FILES" + READ_PRIVATE_FILES = "READ_PRIVATE_FILES" + READ_VOLUME = "READ_VOLUME" + REFRESH = "REFRESH" + SELECT = "SELECT" + SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION" + USAGE = "USAGE" + USE_CATALOG = "USE_CATALOG" + USE_CONNECTION = "USE_CONNECTION" + USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS" + USE_PROVIDER = "USE_PROVIDER" + USE_RECIPIENT = "USE_RECIPIENT" + USE_SCHEMA = "USE_SCHEMA" + USE_SHARE = "USE_SHARE" + WRITE_FILES = "WRITE_FILES" + WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES" + WRITE_VOLUME = "WRITE_VOLUME" + @dataclass class PrivilegeAssignment: principal: Optional[str] = None """The principal (user email address or group name).""" - + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" - + def as_dict(self) -> dict: """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principal is not None: body['principal'] = self.principal - if self.privileges: body['privileges'] = [v.value for v in self.privileges] + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = [v.value for v in self.privileges] return body def as_shallow_dict(self) -> dict: """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.principal is not None: body['principal'] = self.principal - if self.privileges: body['privileges'] = self.privileges + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = self.privileges return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get('principal', None), privileges=_repeated_enum(d, 'privileges', Privilege)) - - + return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) @dataclass class ProvisioningInfo: """Status of an asynchronously provisioned resource.""" - + state: Optional[ProvisioningInfoState] = None """The provisioning state of the resource.""" - + def as_dict(self) -> dict: """Serializes the ProvisioningInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.state is not None: body['state'] = self.state.value + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.state is not None: body['state'] = self.state + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProvisioningInfo: """Deserializes the ProvisioningInfo from a dictionary.""" - return cls(state=_enum(d, 'state', ProvisioningInfoState)) - - + return cls(state=_enum(d, "state", ProvisioningInfoState)) class ProvisioningInfoState(Enum): - - - ACTIVE = 'ACTIVE' - DEGRADED = 'DEGRADED' - DELETING = 'DELETING' - FAILED = 'FAILED' - PROVISIONING = 'PROVISIONING' - UPDATING = 'UPDATING' + + ACTIVE = "ACTIVE" + DEGRADED = "DEGRADED" + DELETING = "DELETING" + FAILED = "FAILED" + PROVISIONING = "PROVISIONING" + UPDATING = "UPDATING" + @dataclass class ProvisioningStatus: """Detailed status of an online table. Shown if the online table is in the PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - + initial_pipeline_sync_progress: Optional[PipelineProgress] = None """Details about initial data synchronization. Only populated when in the PROVISIONING_INITIAL_SNAPSHOT state.""" - + def as_dict(self) -> dict: """Serializes the ProvisioningStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict() + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProvisioningStatus: """Deserializes the ProvisioningStatus from a dictionary.""" - return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', PipelineProgress)) - - + return cls(initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress)) @dataclass class QuotaInfo: last_refreshed_at: Optional[int] = None """The timestamp that indicates when the quota count was last updated.""" - + parent_full_name: Optional[str] = None """Name of the parent resource. Returns metastore ID if the parent is a metastore.""" - + parent_securable_type: Optional[SecurableType] = None """The quota parent securable type.""" - + quota_count: Optional[int] = None """The current usage of the resource quota.""" - + quota_limit: Optional[int] = None """The current limit of the resource quota.""" - + quota_name: Optional[str] = None """The name of the quota.""" - + def as_dict(self) -> dict: """Serializes the QuotaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at - if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name - if self.parent_securable_type is not None: body['parent_securable_type'] = self.parent_securable_type.value - if self.quota_count is not None: body['quota_count'] = self.quota_count - if self.quota_limit is not None: body['quota_limit'] = self.quota_limit - if self.quota_name is not None: body['quota_name'] = self.quota_name + if self.last_refreshed_at is not None: + body["last_refreshed_at"] = self.last_refreshed_at + if self.parent_full_name is not None: + body["parent_full_name"] = self.parent_full_name + if self.parent_securable_type is not None: + body["parent_securable_type"] = self.parent_securable_type.value + if self.quota_count is not None: + body["quota_count"] = self.quota_count + if self.quota_limit is not None: + body["quota_limit"] = self.quota_limit + if self.quota_name is not None: + body["quota_name"] = self.quota_name return body def as_shallow_dict(self) -> dict: """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at - if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name - if self.parent_securable_type is not None: body['parent_securable_type'] = self.parent_securable_type - if self.quota_count is not None: body['quota_count'] = self.quota_count - if self.quota_limit is not None: body['quota_limit'] = self.quota_limit - if self.quota_name is not None: body['quota_name'] = self.quota_name + if self.last_refreshed_at is not None: + body["last_refreshed_at"] = self.last_refreshed_at + if self.parent_full_name is not None: + body["parent_full_name"] = self.parent_full_name + if self.parent_securable_type is not None: + body["parent_securable_type"] = self.parent_securable_type + if self.quota_count is not None: + body["quota_count"] = self.quota_count + if self.quota_limit is not None: + body["quota_limit"] = self.quota_limit + if self.quota_name is not None: + body["quota_name"] = self.quota_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QuotaInfo: """Deserializes the QuotaInfo from a dictionary.""" - return cls(last_refreshed_at=d.get('last_refreshed_at', None), parent_full_name=d.get('parent_full_name', None), parent_securable_type=_enum(d, 'parent_securable_type', SecurableType), quota_count=d.get('quota_count', None), quota_limit=d.get('quota_limit', None), quota_name=d.get('quota_name', None)) - - + return cls( + last_refreshed_at=d.get("last_refreshed_at", None), + parent_full_name=d.get("parent_full_name", None), + parent_securable_type=_enum(d, "parent_securable_type", SecurableType), + quota_count=d.get("quota_count", None), + quota_limit=d.get("quota_limit", None), + quota_name=d.get("quota_name", None), + ) @dataclass class R2Credentials: """R2 temporary credentials for API authentication. Read more at https://developers.cloudflare.com/r2/api/s3/tokens/.""" - + access_key_id: Optional[str] = None """The access key ID that identifies the temporary credentials.""" - + secret_access_key: Optional[str] = None """The secret access key associated with the access key.""" - + session_token: Optional[str] = None """The generated JWT that users must pass to use the temporary credentials.""" - + def as_dict(self) -> dict: """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_key_id is not None: body['access_key_id'] = self.access_key_id - if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key - if self.session_token is not None: body['session_token'] = self.session_token + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key + if self.session_token is not None: + body["session_token"] = self.session_token return body def as_shallow_dict(self) -> dict: """Serializes the R2Credentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_key_id is not None: body['access_key_id'] = self.access_key_id - if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key - if self.session_token is not None: body['session_token'] = self.session_token + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key + if self.session_token is not None: + body["session_token"] = self.session_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> R2Credentials: """Deserializes the R2Credentials from a dictionary.""" - return cls(access_key_id=d.get('access_key_id', None), secret_access_key=d.get('secret_access_key', None), session_token=d.get('session_token', None)) - - - - - + return cls( + access_key_id=d.get("access_key_id", None), + secret_access_key=d.get("secret_access_key", None), + session_token=d.get("session_token", None), + ) @dataclass class RegenerateDashboardRequest: table_name: Optional[str] = None """Full name of the table.""" - + warehouse_id: Optional[str] = None """Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first running warehouse will be used.""" - + def as_dict(self) -> dict: """Serializes the RegenerateDashboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.table_name is not None: body['table_name'] = self.table_name - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.table_name is not None: + body["table_name"] = self.table_name + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the RegenerateDashboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.table_name is not None: body['table_name'] = self.table_name - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.table_name is not None: + body["table_name"] = self.table_name + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardRequest: """Deserializes the RegenerateDashboardRequest from a dictionary.""" - return cls(table_name=d.get('table_name', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls(table_name=d.get("table_name", None), warehouse_id=d.get("warehouse_id", None)) @dataclass class RegenerateDashboardResponse: dashboard_id: Optional[str] = None """Id of the regenerated monitoring dashboard.""" - + parent_folder: Optional[str] = None """The directory where the regenerated dashboard is stored.""" - + def as_dict(self) -> dict: """Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.parent_folder is not None: body['parent_folder'] = self.parent_folder + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.parent_folder is not None: + body["parent_folder"] = self.parent_folder return body def as_shallow_dict(self) -> dict: """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.parent_folder is not None: body['parent_folder'] = self.parent_folder + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.parent_folder is not None: + body["parent_folder"] = self.parent_folder return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardResponse: """Deserializes the RegenerateDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), parent_folder=d.get('parent_folder', None)) - - + return cls(dashboard_id=d.get("dashboard_id", None), parent_folder=d.get("parent_folder", None)) @dataclass class RegisteredModelAlias: """Registered model alias.""" - + alias_name: Optional[str] = None """Name of the alias, e.g. 'champion' or 'latest_stable'""" - + version_num: Optional[int] = None """Integer version number of the model version to which this alias points.""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelAlias into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alias_name is not None: body['alias_name'] = self.alias_name - if self.version_num is not None: body['version_num'] = self.version_num + if self.alias_name is not None: + body["alias_name"] = self.alias_name + if self.version_num is not None: + body["version_num"] = self.version_num return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes.""" body = {} - if self.alias_name is not None: body['alias_name'] = self.alias_name - if self.version_num is not None: body['version_num'] = self.version_num + if self.alias_name is not None: + body["alias_name"] = self.alias_name + if self.version_num is not None: + body["version_num"] = self.version_num return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: """Deserializes the RegisteredModelAlias from a dictionary.""" - return cls(alias_name=d.get('alias_name', None), version_num=d.get('version_num', None)) - - + return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) @dataclass class RegisteredModelInfo: aliases: Optional[List[RegisteredModelAlias]] = None """List of aliases associated with the registered model""" - + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """The name of the catalog where the schema and the registered model reside""" - + comment: Optional[str] = None """The comment attached to the registered model""" - + created_at: Optional[int] = None """Creation timestamp of the registered model in milliseconds since the Unix epoch""" - + created_by: Optional[str] = None """The identifier of the user who created the registered model""" - + full_name: Optional[str] = None """The three-level (fully qualified) name of the registered model""" - + metastore_id: Optional[str] = None """The unique identifier of the metastore""" - + name: Optional[str] = None """The name of the registered model""" - + owner: Optional[str] = None """The identifier of the user who owns the registered model""" - + schema_name: Optional[str] = None """The name of the schema where the registered model resides""" - + storage_location: Optional[str] = None """The storage location on the cloud under which model version data files are stored""" - + updated_at: Optional[int] = None """Last-update timestamp of the registered model in milliseconds since the Unix epoch""" - + updated_by: Optional[str] = None """The identifier of the user who updated the registered model last time""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases] - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.aliases: + body["aliases"] = [v.as_dict() for v in self.aliases] + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aliases: body['aliases'] = self.aliases - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.aliases: + body["aliases"] = self.aliases + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelInfo: """Deserializes the RegisteredModelInfo from a dictionary.""" - return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias), browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), schema_name=d.get('schema_name', None), storage_location=d.get('storage_location', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - - - - + return cls( + aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + full_name=d.get("full_name", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + schema_name=d.get("schema_name", None), + storage_location=d.get("storage_location", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class SchemaInfo: """Next ID: 40""" - + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """Name of parent catalog.""" - + catalog_type: Optional[CatalogType] = None """The type of the parent catalog.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this schema was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of schema creator.""" - + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None """Whether predictive optimization should be enabled for this object and objects under it.""" - + full_name: Optional[str] = None """Full name of schema, in form of __catalog_name__.__schema_name__.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of schema, relative to parent catalog.""" - + owner: Optional[str] = None """Username of current owner of schema.""" - - properties: Optional[Dict[str,str]] = None + + properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + schema_id: Optional[str] = None """The unique identifier of the schema.""" - + storage_location: Optional[str] = None """Storage location for managed tables within schema.""" - + storage_root: Optional[str] = None """Storage root URL for managed tables within schema.""" - + updated_at: Optional[int] = None """Time at which this schema was created, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified schema.""" - + def as_dict(self) -> dict: """Serializes the SchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.catalog_type is not None: body['catalog_type'] = self.catalog_type.value - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties - if self.schema_id is not None: body['schema_id'] = self.schema_id - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.catalog_type is not None: + body["catalog_type"] = self.catalog_type.value + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization.value + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.schema_id is not None: + body["schema_id"] = self.schema_id + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.catalog_type is not None: body['catalog_type'] = self.catalog_type - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties - if self.schema_id is not None: body['schema_id'] = self.schema_id - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.catalog_type is not None: + body["catalog_type"] = self.catalog_type + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.schema_id is not None: + body["schema_id"] = self.schema_id + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SchemaInfo: """Deserializes the SchemaInfo from a dictionary.""" - return cls(browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), catalog_type=_enum(d, 'catalog_type', CatalogType), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), effective_predictive_optimization_flag=_from_dict(d, 'effective_predictive_optimization_flag', EffectivePredictiveOptimizationFlag), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), properties=d.get('properties', None), schema_id=d.get('schema_id', None), storage_location=d.get('storage_location', None), storage_root=d.get('storage_root', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + catalog_type=_enum(d, "catalog_type", CatalogType), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + effective_predictive_optimization_flag=_from_dict( + d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag + ), + enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), + full_name=d.get("full_name", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + properties=d.get("properties", None), + schema_id=d.get("schema_id", None), + storage_location=d.get("storage_location", None), + storage_root=d.get("storage_root", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) class SecurableType(Enum): """The type of Unity Catalog securable.""" - - CATALOG = 'CATALOG' - CLEAN_ROOM = 'CLEAN_ROOM' - CONNECTION = 'CONNECTION' - CREDENTIAL = 'CREDENTIAL' - EXTERNAL_LOCATION = 'EXTERNAL_LOCATION' - EXTERNAL_METADATA = 'EXTERNAL_METADATA' - FUNCTION = 'FUNCTION' - METASTORE = 'METASTORE' - PIPELINE = 'PIPELINE' - PROVIDER = 'PROVIDER' - RECIPIENT = 'RECIPIENT' - SCHEMA = 'SCHEMA' - SHARE = 'SHARE' - STAGING_TABLE = 'STAGING_TABLE' - STORAGE_CREDENTIAL = 'STORAGE_CREDENTIAL' - TABLE = 'TABLE' - UNKNOWN_SECURABLE_TYPE = 'UNKNOWN_SECURABLE_TYPE' - VOLUME = 'VOLUME' + + CATALOG = "CATALOG" + CLEAN_ROOM = "CLEAN_ROOM" + CONNECTION = "CONNECTION" + CREDENTIAL = "CREDENTIAL" + EXTERNAL_LOCATION = "EXTERNAL_LOCATION" + EXTERNAL_METADATA = "EXTERNAL_METADATA" + FUNCTION = "FUNCTION" + METASTORE = "METASTORE" + PIPELINE = "PIPELINE" + PROVIDER = "PROVIDER" + RECIPIENT = "RECIPIENT" + SCHEMA = "SCHEMA" + SHARE = "SHARE" + STAGING_TABLE = "STAGING_TABLE" + STORAGE_CREDENTIAL = "STORAGE_CREDENTIAL" + TABLE = "TABLE" + UNKNOWN_SECURABLE_TYPE = "UNKNOWN_SECURABLE_TYPE" + VOLUME = "VOLUME" + @dataclass class SetArtifactAllowlist: artifact_matchers: List[ArtifactMatcher] """A list of allowed artifact match patterns.""" - + artifact_type: Optional[ArtifactType] = None """The artifact type of the allowlist.""" - + created_at: Optional[int] = None """Time at which this artifact allowlist was set, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of the user who set the artifact allowlist.""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + def as_dict(self) -> dict: """Serializes the SetArtifactAllowlist into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_matchers: body['artifact_matchers'] = [v.as_dict() for v in self.artifact_matchers] - if self.artifact_type is not None: body['artifact_type'] = self.artifact_type.value - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.artifact_matchers: + body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers] + if self.artifact_type is not None: + body["artifact_type"] = self.artifact_type.value + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id return body def as_shallow_dict(self) -> dict: """Serializes the SetArtifactAllowlist into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers - if self.artifact_type is not None: body['artifact_type'] = self.artifact_type - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id + if self.artifact_matchers: + body["artifact_matchers"] = self.artifact_matchers + if self.artifact_type is not None: + body["artifact_type"] = self.artifact_type + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetArtifactAllowlist: """Deserializes the SetArtifactAllowlist from a dictionary.""" - return cls(artifact_matchers=_repeated_dict(d, 'artifact_matchers', ArtifactMatcher), artifact_type=_enum(d, 'artifact_type', ArtifactType), created_at=d.get('created_at', None), created_by=d.get('created_by', None), metastore_id=d.get('metastore_id', None)) - - + return cls( + artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher), + artifact_type=_enum(d, "artifact_type", ArtifactType), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + metastore_id=d.get("metastore_id", None), + ) @dataclass class SetRegisteredModelAliasRequest: full_name: str """Full name of the registered model""" - + alias: str """The name of the alias""" - + version_num: int """The version number of the model version to which the alias points""" - + def as_dict(self) -> dict: """Serializes the SetRegisteredModelAliasRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alias is not None: body['alias'] = self.alias - if self.full_name is not None: body['full_name'] = self.full_name - if self.version_num is not None: body['version_num'] = self.version_num + if self.alias is not None: + body["alias"] = self.alias + if self.full_name is not None: + body["full_name"] = self.full_name + if self.version_num is not None: + body["version_num"] = self.version_num return body def as_shallow_dict(self) -> dict: """Serializes the SetRegisteredModelAliasRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.alias is not None: body['alias'] = self.alias - if self.full_name is not None: body['full_name'] = self.full_name - if self.version_num is not None: body['version_num'] = self.version_num + if self.alias is not None: + body["alias"] = self.alias + if self.full_name is not None: + body["full_name"] = self.full_name + if self.version_num is not None: + body["version_num"] = self.version_num return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetRegisteredModelAliasRequest: """Deserializes the SetRegisteredModelAliasRequest from a dictionary.""" - return cls(alias=d.get('alias', None), full_name=d.get('full_name', None), version_num=d.get('version_num', None)) - - + return cls( + alias=d.get("alias", None), full_name=d.get("full_name", None), version_num=d.get("version_num", None) + ) @dataclass class SseEncryptionDetails: """Server-Side Encryption properties for clients communicating with AWS s3.""" - + algorithm: Optional[SseEncryptionDetailsAlgorithm] = None """Sets the value of the 'x-amz-server-side-encryption' header in S3 request.""" - + aws_kms_key_arn: Optional[str] = None """Optional. The ARN of the SSE-KMS key used with the S3 location, when algorithm = "SSE-KMS". Sets the value of the 'x-amz-server-side-encryption-aws-kms-key-id' header.""" - + def as_dict(self) -> dict: """Serializes the SseEncryptionDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.algorithm is not None: body['algorithm'] = self.algorithm.value - if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn + if self.algorithm is not None: + body["algorithm"] = self.algorithm.value + if self.aws_kms_key_arn is not None: + body["aws_kms_key_arn"] = self.aws_kms_key_arn return body def as_shallow_dict(self) -> dict: """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.algorithm is not None: body['algorithm'] = self.algorithm - if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn + if self.algorithm is not None: + body["algorithm"] = self.algorithm + if self.aws_kms_key_arn is not None: + body["aws_kms_key_arn"] = self.aws_kms_key_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SseEncryptionDetails: """Deserializes the SseEncryptionDetails from a dictionary.""" - return cls(algorithm=_enum(d, 'algorithm', SseEncryptionDetailsAlgorithm), aws_kms_key_arn=d.get('aws_kms_key_arn', None)) - - + return cls( + algorithm=_enum(d, "algorithm", SseEncryptionDetailsAlgorithm), + aws_kms_key_arn=d.get("aws_kms_key_arn", None), + ) class SseEncryptionDetailsAlgorithm(Enum): - - - AWS_SSE_KMS = 'AWS_SSE_KMS' - AWS_SSE_S3 = 'AWS_SSE_S3' + + AWS_SSE_KMS = "AWS_SSE_KMS" + AWS_SSE_S3 = "AWS_SSE_S3" + @dataclass class StorageCredentialInfo: aws_iam_role: Optional[AwsIamRoleResponse] = None """The AWS IAM role configuration.""" - + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" - + cloudflare_api_token: Optional[CloudflareApiToken] = None """The Cloudflare API token configuration.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + created_at: Optional[int] = None """Time at which this Credential was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of credential creator.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None """The Databricks managed GCP service account configuration.""" - + full_name: Optional[str] = None """The full name of the credential.""" - + id: Optional[str] = None """The unique identifier of the credential.""" - + isolation_mode: Optional[IsolationMode] = None - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """The credential name. The name must be unique within the metastore.""" - + owner: Optional[str] = None """Username of current owner of credential.""" - + read_only: Optional[bool] = None """Whether the storage credential is only usable for read operations.""" - + updated_at: Optional[int] = None """Time at which this credential was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the credential.""" - + used_for_managed_storage: Optional[bool] = None """Whether this credential is the current metastore's root storage credential.""" - + def as_dict(self) -> dict: """Serializes the StorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() - if self.full_name is not None: body['full_name'] = self.full_name - if self.id is not None: body['id'] = self.id - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.used_for_managed_storage is not None: body['used_for_managed_storage'] = self.used_for_managed_storage + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body def as_shallow_dict(self) -> dict: """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal - if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account - if self.full_name is not None: body['full_name'] = self.full_name - if self.id is not None: body['id'] = self.id - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.used_for_managed_storage is not None: body['used_for_managed_storage'] = self.used_for_managed_storage + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StorageCredentialInfo: """Deserializes the StorageCredentialInfo from a dictionary.""" - return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleResponse), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentityResponse), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccountResponse), full_name=d.get('full_name', None), id=d.get('id', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), used_for_managed_storage=d.get('used_for_managed_storage', None)) - - + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleResponse), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountResponse + ), + full_name=d.get("full_name", None), + id=d.get("id", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + read_only=d.get("read_only", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + used_for_managed_storage=d.get("used_for_managed_storage", None), + ) @dataclass class SystemSchemaInfo: schema: str """Name of the system schema.""" - + state: str """The current state of enablement for the system schema. An empty string means the system schema is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE""" - + def as_dict(self) -> dict: """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.schema is not None: body['schema'] = self.schema - if self.state is not None: body['state'] = self.state + if self.schema is not None: + body["schema"] = self.schema + if self.state is not None: + body["state"] = self.state return body def as_shallow_dict(self) -> dict: """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.schema is not None: body['schema'] = self.schema - if self.state is not None: body['state'] = self.state + if self.schema is not None: + body["schema"] = self.schema + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SystemSchemaInfo: """Deserializes the SystemSchemaInfo from a dictionary.""" - return cls(schema=d.get('schema', None), state=d.get('state', None)) - - + return cls(schema=d.get("schema", None), state=d.get("state", None)) @dataclass class TableConstraint: """A table constraint, as defined by *one* of the following fields being set: __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" - + foreign_key_constraint: Optional[ForeignKeyConstraint] = None - + named_table_constraint: Optional[NamedTableConstraint] = None - + primary_key_constraint: Optional[PrimaryKeyConstraint] = None - + def as_dict(self) -> dict: """Serializes the TableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.foreign_key_constraint: body['foreign_key_constraint'] = self.foreign_key_constraint.as_dict() - if self.named_table_constraint: body['named_table_constraint'] = self.named_table_constraint.as_dict() - if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint.as_dict() + if self.foreign_key_constraint: + body["foreign_key_constraint"] = self.foreign_key_constraint.as_dict() + if self.named_table_constraint: + body["named_table_constraint"] = self.named_table_constraint.as_dict() + if self.primary_key_constraint: + body["primary_key_constraint"] = self.primary_key_constraint.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TableConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.foreign_key_constraint: body['foreign_key_constraint'] = self.foreign_key_constraint - if self.named_table_constraint: body['named_table_constraint'] = self.named_table_constraint - if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint + if self.foreign_key_constraint: + body["foreign_key_constraint"] = self.foreign_key_constraint + if self.named_table_constraint: + body["named_table_constraint"] = self.named_table_constraint + if self.primary_key_constraint: + body["primary_key_constraint"] = self.primary_key_constraint return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableConstraint: """Deserializes the TableConstraint from a dictionary.""" - return cls(foreign_key_constraint=_from_dict(d, 'foreign_key_constraint', ForeignKeyConstraint), named_table_constraint=_from_dict(d, 'named_table_constraint', NamedTableConstraint), primary_key_constraint=_from_dict(d, 'primary_key_constraint', PrimaryKeyConstraint)) - - + return cls( + foreign_key_constraint=_from_dict(d, "foreign_key_constraint", ForeignKeyConstraint), + named_table_constraint=_from_dict(d, "named_table_constraint", NamedTableConstraint), + primary_key_constraint=_from_dict(d, "primary_key_constraint", PrimaryKeyConstraint), + ) @dataclass class TableDependency: """A table that is dependent on a SQL object.""" - + table_full_name: str """Full name of the dependent table, in the form of __catalog_name__.__schema_name__.__table_name__.""" - + def as_dict(self) -> dict: """Serializes the TableDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.table_full_name is not None: body['table_full_name'] = self.table_full_name + if self.table_full_name is not None: + body["table_full_name"] = self.table_full_name return body def as_shallow_dict(self) -> dict: """Serializes the TableDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.table_full_name is not None: body['table_full_name'] = self.table_full_name + if self.table_full_name is not None: + body["table_full_name"] = self.table_full_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableDependency: """Deserializes the TableDependency from a dictionary.""" - return cls(table_full_name=d.get('table_full_name', None)) - - + return cls(table_full_name=d.get("table_full_name", None)) @dataclass class TableExistsResponse: table_exists: Optional[bool] = None """Whether the table exists or not.""" - + def as_dict(self) -> dict: """Serializes the TableExistsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.table_exists is not None: body['table_exists'] = self.table_exists + if self.table_exists is not None: + body["table_exists"] = self.table_exists return body def as_shallow_dict(self) -> dict: """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.table_exists is not None: body['table_exists'] = self.table_exists + if self.table_exists is not None: + body["table_exists"] = self.table_exists return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableExistsResponse: """Deserializes the TableExistsResponse from a dictionary.""" - return cls(table_exists=d.get('table_exists', None)) - - + return cls(table_exists=d.get("table_exists", None)) @dataclass class TableInfo: access_point: Optional[str] = None """The AWS access point to use when accesing s3 for this external location.""" - + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """Name of parent catalog.""" - + columns: Optional[List[ColumnInfo]] = None """The array of __ColumnInfo__ definitions of the table's columns.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this table was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of table creator.""" - + data_access_configuration_id: Optional[str] = None """Unique ID of the Data Access Configuration to use with the table data.""" - + data_source_format: Optional[DataSourceFormat] = None """Data source format""" - + deleted_at: Optional[int] = None """Time at which this table was deleted, in epoch milliseconds. Field is omitted if table is not deleted.""" - + delta_runtime_properties_kvpairs: Optional[DeltaRuntimePropertiesKvPairs] = None """Information pertaining to current state of the delta table.""" - + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + full_name: Optional[str] = None """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__""" - + metastore_id: Optional[str] = None """Unique identifier of parent metastore.""" - + name: Optional[str] = None """Name of table, relative to parent schema.""" - + owner: Optional[str] = None """Username of current owner of table.""" - + pipeline_id: Optional[str] = None """The pipeline ID of the table. Applicable for tables created by pipelines (Materialized View, Streaming Table, etc.).""" - - properties: Optional[Dict[str,str]] = None + + properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + row_filter: Optional[TableRowFilter] = None - + schema_name: Optional[str] = None """Name of parent schema relative to its parent catalog.""" - + sql_path: Optional[str] = None """List of schemes whose objects can be referenced without qualification.""" - + storage_credential_name: Optional[str] = None """Name of the storage credential, when a storage credential is configured for use with this table.""" - + storage_location: Optional[str] = None """Storage root URL for table (for **MANAGED**, **EXTERNAL** tables)""" - + table_constraints: Optional[List[TableConstraint]] = None """List of table constraints. Note: this field is not set in the output of the __listTables__ API.""" - + table_id: Optional[str] = None """The unique identifier of the table.""" - + table_type: Optional[TableType] = None - + updated_at: Optional[int] = None """Time at which this table was last modified, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified the table.""" - + view_definition: Optional[str] = None """View definition SQL (when __table_type__ is **VIEW**, **MATERIALIZED_VIEW**, or **STREAMING_TABLE**)""" - + view_dependencies: Optional[DependencyList] = None """View dependencies (when table_type == **VIEW** or **MATERIALIZED_VIEW**, **STREAMING_TABLE**) - when DependencyList is None, the dependency is not provided; - when DependencyList is an empty list, the dependency is provided but is empty; - when DependencyList is not an empty list, dependencies are provided and recorded.""" - + def as_dict(self) -> dict: """Serializes the TableInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_point is not None: body['access_point'] = self.access_point - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.columns: body['columns'] = [v.as_dict() for v in self.columns] - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.data_access_configuration_id is not None: body['data_access_configuration_id'] = self.data_access_configuration_id - if self.data_source_format is not None: body['data_source_format'] = self.data_source_format.value - if self.deleted_at is not None: body['deleted_at'] = self.deleted_at - if self.delta_runtime_properties_kvpairs: body['delta_runtime_properties_kvpairs'] = self.delta_runtime_properties_kvpairs.as_dict() - if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value - if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.properties: body['properties'] = self.properties - if self.row_filter: body['row_filter'] = self.row_filter.as_dict() - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.sql_path is not None: body['sql_path'] = self.sql_path - if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.table_constraints: body['table_constraints'] = [v.as_dict() for v in self.table_constraints] - if self.table_id is not None: body['table_id'] = self.table_id - if self.table_type is not None: body['table_type'] = self.table_type.value - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.view_definition is not None: body['view_definition'] = self.view_definition - if self.view_dependencies: body['view_dependencies'] = self.view_dependencies.as_dict() + if self.access_point is not None: + body["access_point"] = self.access_point + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_access_configuration_id is not None: + body["data_access_configuration_id"] = self.data_access_configuration_id + if self.data_source_format is not None: + body["data_source_format"] = self.data_source_format.value + if self.deleted_at is not None: + body["deleted_at"] = self.deleted_at + if self.delta_runtime_properties_kvpairs: + body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs.as_dict() + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization.value + if self.encryption_details: + body["encryption_details"] = self.encryption_details.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.properties: + body["properties"] = self.properties + if self.row_filter: + body["row_filter"] = self.row_filter.as_dict() + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.table_constraints: + body["table_constraints"] = [v.as_dict() for v in self.table_constraints] + if self.table_id is not None: + body["table_id"] = self.table_id + if self.table_type is not None: + body["table_type"] = self.table_type.value + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.view_definition is not None: + body["view_definition"] = self.view_definition + if self.view_dependencies: + body["view_dependencies"] = self.view_dependencies.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TableInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_point is not None: body['access_point'] = self.access_point - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.columns: body['columns'] = self.columns - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.data_access_configuration_id is not None: body['data_access_configuration_id'] = self.data_access_configuration_id - if self.data_source_format is not None: body['data_source_format'] = self.data_source_format - if self.deleted_at is not None: body['deleted_at'] = self.deleted_at - if self.delta_runtime_properties_kvpairs: body['delta_runtime_properties_kvpairs'] = self.delta_runtime_properties_kvpairs - if self.effective_predictive_optimization_flag: body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization - if self.encryption_details: body['encryption_details'] = self.encryption_details - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.properties: body['properties'] = self.properties - if self.row_filter: body['row_filter'] = self.row_filter - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.sql_path is not None: body['sql_path'] = self.sql_path - if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.table_constraints: body['table_constraints'] = self.table_constraints - if self.table_id is not None: body['table_id'] = self.table_id - if self.table_type is not None: body['table_type'] = self.table_type - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.view_definition is not None: body['view_definition'] = self.view_definition - if self.view_dependencies: body['view_dependencies'] = self.view_dependencies + if self.access_point is not None: + body["access_point"] = self.access_point + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.columns: + body["columns"] = self.columns + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_access_configuration_id is not None: + body["data_access_configuration_id"] = self.data_access_configuration_id + if self.data_source_format is not None: + body["data_source_format"] = self.data_source_format + if self.deleted_at is not None: + body["deleted_at"] = self.deleted_at + if self.delta_runtime_properties_kvpairs: + body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization + if self.encryption_details: + body["encryption_details"] = self.encryption_details + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.properties: + body["properties"] = self.properties + if self.row_filter: + body["row_filter"] = self.row_filter + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.table_constraints: + body["table_constraints"] = self.table_constraints + if self.table_id is not None: + body["table_id"] = self.table_id + if self.table_type is not None: + body["table_type"] = self.table_type + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.view_definition is not None: + body["view_definition"] = self.view_definition + if self.view_dependencies: + body["view_dependencies"] = self.view_dependencies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableInfo: """Deserializes the TableInfo from a dictionary.""" - return cls(access_point=d.get('access_point', None), browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), columns=_repeated_dict(d, 'columns', ColumnInfo), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), data_access_configuration_id=d.get('data_access_configuration_id', None), data_source_format=_enum(d, 'data_source_format', DataSourceFormat), deleted_at=d.get('deleted_at', None), delta_runtime_properties_kvpairs=_from_dict(d, 'delta_runtime_properties_kvpairs', DeltaRuntimePropertiesKvPairs), effective_predictive_optimization_flag=_from_dict(d, 'effective_predictive_optimization_flag', EffectivePredictiveOptimizationFlag), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), pipeline_id=d.get('pipeline_id', None), properties=d.get('properties', None), row_filter=_from_dict(d, 'row_filter', TableRowFilter), schema_name=d.get('schema_name', None), sql_path=d.get('sql_path', None), storage_credential_name=d.get('storage_credential_name', None), storage_location=d.get('storage_location', None), table_constraints=_repeated_dict(d, 'table_constraints', TableConstraint), table_id=d.get('table_id', None), table_type=_enum(d, 'table_type', TableType), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), view_definition=d.get('view_definition', None), view_dependencies=_from_dict(d, 'view_dependencies', DependencyList)) - - + return cls( + access_point=d.get("access_point", None), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + columns=_repeated_dict(d, "columns", ColumnInfo), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + data_access_configuration_id=d.get("data_access_configuration_id", None), + data_source_format=_enum(d, "data_source_format", DataSourceFormat), + deleted_at=d.get("deleted_at", None), + delta_runtime_properties_kvpairs=_from_dict( + d, "delta_runtime_properties_kvpairs", DeltaRuntimePropertiesKvPairs + ), + effective_predictive_optimization_flag=_from_dict( + d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag + ), + enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), + encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), + full_name=d.get("full_name", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + pipeline_id=d.get("pipeline_id", None), + properties=d.get("properties", None), + row_filter=_from_dict(d, "row_filter", TableRowFilter), + schema_name=d.get("schema_name", None), + sql_path=d.get("sql_path", None), + storage_credential_name=d.get("storage_credential_name", None), + storage_location=d.get("storage_location", None), + table_constraints=_repeated_dict(d, "table_constraints", TableConstraint), + table_id=d.get("table_id", None), + table_type=_enum(d, "table_type", TableType), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + view_definition=d.get("view_definition", None), + view_dependencies=_from_dict(d, "view_dependencies", DependencyList), + ) class TableOperation(Enum): - - - READ = 'READ' - READ_WRITE = 'READ_WRITE' + + READ = "READ" + READ_WRITE = "READ_WRITE" + @dataclass class TableRowFilter: function_name: str """The full name of the row filter SQL UDF.""" - + input_column_names: List[str] """The list of table columns to be passed as input to the row filter function. The column types should match the types of the filter function arguments.""" - + def as_dict(self) -> dict: """Serializes the TableRowFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_name is not None: body['function_name'] = self.function_name - if self.input_column_names: body['input_column_names'] = [v for v in self.input_column_names] + if self.function_name is not None: + body["function_name"] = self.function_name + if self.input_column_names: + body["input_column_names"] = [v for v in self.input_column_names] return body def as_shallow_dict(self) -> dict: """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_name is not None: body['function_name'] = self.function_name - if self.input_column_names: body['input_column_names'] = self.input_column_names + if self.function_name is not None: + body["function_name"] = self.function_name + if self.input_column_names: + body["input_column_names"] = self.input_column_names return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableRowFilter: """Deserializes the TableRowFilter from a dictionary.""" - return cls(function_name=d.get('function_name', None), input_column_names=d.get('input_column_names', None)) - - + return cls(function_name=d.get("function_name", None), input_column_names=d.get("input_column_names", None)) @dataclass class TableSummary: full_name: Optional[str] = None """The full name of the table.""" - + table_type: Optional[TableType] = None - + def as_dict(self) -> dict: """Serializes the TableSummary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_name is not None: body['full_name'] = self.full_name - if self.table_type is not None: body['table_type'] = self.table_type.value + if self.full_name is not None: + body["full_name"] = self.full_name + if self.table_type is not None: + body["table_type"] = self.table_type.value return body def as_shallow_dict(self) -> dict: """Serializes the TableSummary into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_name is not None: body['full_name'] = self.full_name - if self.table_type is not None: body['table_type'] = self.table_type + if self.full_name is not None: + body["full_name"] = self.full_name + if self.table_type is not None: + body["table_type"] = self.table_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableSummary: """Deserializes the TableSummary from a dictionary.""" - return cls(full_name=d.get('full_name', None), table_type=_enum(d, 'table_type', TableType)) - - + return cls(full_name=d.get("full_name", None), table_type=_enum(d, "table_type", TableType)) class TableType(Enum): - - - EXTERNAL = 'EXTERNAL' - EXTERNAL_SHALLOW_CLONE = 'EXTERNAL_SHALLOW_CLONE' - FOREIGN = 'FOREIGN' - MANAGED = 'MANAGED' - MANAGED_SHALLOW_CLONE = 'MANAGED_SHALLOW_CLONE' - MATERIALIZED_VIEW = 'MATERIALIZED_VIEW' - STREAMING_TABLE = 'STREAMING_TABLE' - VIEW = 'VIEW' + + EXTERNAL = "EXTERNAL" + EXTERNAL_SHALLOW_CLONE = "EXTERNAL_SHALLOW_CLONE" + FOREIGN = "FOREIGN" + MANAGED = "MANAGED" + MANAGED_SHALLOW_CLONE = "MANAGED_SHALLOW_CLONE" + MATERIALIZED_VIEW = "MATERIALIZED_VIEW" + STREAMING_TABLE = "STREAMING_TABLE" + VIEW = "VIEW" + @dataclass class TagKeyValue: key: Optional[str] = None """name of the tag""" - + value: Optional[str] = None """value of the tag associated with the key, could be optional""" - + def as_dict(self) -> dict: """Serializes the TagKeyValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the TagKeyValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TagKeyValue: """Deserializes the TagKeyValue from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass @@ -7019,87 +8511,103 @@ class TemporaryCredentials: aws_temp_credentials: Optional[AwsCredentials] = None """AWS temporary credentials for API authentication. Read more at https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.""" - + azure_aad: Optional[AzureActiveDirectoryToken] = None """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed Identity. Read more at https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token""" - + expiration_time: Optional[int] = None """Server time when the credential will expire, in epoch milliseconds. The API client is advised to cache the credential given this expiration time.""" - + gcp_oauth_token: Optional[GcpOauthToken] = None """GCP temporary credentials for API authentication. Read more at https://developers.google.com/identity/protocols/oauth2/service-account""" - + def as_dict(self) -> dict: """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict() - if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict() - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict() + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() + if self.azure_aad: + body["azure_aad"] = self.azure_aad.as_dict() + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials - if self.azure_aad: body['azure_aad'] = self.azure_aad - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials + if self.azure_aad: + body["azure_aad"] = self.azure_aad + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TemporaryCredentials: """Deserializes the TemporaryCredentials from a dictionary.""" - return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials), azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken), expiration_time=d.get('expiration_time', None), gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken)) - - + return cls( + aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), + azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), + expiration_time=d.get("expiration_time", None), + gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), + ) @dataclass class TriggeredUpdateStatus: """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE or the ONLINE_NO_PENDING_UPDATE state.""" - + last_processed_commit_version: Optional[int] = None """The last source table Delta version that was synced to the online table. Note that this Delta version may not be completely synced to the online table yet.""" - + timestamp: Optional[str] = None """The timestamp of the last time any data was synchronized from the source table to the online table.""" - + triggered_update_progress: Optional[PipelineProgress] = None """Progress of the active data synchronization pipeline.""" - + def as_dict(self) -> dict: """Serializes the TriggeredUpdateStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress.as_dict() + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TriggeredUpdateStatus: """Deserializes the TriggeredUpdateStatus from a dictionary.""" - return cls(last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None), triggered_update_progress=_from_dict(d, 'triggered_update_progress', PipelineProgress)) - - - - - + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + triggered_update_progress=_from_dict(d, "triggered_update_progress", PipelineProgress), + ) @dataclass @@ -7118,8 +8626,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UnassignResponse: """Deserializes the UnassignResponse from a dictionary.""" return cls() - - @dataclass @@ -7138,306 +8644,414 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: """Deserializes the UpdateAssignmentResponse from a dictionary.""" return cls() - - @dataclass class UpdateCatalog: comment: Optional[str] = None """User-provided free-form text description.""" - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None """Whether predictive optimization should be enabled for this object and objects under it.""" - + isolation_mode: Optional[CatalogIsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - + name: Optional[str] = None """The name of the catalog.""" - + new_name: Optional[str] = None """New name for the catalog.""" - - options: Optional[Dict[str,str]] = None + + options: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + owner: Optional[str] = None """Username of current owner of catalog.""" - - properties: Optional[Dict[str,str]] = None + + properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + def as_dict(self) -> dict: """Serializes the UpdateCatalog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.options: body['options'] = self.options - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties + if self.comment is not None: + body["comment"] = self.comment + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization.value + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.options: + body["options"] = self.options + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCatalog into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.options: body['options'] = self.options - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties + if self.comment is not None: + body["comment"] = self.comment + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.options: + body["options"] = self.options + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalog: """Deserializes the UpdateCatalog from a dictionary.""" - return cls(comment=d.get('comment', None), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode), name=d.get('name', None), new_name=d.get('new_name', None), options=d.get('options', None), owner=d.get('owner', None), properties=d.get('properties', None)) - - + return cls( + comment=d.get("comment", None), + enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), + isolation_mode=_enum(d, "isolation_mode", CatalogIsolationMode), + name=d.get("name", None), + new_name=d.get("new_name", None), + options=d.get("options", None), + owner=d.get("owner", None), + properties=d.get("properties", None), + ) @dataclass class UpdateCatalogWorkspaceBindingsResponse: workspaces: Optional[List[int]] = None """A list of workspace IDs""" - + def as_dict(self) -> dict: """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspaces: body['workspaces'] = [v for v in self.workspaces] + if self.workspaces: + body["workspaces"] = [v for v in self.workspaces] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspaces: body['workspaces'] = self.workspaces + if self.workspaces: + body["workspaces"] = self.workspaces return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get('workspaces', None)) - - + return cls(workspaces=d.get("workspaces", None)) @dataclass class UpdateConnection: - options: Dict[str,str] + options: Dict[str, str] """A map of key-value properties attached to the securable.""" - + name: Optional[str] = None """Name of the connection.""" - + new_name: Optional[str] = None """New name for the connection.""" - + owner: Optional[str] = None """Username of current owner of the connection.""" - + def as_dict(self) -> dict: """Serializes the UpdateConnection into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.options: body['options'] = self.options - if self.owner is not None: body['owner'] = self.owner + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.options: + body["options"] = self.options + if self.owner is not None: + body["owner"] = self.owner return body def as_shallow_dict(self) -> dict: """Serializes the UpdateConnection into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.options: body['options'] = self.options - if self.owner is not None: body['owner'] = self.owner + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.options: + body["options"] = self.options + if self.owner is not None: + body["owner"] = self.owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateConnection: """Deserializes the UpdateConnection from a dictionary.""" - return cls(name=d.get('name', None), new_name=d.get('new_name', None), options=d.get('options', None), owner=d.get('owner', None)) - - + return cls( + name=d.get("name", None), + new_name=d.get("new_name", None), + options=d.get("options", None), + owner=d.get("owner", None), + ) @dataclass class UpdateCredentialRequest: aws_iam_role: Optional[AwsIamRole] = None """The AWS IAM role configuration""" - + azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration. Only applicable when purpose is **STORAGE**.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + force: Optional[bool] = None """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent external locations and external tables (when purpose is **STORAGE**).""" - + isolation_mode: Optional[IsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - + name_arg: Optional[str] = None """Name of the credential.""" - + new_name: Optional[str] = None """New name of credential.""" - + owner: Optional[str] = None """Username of current owner of credential.""" - + read_only: Optional[bool] = None """Whether the credential is usable only for read operations. Only applicable when purpose is **STORAGE**.""" - + skip_validation: Optional[bool] = None """Supply true to this argument to skip validation of the updated credential.""" - + def as_dict(self) -> dict: """Serializes the UpdateCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() - if self.comment is not None: body['comment'] = self.comment - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: body['force'] = self.force - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value - if self.name_arg is not None: body['name_arg'] = self.name_arg - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.name_arg is not None: + body["name_arg"] = self.name_arg + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal - if self.comment is not None: body['comment'] = self.comment - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account - if self.force is not None: body['force'] = self.force - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode - if self.name_arg is not None: body['name_arg'] = self.name_arg - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.name_arg is not None: + body["name_arg"] = self.name_arg + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialRequest: """Deserializes the UpdateCredentialRequest from a dictionary.""" - return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), comment=d.get('comment', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccount), force=d.get('force', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), name_arg=d.get('name_arg', None), new_name=d.get('new_name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) - - + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), + force=d.get("force", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + name_arg=d.get("name_arg", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + read_only=d.get("read_only", None), + skip_validation=d.get("skip_validation", None), + ) @dataclass class UpdateExternalLocation: comment: Optional[str] = None """User-provided free-form text description.""" - + credential_name: Optional[str] = None """Name of the storage credential used with this location.""" - + enable_file_events: Optional[bool] = None """[Create:OPT Update:OPT] Whether to enable file events on this external location.""" - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + fallback: Optional[bool] = None """Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled, the access to the location falls back to cluster credentials if UC credentials are not sufficient.""" - + file_event_queue: Optional[FileEventQueue] = None """[Create:OPT Update:OPT] File event queue settings.""" - + force: Optional[bool] = None """Force update even if changing url invalidates dependent external tables or mounts.""" - + isolation_mode: Optional[IsolationMode] = None - + name: Optional[str] = None """Name of the external location.""" - + new_name: Optional[str] = None """New name for the external location.""" - + owner: Optional[str] = None """The owner of the external location.""" - + read_only: Optional[bool] = None """Indicates whether the external location is read-only.""" - + skip_validation: Optional[bool] = None """Skips validation of the storage credential associated with the external location.""" - + url: Optional[str] = None """Path URL of the external location.""" - + def as_dict(self) -> dict: """Serializes the UpdateExternalLocation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events - if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() - if self.fallback is not None: body['fallback'] = self.fallback - if self.file_event_queue: body['file_event_queue'] = self.file_event_queue.as_dict() - if self.force is not None: body['force'] = self.force - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation - if self.url is not None: body['url'] = self.url + if self.comment is not None: + body["comment"] = self.comment + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details.as_dict() + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue.as_dict() + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExternalLocation into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.enable_file_events is not None: body['enable_file_events'] = self.enable_file_events - if self.encryption_details: body['encryption_details'] = self.encryption_details - if self.fallback is not None: body['fallback'] = self.fallback - if self.file_event_queue: body['file_event_queue'] = self.file_event_queue - if self.force is not None: body['force'] = self.force - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation - if self.url is not None: body['url'] = self.url + if self.comment is not None: + body["comment"] = self.comment + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExternalLocation: """Deserializes the UpdateExternalLocation from a dictionary.""" - return cls(comment=d.get('comment', None), credential_name=d.get('credential_name', None), enable_file_events=d.get('enable_file_events', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), fallback=d.get('fallback', None), file_event_queue=_from_dict(d, 'file_event_queue', FileEventQueue), force=d.get('force', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None), url=d.get('url', None)) - - + return cls( + comment=d.get("comment", None), + credential_name=d.get("credential_name", None), + enable_file_events=d.get("enable_file_events", None), + encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), + fallback=d.get("fallback", None), + file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), + force=d.get("force", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + name=d.get("name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + read_only=d.get("read_only", None), + skip_validation=d.get("skip_validation", None), + url=d.get("url", None), + ) @dataclass @@ -7445,30 +9059,32 @@ class UpdateFunction: name: Optional[str] = None """The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__).""" - + owner: Optional[str] = None """Username of current owner of function.""" - + def as_dict(self) -> dict: """Serializes the UpdateFunction into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner return body def as_shallow_dict(self) -> dict: """Serializes the UpdateFunction into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateFunction: """Deserializes the UpdateFunction from a dictionary.""" - return cls(name=d.get('name', None), owner=d.get('owner', None)) - - + return cls(name=d.get("name", None), owner=d.get("owner", None)) @dataclass @@ -7476,60 +9092,89 @@ class UpdateMetastore: delta_sharing_organization_name: Optional[str] = None """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.""" - + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None """The lifetime of delta sharing recipient token in seconds.""" - + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None """The scope of Delta Sharing enabled for the metastore.""" - + id: Optional[str] = None """Unique ID of the metastore.""" - + new_name: Optional[str] = None """New name for the metastore.""" - + owner: Optional[str] = None """The owner of the metastore.""" - + privilege_model_version: Optional[str] = None """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" - + storage_root_credential_id: Optional[str] = None """UUID of storage credential to access the metastore storage_root.""" - + def as_dict(self) -> dict: """Serializes the UpdateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds - if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value - if self.id is not None: body['id'] = self.id - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version - if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope.value + if self.id is not None: + body["id"] = self.id + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds - if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope - if self.id is not None: body['id'] = self.id - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.privilege_model_version is not None: body['privilege_model_version'] = self.privilege_model_version - if self.storage_root_credential_id is not None: body['storage_root_credential_id'] = self.storage_root_credential_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope + if self.id is not None: + body["id"] = self.id + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastore: """Deserializes the UpdateMetastore from a dictionary.""" - return cls(delta_sharing_organization_name=d.get('delta_sharing_organization_name', None), delta_sharing_recipient_token_lifetime_in_seconds=d.get('delta_sharing_recipient_token_lifetime_in_seconds', None), delta_sharing_scope=_enum(d, 'delta_sharing_scope', DeltaSharingScopeEnum), id=d.get('id', None), new_name=d.get('new_name', None), owner=d.get('owner', None), privilege_model_version=d.get('privilege_model_version', None), storage_root_credential_id=d.get('storage_root_credential_id', None)) - - + return cls( + delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), + delta_sharing_recipient_token_lifetime_in_seconds=d.get( + "delta_sharing_recipient_token_lifetime_in_seconds", None + ), + delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), + id=d.get("id", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + privilege_model_version=d.get("privilege_model_version", None), + storage_root_credential_id=d.get("storage_root_credential_id", None), + ) @dataclass @@ -7537,256 +9182,322 @@ class UpdateMetastoreAssignment: default_catalog_name: Optional[str] = None """The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace.""" - + metastore_id: Optional[str] = None """The unique ID of the metastore.""" - + workspace_id: Optional[int] = None """A workspace ID.""" - + def as_dict(self) -> dict: """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastoreAssignment: """Deserializes the UpdateMetastoreAssignment from a dictionary.""" - return cls(default_catalog_name=d.get('default_catalog_name', None), metastore_id=d.get('metastore_id', None), workspace_id=d.get('workspace_id', None)) - - + return cls( + default_catalog_name=d.get("default_catalog_name", None), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass class UpdateModelVersionRequest: comment: Optional[str] = None """The comment attached to the model version""" - + full_name: Optional[str] = None """The three-level (fully qualified) name of the model version""" - + version: Optional[int] = None """The integer version number of the model version""" - + def as_dict(self) -> dict: """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.full_name is not None: body['full_name'] = self.full_name - if self.version is not None: body['version'] = self.version + if self.comment is not None: + body["comment"] = self.comment + if self.full_name is not None: + body["full_name"] = self.full_name + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.full_name is not None: body['full_name'] = self.full_name - if self.version is not None: body['version'] = self.version + if self.comment is not None: + body["comment"] = self.comment + if self.full_name is not None: + body["full_name"] = self.full_name + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionRequest: """Deserializes the UpdateModelVersionRequest from a dictionary.""" - return cls(comment=d.get('comment', None), full_name=d.get('full_name', None), version=d.get('version', None)) - - + return cls(comment=d.get("comment", None), full_name=d.get("full_name", None), version=d.get("version", None)) @dataclass class UpdateMonitor: output_schema_name: str """Schema where output metric tables are created.""" - + baseline_table_name: Optional[str] = None """Name of the baseline table from which drift metrics are computed from. Columns in the monitored table should also be present in the baseline table.""" - + custom_metrics: Optional[List[MonitorMetric]] = None """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).""" - + dashboard_id: Optional[str] = None """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in PENDING state.""" - + data_classification_config: Optional[MonitorDataClassificationConfig] = None """The data classification config for the monitor.""" - + inference_log: Optional[MonitorInferenceLog] = None """Configuration for monitoring inference logs.""" - + notifications: Optional[MonitorNotifications] = None """The notification settings for the monitor.""" - + schedule: Optional[MonitorCronSchedule] = None """The schedule for automatically updating and refreshing metric tables.""" - + slicing_exprs: Optional[List[str]] = None """List of column expressions to slice data with for targeted analysis. The data is grouped by each expression independently, resulting in a separate slice for each predicate and its complements. For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" - + snapshot: Optional[MonitorSnapshot] = None """Configuration for monitoring snapshot tables.""" - + table_name: Optional[str] = None """Full name of the table.""" - + time_series: Optional[MonitorTimeSeries] = None """Configuration for monitoring time series tables.""" - + def as_dict(self) -> dict: """Serializes the UpdateMonitor into a dictionary suitable for use as a JSON request body.""" body = {} - if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name - if self.custom_metrics: body['custom_metrics'] = [v.as_dict() for v in self.custom_metrics] - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.data_classification_config: body['data_classification_config'] = self.data_classification_config.as_dict() - if self.inference_log: body['inference_log'] = self.inference_log.as_dict() - if self.notifications: body['notifications'] = self.notifications.as_dict() - if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name - if self.schedule: body['schedule'] = self.schedule.as_dict() - if self.slicing_exprs: body['slicing_exprs'] = [v for v in self.slicing_exprs] - if self.snapshot: body['snapshot'] = self.snapshot.as_dict() - if self.table_name is not None: body['table_name'] = self.table_name - if self.time_series: body['time_series'] = self.time_series.as_dict() + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config.as_dict() + if self.inference_log: + body["inference_log"] = self.inference_log.as_dict() + if self.notifications: + body["notifications"] = self.notifications.as_dict() + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.slicing_exprs: + body["slicing_exprs"] = [v for v in self.slicing_exprs] + if self.snapshot: + body["snapshot"] = self.snapshot.as_dict() + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateMonitor into a shallow dictionary of its immediate attributes.""" body = {} - if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name - if self.custom_metrics: body['custom_metrics'] = self.custom_metrics - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.data_classification_config: body['data_classification_config'] = self.data_classification_config - if self.inference_log: body['inference_log'] = self.inference_log - if self.notifications: body['notifications'] = self.notifications - if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name - if self.schedule: body['schedule'] = self.schedule - if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs - if self.snapshot: body['snapshot'] = self.snapshot - if self.table_name is not None: body['table_name'] = self.table_name - if self.time_series: body['time_series'] = self.time_series + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = self.custom_metrics + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config + if self.inference_log: + body["inference_log"] = self.inference_log + if self.notifications: + body["notifications"] = self.notifications + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.schedule: + body["schedule"] = self.schedule + if self.slicing_exprs: + body["slicing_exprs"] = self.slicing_exprs + if self.snapshot: + body["snapshot"] = self.snapshot + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateMonitor: """Deserializes the UpdateMonitor from a dictionary.""" - return cls(baseline_table_name=d.get('baseline_table_name', None), custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric), dashboard_id=d.get('dashboard_id', None), data_classification_config=_from_dict(d, 'data_classification_config', MonitorDataClassificationConfig), inference_log=_from_dict(d, 'inference_log', MonitorInferenceLog), notifications=_from_dict(d, 'notifications', MonitorNotifications), output_schema_name=d.get('output_schema_name', None), schedule=_from_dict(d, 'schedule', MonitorCronSchedule), slicing_exprs=d.get('slicing_exprs', None), snapshot=_from_dict(d, 'snapshot', MonitorSnapshot), table_name=d.get('table_name', None), time_series=_from_dict(d, 'time_series', MonitorTimeSeries)) - - + return cls( + baseline_table_name=d.get("baseline_table_name", None), + custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), + dashboard_id=d.get("dashboard_id", None), + data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), + inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), + notifications=_from_dict(d, "notifications", MonitorNotifications), + output_schema_name=d.get("output_schema_name", None), + schedule=_from_dict(d, "schedule", MonitorCronSchedule), + slicing_exprs=d.get("slicing_exprs", None), + snapshot=_from_dict(d, "snapshot", MonitorSnapshot), + table_name=d.get("table_name", None), + time_series=_from_dict(d, "time_series", MonitorTimeSeries), + ) @dataclass class UpdatePermissions: changes: Optional[List[PermissionsChange]] = None """Array of permissions change objects.""" - + full_name: Optional[str] = None """Full name of securable.""" - + securable_type: Optional[str] = None """Type of securable.""" - + def as_dict(self) -> dict: """Serializes the UpdatePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.changes: body['changes'] = [v.as_dict() for v in self.changes] - if self.full_name is not None: body['full_name'] = self.full_name - if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.changes: + body["changes"] = [v.as_dict() for v in self.changes] + if self.full_name is not None: + body["full_name"] = self.full_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.changes: body['changes'] = self.changes - if self.full_name is not None: body['full_name'] = self.full_name - if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.changes: + body["changes"] = self.changes + if self.full_name is not None: + body["full_name"] = self.full_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissions: """Deserializes the UpdatePermissions from a dictionary.""" - return cls(changes=_repeated_dict(d, 'changes', PermissionsChange), full_name=d.get('full_name', None), securable_type=d.get('securable_type', None)) - - + return cls( + changes=_repeated_dict(d, "changes", PermissionsChange), + full_name=d.get("full_name", None), + securable_type=d.get("securable_type", None), + ) @dataclass class UpdatePermissionsResponse: privilege_assignments: Optional[List[PrivilegeAssignment]] = None """The privileges assigned to each principal""" - + def as_dict(self) -> dict: """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissionsResponse: """Deserializes the UpdatePermissionsResponse from a dictionary.""" - return cls(privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment)) - - + return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment)) @dataclass class UpdateRegisteredModelRequest: comment: Optional[str] = None """The comment attached to the registered model""" - + full_name: Optional[str] = None """The three-level (fully qualified) name of the registered model""" - + new_name: Optional[str] = None """New name for the registered model.""" - + owner: Optional[str] = None """The identifier of the user who owns the registered model""" - + def as_dict(self) -> dict: """Serializes the UpdateRegisteredModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.full_name is not None: body['full_name'] = self.full_name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner + if self.comment is not None: + body["comment"] = self.comment + if self.full_name is not None: + body["full_name"] = self.full_name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.full_name is not None: body['full_name'] = self.full_name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner + if self.comment is not None: + body["comment"] = self.comment + if self.full_name is not None: + body["full_name"] = self.full_name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRegisteredModelRequest: """Deserializes the UpdateRegisteredModelRequest from a dictionary.""" - return cls(comment=d.get('comment', None), full_name=d.get('full_name', None), new_name=d.get('new_name', None), owner=d.get('owner', None)) - - + return cls( + comment=d.get("comment", None), + full_name=d.get("full_name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + ) @dataclass @@ -7805,380 +9516,490 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() - - @dataclass class UpdateSchema: comment: Optional[str] = None """User-provided free-form text description.""" - + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None """Whether predictive optimization should be enabled for this object and objects under it.""" - + full_name: Optional[str] = None """Full name of the schema.""" - + new_name: Optional[str] = None """New name for the schema.""" - + owner: Optional[str] = None """Username of current owner of schema.""" - - properties: Optional[Dict[str,str]] = None + + properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" - + def as_dict(self) -> dict: """Serializes the UpdateSchema into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization.value - if self.full_name is not None: body['full_name'] = self.full_name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties + if self.comment is not None: + body["comment"] = self.comment + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization.value + if self.full_name is not None: + body["full_name"] = self.full_name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties return body def as_shallow_dict(self) -> dict: """Serializes the UpdateSchema into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.enable_predictive_optimization is not None: body['enable_predictive_optimization'] = self.enable_predictive_optimization - if self.full_name is not None: body['full_name'] = self.full_name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.properties: body['properties'] = self.properties + if self.comment is not None: + body["comment"] = self.comment + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization + if self.full_name is not None: + body["full_name"] = self.full_name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateSchema: """Deserializes the UpdateSchema from a dictionary.""" - return cls(comment=d.get('comment', None), enable_predictive_optimization=_enum(d, 'enable_predictive_optimization', EnablePredictiveOptimization), full_name=d.get('full_name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), properties=d.get('properties', None)) - - + return cls( + comment=d.get("comment", None), + enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), + full_name=d.get("full_name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + properties=d.get("properties", None), + ) @dataclass class UpdateStorageCredential: aws_iam_role: Optional[AwsIamRoleRequest] = None """The AWS IAM role configuration.""" - + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" - + cloudflare_api_token: Optional[CloudflareApiToken] = None """The Cloudflare API token configuration.""" - + comment: Optional[str] = None """Comment associated with the credential.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None """The Databricks managed GCP service account configuration.""" - + force: Optional[bool] = None """Force update even if there are dependent external locations or external tables.""" - + isolation_mode: Optional[IsolationMode] = None - + name: Optional[str] = None """Name of the storage credential.""" - + new_name: Optional[str] = None """New name for the storage credential.""" - + owner: Optional[str] = None """Username of current owner of credential.""" - + read_only: Optional[bool] = None """Whether the storage credential is only usable for read operations.""" - + skip_validation: Optional[bool] = None """Supplying true to this argument skips validation of the updated credential.""" - + def as_dict(self) -> dict: """Serializes the UpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() - if self.comment is not None: body['comment'] = self.comment - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: body['force'] = self.force - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal - if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token - if self.comment is not None: body['comment'] = self.comment - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account - if self.force is not None: body['force'] = self.force - if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.read_only is not None: body['read_only'] = self.read_only - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateStorageCredential: """Deserializes the UpdateStorageCredential from a dictionary.""" - return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleRequest), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentityResponse), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), comment=d.get('comment', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccountRequest), force=d.get('force', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None)) - - + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), + force=d.get("force", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + name=d.get("name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + read_only=d.get("read_only", None), + skip_validation=d.get("skip_validation", None), + ) @dataclass class UpdateTableRequest: """Update a table owner.""" - + full_name: Optional[str] = None """Full name of the table.""" - + owner: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateTableRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_name is not None: body['full_name'] = self.full_name - if self.owner is not None: body['owner'] = self.owner + if self.full_name is not None: + body["full_name"] = self.full_name + if self.owner is not None: + body["owner"] = self.owner return body def as_shallow_dict(self) -> dict: """Serializes the UpdateTableRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_name is not None: body['full_name'] = self.full_name - if self.owner is not None: body['owner'] = self.owner + if self.full_name is not None: + body["full_name"] = self.full_name + if self.owner is not None: + body["owner"] = self.owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateTableRequest: """Deserializes the UpdateTableRequest from a dictionary.""" - return cls(full_name=d.get('full_name', None), owner=d.get('owner', None)) - - + return cls(full_name=d.get("full_name", None), owner=d.get("owner", None)) @dataclass class UpdateVolumeRequestContent: comment: Optional[str] = None """The comment attached to the volume""" - + name: Optional[str] = None """The three-level (fully qualified) name of the volume""" - + new_name: Optional[str] = None """New name for the volume.""" - + owner: Optional[str] = None """The identifier of the user who owns the volume""" - + def as_dict(self) -> dict: """Serializes the UpdateVolumeRequestContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner return body def as_shallow_dict(self) -> dict: """Serializes the UpdateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateVolumeRequestContent: """Deserializes the UpdateVolumeRequestContent from a dictionary.""" - return cls(comment=d.get('comment', None), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None)) - - + return cls( + comment=d.get("comment", None), + name=d.get("name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + ) @dataclass class UpdateWorkspaceBindings: assign_workspaces: Optional[List[int]] = None """A list of workspace IDs.""" - + name: Optional[str] = None """The name of the catalog.""" - + unassign_workspaces: Optional[List[int]] = None """A list of workspace IDs.""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceBindings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assign_workspaces: body['assign_workspaces'] = [v for v in self.assign_workspaces] - if self.name is not None: body['name'] = self.name - if self.unassign_workspaces: body['unassign_workspaces'] = [v for v in self.unassign_workspaces] + if self.assign_workspaces: + body["assign_workspaces"] = [v for v in self.assign_workspaces] + if self.name is not None: + body["name"] = self.name + if self.unassign_workspaces: + body["unassign_workspaces"] = [v for v in self.unassign_workspaces] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceBindings into a shallow dictionary of its immediate attributes.""" body = {} - if self.assign_workspaces: body['assign_workspaces'] = self.assign_workspaces - if self.name is not None: body['name'] = self.name - if self.unassign_workspaces: body['unassign_workspaces'] = self.unassign_workspaces + if self.assign_workspaces: + body["assign_workspaces"] = self.assign_workspaces + if self.name is not None: + body["name"] = self.name + if self.unassign_workspaces: + body["unassign_workspaces"] = self.unassign_workspaces return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindings: """Deserializes the UpdateWorkspaceBindings from a dictionary.""" - return cls(assign_workspaces=d.get('assign_workspaces', None), name=d.get('name', None), unassign_workspaces=d.get('unassign_workspaces', None)) - - + return cls( + assign_workspaces=d.get("assign_workspaces", None), + name=d.get("name", None), + unassign_workspaces=d.get("unassign_workspaces", None), + ) @dataclass class UpdateWorkspaceBindingsParameters: add: Optional[List[WorkspaceBinding]] = None """List of workspace bindings.""" - + remove: Optional[List[WorkspaceBinding]] = None """List of workspace bindings.""" - + securable_name: Optional[str] = None """The name of the securable.""" - + securable_type: Optional[str] = None """The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location).""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add: body['add'] = [v.as_dict() for v in self.add] - if self.remove: body['remove'] = [v.as_dict() for v in self.remove] - if self.securable_name is not None: body['securable_name'] = self.securable_name - if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.add: + body["add"] = [v.as_dict() for v in self.add] + if self.remove: + body["remove"] = [v.as_dict() for v in self.remove] + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsParameters into a shallow dictionary of its immediate attributes.""" body = {} - if self.add: body['add'] = self.add - if self.remove: body['remove'] = self.remove - if self.securable_name is not None: body['securable_name'] = self.securable_name - if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.add: + body["add"] = self.add + if self.remove: + body["remove"] = self.remove + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsParameters: """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary.""" - return cls(add=_repeated_dict(d, 'add', WorkspaceBinding), remove=_repeated_dict(d, 'remove', WorkspaceBinding), securable_name=d.get('securable_name', None), securable_type=d.get('securable_type', None)) - - + return cls( + add=_repeated_dict(d, "add", WorkspaceBinding), + remove=_repeated_dict(d, "remove", WorkspaceBinding), + securable_name=d.get("securable_name", None), + securable_type=d.get("securable_type", None), + ) @dataclass class UpdateWorkspaceBindingsResponse: """A list of workspace IDs that are bound to the securable""" - + bindings: Optional[List[WorkspaceBinding]] = None """List of workspace bindings.""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bindings: body['bindings'] = [v.as_dict() for v in self.bindings] + if self.bindings: + body["bindings"] = [v.as_dict() for v in self.bindings] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bindings: body['bindings'] = self.bindings + if self.bindings: + body["bindings"] = self.bindings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsResponse: """Deserializes the UpdateWorkspaceBindingsResponse from a dictionary.""" - return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding)) - - + return cls(bindings=_repeated_dict(d, "bindings", WorkspaceBinding)) @dataclass class ValidateCredentialRequest: """Next ID: 17""" - + aws_iam_role: Optional[AwsIamRole] = None """The AWS IAM role configuration""" - + azure_managed_identity: Optional[AzureManagedIdentity] = None """The Azure managed identity configuration.""" - + credential_name: Optional[str] = None """Required. The name of an existing credential or long-lived cloud credential to validate.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - + external_location_name: Optional[str] = None """The name of an existing external location to validate. Only applicable for storage credentials (purpose is **STORAGE**.)""" - + purpose: Optional[CredentialPurpose] = None """The purpose of the credential. This should only be used when the credential is specified.""" - + read_only: Optional[bool] = None """Whether the credential is only usable for read operations. Only applicable for storage credentials (purpose is **STORAGE**.)""" - + url: Optional[str] = None """The external location url to validate. Only applicable when purpose is **STORAGE**.""" - + def as_dict(self) -> dict: """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() - if self.external_location_name is not None: body['external_location_name'] = self.external_location_name - if self.purpose is not None: body['purpose'] = self.purpose.value - if self.read_only is not None: body['read_only'] = self.read_only - if self.url is not None: body['url'] = self.url + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.external_location_name is not None: + body["external_location_name"] = self.external_location_name + if self.purpose is not None: + body["purpose"] = self.purpose.value + if self.read_only is not None: + body["read_only"] = self.read_only + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the ValidateCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity - if self.credential_name is not None: body['credential_name'] = self.credential_name - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account - if self.external_location_name is not None: body['external_location_name'] = self.external_location_name - if self.purpose is not None: body['purpose'] = self.purpose - if self.read_only is not None: body['read_only'] = self.read_only - if self.url is not None: body['url'] = self.url + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.external_location_name is not None: + body["external_location_name"] = self.external_location_name + if self.purpose is not None: + body["purpose"] = self.purpose + if self.read_only is not None: + body["read_only"] = self.read_only + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidateCredentialRequest: """Deserializes the ValidateCredentialRequest from a dictionary.""" - return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity), credential_name=d.get('credential_name', None), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccount), external_location_name=d.get('external_location_name', None), purpose=_enum(d, 'purpose', CredentialPurpose), read_only=d.get('read_only', None), url=d.get('url', None)) - - + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), + credential_name=d.get("credential_name", None), + databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), + external_location_name=d.get("external_location_name", None), + purpose=_enum(d, "purpose", CredentialPurpose), + read_only=d.get("read_only", None), + url=d.get("url", None), + ) @dataclass @@ -8186,855 +10007,885 @@ class ValidateCredentialResponse: is_dir: Optional[bool] = None """Whether the tested location is a directory in cloud storage. Only applicable for when purpose is **STORAGE**.""" - + results: Optional[List[CredentialValidationResult]] = None """The results of the validation check.""" - + def as_dict(self) -> dict: """Serializes the ValidateCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_dir is not None: body['isDir'] = self.is_dir - if self.results: body['results'] = [v.as_dict() for v in self.results] + if self.is_dir is not None: + body["isDir"] = self.is_dir + if self.results: + body["results"] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ValidateCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_dir is not None: body['isDir'] = self.is_dir - if self.results: body['results'] = self.results + if self.is_dir is not None: + body["isDir"] = self.is_dir + if self.results: + body["results"] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidateCredentialResponse: """Deserializes the ValidateCredentialResponse from a dictionary.""" - return cls(is_dir=d.get('isDir', None), results=_repeated_dict(d, 'results', CredentialValidationResult)) - - + return cls(is_dir=d.get("isDir", None), results=_repeated_dict(d, "results", CredentialValidationResult)) class ValidateCredentialResult(Enum): """A enum represents the result of the file operation""" - - FAIL = 'FAIL' - PASS = 'PASS' - SKIP = 'SKIP' + + FAIL = "FAIL" + PASS = "PASS" + SKIP = "SKIP" + @dataclass class ValidateStorageCredential: aws_iam_role: Optional[AwsIamRoleRequest] = None """The AWS IAM role configuration.""" - + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None """The Azure managed identity configuration.""" - + azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" - + cloudflare_api_token: Optional[CloudflareApiToken] = None """The Cloudflare API token configuration.""" - + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None """The Databricks created GCP service account configuration.""" - + external_location_name: Optional[str] = None """The name of an existing external location to validate.""" - + read_only: Optional[bool] = None """Whether the storage credential is only usable for read operations.""" - + storage_credential_name: Optional[str] = None """The name of the storage credential to validate.""" - + url: Optional[str] = None """The external location url to validate.""" - + def as_dict(self) -> dict: """Serializes the ValidateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token.as_dict() - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict() - if self.external_location_name is not None: body['external_location_name'] = self.external_location_name - if self.read_only is not None: body['read_only'] = self.read_only - if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name - if self.url is not None: body['url'] = self.url + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.external_location_name is not None: + body["external_location_name"] = self.external_location_name + if self.read_only is not None: + body["read_only"] = self.read_only + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the ValidateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role - if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity - if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal - if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token - if self.databricks_gcp_service_account: body['databricks_gcp_service_account'] = self.databricks_gcp_service_account - if self.external_location_name is not None: body['external_location_name'] = self.external_location_name - if self.read_only is not None: body['read_only'] = self.read_only - if self.storage_credential_name is not None: body['storage_credential_name'] = self.storage_credential_name - if self.url is not None: body['url'] = self.url + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.external_location_name is not None: + body["external_location_name"] = self.external_location_name + if self.read_only is not None: + body["read_only"] = self.read_only + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidateStorageCredential: """Deserializes the ValidateStorageCredential from a dictionary.""" - return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRoleRequest), azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentityRequest), azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal), cloudflare_api_token=_from_dict(d, 'cloudflare_api_token', CloudflareApiToken), databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account', DatabricksGcpServiceAccountRequest), external_location_name=d.get('external_location_name', None), read_only=d.get('read_only', None), storage_credential_name=d.get('storage_credential_name', None), url=d.get('url', None)) - - + return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), + external_location_name=d.get("external_location_name", None), + read_only=d.get("read_only", None), + storage_credential_name=d.get("storage_credential_name", None), + url=d.get("url", None), + ) @dataclass class ValidateStorageCredentialResponse: is_dir: Optional[bool] = None """Whether the tested location is a directory in cloud storage.""" - + results: Optional[List[ValidationResult]] = None """The results of the validation check.""" - + def as_dict(self) -> dict: """Serializes the ValidateStorageCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_dir is not None: body['isDir'] = self.is_dir - if self.results: body['results'] = [v.as_dict() for v in self.results] + if self.is_dir is not None: + body["isDir"] = self.is_dir + if self.results: + body["results"] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ValidateStorageCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_dir is not None: body['isDir'] = self.is_dir - if self.results: body['results'] = self.results + if self.is_dir is not None: + body["isDir"] = self.is_dir + if self.results: + body["results"] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidateStorageCredentialResponse: """Deserializes the ValidateStorageCredentialResponse from a dictionary.""" - return cls(is_dir=d.get('isDir', None), results=_repeated_dict(d, 'results', ValidationResult)) - - + return cls(is_dir=d.get("isDir", None), results=_repeated_dict(d, "results", ValidationResult)) @dataclass class ValidationResult: message: Optional[str] = None """Error message would exist when the result does not equal to **PASS**.""" - + operation: Optional[ValidationResultOperation] = None """The operation tested.""" - + result: Optional[ValidationResultResult] = None """The results of the tested operation.""" - + def as_dict(self) -> dict: """Serializes the ValidationResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: body['message'] = self.message - if self.operation is not None: body['operation'] = self.operation.value - if self.result is not None: body['result'] = self.result.value + if self.message is not None: + body["message"] = self.message + if self.operation is not None: + body["operation"] = self.operation.value + if self.result is not None: + body["result"] = self.result.value return body def as_shallow_dict(self) -> dict: """Serializes the ValidationResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: body['message'] = self.message - if self.operation is not None: body['operation'] = self.operation - if self.result is not None: body['result'] = self.result + if self.message is not None: + body["message"] = self.message + if self.operation is not None: + body["operation"] = self.operation + if self.result is not None: + body["result"] = self.result return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ValidationResult: """Deserializes the ValidationResult from a dictionary.""" - return cls(message=d.get('message', None), operation=_enum(d, 'operation', ValidationResultOperation), result=_enum(d, 'result', ValidationResultResult)) - - + return cls( + message=d.get("message", None), + operation=_enum(d, "operation", ValidationResultOperation), + result=_enum(d, "result", ValidationResultResult), + ) class ValidationResultOperation(Enum): """The operation tested.""" - - DELETE = 'DELETE' - LIST = 'LIST' - PATH_EXISTS = 'PATH_EXISTS' - READ = 'READ' - WRITE = 'WRITE' + + DELETE = "DELETE" + LIST = "LIST" + PATH_EXISTS = "PATH_EXISTS" + READ = "READ" + WRITE = "WRITE" + class ValidationResultResult(Enum): """The results of the tested operation.""" - - FAIL = 'FAIL' - PASS = 'PASS' - SKIP = 'SKIP' + + FAIL = "FAIL" + PASS = "PASS" + SKIP = "SKIP" + @dataclass class VolumeInfo: access_point: Optional[str] = None """The AWS access point to use when accesing s3 for this external location.""" - + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" - + catalog_name: Optional[str] = None """The name of the catalog where the schema and the volume are""" - + comment: Optional[str] = None """The comment attached to the volume""" - + created_at: Optional[int] = None - + created_by: Optional[str] = None """The identifier of the user who created the volume""" - + encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" - + full_name: Optional[str] = None """The three-level (fully qualified) name of the volume""" - + metastore_id: Optional[str] = None """The unique identifier of the metastore""" - + name: Optional[str] = None """The name of the volume""" - + owner: Optional[str] = None """The identifier of the user who owns the volume""" - + schema_name: Optional[str] = None """The name of the schema where the volume is""" - + storage_location: Optional[str] = None """The storage location on the cloud""" - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None """The identifier of the user who updated the volume last time""" - + volume_id: Optional[str] = None """The unique identifier of the volume""" - + volume_type: Optional[VolumeType] = None """The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" - + def as_dict(self) -> dict: """Serializes the VolumeInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_point is not None: body['access_point'] = self.access_point - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.volume_id is not None: body['volume_id'] = self.volume_id - if self.volume_type is not None: body['volume_type'] = self.volume_type.value + if self.access_point is not None: + body["access_point"] = self.access_point + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.encryption_details: + body["encryption_details"] = self.encryption_details.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.volume_id is not None: + body["volume_id"] = self.volume_id + if self.volume_type is not None: + body["volume_type"] = self.volume_type.value return body def as_shallow_dict(self) -> dict: """Serializes the VolumeInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_point is not None: body['access_point'] = self.access_point - if self.browse_only is not None: body['browse_only'] = self.browse_only - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.encryption_details: body['encryption_details'] = self.encryption_details - if self.full_name is not None: body['full_name'] = self.full_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.volume_id is not None: body['volume_id'] = self.volume_id - if self.volume_type is not None: body['volume_type'] = self.volume_type + if self.access_point is not None: + body["access_point"] = self.access_point + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.encryption_details: + body["encryption_details"] = self.encryption_details + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.volume_id is not None: + body["volume_id"] = self.volume_id + if self.volume_type is not None: + body["volume_type"] = self.volume_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VolumeInfo: """Deserializes the VolumeInfo from a dictionary.""" - return cls(access_point=d.get('access_point', None), browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), full_name=d.get('full_name', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), schema_name=d.get('schema_name', None), storage_location=d.get('storage_location', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), volume_id=d.get('volume_id', None), volume_type=_enum(d, 'volume_type', VolumeType)) - - + return cls( + access_point=d.get("access_point", None), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), + full_name=d.get("full_name", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + schema_name=d.get("schema_name", None), + storage_location=d.get("storage_location", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + volume_id=d.get("volume_id", None), + volume_type=_enum(d, "volume_type", VolumeType), + ) class VolumeType(Enum): """The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] - + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" - - EXTERNAL = 'EXTERNAL' - MANAGED = 'MANAGED' + + EXTERNAL = "EXTERNAL" + MANAGED = "MANAGED" + @dataclass class WorkspaceBinding: workspace_id: int """Required""" - + binding_type: Optional[WorkspaceBindingBindingType] = None """One of READ_WRITE/READ_ONLY. Default is READ_WRITE.""" - + def as_dict(self) -> dict: """Serializes the WorkspaceBinding into a dictionary suitable for use as a JSON request body.""" body = {} - if self.binding_type is not None: body['binding_type'] = self.binding_type.value - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.binding_type is not None: + body["binding_type"] = self.binding_type.value + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceBinding into a shallow dictionary of its immediate attributes.""" body = {} - if self.binding_type is not None: body['binding_type'] = self.binding_type - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.binding_type is not None: + body["binding_type"] = self.binding_type + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceBinding: """Deserializes the WorkspaceBinding from a dictionary.""" - return cls(binding_type=_enum(d, 'binding_type', WorkspaceBindingBindingType), workspace_id=d.get('workspace_id', None)) - - + return cls( + binding_type=_enum(d, "binding_type", WorkspaceBindingBindingType), workspace_id=d.get("workspace_id", None) + ) class WorkspaceBindingBindingType(Enum): """Using `BINDING_TYPE_` prefix here to avoid conflict with `TableOperation` enum in `credentials_common.proto`.""" - - BINDING_TYPE_READ_ONLY = 'BINDING_TYPE_READ_ONLY' - BINDING_TYPE_READ_WRITE = 'BINDING_TYPE_READ_WRITE' + BINDING_TYPE_READ_ONLY = "BINDING_TYPE_READ_ONLY" + BINDING_TYPE_READ_WRITE = "BINDING_TYPE_READ_WRITE" class AccountMetastoreAssignmentsAPI: """These APIs manage metastore assignments to a workspace.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , workspace_id: int, metastore_id: str - , * - , metastore_assignment: Optional[CreateMetastoreAssignment] = None): + def create( + self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[CreateMetastoreAssignment] = None + ): """Assigns a workspace to a metastore. - + Creates an assignment to a metastore for a workspace - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional) - - - """ - body = {} - if metastore_assignment is not None: body['metastore_assignment'] = metastore_assignment.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}', body=body - - , headers=headers - ) - - - - - def delete(self - , workspace_id: int, metastore_id: str - ): + """ + body = {} + if metastore_assignment is not None: + body["metastore_assignment"] = metastore_assignment.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "POST", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", + body=body, + headers=headers, + ) + + def delete(self, workspace_id: int, metastore_id: str): """Delete a metastore assignment. - + Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get(self - , workspace_id: int - ) -> AccountsMetastoreAssignment: + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", + headers=headers, + ) + + def get(self, workspace_id: int) -> AccountsMetastoreAssignment: """Gets the metastore assignment for a workspace. - + Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment will not be found and a 404 returned. - + :param workspace_id: int Workspace ID. - + :returns: :class:`AccountsMetastoreAssignment` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastore' - - , headers=headers - ) - return AccountsMetastoreAssignment.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - , metastore_id: str - ) -> Iterator[int]: + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastore", headers=headers + ) + return AccountsMetastoreAssignment.from_dict(res) + + def list(self, metastore_id: str) -> Iterator[int]: """Get all workspaces assigned to a metastore. - + Gets a list of all Databricks workspace IDs that have been assigned to given metastore. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: Iterator over int """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/workspaces' - - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/workspaces", headers=headers + ) parsed = ListAccountMetastoreAssignmentsResponse.from_dict(json).workspace_ids return parsed if parsed is not None else [] - - - - - - def update(self - , workspace_id: int, metastore_id: str - , * - , metastore_assignment: Optional[UpdateMetastoreAssignment] = None): + def update( + self, workspace_id: int, metastore_id: str, *, metastore_assignment: Optional[UpdateMetastoreAssignment] = None + ): """Updates a metastore assignment to a workspaces. - + Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be updated. - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID :param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional) - - + + """ body = {} - if metastore_assignment is not None: body['metastore_assignment'] = metastore_assignment.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}', body=body - - , headers=headers - ) - + if metastore_assignment is not None: + body["metastore_assignment"] = metastore_assignment.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/metastores/{metastore_id}", + body=body, + headers=headers, + ) + - - class AccountMetastoresAPI: """These APIs manage Unity Catalog metastores for an account. A metastore contains catalogs that can be associated with workspaces""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , metastore_info: Optional[CreateMetastore] = None) -> AccountsMetastoreInfo: + def create(self, *, metastore_info: Optional[CreateMetastore] = None) -> AccountsMetastoreInfo: """Create metastore. - + Creates a Unity Catalog metastore. - + :param metastore_info: :class:`CreateMetastore` (optional) - + :returns: :class:`AccountsMetastoreInfo` """ body = {} - if metastore_info is not None: body['metastore_info'] = metastore_info.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/metastores', body=body - - , headers=headers - ) - return AccountsMetastoreInfo.from_dict(res) + if metastore_info is not None: + body["metastore_info"] = metastore_info.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/metastores", body=body, headers=headers) + return AccountsMetastoreInfo.from_dict(res) - def delete(self - , metastore_id: str - , * - , force: Optional[bool] = None): + def delete(self, metastore_id: str, *, force: Optional[bool] = None): """Delete a metastore. - + Deletes a Unity Catalog metastore for an account, both specified by ID. - + :param metastore_id: str Unity Catalog metastore ID :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - - - """ - - query = {} - if force is not None: query['force'] = force - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}', query=query - - , headers=headers - ) - - - - - def get(self - , metastore_id: str - ) -> AccountsMetastoreInfo: + """ + + query = {} + if force is not None: + query["force"] = force + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", + query=query, + headers=headers, + ) + + def get(self, metastore_id: str) -> AccountsMetastoreInfo: """Get a metastore. - + Gets a Unity Catalog metastore from an account, both specified by ID. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: :class:`AccountsMetastoreInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}' - - , headers=headers - ) - return AccountsMetastoreInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", headers=headers + ) + return AccountsMetastoreInfo.from_dict(res) def list(self) -> Iterator[MetastoreInfo]: """Get all metastores associated with an account. - + Gets all Unity Catalog metastores associated with an account specified by ID. - + :returns: Iterator over :class:`MetastoreInfo` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/metastores", headers=headers) parsed = ListMetastoresResponse.from_dict(json).metastores return parsed if parsed is not None else [] - - - - - - def update(self - , metastore_id: str - , * - , metastore_info: Optional[UpdateMetastore] = None) -> AccountsMetastoreInfo: + def update(self, metastore_id: str, *, metastore_info: Optional[UpdateMetastore] = None) -> AccountsMetastoreInfo: """Update a metastore. - + Updates an existing Unity Catalog metastore. - + :param metastore_id: str Unity Catalog metastore ID :param metastore_info: :class:`UpdateMetastore` (optional) - + :returns: :class:`AccountsMetastoreInfo` """ body = {} - if metastore_info is not None: body['metastore_info'] = metastore_info.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}', body=body - - , headers=headers - ) + if metastore_info is not None: + body["metastore_info"] = metastore_info.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}", body=body, headers=headers + ) return AccountsMetastoreInfo.from_dict(res) - - + class AccountStorageCredentialsAPI: """These APIs manage storage credentials for a particular metastore.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , metastore_id: str - , * - , credential_info: Optional[CreateStorageCredential] = None) -> AccountsStorageCredentialInfo: + def create( + self, metastore_id: str, *, credential_info: Optional[CreateStorageCredential] = None + ) -> AccountsStorageCredentialInfo: """Create a storage credential. - + Creates a new storage credential. The request object is specific to the cloud: - + * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * **GcpServiceAcountKey** for GCP credentials. - + The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the metastore. - + :param metastore_id: str Unity Catalog metastore ID :param credential_info: :class:`CreateStorageCredential` (optional) - + :returns: :class:`AccountsStorageCredentialInfo` """ body = {} - if credential_info is not None: body['credential_info'] = credential_info.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials', body=body - - , headers=headers - ) + if credential_info is not None: + body["credential_info"] = credential_info.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials", + body=body, + headers=headers, + ) return AccountsStorageCredentialInfo.from_dict(res) - - - - - def delete(self - , metastore_id: str, storage_credential_name: str - , * - , force: Optional[bool] = None): + def delete(self, metastore_id: str, storage_credential_name: str, *, force: Optional[bool] = None): """Delete a storage credential. - + Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. :param force: bool (optional) Force deletion even if the Storage Credential is not empty. Default is false. - - - """ - - query = {} - if force is not None: query['force'] = force - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}', query=query - - , headers=headers - ) - - - - - def get(self - , metastore_id: str, storage_credential_name: str - ) -> AccountsStorageCredentialInfo: + """ + + query = {} + if force is not None: + query["force"] = force + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}", + query=query, + headers=headers, + ) + + def get(self, metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo: """Gets the named storage credential. - + Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have a level of privilege on the storage credential. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. - + :returns: :class:`AccountsStorageCredentialInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}' - - , headers=headers - ) - return AccountsStorageCredentialInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}", + headers=headers, + ) + return AccountsStorageCredentialInfo.from_dict(res) - def list(self - , metastore_id: str - ) -> Iterator[StorageCredentialInfo]: + def list(self, metastore_id: str) -> Iterator[StorageCredentialInfo]: """Get all storage credentials assigned to a metastore. - + Gets a list of all storage credentials that have been assigned to given metastore. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: Iterator over :class:`StorageCredentialInfo` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials' - - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials", + headers=headers, + ) parsed = ListAccountStorageCredentialsResponse.from_dict(json).storage_credentials return parsed if parsed is not None else [] - - - - - - def update(self - , metastore_id: str, storage_credential_name: str - , * - , credential_info: Optional[UpdateStorageCredential] = None) -> AccountsStorageCredentialInfo: + def update( + self, + metastore_id: str, + storage_credential_name: str, + *, + credential_info: Optional[UpdateStorageCredential] = None, + ) -> AccountsStorageCredentialInfo: """Updates a storage credential. - + Updates a storage credential on the metastore. The caller must be the owner of the storage credential. If the caller is a metastore admin, only the __owner__ credential can be changed. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. :param credential_info: :class:`UpdateStorageCredential` (optional) - + :returns: :class:`AccountsStorageCredentialInfo` """ body = {} - if credential_info is not None: body['credential_info'] = credential_info.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}', body=body - - , headers=headers - ) + if credential_info is not None: + body["credential_info"] = credential_info.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/metastores/{metastore_id}/storage-credentials/{storage_credential_name}", + body=body, + headers=headers, + ) return AccountsStorageCredentialInfo.from_dict(res) - - + class ArtifactAllowlistsAPI: """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - , artifact_type: ArtifactType - ) -> ArtifactAllowlistInfo: + def get(self, artifact_type: ArtifactType) -> ArtifactAllowlistInfo: """Get an artifact allowlist. - + Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. - + :param artifact_type: :class:`ArtifactType` The artifact type of the allowlist. - + :returns: :class:`ArtifactAllowlistInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}' - - , headers=headers - ) - return ArtifactAllowlistInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}", headers=headers) + return ArtifactAllowlistInfo.from_dict(res) - def update(self - , artifact_type: ArtifactType, artifact_matchers: List[ArtifactMatcher] - , * - , created_at: Optional[int] = None, created_by: Optional[str] = None, metastore_id: Optional[str] = None) -> ArtifactAllowlistInfo: + def update( + self, + artifact_type: ArtifactType, + artifact_matchers: List[ArtifactMatcher], + *, + created_at: Optional[int] = None, + created_by: Optional[str] = None, + metastore_id: Optional[str] = None, + ) -> ArtifactAllowlistInfo: """Set an artifact allowlist. - + Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. - + :param artifact_type: :class:`ArtifactType` The artifact type of the allowlist. :param artifact_matchers: List[:class:`ArtifactMatcher`] @@ -9045,52 +10896,57 @@ def update(self Username of the user who set the artifact allowlist. :param metastore_id: str (optional) Unique identifier of parent metastore. - + :returns: :class:`ArtifactAllowlistInfo` """ body = {} - if artifact_matchers is not None: body['artifact_matchers'] = [v.as_dict() for v in artifact_matchers] - if created_at is not None: body['created_at'] = created_at - if created_by is not None: body['created_by'] = created_by - if metastore_id is not None: body['metastore_id'] = metastore_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}', body=body - - , headers=headers - ) + if artifact_matchers is not None: + body["artifact_matchers"] = [v.as_dict() for v in artifact_matchers] + if created_at is not None: + body["created_at"] = created_at + if created_by is not None: + body["created_by"] = created_by + if metastore_id is not None: + body["metastore_id"] = metastore_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", f"/api/2.1/unity-catalog/artifact-allowlists/{artifact_type.value}", body=body, headers=headers + ) return ArtifactAllowlistInfo.from_dict(res) - - + class CatalogsAPI: """A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission. - + In Unity Catalog, admins and data stewards manage users and their access to data centrally across all of the workspaces in a Databricks account. Users in different workspaces can share access to the same data, depending on privileges granted centrally in Unity Catalog.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str - , * - , comment: Optional[str] = None, connection_name: Optional[str] = None, options: Optional[Dict[str,str]] = None, properties: Optional[Dict[str,str]] = None, provider_name: Optional[str] = None, share_name: Optional[str] = None, storage_root: Optional[str] = None) -> CatalogInfo: + def create( + self, + name: str, + *, + comment: Optional[str] = None, + connection_name: Optional[str] = None, + options: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + provider_name: Optional[str] = None, + share_name: Optional[str] = None, + storage_root: Optional[str] = None, + ) -> CatalogInfo: """Create a catalog. - + Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. - + :param name: str Name of catalog. :param comment: str (optional) @@ -9103,110 +10959,102 @@ def create(self A map of key-value properties attached to the securable. :param provider_name: str (optional) The name of delta sharing provider. - + A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server. :param share_name: str (optional) The name of the share under the share provider. :param storage_root: str (optional) Storage root URL for managed tables within catalog. - + :returns: :class:`CatalogInfo` """ body = {} - if comment is not None: body['comment'] = comment - if connection_name is not None: body['connection_name'] = connection_name - if name is not None: body['name'] = name - if options is not None: body['options'] = options - if properties is not None: body['properties'] = properties - if provider_name is not None: body['provider_name'] = provider_name - if share_name is not None: body['share_name'] = share_name - if storage_root is not None: body['storage_root'] = storage_root - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/catalogs', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if connection_name is not None: + body["connection_name"] = connection_name + if name is not None: + body["name"] = name + if options is not None: + body["options"] = options + if properties is not None: + body["properties"] = properties + if provider_name is not None: + body["provider_name"] = provider_name + if share_name is not None: + body["share_name"] = share_name + if storage_root is not None: + body["storage_root"] = storage_root + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/catalogs", body=body, headers=headers) return CatalogInfo.from_dict(res) - - - - - def delete(self - , name: str - , * - , force: Optional[bool] = None): + def delete(self, name: str, *, force: Optional[bool] = None): """Delete a catalog. - + Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner of the catalog. - + :param name: str The name of the catalog. :param force: bool (optional) Force deletion even if the catalog is not empty. - - + + """ - + query = {} - if force is not None: query['force'] = force - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/catalogs/{name}', query=query - - , headers=headers - ) - + if force is not None: + query["force"] = force + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/catalogs/{name}", query=query, headers=headers) - def get(self - , name: str - , * - , include_browse: Optional[bool] = None) -> CatalogInfo: + def get(self, name: str, *, include_browse: Optional[bool] = None) -> CatalogInfo: """Get a catalog. - + Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the catalog, or a user that has the **USE_CATALOG** privilege set for their account. - + :param name: str The name of the catalog. :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective metadata for - + :returns: :class:`CatalogInfo` """ - + query = {} - if include_browse is not None: query['include_browse'] = include_browse - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/catalogs/{name}', query=query - - , headers=headers - ) - return CatalogInfo.from_dict(res) + if include_browse is not None: + query["include_browse"] = include_browse + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", f"/api/2.1/unity-catalog/catalogs/{name}", query=query, headers=headers) + return CatalogInfo.from_dict(res) - def list(self - - , * - , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[CatalogInfo]: + def list( + self, + *, + include_browse: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[CatalogInfo]: """List catalogs. - + Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. - + :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective metadata for @@ -9220,44 +11068,47 @@ def list(self response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CatalogInfo` """ - - query = {} - if include_browse is not None: query['include_browse'] = include_browse - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/unity-catalog/catalogs', query=query - - , headers=headers - ) - if 'catalogs' in json: - for v in json['catalogs']: - yield CatalogInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if include_browse is not None: + query["include_browse"] = include_browse + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , name: str - , * - , comment: Optional[str] = None, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, isolation_mode: Optional[CatalogIsolationMode] = None, new_name: Optional[str] = None, options: Optional[Dict[str,str]] = None, owner: Optional[str] = None, properties: Optional[Dict[str,str]] = None) -> CatalogInfo: + while True: + json = self._api.do("GET", "/api/2.1/unity-catalog/catalogs", query=query, headers=headers) + if "catalogs" in json: + for v in json["catalogs"]: + yield CatalogInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + name: str, + *, + comment: Optional[str] = None, + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, + isolation_mode: Optional[CatalogIsolationMode] = None, + new_name: Optional[str] = None, + options: Optional[Dict[str, str]] = None, + owner: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + ) -> CatalogInfo: """Update a catalog. - + Updates the catalog that matches the supplied name. The caller must be either the owner of the catalog, or a metastore admin (when changing the owner field of the catalog). - + :param name: str The name of the catalog. :param comment: str (optional) @@ -9274,59 +11125,63 @@ def update(self Username of current owner of catalog. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. - + :returns: :class:`CatalogInfo` """ body = {} - if comment is not None: body['comment'] = comment - if enable_predictive_optimization is not None: body['enable_predictive_optimization'] = enable_predictive_optimization.value - if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value - if new_name is not None: body['new_name'] = new_name - if options is not None: body['options'] = options - if owner is not None: body['owner'] = owner - if properties is not None: body['properties'] = properties - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/catalogs/{name}', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = enable_predictive_optimization.value + if isolation_mode is not None: + body["isolation_mode"] = isolation_mode.value + if new_name is not None: + body["new_name"] = new_name + if options is not None: + body["options"] = options + if owner is not None: + body["owner"] = owner + if properties is not None: + body["properties"] = properties + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/catalogs/{name}", body=body, headers=headers) return CatalogInfo.from_dict(res) - - + class ConnectionsAPI: """Connections allow for creating a connection to an external data source. - + A connection is an abstraction of an external data source that can be connected from Databricks Compute. Creating a connection object is the first step to managing external data sources within Unity Catalog, with the second step being creating a data object (catalog, schema, or table) using the connection. Data objects derived from a connection can be written to or read from similar to other Unity Catalog data objects based on cloud storage. Users may create different types of connections with each connection having a unique set of configuration options to support credential management and other settings.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str, connection_type: ConnectionType, options: Dict[str,str] - , * - , comment: Optional[str] = None, properties: Optional[Dict[str,str]] = None, read_only: Optional[bool] = None) -> ConnectionInfo: + def create( + self, + name: str, + connection_type: ConnectionType, + options: Dict[str, str], + *, + comment: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + read_only: Optional[bool] = None, + ) -> ConnectionInfo: """Create a connection. - + Creates a new connection - + Creates a new connection to an external data source. It allows users to specify connection details and configurations for interaction with the external server. - + :param name: str Name of the connection. :param connection_type: :class:`ConnectionType` @@ -9339,86 +11194,70 @@ def create(self A map of key-value properties attached to the securable. :param read_only: bool (optional) If the connection is read only. - + :returns: :class:`ConnectionInfo` """ body = {} - if comment is not None: body['comment'] = comment - if connection_type is not None: body['connection_type'] = connection_type.value - if name is not None: body['name'] = name - if options is not None: body['options'] = options - if properties is not None: body['properties'] = properties - if read_only is not None: body['read_only'] = read_only - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/connections', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if connection_type is not None: + body["connection_type"] = connection_type.value + if name is not None: + body["name"] = name + if options is not None: + body["options"] = options + if properties is not None: + body["properties"] = properties + if read_only is not None: + body["read_only"] = read_only + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/connections", body=body, headers=headers) return ConnectionInfo.from_dict(res) - - - - - def delete(self - , name: str - ): + def delete(self, name: str): """Delete a connection. - + Deletes the connection that matches the supplied name. - + :param name: str The name of the connection to be deleted. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/connections/{name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get(self - , name: str - ) -> ConnectionInfo: + self._api.do("DELETE", f"/api/2.1/unity-catalog/connections/{name}", headers=headers) + + def get(self, name: str) -> ConnectionInfo: """Get a connection. - + Gets a connection from it's name. - + :param name: str Name of the connection. - + :returns: :class:`ConnectionInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/connections/{name}' - - , headers=headers - ) - return ConnectionInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/connections/{name}", headers=headers) + return ConnectionInfo.from_dict(res) - def list(self - - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ConnectionInfo]: + def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ConnectionInfo]: """List connections. - + List all connections. - + :param max_results: int (optional) Maximum number of connections to return. - If not set, all connections are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of this value and @@ -9426,42 +11265,35 @@ def list(self (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ConnectionInfo` """ - - query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/unity-catalog/connections', query=query - - , headers=headers - ) - if 'connections' in json: - for v in json['connections']: - yield ConnectionInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , name: str, options: Dict[str,str] - , * - , new_name: Optional[str] = None, owner: Optional[str] = None) -> ConnectionInfo: + while True: + json = self._api.do("GET", "/api/2.1/unity-catalog/connections", query=query, headers=headers) + if "connections" in json: + for v in json["connections"]: + yield ConnectionInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, name: str, options: Dict[str, str], *, new_name: Optional[str] = None, owner: Optional[str] = None + ) -> ConnectionInfo: """Update a connection. - + Updates the connection that matches the supplied name. - + :param name: str Name of the connection. :param options: Dict[str,str] @@ -9470,55 +11302,58 @@ def update(self New name for the connection. :param owner: str (optional) Username of current owner of the connection. - + :returns: :class:`ConnectionInfo` """ body = {} - if new_name is not None: body['new_name'] = new_name - if options is not None: body['options'] = options - if owner is not None: body['owner'] = owner - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/connections/{name}', body=body - - , headers=headers - ) + if new_name is not None: + body["new_name"] = new_name + if options is not None: + body["options"] = options + if owner is not None: + body["owner"] = owner + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/connections/{name}", body=body, headers=headers) return ConnectionInfo.from_dict(res) - - + class CredentialsAPI: """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant. Each credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential. - + To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL` privilege. The user who creates the credential can delegate ownership to another user or group to manage permissions on it.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_credential(self - , name: str - , * - , aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, purpose: Optional[CredentialPurpose] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> CredentialInfo: + def create_credential( + self, + name: str, + *, + aws_iam_role: Optional[AwsIamRole] = None, + azure_managed_identity: Optional[AzureManagedIdentity] = None, + azure_service_principal: Optional[AzureServicePrincipal] = None, + comment: Optional[str] = None, + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, + purpose: Optional[CredentialPurpose] = None, + read_only: Optional[bool] = None, + skip_validation: Optional[bool] = None, + ) -> CredentialInfo: """Create a credential. - + Creates a new credential. The type of credential to be created is determined by the **purpose** field, which should be either **SERVICE** or **STORAGE**. - + The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials. - + :param name: str The credential name. The name must be unique among storage and service credentials within the metastore. @@ -9539,135 +11374,130 @@ def create_credential(self **STORAGE**. :param skip_validation: bool (optional) Optional. Supplying true to this argument skips validation of the created set of credentials. - + :returns: :class:`CredentialInfo` """ body = {} - if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() - if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() - if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() - if comment is not None: body['comment'] = comment - if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() - if name is not None: body['name'] = name - if purpose is not None: body['purpose'] = purpose.value - if read_only is not None: body['read_only'] = read_only - if skip_validation is not None: body['skip_validation'] = skip_validation - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/credentials', body=body - - , headers=headers - ) + if aws_iam_role is not None: + body["aws_iam_role"] = aws_iam_role.as_dict() + if azure_managed_identity is not None: + body["azure_managed_identity"] = azure_managed_identity.as_dict() + if azure_service_principal is not None: + body["azure_service_principal"] = azure_service_principal.as_dict() + if comment is not None: + body["comment"] = comment + if databricks_gcp_service_account is not None: + body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() + if name is not None: + body["name"] = name + if purpose is not None: + body["purpose"] = purpose.value + if read_only is not None: + body["read_only"] = read_only + if skip_validation is not None: + body["skip_validation"] = skip_validation + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/credentials", body=body, headers=headers) return CredentialInfo.from_dict(res) - - - - - def delete_credential(self - , name_arg: str - , * - , force: Optional[bool] = None): + def delete_credential(self, name_arg: str, *, force: Optional[bool] = None): """Delete a credential. - + Deletes a service or storage credential from the metastore. The caller must be an owner of the credential. - + :param name_arg: str Name of the credential. :param force: bool (optional) Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent external locations and external tables (when purpose is **STORAGE**). - - - """ - - query = {} - if force is not None: query['force'] = force - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/credentials/{name_arg}', query=query - - , headers=headers - ) - - - - - def generate_temporary_service_credential(self - , credential_name: str - , * - , azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None, gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None) -> TemporaryCredentials: + """ + + query = {} + if force is not None: + query["force"] = force + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.1/unity-catalog/credentials/{name_arg}", query=query, headers=headers) + + def generate_temporary_service_credential( + self, + credential_name: str, + *, + azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None, + gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None, + ) -> TemporaryCredentials: """Generate a temporary service credential. - + Returns a set of temporary credentials generated using the specified service credential. The caller must be a metastore admin or have the metastore privilege **ACCESS** on the service credential. - + :param credential_name: str The name of the service credential used to generate a temporary credential :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional) The Azure cloud options to customize the requested temporary credential :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional) The GCP cloud options to customize the requested temporary credential - + :returns: :class:`TemporaryCredentials` """ body = {} - if azure_options is not None: body['azure_options'] = azure_options.as_dict() - if credential_name is not None: body['credential_name'] = credential_name - if gcp_options is not None: body['gcp_options'] = gcp_options.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/temporary-service-credentials', body=body - - , headers=headers - ) + if azure_options is not None: + body["azure_options"] = azure_options.as_dict() + if credential_name is not None: + body["credential_name"] = credential_name + if gcp_options is not None: + body["gcp_options"] = gcp_options.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/temporary-service-credentials", body=body, headers=headers) return TemporaryCredentials.from_dict(res) - - - - - def get_credential(self - , name_arg: str - ) -> CredentialInfo: + def get_credential(self, name_arg: str) -> CredentialInfo: """Get a credential. - + Gets a service or storage credential from the metastore. The caller must be a metastore admin, the owner of the credential, or have any permission on the credential. - + :param name_arg: str Name of the credential. - + :returns: :class:`CredentialInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/credentials/{name_arg}' - - , headers=headers - ) - return CredentialInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/credentials/{name_arg}", headers=headers) + return CredentialInfo.from_dict(res) - def list_credentials(self - - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None, purpose: Optional[CredentialPurpose] = None) -> Iterator[CredentialInfo]: + def list_credentials( + self, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + purpose: Optional[CredentialPurpose] = None, + ) -> Iterator[CredentialInfo]: """List credentials. - + Gets an array of credentials (as __CredentialInfo__ objects). - + The array is limited to only the credentials that the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of credentials to return. - If not set, the default max page size is used. - When set to a value greater than 0, the page length is the minimum of this value and a server-configured @@ -9677,46 +11507,53 @@ def list_credentials(self Opaque token to retrieve the next page of results. :param purpose: :class:`CredentialPurpose` (optional) Return only credentials for the specified purpose. - + :returns: Iterator over :class:`CredentialInfo` """ - - query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - if purpose is not None: query['purpose'] = purpose.value - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/unity-catalog/credentials', query=query - - , headers=headers - ) - if 'credentials' in json: - for v in json['credentials']: - yield CredentialInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + if purpose is not None: + query["purpose"] = purpose.value + headers = { + "Accept": "application/json", + } - def update_credential(self - , name_arg: str - , * - , aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, force: Optional[bool] = None, isolation_mode: Optional[IsolationMode] = None, new_name: Optional[str] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> CredentialInfo: + while True: + json = self._api.do("GET", "/api/2.1/unity-catalog/credentials", query=query, headers=headers) + if "credentials" in json: + for v in json["credentials"]: + yield CredentialInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_credential( + self, + name_arg: str, + *, + aws_iam_role: Optional[AwsIamRole] = None, + azure_managed_identity: Optional[AzureManagedIdentity] = None, + azure_service_principal: Optional[AzureServicePrincipal] = None, + comment: Optional[str] = None, + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, + force: Optional[bool] = None, + isolation_mode: Optional[IsolationMode] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + read_only: Optional[bool] = None, + skip_validation: Optional[bool] = None, + ) -> CredentialInfo: """Update a credential. - + Updates a service or storage credential on the metastore. - + The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission. If the caller is a metastore admin, only the __owner__ field can be changed. - + :param name_arg: str Name of the credential. :param aws_iam_role: :class:`AwsIamRole` (optional) @@ -9743,53 +11580,68 @@ def update_credential(self **STORAGE**. :param skip_validation: bool (optional) Supply true to this argument to skip validation of the updated credential. - + :returns: :class:`CredentialInfo` """ body = {} - if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() - if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() - if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() - if comment is not None: body['comment'] = comment - if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() - if force is not None: body['force'] = force - if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - if read_only is not None: body['read_only'] = read_only - if skip_validation is not None: body['skip_validation'] = skip_validation - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/credentials/{name_arg}', body=body - - , headers=headers - ) + if aws_iam_role is not None: + body["aws_iam_role"] = aws_iam_role.as_dict() + if azure_managed_identity is not None: + body["azure_managed_identity"] = azure_managed_identity.as_dict() + if azure_service_principal is not None: + body["azure_service_principal"] = azure_service_principal.as_dict() + if comment is not None: + body["comment"] = comment + if databricks_gcp_service_account is not None: + body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() + if force is not None: + body["force"] = force + if isolation_mode is not None: + body["isolation_mode"] = isolation_mode.value + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + if read_only is not None: + body["read_only"] = read_only + if skip_validation is not None: + body["skip_validation"] = skip_validation + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/credentials/{name_arg}", body=body, headers=headers) return CredentialInfo.from_dict(res) - - - - - def validate_credential(self - - , * - , aws_iam_role: Optional[AwsIamRole] = None, azure_managed_identity: Optional[AzureManagedIdentity] = None, credential_name: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, external_location_name: Optional[str] = None, purpose: Optional[CredentialPurpose] = None, read_only: Optional[bool] = None, url: Optional[str] = None) -> ValidateCredentialResponse: + def validate_credential( + self, + *, + aws_iam_role: Optional[AwsIamRole] = None, + azure_managed_identity: Optional[AzureManagedIdentity] = None, + credential_name: Optional[str] = None, + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None, + external_location_name: Optional[str] = None, + purpose: Optional[CredentialPurpose] = None, + read_only: Optional[bool] = None, + url: Optional[str] = None, + ) -> ValidateCredentialResponse: """Validate a credential. - + Validates a credential. - + For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific credential must be provided. - + For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific credential must be provided. - + The caller must be a metastore admin or the credential owner or have the required permission on the metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**). - + :param aws_iam_role: :class:`AwsIamRole` (optional) The AWS IAM role configuration :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) @@ -9808,61 +11660,70 @@ def validate_credential(self (purpose is **STORAGE**.) :param url: str (optional) The external location url to validate. Only applicable when purpose is **STORAGE**. - + :returns: :class:`ValidateCredentialResponse` """ body = {} - if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() - if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() - if credential_name is not None: body['credential_name'] = credential_name - if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() - if external_location_name is not None: body['external_location_name'] = external_location_name - if purpose is not None: body['purpose'] = purpose.value - if read_only is not None: body['read_only'] = read_only - if url is not None: body['url'] = url - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/validate-credentials', body=body - - , headers=headers - ) + if aws_iam_role is not None: + body["aws_iam_role"] = aws_iam_role.as_dict() + if azure_managed_identity is not None: + body["azure_managed_identity"] = azure_managed_identity.as_dict() + if credential_name is not None: + body["credential_name"] = credential_name + if databricks_gcp_service_account is not None: + body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() + if external_location_name is not None: + body["external_location_name"] = external_location_name + if purpose is not None: + body["purpose"] = purpose.value + if read_only is not None: + body["read_only"] = read_only + if url is not None: + body["url"] = url + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/validate-credentials", body=body, headers=headers) return ValidateCredentialResponse.from_dict(res) - - + class ExternalLocationsAPI: """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path. Each external location is subject to Unity Catalog access-control policies that control which users and groups can access the credential. If a user does not have access to an external location in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. - + Databricks recommends using external locations rather than using storage credentials directly. - + To create external locations, you must be a metastore admin or a user with the **CREATE_EXTERNAL_LOCATION** privilege.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str, url: str, credential_name: str - , * - , comment: Optional[str] = None, enable_file_events: Optional[bool] = None, encryption_details: Optional[EncryptionDetails] = None, fallback: Optional[bool] = None, file_event_queue: Optional[FileEventQueue] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> ExternalLocationInfo: + def create( + self, + name: str, + url: str, + credential_name: str, + *, + comment: Optional[str] = None, + enable_file_events: Optional[bool] = None, + encryption_details: Optional[EncryptionDetails] = None, + fallback: Optional[bool] = None, + file_event_queue: Optional[FileEventQueue] = None, + read_only: Optional[bool] = None, + skip_validation: Optional[bool] = None, + ) -> ExternalLocationInfo: """Create an external location. - + Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage credential. - + :param name: str Name of the external location. :param url: str @@ -9885,105 +11746,99 @@ def create(self Indicates whether the external location is read-only. :param skip_validation: bool (optional) Skips validation of the storage credential associated with the external location. - + :returns: :class:`ExternalLocationInfo` """ body = {} - if comment is not None: body['comment'] = comment - if credential_name is not None: body['credential_name'] = credential_name - if enable_file_events is not None: body['enable_file_events'] = enable_file_events - if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict() - if fallback is not None: body['fallback'] = fallback - if file_event_queue is not None: body['file_event_queue'] = file_event_queue.as_dict() - if name is not None: body['name'] = name - if read_only is not None: body['read_only'] = read_only - if skip_validation is not None: body['skip_validation'] = skip_validation - if url is not None: body['url'] = url - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/external-locations', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if credential_name is not None: + body["credential_name"] = credential_name + if enable_file_events is not None: + body["enable_file_events"] = enable_file_events + if encryption_details is not None: + body["encryption_details"] = encryption_details.as_dict() + if fallback is not None: + body["fallback"] = fallback + if file_event_queue is not None: + body["file_event_queue"] = file_event_queue.as_dict() + if name is not None: + body["name"] = name + if read_only is not None: + body["read_only"] = read_only + if skip_validation is not None: + body["skip_validation"] = skip_validation + if url is not None: + body["url"] = url + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/external-locations", body=body, headers=headers) return ExternalLocationInfo.from_dict(res) - - - - - def delete(self - , name: str - , * - , force: Optional[bool] = None): + def delete(self, name: str, *, force: Optional[bool] = None): """Delete an external location. - + Deletes the specified external location from the metastore. The caller must be the owner of the external location. - + :param name: str Name of the external location. :param force: bool (optional) Force deletion even if there are dependent external tables or mounts. - - + + """ - + query = {} - if force is not None: query['force'] = force - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/external-locations/{name}', query=query - - , headers=headers - ) - + if force is not None: + query["force"] = force + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/external-locations/{name}", query=query, headers=headers) - def get(self - , name: str - , * - , include_browse: Optional[bool] = None) -> ExternalLocationInfo: + def get(self, name: str, *, include_browse: Optional[bool] = None) -> ExternalLocationInfo: """Get an external location. - + Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. - + :param name: str Name of the external location. :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access selective metadata for - + :returns: :class:`ExternalLocationInfo` """ - + query = {} - if include_browse is not None: query['include_browse'] = include_browse - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/external-locations/{name}', query=query - - , headers=headers - ) - return ExternalLocationInfo.from_dict(res) + if include_browse is not None: + query["include_browse"] = include_browse + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", f"/api/2.1/unity-catalog/external-locations/{name}", query=query, headers=headers) + return ExternalLocationInfo.from_dict(res) - def list(self - - , * - , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ExternalLocationInfo]: + def list( + self, + *, + include_browse: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[ExternalLocationInfo]: """List external locations. - + Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on the external location. There is no guarantee of a specific ordering of the elements in the array. - + :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access selective metadata for @@ -9994,45 +11849,54 @@ def list(self value (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ExternalLocationInfo` """ - - query = {} - if include_browse is not None: query['include_browse'] = include_browse - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/unity-catalog/external-locations', query=query - - , headers=headers - ) - if 'external_locations' in json: - for v in json['external_locations']: - yield ExternalLocationInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if include_browse is not None: + query["include_browse"] = include_browse + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , name: str - , * - , comment: Optional[str] = None, credential_name: Optional[str] = None, enable_file_events: Optional[bool] = None, encryption_details: Optional[EncryptionDetails] = None, fallback: Optional[bool] = None, file_event_queue: Optional[FileEventQueue] = None, force: Optional[bool] = None, isolation_mode: Optional[IsolationMode] = None, new_name: Optional[str] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None, url: Optional[str] = None) -> ExternalLocationInfo: + while True: + json = self._api.do("GET", "/api/2.1/unity-catalog/external-locations", query=query, headers=headers) + if "external_locations" in json: + for v in json["external_locations"]: + yield ExternalLocationInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + name: str, + *, + comment: Optional[str] = None, + credential_name: Optional[str] = None, + enable_file_events: Optional[bool] = None, + encryption_details: Optional[EncryptionDetails] = None, + fallback: Optional[bool] = None, + file_event_queue: Optional[FileEventQueue] = None, + force: Optional[bool] = None, + isolation_mode: Optional[IsolationMode] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + read_only: Optional[bool] = None, + skip_validation: Optional[bool] = None, + url: Optional[str] = None, + ) -> ExternalLocationInfo: """Update an external location. - + Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin can only update the name of the external location. - + :param name: str Name of the external location. :param comment: str (optional) @@ -10062,167 +11926,156 @@ def update(self Skips validation of the storage credential associated with the external location. :param url: str (optional) Path URL of the external location. - + :returns: :class:`ExternalLocationInfo` """ body = {} - if comment is not None: body['comment'] = comment - if credential_name is not None: body['credential_name'] = credential_name - if enable_file_events is not None: body['enable_file_events'] = enable_file_events - if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict() - if fallback is not None: body['fallback'] = fallback - if file_event_queue is not None: body['file_event_queue'] = file_event_queue.as_dict() - if force is not None: body['force'] = force - if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - if read_only is not None: body['read_only'] = read_only - if skip_validation is not None: body['skip_validation'] = skip_validation - if url is not None: body['url'] = url - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/external-locations/{name}', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if credential_name is not None: + body["credential_name"] = credential_name + if enable_file_events is not None: + body["enable_file_events"] = enable_file_events + if encryption_details is not None: + body["encryption_details"] = encryption_details.as_dict() + if fallback is not None: + body["fallback"] = fallback + if file_event_queue is not None: + body["file_event_queue"] = file_event_queue.as_dict() + if force is not None: + body["force"] = force + if isolation_mode is not None: + body["isolation_mode"] = isolation_mode.value + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + if read_only is not None: + body["read_only"] = read_only + if skip_validation is not None: + body["skip_validation"] = skip_validation + if url is not None: + body["url"] = url + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/external-locations/{name}", body=body, headers=headers) return ExternalLocationInfo.from_dict(res) - - + class FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog. - + The function implementation can be any SQL expression or Query, and it can be invoked wherever a table reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it can be referenced with the form __catalog_name__.__schema_name__.__function_name__.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , function_info: CreateFunction - ) -> FunctionInfo: + def create(self, function_info: CreateFunction) -> FunctionInfo: """Create a function. - + **WARNING: This API is experimental and will change in future versions** - + Creates a new function - + The user must have the following permissions in order for the function to be created: - **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the function's parent schema - + :param function_info: :class:`CreateFunction` Partial __FunctionInfo__ specifying the function to be created. - + :returns: :class:`FunctionInfo` """ body = {} - if function_info is not None: body['function_info'] = function_info.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/functions', body=body - - , headers=headers - ) - return FunctionInfo.from_dict(res) + if function_info is not None: + body["function_info"] = function_info.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.1/unity-catalog/functions", body=body, headers=headers) + return FunctionInfo.from_dict(res) - def delete(self - , name: str - , * - , force: Optional[bool] = None): + def delete(self, name: str, *, force: Optional[bool] = None): """Delete a function. - + Deletes the function that matches the supplied name. For the deletion to succeed, the user must satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog and the **USE_SCHEMA** privilege on its parent schema - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param force: bool (optional) Force deletion even if the function is notempty. - - + + """ - + query = {} - if force is not None: query['force'] = force - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/functions/{name}', query=query - - , headers=headers - ) - + if force is not None: + query["force"] = force + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/functions/{name}", query=query, headers=headers) - def get(self - , name: str - , * - , include_browse: Optional[bool] = None) -> FunctionInfo: + def get(self, name: str, *, include_browse: Optional[bool] = None) -> FunctionInfo: """Get a function. - + Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the **USE_SCHEMA** privilege on the function's parent schema, and the **EXECUTE** privilege on the function itself - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param include_browse: bool (optional) Whether to include functions in the response for which the principal can only access selective metadata for - + :returns: :class:`FunctionInfo` """ - + query = {} - if include_browse is not None: query['include_browse'] = include_browse - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/functions/{name}', query=query - - , headers=headers - ) - return FunctionInfo.from_dict(res) + if include_browse is not None: + query["include_browse"] = include_browse + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", f"/api/2.1/unity-catalog/functions/{name}", query=query, headers=headers) + return FunctionInfo.from_dict(res) - def list(self - , catalog_name: str, schema_name: str - , * - , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FunctionInfo]: + def list( + self, + catalog_name: str, + schema_name: str, + *, + include_browse: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[FunctionInfo]: """List functions. - + List functions within the specified parent catalog and schema. If the user is a metastore admin, all functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for functions of interest. :param schema_name: str @@ -10237,100 +12090,91 @@ def list(self (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`FunctionInfo` """ - - query = {} - if catalog_name is not None: query['catalog_name'] = catalog_name - if include_browse is not None: query['include_browse'] = include_browse - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - if schema_name is not None: query['schema_name'] = schema_name - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/unity-catalog/functions', query=query - - , headers=headers - ) - if 'functions' in json: - for v in json['functions']: - yield FunctionInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if catalog_name is not None: + query["catalog_name"] = catalog_name + if include_browse is not None: + query["include_browse"] = include_browse + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + if schema_name is not None: + query["schema_name"] = schema_name + headers = { + "Accept": "application/json", + } - def update(self - , name: str - , * - , owner: Optional[str] = None) -> FunctionInfo: + while True: + json = self._api.do("GET", "/api/2.1/unity-catalog/functions", query=query, headers=headers) + if "functions" in json: + for v in json["functions"]: + yield FunctionInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, name: str, *, owner: Optional[str] = None) -> FunctionInfo: """Update a function. - + Updates the function that matches the supplied name. Only the owner of the function can be updated. If the user is not a metastore admin, the user must be a member of the group that is the new function owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the **USE_SCHEMA** privilege on the function's parent schema. - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param owner: str (optional) Username of current owner of function. - + :returns: :class:`FunctionInfo` """ body = {} - if owner is not None: body['owner'] = owner - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/functions/{name}', body=body - - , headers=headers - ) + if owner is not None: + body["owner"] = owner + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/functions/{name}", body=body, headers=headers) return FunctionInfo.from_dict(res) - - + class GrantsAPI: """In Unity Catalog, data is secure by default. Initially, users have no access to data in a metastore. Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. - + Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. This means that granting a privilege on the catalog automatically grants the privilege to all current and future objects within the catalog. Similarly, privileges granted on a schema are inherited by all current and future objects within that schema.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - , securable_type: str, full_name: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None, principal: Optional[str] = None) -> GetPermissionsResponse: + def get( + self, + securable_type: str, + full_name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + principal: Optional[str] = None, + ) -> GetPermissionsResponse: """Get permissions. - + Gets the permissions for a securable. Does not include inherited permissions. - + :param securable_type: str Type of securable. :param full_name: str @@ -10339,7 +12183,7 @@ def get(self Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment present in a single page response is guaranteed to contain all the privileges granted on the requested Securable for the respective principal. - + If not set, all the permissions are returned. If set to - lesser than 0: invalid parameter error - 0: page length is set to a server configured value - lesser than 150 but greater than 0: invalid parameter error (this is to ensure that server is able to return at least one complete @@ -10349,35 +12193,40 @@ def get(self Opaque pagination token to go to next page based on previous query. :param principal: str (optional) If provided, only the permissions for the specified principal (user or group) are returned. - + :returns: :class:`GetPermissionsResponse` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - if principal is not None: query['principal'] = principal - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/permissions/{securable_type}/{full_name}', query=query - - , headers=headers - ) + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + if principal is not None: + query["principal"] = principal + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/unity-catalog/permissions/{securable_type}/{full_name}", query=query, headers=headers + ) return GetPermissionsResponse.from_dict(res) - - - - - def get_effective(self - , securable_type: str, full_name: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None, principal: Optional[str] = None) -> EffectivePermissionsList: + def get_effective( + self, + securable_type: str, + full_name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + principal: Optional[str] = None, + ) -> EffectivePermissionsList: """Get effective permissions. - + Gets the effective permissions for a securable. Includes inherited permissions from any parent securables. - + :param securable_type: str Type of securable. :param full_name: str @@ -10387,7 +12236,7 @@ def get_effective(self EffectivePrivilegeAssignment present in a single page response is guaranteed to contain all the effective privileges granted on (or inherited by) the requested Securable for the respective principal. - + If not set, all the effective permissions are returned. If set to - lesser than 0: invalid parameter error - 0: page length is set to a server configured value - lesser than 150 but greater than 0: invalid parameter error (this is to ensure that server is able to return at least one complete @@ -10398,88 +12247,82 @@ def get_effective(self :param principal: str (optional) If provided, only the effective permissions for the specified principal (user or group) are returned. - + :returns: :class:`EffectivePermissionsList` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - if principal is not None: query['principal'] = principal - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/effective-permissions/{securable_type}/{full_name}', query=query - - , headers=headers - ) + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + if principal is not None: + query["principal"] = principal + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.1/unity-catalog/effective-permissions/{securable_type}/{full_name}", + query=query, + headers=headers, + ) return EffectivePermissionsList.from_dict(res) - - - - - def update(self - , securable_type: str, full_name: str - , * - , changes: Optional[List[PermissionsChange]] = None) -> UpdatePermissionsResponse: + def update( + self, securable_type: str, full_name: str, *, changes: Optional[List[PermissionsChange]] = None + ) -> UpdatePermissionsResponse: """Update permissions. - + Updates the permissions for a securable. - + :param securable_type: str Type of securable. :param full_name: str Full name of securable. :param changes: List[:class:`PermissionsChange`] (optional) Array of permissions change objects. - + :returns: :class:`UpdatePermissionsResponse` """ body = {} - if changes is not None: body['changes'] = [v.as_dict() for v in changes] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/permissions/{securable_type}/{full_name}', body=body - - , headers=headers - ) + if changes is not None: + body["changes"] = [v.as_dict() for v in changes] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.1/unity-catalog/permissions/{securable_type}/{full_name}", body=body, headers=headers + ) return UpdatePermissionsResponse.from_dict(res) - - + class MetastoresAPI: """A metastore is the top-level container of objects in Unity Catalog. It stores data assets (tables and views) and the permissions that govern access to them. Databricks account admins can create metastores and assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use Unity Catalog, it must have a Unity Catalog metastore attached. - + Each metastore is configured with a root storage location in a cloud storage account. This storage location is used for metadata and managed tables data. - + NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is available in a catalog named hive_metastore.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def assign(self - , workspace_id: int, metastore_id: str, default_catalog_name: str - ): + def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str): """Create an assignment. - + Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account admin. - + :param workspace_id: int A workspace ID. :param metastore_id: str @@ -10487,144 +12330,115 @@ def assign(self :param default_catalog_name: str The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace. - - + + """ body = {} - if default_catalog_name is not None: body['default_catalog_name'] = default_catalog_name - if metastore_id is not None: body['metastore_id'] = metastore_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore', body=body - - , headers=headers - ) - + if default_catalog_name is not None: + body["default_catalog_name"] = default_catalog_name + if metastore_id is not None: + body["metastore_id"] = metastore_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("PUT", f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore", body=body, headers=headers) - def create(self - , name: str - , * - , region: Optional[str] = None, storage_root: Optional[str] = None) -> MetastoreInfo: + def create(self, name: str, *, region: Optional[str] = None, storage_root: Optional[str] = None) -> MetastoreInfo: """Create a metastore. - + Creates a new metastore based on a provided name and optional storage root path. By default (if the __owner__ field is not set), the owner of the new metastore is the user calling the __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is assigned to the System User instead. - + :param name: str The user-specified name of the metastore. :param region: str (optional) Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). :param storage_root: str (optional) The storage root URL for metastore - + :returns: :class:`MetastoreInfo` """ body = {} - if name is not None: body['name'] = name - if region is not None: body['region'] = region - if storage_root is not None: body['storage_root'] = storage_root - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/metastores', body=body - - , headers=headers - ) + if name is not None: + body["name"] = name + if region is not None: + body["region"] = region + if storage_root is not None: + body["storage_root"] = storage_root + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/metastores", body=body, headers=headers) return MetastoreInfo.from_dict(res) - - - - def current(self) -> MetastoreAssignment: """Get metastore assignment for workspace. - + Gets the metastore assignment for the workspace being accessed. - + :returns: :class:`MetastoreAssignment` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.1/unity-catalog/current-metastore-assignment' - , headers=headers - ) - return MetastoreAssignment.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.1/unity-catalog/current-metastore-assignment", headers=headers) + return MetastoreAssignment.from_dict(res) - def delete(self - , id: str - , * - , force: Optional[bool] = None): + def delete(self, id: str, *, force: Optional[bool] = None): """Delete a metastore. - + Deletes a metastore. The caller must be a metastore admin. - + :param id: str Unique ID of the metastore. :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - - + + """ - + query = {} - if force is not None: query['force'] = force - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/metastores/{id}', query=query - - , headers=headers - ) - + if force is not None: + query["force"] = force + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/metastores/{id}", query=query, headers=headers) - def get(self - , id: str - ) -> MetastoreInfo: + def get(self, id: str) -> MetastoreInfo: """Get a metastore. - + Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this info. - + :param id: str Unique ID of the metastore. - + :returns: :class:`MetastoreInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/metastores/{id}' - - , headers=headers - ) - return MetastoreInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/metastores/{id}", headers=headers) + return MetastoreInfo.from_dict(res) - def list(self - - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[MetastoreInfo]: + def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[MetastoreInfo]: """List metastores. - + Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of metastores to return. - when set to a value greater than 0, the page length is the minimum of this value and a server configured value; - when set to 0, the page length is set to a @@ -10635,92 +12449,85 @@ def list(self from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`MetastoreInfo` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/unity-catalog/metastores', query=query - - , headers=headers - ) - if 'metastores' in json: - for v in json['metastores']: - yield MetastoreInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - - - + while True: + json = self._api.do("GET", "/api/2.1/unity-catalog/metastores", query=query, headers=headers) + if "metastores" in json: + for v in json["metastores"]: + yield MetastoreInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] def summary(self) -> GetMetastoreSummaryResponse: """Get a metastore summary. - + Gets information about a metastore. This summary includes the storage credential, the cloud vendor, the cloud region, and the global metastore ID. - + :returns: :class:`GetMetastoreSummaryResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.1/unity-catalog/metastore_summary' - , headers=headers - ) - return GetMetastoreSummaryResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.1/unity-catalog/metastore_summary", headers=headers) + return GetMetastoreSummaryResponse.from_dict(res) - def unassign(self - , workspace_id: int, metastore_id: str - ): + def unassign(self, workspace_id: int, metastore_id: str): """Delete an assignment. - + Deletes a metastore assignment. The caller must be an account administrator. - + :param workspace_id: int A workspace ID. - :param metastore_id: str - Query for the ID of the metastore to delete. - - - """ - - query = {} - if metastore_id is not None: query['metastore_id'] = metastore_id - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore', query=query - - , headers=headers - ) - + :param metastore_id: str + Query for the ID of the metastore to delete. - - - - def update(self - , id: str - , * - , delta_sharing_organization_name: Optional[str] = None, delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None, delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None, new_name: Optional[str] = None, owner: Optional[str] = None, privilege_model_version: Optional[str] = None, storage_root_credential_id: Optional[str] = None) -> MetastoreInfo: + """ + + query = {} + if metastore_id is not None: + query["metastore_id"] = metastore_id + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore", query=query, headers=headers + ) + + def update( + self, + id: str, + *, + delta_sharing_organization_name: Optional[str] = None, + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None, + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + privilege_model_version: Optional[str] = None, + storage_root_credential_id: Optional[str] = None, + ) -> MetastoreInfo: """Update a metastore. - + Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__ field is set to the empty string (**""**), the ownership is updated to the System User. - + :param id: str Unique ID of the metastore. :param delta_sharing_organization_name: str (optional) @@ -10738,40 +12545,44 @@ def update(self Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`). :param storage_root_credential_id: str (optional) UUID of storage credential to access the metastore storage_root. - + :returns: :class:`MetastoreInfo` """ body = {} - if delta_sharing_organization_name is not None: body['delta_sharing_organization_name'] = delta_sharing_organization_name - if delta_sharing_recipient_token_lifetime_in_seconds is not None: body['delta_sharing_recipient_token_lifetime_in_seconds'] = delta_sharing_recipient_token_lifetime_in_seconds - if delta_sharing_scope is not None: body['delta_sharing_scope'] = delta_sharing_scope.value - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - if privilege_model_version is not None: body['privilege_model_version'] = privilege_model_version - if storage_root_credential_id is not None: body['storage_root_credential_id'] = storage_root_credential_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/metastores/{id}', body=body - - , headers=headers - ) + if delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = delta_sharing_organization_name + if delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + delta_sharing_recipient_token_lifetime_in_seconds + ) + if delta_sharing_scope is not None: + body["delta_sharing_scope"] = delta_sharing_scope.value + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + if privilege_model_version is not None: + body["privilege_model_version"] = privilege_model_version + if storage_root_credential_id is not None: + body["storage_root_credential_id"] = storage_root_credential_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/metastores/{id}", body=body, headers=headers) return MetastoreInfo.from_dict(res) - - - - - def update_assignment(self - , workspace_id: int - , * - , default_catalog_name: Optional[str] = None, metastore_id: Optional[str] = None): + def update_assignment( + self, workspace_id: int, *, default_catalog_name: Optional[str] = None, metastore_id: Optional[str] = None + ): """Update an assignment. - + Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a Workspace admin. - + :param workspace_id: int A workspace ID. :param default_catalog_name: str (optional) @@ -10779,85 +12590,71 @@ def update_assignment(self Namespace API" to configure the default catalog for a Databricks workspace. :param metastore_id: str (optional) The unique ID of the metastore. - - + + """ body = {} - if default_catalog_name is not None: body['default_catalog_name'] = default_catalog_name - if metastore_id is not None: body['metastore_id'] = metastore_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore', body=body - - , headers=headers - ) - + if default_catalog_name is not None: + body["default_catalog_name"] = default_catalog_name + if metastore_id is not None: + body["metastore_id"] = metastore_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.1/unity-catalog/workspaces/{workspace_id}/metastore", body=body, headers=headers) + - - class ModelVersionsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog provide centralized access control, auditing, lineage, and discovery of ML models across Databricks workspaces. - + This API reference documents the REST endpoints for managing model versions in Unity Catalog. For more details, see the [registered models API docs](/api/workspace/registeredmodels).""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - , full_name: str, version: int - ): + def delete(self, full_name: str, version: int): """Delete a Model Version. - + Deletes a model version from the specified registered model. Any aliases assigned to the model version will also be deleted. - + The caller must be a metastore admin or an owner of the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/models/{full_name}/versions/{version}", headers=headers) - def get(self - , full_name: str, version: int - , * - , include_aliases: Optional[bool] = None, include_browse: Optional[bool] = None) -> ModelVersionInfo: + def get( + self, + full_name: str, + version: int, + *, + include_aliases: Optional[bool] = None, + include_browse: Optional[bool] = None, + ) -> ModelVersionInfo: """Get a Model Version. - + Get a model version. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int @@ -10867,79 +12664,77 @@ def get(self :param include_browse: bool (optional) Whether to include model versions in the response for which the principal can only access selective metadata for - + :returns: :class:`ModelVersionInfo` """ - + query = {} - if include_aliases is not None: query['include_aliases'] = include_aliases - if include_browse is not None: query['include_browse'] = include_browse - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}', query=query - - , headers=headers - ) + if include_aliases is not None: + query["include_aliases"] = include_aliases + if include_browse is not None: + query["include_browse"] = include_browse + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/unity-catalog/models/{full_name}/versions/{version}", query=query, headers=headers + ) return ModelVersionInfo.from_dict(res) - - - - - def get_by_alias(self - , full_name: str, alias: str - , * - , include_aliases: Optional[bool] = None) -> ModelVersionInfo: + def get_by_alias(self, full_name: str, alias: str, *, include_aliases: Optional[bool] = None) -> ModelVersionInfo: """Get Model Version By Alias. - + Get a model version by alias. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param alias: str The name of the alias :param include_aliases: bool (optional) Whether to include aliases associated with the model version in the response - + :returns: :class:`ModelVersionInfo` """ - + query = {} - if include_aliases is not None: query['include_aliases'] = include_aliases - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', query=query - - , headers=headers - ) + if include_aliases is not None: + query["include_aliases"] = include_aliases + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}", query=query, headers=headers + ) return ModelVersionInfo.from_dict(res) - - - - - def list(self - , full_name: str - , * - , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ModelVersionInfo]: + def list( + self, + full_name: str, + *, + include_browse: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[ModelVersionInfo]: """List Model Versions. - + List model versions. You can list model versions under a particular schema, or list all model versions in the current metastore. - + The returned models are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the model versions. A regular user needs to be the owner or have the **EXECUTE** privilege on the parent registered model to recieve the model versions in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the response. The elements in the response will not contain any aliases or tags. - + :param full_name: str The full three-level name of the registered model under which to list model versions :param include_browse: bool (optional) @@ -10953,267 +12748,232 @@ def list(self value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ModelVersionInfo` """ - - query = {} - if include_browse is not None: query['include_browse'] = include_browse - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.1/unity-catalog/models/{full_name}/versions', query=query - - , headers=headers - ) - if 'model_versions' in json: - for v in json['model_versions']: - yield ModelVersionInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if include_browse is not None: + query["include_browse"] = include_browse + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , full_name: str, version: int - , * - , comment: Optional[str] = None) -> ModelVersionInfo: + while True: + json = self._api.do( + "GET", f"/api/2.1/unity-catalog/models/{full_name}/versions", query=query, headers=headers + ) + if "model_versions" in json: + for v in json["model_versions"]: + yield ModelVersionInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, full_name: str, version: int, *, comment: Optional[str] = None) -> ModelVersionInfo: """Update a Model Version. - + Updates the specified model version. - + The caller must be a metastore admin or an owner of the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the comment of the model version can be updated. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version :param comment: str (optional) The comment attached to the model version - + :returns: :class:`ModelVersionInfo` """ body = {} - if comment is not None: body['comment'] = comment - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.1/unity-catalog/models/{full_name}/versions/{version}", body=body, headers=headers + ) return ModelVersionInfo.from_dict(res) - - + class OnlineTablesAPI: """Online tables provide lower latency and higher QPS access to data from Delta tables.""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_get_online_table_active(self, name: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[OnlineTable], None]] = None) -> OnlineTable: - deadline = time.time() + timeout.total_seconds() - target_states = (ProvisioningInfoState.ACTIVE, ) - failure_states = (ProvisioningInfoState.FAILED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.unity_catalog_provisioning_state - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach ACTIVE, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - def create(self - , table: OnlineTable - ) -> Wait[OnlineTable]: + def wait_get_online_table_active( + self, name: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[OnlineTable], None]] = None + ) -> OnlineTable: + deadline = time.time() + timeout.total_seconds() + target_states = (ProvisioningInfoState.ACTIVE,) + failure_states = (ProvisioningInfoState.FAILED,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.unity_catalog_provisioning_state + status_message = f"current status: {status}" + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach ACTIVE, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def create(self, table: OnlineTable) -> Wait[OnlineTable]: """Create an Online Table. - + Create a new Online Table. - + :param table: :class:`OnlineTable` Online Table information. - + :returns: Long-running operation waiter for :class:`OnlineTable`. See :method:wait_get_online_table_active for more details. """ body = table.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.0/online-tables', body=body - - , headers=headers - ) - return Wait(self.wait_get_online_table_active - , response = OnlineTable.from_dict(op_response) - , name=op_response['name']) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - def create_and_wait(self - , table: OnlineTable - , - timeout=timedelta(minutes=20)) -> OnlineTable: + op_response = self._api.do("POST", "/api/2.0/online-tables", body=body, headers=headers) + return Wait( + self.wait_get_online_table_active, response=OnlineTable.from_dict(op_response), name=op_response["name"] + ) + + def create_and_wait(self, table: OnlineTable, timeout=timedelta(minutes=20)) -> OnlineTable: return self.create(table=table).result(timeout=timeout) - - - - def delete(self - , name: str - ): + def delete(self, name: str): """Delete an Online Table. - + Delete an online table. Warning: This will delete all the data in the online table. If the source Delta table was deleted or modified since this Online Table was created, this will lose the data forever! - + :param name: str Full three-part (catalog, schema, table) name of the table. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/online-tables/{name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/online-tables/{name}", headers=headers) - def get(self - , name: str - ) -> OnlineTable: + def get(self, name: str) -> OnlineTable: """Get an Online Table. - + Get information about an existing online table and its status. - + :param name: str Full three-part (catalog, schema, table) name of the table. - + :returns: :class:`OnlineTable` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/online-tables/{name}' - - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/online-tables/{name}", headers=headers) return OnlineTable.from_dict(res) - - + class QualityMonitorsAPI: """A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics tables and a dashboard that you can use to monitor table health and set alerts. - + Most write operations require the user to be the owner of the table (or its parent schema or parent catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires the user to have **SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**).""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def cancel_refresh(self - , table_name: str, refresh_id: str - ): + def cancel_refresh(self, table_name: str, refresh_id: str): """Cancel refresh. - + Cancel an active monitor refresh for the given refresh ID. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. :param refresh_id: str ID of the refresh. - - + + """ - - headers = {} - - self._api.do('POST',f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}/cancel' - - , headers=headers - ) - - - - + headers = {} - def create(self - , table_name: str, assets_dir: str, output_schema_name: str - , * - , baseline_table_name: Optional[str] = None, custom_metrics: Optional[List[MonitorMetric]] = None, data_classification_config: Optional[MonitorDataClassificationConfig] = None, inference_log: Optional[MonitorInferenceLog] = None, notifications: Optional[MonitorNotifications] = None, schedule: Optional[MonitorCronSchedule] = None, skip_builtin_dashboard: Optional[bool] = None, slicing_exprs: Optional[List[str]] = None, snapshot: Optional[MonitorSnapshot] = None, time_series: Optional[MonitorTimeSeries] = None, warehouse_id: Optional[str] = None) -> MonitorInfo: + self._api.do( + "POST", f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}/cancel", headers=headers + ) + + def create( + self, + table_name: str, + assets_dir: str, + output_schema_name: str, + *, + baseline_table_name: Optional[str] = None, + custom_metrics: Optional[List[MonitorMetric]] = None, + data_classification_config: Optional[MonitorDataClassificationConfig] = None, + inference_log: Optional[MonitorInferenceLog] = None, + notifications: Optional[MonitorNotifications] = None, + schedule: Optional[MonitorCronSchedule] = None, + skip_builtin_dashboard: Optional[bool] = None, + slicing_exprs: Optional[List[str]] = None, + snapshot: Optional[MonitorSnapshot] = None, + time_series: Optional[MonitorTimeSeries] = None, + warehouse_id: Optional[str] = None, + ) -> MonitorInfo: """Create a table monitor. - + Creates a new monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent schema, and have **SELECT** access on the table. 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Workspace assets, such as the dashboard, will be created in the workspace where this call was made. - + :param table_name: str Full name of the table. :param assets_dir: str @@ -11247,259 +13007,242 @@ def create(self :param warehouse_id: str (optional) Optional argument to specify the warehouse for dashboard creation. If not specified, the first running warehouse will be used. - + :returns: :class:`MonitorInfo` """ body = {} - if assets_dir is not None: body['assets_dir'] = assets_dir - if baseline_table_name is not None: body['baseline_table_name'] = baseline_table_name - if custom_metrics is not None: body['custom_metrics'] = [v.as_dict() for v in custom_metrics] - if data_classification_config is not None: body['data_classification_config'] = data_classification_config.as_dict() - if inference_log is not None: body['inference_log'] = inference_log.as_dict() - if notifications is not None: body['notifications'] = notifications.as_dict() - if output_schema_name is not None: body['output_schema_name'] = output_schema_name - if schedule is not None: body['schedule'] = schedule.as_dict() - if skip_builtin_dashboard is not None: body['skip_builtin_dashboard'] = skip_builtin_dashboard - if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs] - if snapshot is not None: body['snapshot'] = snapshot.as_dict() - if time_series is not None: body['time_series'] = time_series.as_dict() - if warehouse_id is not None: body['warehouse_id'] = warehouse_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.1/unity-catalog/tables/{table_name}/monitor', body=body - - , headers=headers - ) + if assets_dir is not None: + body["assets_dir"] = assets_dir + if baseline_table_name is not None: + body["baseline_table_name"] = baseline_table_name + if custom_metrics is not None: + body["custom_metrics"] = [v.as_dict() for v in custom_metrics] + if data_classification_config is not None: + body["data_classification_config"] = data_classification_config.as_dict() + if inference_log is not None: + body["inference_log"] = inference_log.as_dict() + if notifications is not None: + body["notifications"] = notifications.as_dict() + if output_schema_name is not None: + body["output_schema_name"] = output_schema_name + if schedule is not None: + body["schedule"] = schedule.as_dict() + if skip_builtin_dashboard is not None: + body["skip_builtin_dashboard"] = skip_builtin_dashboard + if slicing_exprs is not None: + body["slicing_exprs"] = [v for v in slicing_exprs] + if snapshot is not None: + body["snapshot"] = snapshot.as_dict() + if time_series is not None: + body["time_series"] = time_series.as_dict() + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.1/unity-catalog/tables/{table_name}/monitor", body=body, headers=headers) return MonitorInfo.from_dict(res) - - - - - def delete(self - , table_name: str - ): + def delete(self, table_name: str): """Delete a table monitor. - + Deletes a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + Note that the metric tables and dashboard will not be deleted as part of this call; those assets must be manually cleaned up (if desired). - + :param table_name: str Full name of the table. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/tables/{table_name}/monitor' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/tables/{table_name}/monitor", headers=headers) - def get(self - , table_name: str - ) -> MonitorInfo: + def get(self, table_name: str) -> MonitorInfo: """Get a table monitor. - + Gets a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + The returned information includes configuration values, as well as information on assets created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different workspace than where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{table_name}/monitor' - - , headers=headers - ) - return MonitorInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/tables/{table_name}/monitor", headers=headers) + return MonitorInfo.from_dict(res) - def get_refresh(self - , table_name: str, refresh_id: str - ) -> MonitorRefreshInfo: + def get_refresh(self, table_name: str, refresh_id: str) -> MonitorRefreshInfo: """Get refresh. - + Gets info about a specific monitor refresh using the given refresh ID. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. :param refresh_id: str ID of the refresh. - + :returns: :class:`MonitorRefreshInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}' - - , headers=headers - ) - return MonitorRefreshInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes/{refresh_id}", headers=headers + ) + return MonitorRefreshInfo.from_dict(res) - def list_refreshes(self - , table_name: str - ) -> MonitorRefreshListResponse: + def list_refreshes(self, table_name: str) -> MonitorRefreshListResponse: """List refreshes. - + Gets an array containing the history of the most recent refreshes (up to 25) for this table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorRefreshListResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes' - - , headers=headers - ) - return MonitorRefreshListResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes", headers=headers) + return MonitorRefreshListResponse.from_dict(res) - def regenerate_dashboard(self - , table_name: str - , * - , warehouse_id: Optional[str] = None) -> RegenerateDashboardResponse: + def regenerate_dashboard( + self, table_name: str, *, warehouse_id: Optional[str] = None + ) -> RegenerateDashboardResponse: """Regenerate a monitoring dashboard. - + Regenerates the monitoring dashboard for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + The call must be made from the workspace where the monitor was created. The dashboard will be regenerated in the assets directory that was specified when the monitor was created. - + :param table_name: str Full name of the table. :param warehouse_id: str (optional) Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first running warehouse will be used. - + :returns: :class:`RegenerateDashboardResponse` """ body = {} - if warehouse_id is not None: body['warehouse_id'] = warehouse_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.1/quality-monitoring/tables/{table_name}/monitor/dashboard', body=body - - , headers=headers - ) - return RegenerateDashboardResponse.from_dict(res) + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "POST", f"/api/2.1/quality-monitoring/tables/{table_name}/monitor/dashboard", body=body, headers=headers + ) + return RegenerateDashboardResponse.from_dict(res) - def run_refresh(self - , table_name: str - ) -> MonitorRefreshInfo: + def run_refresh(self, table_name: str) -> MonitorRefreshInfo: """Queue a metric refresh for a monitor. - + Queues a metric refresh on the monitor for the specified table. The refresh will execute in the background. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorRefreshInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('POST',f'/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes' - - , headers=headers - ) - return MonitorRefreshInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("POST", f"/api/2.1/unity-catalog/tables/{table_name}/monitor/refreshes", headers=headers) + return MonitorRefreshInfo.from_dict(res) - def update(self - , table_name: str, output_schema_name: str - , * - , baseline_table_name: Optional[str] = None, custom_metrics: Optional[List[MonitorMetric]] = None, dashboard_id: Optional[str] = None, data_classification_config: Optional[MonitorDataClassificationConfig] = None, inference_log: Optional[MonitorInferenceLog] = None, notifications: Optional[MonitorNotifications] = None, schedule: Optional[MonitorCronSchedule] = None, slicing_exprs: Optional[List[str]] = None, snapshot: Optional[MonitorSnapshot] = None, time_series: Optional[MonitorTimeSeries] = None) -> MonitorInfo: + def update( + self, + table_name: str, + output_schema_name: str, + *, + baseline_table_name: Optional[str] = None, + custom_metrics: Optional[List[MonitorMetric]] = None, + dashboard_id: Optional[str] = None, + data_classification_config: Optional[MonitorDataClassificationConfig] = None, + inference_log: Optional[MonitorInferenceLog] = None, + notifications: Optional[MonitorNotifications] = None, + schedule: Optional[MonitorCronSchedule] = None, + slicing_exprs: Optional[List[str]] = None, + snapshot: Optional[MonitorSnapshot] = None, + time_series: Optional[MonitorTimeSeries] = None, + ) -> MonitorInfo: """Update a table monitor. - + Updates a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Additionally, the call must be made from the workspace where the monitor was created, and the caller must be the original creator of the monitor. - + Certain configuration fields, such as output asset identifiers, cannot be updated. - + :param table_name: str Full name of the table. :param output_schema_name: str @@ -11529,46 +13272,56 @@ def update(self Configuration for monitoring snapshot tables. :param time_series: :class:`MonitorTimeSeries` (optional) Configuration for monitoring time series tables. - + :returns: :class:`MonitorInfo` """ body = {} - if baseline_table_name is not None: body['baseline_table_name'] = baseline_table_name - if custom_metrics is not None: body['custom_metrics'] = [v.as_dict() for v in custom_metrics] - if dashboard_id is not None: body['dashboard_id'] = dashboard_id - if data_classification_config is not None: body['data_classification_config'] = data_classification_config.as_dict() - if inference_log is not None: body['inference_log'] = inference_log.as_dict() - if notifications is not None: body['notifications'] = notifications.as_dict() - if output_schema_name is not None: body['output_schema_name'] = output_schema_name - if schedule is not None: body['schedule'] = schedule.as_dict() - if slicing_exprs is not None: body['slicing_exprs'] = [v for v in slicing_exprs] - if snapshot is not None: body['snapshot'] = snapshot.as_dict() - if time_series is not None: body['time_series'] = time_series.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.1/unity-catalog/tables/{table_name}/monitor', body=body - - , headers=headers - ) + if baseline_table_name is not None: + body["baseline_table_name"] = baseline_table_name + if custom_metrics is not None: + body["custom_metrics"] = [v.as_dict() for v in custom_metrics] + if dashboard_id is not None: + body["dashboard_id"] = dashboard_id + if data_classification_config is not None: + body["data_classification_config"] = data_classification_config.as_dict() + if inference_log is not None: + body["inference_log"] = inference_log.as_dict() + if notifications is not None: + body["notifications"] = notifications.as_dict() + if output_schema_name is not None: + body["output_schema_name"] = output_schema_name + if schedule is not None: + body["schedule"] = schedule.as_dict() + if slicing_exprs is not None: + body["slicing_exprs"] = [v for v in slicing_exprs] + if snapshot is not None: + body["snapshot"] = snapshot.as_dict() + if time_series is not None: + body["time_series"] = time_series.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", f"/api/2.1/unity-catalog/tables/{table_name}/monitor", body=body, headers=headers) return MonitorInfo.from_dict(res) - - + class RegisteredModelsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog provide centralized access control, auditing, lineage, and discovery of ML models across Databricks workspaces. - + An MLflow registered model resides in the third layer of Unity Catalog’s three-level namespace. Registered models contain model versions, which correspond to actual ML models (MLflow models). Creating new model versions currently requires use of the MLflow Python client. Once model versions are created, you can load them for batch inference using MLflow Python client APIs, or deploy them for real-time serving using Databricks Model Serving. - + All operations on registered models and model versions require USE_CATALOG permissions on the enclosing catalog and USE_SCHEMA permissions on the enclosing schema. In addition, the following additional privileges are required for various operations: - + * To create a registered model, users must additionally have the CREATE_MODEL permission on the target schema. * To view registered model or model version metadata, model version data files, or invoke a model version, users must additionally have the EXECUTE permission on the registered model * To update @@ -11576,37 +13329,34 @@ class RegisteredModelsAPI: registered model * To update other registered model or model version metadata (comments, aliases) create a new model version, or update permissions on the registered model, users must be owners of the registered model. - - Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that - specify a securable type, use "FUNCTION" as the securable type.""" - - def __init__(self, api_client): - self._api = api_client - - - - + Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that + specify a securable type, use "FUNCTION" as the securable type.""" - - + def __init__(self, api_client): + self._api = api_client - def create(self - , catalog_name: str, schema_name: str, name: str - , * - , comment: Optional[str] = None, storage_location: Optional[str] = None) -> RegisteredModelInfo: + def create( + self, + catalog_name: str, + schema_name: str, + name: str, + *, + comment: Optional[str] = None, + storage_location: Optional[str] = None, + ) -> RegisteredModelInfo: """Create a Registered Model. - + Creates a new registered model in Unity Catalog. - + File storage for model versions in the registered model will be located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. - + For registered model creation to succeed, the user must satisfy the following conditions: - The caller must be a metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema. - + :param catalog_name: str The name of the catalog where the schema and the registered model reside :param schema_name: str @@ -11617,99 +13367,79 @@ def create(self The comment attached to the registered model :param storage_location: str (optional) The storage location on the cloud under which model version data files are stored - + :returns: :class:`RegisteredModelInfo` """ body = {} - if catalog_name is not None: body['catalog_name'] = catalog_name - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if schema_name is not None: body['schema_name'] = schema_name - if storage_location is not None: body['storage_location'] = storage_location - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/models', body=body - - , headers=headers - ) + if catalog_name is not None: + body["catalog_name"] = catalog_name + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if schema_name is not None: + body["schema_name"] = schema_name + if storage_location is not None: + body["storage_location"] = storage_location + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/models", body=body, headers=headers) return RegisteredModelInfo.from_dict(res) - - - - - def delete(self - , full_name: str - ): + def delete(self, full_name: str): """Delete a Registered Model. - + Deletes a registered model and all its model versions from the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/models/{full_name}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/models/{full_name}", headers=headers) - def delete_alias(self - , full_name: str, alias: str - ): + def delete_alias(self, full_name: str, alias: str): """Delete a Registered Model Alias. - + Deletes a registered model alias. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param alias: str The name of the alias - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}", headers=headers) - def get(self - , full_name: str - , * - , include_aliases: Optional[bool] = None, include_browse: Optional[bool] = None) -> RegisteredModelInfo: + def get( + self, full_name: str, *, include_aliases: Optional[bool] = None, include_browse: Optional[bool] = None + ) -> RegisteredModelInfo: """Get a Registered Model. - + Get a registered model. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param include_aliases: bool (optional) @@ -11717,42 +13447,44 @@ def get(self :param include_browse: bool (optional) Whether to include registered models in the response for which the principal can only access selective metadata for - + :returns: :class:`RegisteredModelInfo` """ - + query = {} - if include_aliases is not None: query['include_aliases'] = include_aliases - if include_browse is not None: query['include_browse'] = include_browse - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/models/{full_name}', query=query - - , headers=headers - ) + if include_aliases is not None: + query["include_aliases"] = include_aliases + if include_browse is not None: + query["include_browse"] = include_browse + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/models/{full_name}", query=query, headers=headers) return RegisteredModelInfo.from_dict(res) - - - - - def list(self - - , * - , catalog_name: Optional[str] = None, include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None, schema_name: Optional[str] = None) -> Iterator[RegisteredModelInfo]: + def list( + self, + *, + catalog_name: Optional[str] = None, + include_browse: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + schema_name: Optional[str] = None, + ) -> Iterator[RegisteredModelInfo]: """List Registered Models. - + List registered models. You can list registered models under a particular schema, or list all registered models in the current metastore. - + The returned models are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the registered models. A regular user needs to be the owner or have the **EXECUTE** privilege on the registered model to recieve the registered models in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the response. - + :param catalog_name: str (optional) The identifier of the catalog under which to list registered models. If specified, schema_name must be specified. @@ -11761,13 +13493,13 @@ def list(self selective metadata for :param max_results: int (optional) Max number of registered models to return. - + If both catalog and schema are specified: - when max_results is not specified, the page length is set to a server configured value (10000, as of 4/2/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (10000, as of 4/2/2024); - when set to 0, the page length is set to a server configured value (10000, as of 4/2/2024); - when set to a value less than 0, an invalid parameter error is returned; - + If neither schema nor catalog is specified: - when max_results is not specified, the page length is set to a server configured value (100, as of 4/2/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (1000, as of 4/2/2024); - @@ -11778,85 +13510,83 @@ def list(self :param schema_name: str (optional) The identifier of the schema under which to list registered models. If specified, catalog_name must be specified. - + :returns: Iterator over :class:`RegisteredModelInfo` """ - - query = {} - if catalog_name is not None: query['catalog_name'] = catalog_name - if include_browse is not None: query['include_browse'] = include_browse - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - if schema_name is not None: query['schema_name'] = schema_name - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/unity-catalog/models', query=query - - , headers=headers - ) - if 'registered_models' in json: - for v in json['registered_models']: - yield RegisteredModelInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if catalog_name is not None: + query["catalog_name"] = catalog_name + if include_browse is not None: + query["include_browse"] = include_browse + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + if schema_name is not None: + query["schema_name"] = schema_name + headers = { + "Accept": "application/json", + } - def set_alias(self - , full_name: str, alias: str, version_num: int - ) -> RegisteredModelAlias: + while True: + json = self._api.do("GET", "/api/2.1/unity-catalog/models", query=query, headers=headers) + if "registered_models" in json: + for v in json["registered_models"]: + yield RegisteredModelInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def set_alias(self, full_name: str, alias: str, version_num: int) -> RegisteredModelAlias: """Set a Registered Model Alias. - + Set an alias on the specified registered model. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the registered model :param alias: str The name of the alias :param version_num: int The version number of the model version to which the alias points - + :returns: :class:`RegisteredModelAlias` """ body = {} - if version_num is not None: body['version_num'] = version_num - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', body=body - - , headers=headers - ) - return RegisteredModelAlias.from_dict(res) + if version_num is not None: + body["version_num"] = version_num + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "PUT", f"/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}", body=body, headers=headers + ) + return RegisteredModelAlias.from_dict(res) - def update(self - , full_name: str - , * - , comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None) -> RegisteredModelInfo: + def update( + self, + full_name: str, + *, + comment: Optional[str] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + ) -> RegisteredModelInfo: """Update a Registered Model. - + Updates the specified registered model. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the name, the owner or the comment of the registered model can be updated. - + :param full_name: str The three-level (fully qualified) name of the registered model :param comment: str (optional) @@ -11865,138 +13595,125 @@ def update(self New name for the registered model. :param owner: str (optional) The identifier of the user who owns the registered model - + :returns: :class:`RegisteredModelInfo` """ body = {} - if comment is not None: body['comment'] = comment - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/models/{full_name}', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/models/{full_name}", body=body, headers=headers) return RegisteredModelInfo.from_dict(res) - - + class ResourceQuotasAPI: """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and limits. For more information on resource quotas see the [Unity Catalog documentation]. - - [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas""" - - def __init__(self, api_client): - self._api = api_client - - - - + [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas + """ - - + def __init__(self, api_client): + self._api = api_client - def get_quota(self - , parent_securable_type: str, parent_full_name: str, quota_name: str - ) -> GetQuotaResponse: + def get_quota(self, parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse: """Get information for a single resource quota. - + The GetQuota API returns usage information for a single resource quota, defined as a child-parent pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered asynchronously. The updated count might not be returned in the first call. - + :param parent_securable_type: str Securable type of the quota parent. :param parent_full_name: str Full name of the parent resource. Provide the metastore ID if the parent is a metastore. :param quota_name: str Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. - + :returns: :class:`GetQuotaResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}' - - , headers=headers - ) - return GetQuotaResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}", + headers=headers, + ) + return GetQuotaResponse.from_dict(res) - def list_quotas(self - - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[QuotaInfo]: + def list_quotas( + self, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[QuotaInfo]: """List all resource quotas under a metastore. - + ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the counts returned. This API does not trigger a refresh of quota counts. - + :param max_results: int (optional) The number of quotas to return. :param page_token: str (optional) Opaque token for the next page of results. - + :returns: Iterator over :class:`QuotaInfo` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.1/unity-catalog/resource-quotas/all-resource-quotas', query=query - - , headers=headers - ) - if 'quotas' in json: - for v in json['quotas']: - yield QuotaInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - + json = self._api.do( + "GET", "/api/2.1/unity-catalog/resource-quotas/all-resource-quotas", query=query, headers=headers + ) + if "quotas" in json: + for v in json["quotas"]: + yield QuotaInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + - - class SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema organizes tables, views and functions. To access (or list) a table or view in a schema, users must have the USE_SCHEMA data permission on the schema and its parent catalog, and they must have the SELECT permission on the table or view.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str, catalog_name: str - , * - , comment: Optional[str] = None, properties: Optional[Dict[str,str]] = None, storage_root: Optional[str] = None) -> SchemaInfo: + def create( + self, + name: str, + catalog_name: str, + *, + comment: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + storage_root: Optional[str] = None, + ) -> SchemaInfo: """Create a schema. - + Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. - + :param name: str Name of schema, relative to parent catalog. :param catalog_name: str @@ -12007,101 +13724,91 @@ def create(self A map of key-value properties attached to the securable. :param storage_root: str (optional) Storage root URL for managed tables within schema. - + :returns: :class:`SchemaInfo` """ body = {} - if catalog_name is not None: body['catalog_name'] = catalog_name - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if properties is not None: body['properties'] = properties - if storage_root is not None: body['storage_root'] = storage_root - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/schemas', body=body - - , headers=headers - ) + if catalog_name is not None: + body["catalog_name"] = catalog_name + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if properties is not None: + body["properties"] = properties + if storage_root is not None: + body["storage_root"] = storage_root + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/schemas", body=body, headers=headers) return SchemaInfo.from_dict(res) - - - - - def delete(self - , full_name: str - , * - , force: Optional[bool] = None): + def delete(self, full_name: str, *, force: Optional[bool] = None): """Delete a schema. - + Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an owner of the parent catalog. - + :param full_name: str Full name of the schema. :param force: bool (optional) Force deletion even if the schema is not empty. - - + + """ - + query = {} - if force is not None: query['force'] = force - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/schemas/{full_name}', query=query - - , headers=headers - ) - + if force is not None: + query["force"] = force + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/schemas/{full_name}", query=query, headers=headers) - def get(self - , full_name: str - , * - , include_browse: Optional[bool] = None) -> SchemaInfo: + def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> SchemaInfo: """Get a schema. - + Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the schema, or a user that has the **USE_SCHEMA** privilege on the schema. - + :param full_name: str Full name of the schema. :param include_browse: bool (optional) Whether to include schemas in the response for which the principal can only access selective metadata for - + :returns: :class:`SchemaInfo` """ - + query = {} - if include_browse is not None: query['include_browse'] = include_browse - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/schemas/{full_name}', query=query - - , headers=headers - ) - return SchemaInfo.from_dict(res) + if include_browse is not None: + query["include_browse"] = include_browse + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", f"/api/2.1/unity-catalog/schemas/{full_name}", query=query, headers=headers) + return SchemaInfo.from_dict(res) - def list(self - , catalog_name: str - , * - , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[SchemaInfo]: + def list( + self, + catalog_name: str, + *, + include_browse: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[SchemaInfo]: """List schemas. - + Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Parent catalog for schemas of interest. :param include_browse: bool (optional) @@ -12114,47 +13821,49 @@ def list(self (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`SchemaInfo` """ - - query = {} - if catalog_name is not None: query['catalog_name'] = catalog_name - if include_browse is not None: query['include_browse'] = include_browse - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/unity-catalog/schemas', query=query - - , headers=headers - ) - if 'schemas' in json: - for v in json['schemas']: - yield SchemaInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if catalog_name is not None: + query["catalog_name"] = catalog_name + if include_browse is not None: + query["include_browse"] = include_browse + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , full_name: str - , * - , comment: Optional[str] = None, enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, new_name: Optional[str] = None, owner: Optional[str] = None, properties: Optional[Dict[str,str]] = None) -> SchemaInfo: + while True: + json = self._api.do("GET", "/api/2.1/unity-catalog/schemas", query=query, headers=headers) + if "schemas" in json: + for v in json["schemas"]: + yield SchemaInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + full_name: str, + *, + comment: Optional[str] = None, + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + ) -> SchemaInfo: """Update a schema. - + Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If the caller is a metastore admin, only the __owner__ field can be changed in the update. If the __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA** privilege on the parent catalog. - + :param full_name: str Full name of the schema. :param comment: str (optional) @@ -12167,56 +13876,61 @@ def update(self Username of current owner of schema. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. - + :returns: :class:`SchemaInfo` """ body = {} - if comment is not None: body['comment'] = comment - if enable_predictive_optimization is not None: body['enable_predictive_optimization'] = enable_predictive_optimization.value - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - if properties is not None: body['properties'] = properties - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/schemas/{full_name}', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = enable_predictive_optimization.value + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + if properties is not None: + body["properties"] = properties + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/schemas/{full_name}", body=body, headers=headers) return SchemaInfo.from_dict(res) - - + class StorageCredentialsAPI: """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant. Each storage credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential. If a user does not have access to a storage credential in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. - + Databricks recommends using external locations rather than using storage credentials directly. - + To create storage credentials, you must be a Databricks account admin. The account admin who creates the storage credential can delegate ownership to another user or group to manage permissions on it.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str - , * - , aws_iam_role: Optional[AwsIamRoleRequest] = None, azure_managed_identity: Optional[AzureManagedIdentityRequest] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, cloudflare_api_token: Optional[CloudflareApiToken] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> StorageCredentialInfo: + def create( + self, + name: str, + *, + aws_iam_role: Optional[AwsIamRoleRequest] = None, + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None, + azure_service_principal: Optional[AzureServicePrincipal] = None, + cloudflare_api_token: Optional[CloudflareApiToken] = None, + comment: Optional[str] = None, + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, + read_only: Optional[bool] = None, + skip_validation: Optional[bool] = None, + ) -> StorageCredentialInfo: """Create a storage credential. - + Creates a new storage credential. - + :param name: str The credential name. The name must be unique within the metastore. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) @@ -12235,99 +13949,88 @@ def create(self Whether the storage credential is only usable for read operations. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the created credential. - + :returns: :class:`StorageCredentialInfo` """ body = {} - if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() - if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() - if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() - if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict() - if comment is not None: body['comment'] = comment - if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() - if name is not None: body['name'] = name - if read_only is not None: body['read_only'] = read_only - if skip_validation is not None: body['skip_validation'] = skip_validation - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/storage-credentials', body=body - - , headers=headers - ) + if aws_iam_role is not None: + body["aws_iam_role"] = aws_iam_role.as_dict() + if azure_managed_identity is not None: + body["azure_managed_identity"] = azure_managed_identity.as_dict() + if azure_service_principal is not None: + body["azure_service_principal"] = azure_service_principal.as_dict() + if cloudflare_api_token is not None: + body["cloudflare_api_token"] = cloudflare_api_token.as_dict() + if comment is not None: + body["comment"] = comment + if databricks_gcp_service_account is not None: + body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() + if name is not None: + body["name"] = name + if read_only is not None: + body["read_only"] = read_only + if skip_validation is not None: + body["skip_validation"] = skip_validation + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/storage-credentials", body=body, headers=headers) return StorageCredentialInfo.from_dict(res) - - - - - def delete(self - , name: str - , * - , force: Optional[bool] = None): + def delete(self, name: str, *, force: Optional[bool] = None): """Delete a credential. - + Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. - + :param name: str Name of the storage credential. :param force: bool (optional) Force deletion even if there are dependent external locations or external tables. - - + + """ - + query = {} - if force is not None: query['force'] = force - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/storage-credentials/{name}', query=query - - , headers=headers - ) - + if force is not None: + query["force"] = force + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/storage-credentials/{name}", query=query, headers=headers) - def get(self - , name: str - ) -> StorageCredentialInfo: + def get(self, name: str) -> StorageCredentialInfo: """Get a credential. - + Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. - + :param name: str Name of the storage credential. - + :returns: :class:`StorageCredentialInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/storage-credentials/{name}' - - , headers=headers - ) - return StorageCredentialInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/storage-credentials/{name}", headers=headers) + return StorageCredentialInfo.from_dict(res) - def list(self - - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[StorageCredentialInfo]: + def list( + self, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[StorageCredentialInfo]: """List credentials. - + Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of storage credentials to return. If not set, all the storage credentials are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of @@ -12336,42 +14039,51 @@ def list(self returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`StorageCredentialInfo` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - if "max_results" not in query: query['max_results'] = 0 + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + if "max_results" not in query: + query["max_results"] = 0 while True: - json = self._api.do('GET','/api/2.1/unity-catalog/storage-credentials', query=query - - , headers=headers - ) - if 'storage_credentials' in json: - for v in json['storage_credentials']: - yield StorageCredentialInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , name: str - , * - , aws_iam_role: Optional[AwsIamRoleRequest] = None, azure_managed_identity: Optional[AzureManagedIdentityResponse] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, cloudflare_api_token: Optional[CloudflareApiToken] = None, comment: Optional[str] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, force: Optional[bool] = None, isolation_mode: Optional[IsolationMode] = None, new_name: Optional[str] = None, owner: Optional[str] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> StorageCredentialInfo: + json = self._api.do("GET", "/api/2.1/unity-catalog/storage-credentials", query=query, headers=headers) + if "storage_credentials" in json: + for v in json["storage_credentials"]: + yield StorageCredentialInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + name: str, + *, + aws_iam_role: Optional[AwsIamRoleRequest] = None, + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None, + azure_service_principal: Optional[AzureServicePrincipal] = None, + cloudflare_api_token: Optional[CloudflareApiToken] = None, + comment: Optional[str] = None, + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, + force: Optional[bool] = None, + isolation_mode: Optional[IsolationMode] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + read_only: Optional[bool] = None, + skip_validation: Optional[bool] = None, + ) -> StorageCredentialInfo: """Update a credential. - + Updates a storage credential on the metastore. - + :param name: str Name of the storage credential. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) @@ -12397,50 +14109,67 @@ def update(self Whether the storage credential is only usable for read operations. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the updated credential. - + :returns: :class:`StorageCredentialInfo` """ body = {} - if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() - if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() - if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() - if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict() - if comment is not None: body['comment'] = comment - if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() - if force is not None: body['force'] = force - if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - if read_only is not None: body['read_only'] = read_only - if skip_validation is not None: body['skip_validation'] = skip_validation - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/storage-credentials/{name}', body=body - - , headers=headers - ) + if aws_iam_role is not None: + body["aws_iam_role"] = aws_iam_role.as_dict() + if azure_managed_identity is not None: + body["azure_managed_identity"] = azure_managed_identity.as_dict() + if azure_service_principal is not None: + body["azure_service_principal"] = azure_service_principal.as_dict() + if cloudflare_api_token is not None: + body["cloudflare_api_token"] = cloudflare_api_token.as_dict() + if comment is not None: + body["comment"] = comment + if databricks_gcp_service_account is not None: + body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() + if force is not None: + body["force"] = force + if isolation_mode is not None: + body["isolation_mode"] = isolation_mode.value + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + if read_only is not None: + body["read_only"] = read_only + if skip_validation is not None: + body["skip_validation"] = skip_validation + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/storage-credentials/{name}", body=body, headers=headers) return StorageCredentialInfo.from_dict(res) - - - - - def validate(self - - , * - , aws_iam_role: Optional[AwsIamRoleRequest] = None, azure_managed_identity: Optional[AzureManagedIdentityRequest] = None, azure_service_principal: Optional[AzureServicePrincipal] = None, cloudflare_api_token: Optional[CloudflareApiToken] = None, databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, external_location_name: Optional[str] = None, read_only: Optional[bool] = None, storage_credential_name: Optional[str] = None, url: Optional[str] = None) -> ValidateStorageCredentialResponse: + def validate( + self, + *, + aws_iam_role: Optional[AwsIamRoleRequest] = None, + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None, + azure_service_principal: Optional[AzureServicePrincipal] = None, + cloudflare_api_token: Optional[CloudflareApiToken] = None, + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None, + external_location_name: Optional[str] = None, + read_only: Optional[bool] = None, + storage_credential_name: Optional[str] = None, + url: Optional[str] = None, + ) -> ValidateStorageCredentialResponse: """Validate a storage credential. - + Validates a storage credential. At least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking overlapping urls. - + Either the __storage_credential_name__ or the cloud-specific credential must be provided. - + The caller must be a metastore admin or the storage credential owner or have the **CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage credential. - + :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional) @@ -12459,113 +14188,104 @@ def validate(self The name of the storage credential to validate. :param url: str (optional) The external location url to validate. - + :returns: :class:`ValidateStorageCredentialResponse` """ body = {} - if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict() - if azure_managed_identity is not None: body['azure_managed_identity'] = azure_managed_identity.as_dict() - if azure_service_principal is not None: body['azure_service_principal'] = azure_service_principal.as_dict() - if cloudflare_api_token is not None: body['cloudflare_api_token'] = cloudflare_api_token.as_dict() - if databricks_gcp_service_account is not None: body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict() - if external_location_name is not None: body['external_location_name'] = external_location_name - if read_only is not None: body['read_only'] = read_only - if storage_credential_name is not None: body['storage_credential_name'] = storage_credential_name - if url is not None: body['url'] = url - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/validate-storage-credentials', body=body - - , headers=headers - ) + if aws_iam_role is not None: + body["aws_iam_role"] = aws_iam_role.as_dict() + if azure_managed_identity is not None: + body["azure_managed_identity"] = azure_managed_identity.as_dict() + if azure_service_principal is not None: + body["azure_service_principal"] = azure_service_principal.as_dict() + if cloudflare_api_token is not None: + body["cloudflare_api_token"] = cloudflare_api_token.as_dict() + if databricks_gcp_service_account is not None: + body["databricks_gcp_service_account"] = databricks_gcp_service_account.as_dict() + if external_location_name is not None: + body["external_location_name"] = external_location_name + if read_only is not None: + body["read_only"] = read_only + if storage_credential_name is not None: + body["storage_credential_name"] = storage_credential_name + if url is not None: + body["url"] = url + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/validate-storage-credentials", body=body, headers=headers) return ValidateStorageCredentialResponse.from_dict(res) - - + class SystemSchemasAPI: """A system schema is a schema that lives within the system catalog. A system schema may contain information about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def disable(self - , metastore_id: str, schema_name: str - ): + def disable(self, metastore_id: str, schema_name: str): """Disable a system schema. - + Disables the system schema and removes it from the system catalog. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The metastore ID under which the system schema lives. :param schema_name: str Full name of the system schema. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def enable(self - , metastore_id: str, schema_name: str - , * - , catalog_name: Optional[str] = None): + self._api.do( + "DELETE", f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}", headers=headers + ) + + def enable(self, metastore_id: str, schema_name: str, *, catalog_name: Optional[str] = None): """Enable a system schema. - + Enables the system schema and adds it to the system catalog. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The metastore ID under which the system schema lives. :param schema_name: str Full name of the system schema. - :param catalog_name: str (optional) - the catalog for which the system schema is to enabled in - - - """ - body = {} - if catalog_name is not None: body['catalog_name'] = catalog_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}', body=body - - , headers=headers - ) - + :param catalog_name: str (optional) + the catalog for which the system schema is to enabled in - - - - def list(self - , metastore_id: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[SystemSchemaInfo]: + """ + body = {} + if catalog_name is not None: + body["catalog_name"] = catalog_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "PUT", + f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}", + body=body, + headers=headers, + ) + + def list( + self, metastore_id: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[SystemSchemaInfo]: """List system schemas. - + Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The ID for the metastore in which the system schema resides. :param max_results: int (optional) @@ -12575,106 +14295,91 @@ def list(self is returned; - If not set, all the schemas are returned (not recommended). :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`SystemSchemaInfo` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas', query=query - - , headers=headers - ) - if 'schemas' in json: - for v in json['schemas']: - yield SystemSchemaInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - + json = self._api.do( + "GET", f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas", query=query, headers=headers + ) + if "schemas" in json: + for v in json["schemas"]: + yield SystemSchemaInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + - - class TableConstraintsAPI: """Primary key and foreign key constraints encode relationships between fields in tables. - + Primary and foreign keys are informational only and are not enforced. Foreign keys must reference a primary key in another table. This primary key is the parent constraint of the foreign key and the table this primary key is on is the parent table of the foreign key. Similarly, the foreign key is the child constraint of its referenced primary key; the table of the foreign key is the child table of the primary key. - + You can declare primary keys and foreign keys as part of the table specification during table creation. You can also add or drop constraints on existing tables.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , full_name_arg: str, constraint: TableConstraint - ) -> TableConstraint: + def create(self, full_name_arg: str, constraint: TableConstraint) -> TableConstraint: """Create a table constraint. - + Creates a new table constraint. - + For the table constraint creation to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** privilege on the table's parent schema, and be the owner of the table. - if the new constraint is a __ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the owner of the referenced parent table. - + :param full_name_arg: str The full name of the table referenced by the constraint. :param constraint: :class:`TableConstraint` A table constraint, as defined by *one* of the following fields being set: __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. - + :returns: :class:`TableConstraint` """ body = {} - if constraint is not None: body['constraint'] = constraint.as_dict() - if full_name_arg is not None: body['full_name_arg'] = full_name_arg - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/constraints', body=body - - , headers=headers - ) - return TableConstraint.from_dict(res) + if constraint is not None: + body["constraint"] = constraint.as_dict() + if full_name_arg is not None: + body["full_name_arg"] = full_name_arg + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.1/unity-catalog/constraints", body=body, headers=headers) + return TableConstraint.from_dict(res) - def delete(self - , full_name: str, constraint_name: str, cascade: bool - ): + def delete(self, full_name: str, constraint_name: str, cascade: bool): """Delete a table constraint. - + Deletes a table constraint. - + For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** privilege on the table's parent schema, and be the owner of the table. - if __cascade__ argument is **true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG** privilege on the table's catalog, the **USE_SCHEMA** privilege on the table's schema, and be the owner of the table. - + :param full_name: str Full name of the table referenced by the constraint. :param constraint_name: str @@ -12682,114 +14387,94 @@ def delete(self :param cascade: bool If true, try deleting all child constraints of the current constraint. If false, reject this operation if the current constraint has any child constraints. - - + + """ - + query = {} - if cascade is not None: query['cascade'] = cascade - if constraint_name is not None: query['constraint_name'] = constraint_name - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/constraints/{full_name}', query=query - - , headers=headers - ) - + if cascade is not None: + query["cascade"] = cascade + if constraint_name is not None: + query["constraint_name"] = constraint_name + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.1/unity-catalog/constraints/{full_name}", query=query, headers=headers) + - - class TablesAPI: """A table resides in the third layer of Unity Catalog’s three-level namespace. It contains rows of data. To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the schema, and they must have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT permission on the table, and they must have the USE_CATALOG permission on its parent catalog and the USE_SCHEMA permission on its parent schema. - + A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table (rather than a managed or external table).""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - , full_name: str - ): + def delete(self, full_name: str): """Delete a table. - + Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the table. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/tables/{full_name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.1/unity-catalog/tables/{full_name}", headers=headers) - def exists(self - , full_name: str - ) -> TableExistsResponse: + def exists(self, full_name: str) -> TableExistsResponse: """Get boolean reflecting if table exists. - + Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on the parent catalog * Have BROWSE privilege on the parent schema. - + :param full_name: str Full name of the table. - + :returns: :class:`TableExistsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{full_name}/exists' - - , headers=headers - ) - return TableExistsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/tables/{full_name}/exists", headers=headers) + return TableExistsResponse.from_dict(res) - def get(self - , full_name: str - , * - , include_browse: Optional[bool] = None, include_delta_metadata: Optional[bool] = None, include_manifest_capabilities: Optional[bool] = None) -> TableInfo: + def get( + self, + full_name: str, + *, + include_browse: Optional[bool] = None, + include_delta_metadata: Optional[bool] = None, + include_manifest_capabilities: Optional[bool] = None, + ) -> TableInfo: """Get a table. - + Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the SELECT privilege on the table. - + :param full_name: str Full name of the table. :param include_browse: bool (optional) @@ -12799,38 +14484,46 @@ def get(self Whether delta metadata should be included in the response. :param include_manifest_capabilities: bool (optional) Whether to include a manifest containing capabilities the table has. - + :returns: :class:`TableInfo` """ - + query = {} - if include_browse is not None: query['include_browse'] = include_browse - if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata - if include_manifest_capabilities is not None: query['include_manifest_capabilities'] = include_manifest_capabilities - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/tables/{full_name}', query=query - - , headers=headers - ) + if include_browse is not None: + query["include_browse"] = include_browse + if include_delta_metadata is not None: + query["include_delta_metadata"] = include_delta_metadata + if include_manifest_capabilities is not None: + query["include_manifest_capabilities"] = include_manifest_capabilities + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/tables/{full_name}", query=query, headers=headers) return TableInfo.from_dict(res) - - - - - def list(self - , catalog_name: str, schema_name: str - , * - , include_browse: Optional[bool] = None, include_delta_metadata: Optional[bool] = None, include_manifest_capabilities: Optional[bool] = None, max_results: Optional[int] = None, omit_columns: Optional[bool] = None, omit_properties: Optional[bool] = None, omit_username: Optional[bool] = None, page_token: Optional[str] = None) -> Iterator[TableInfo]: + def list( + self, + catalog_name: str, + schema_name: str, + *, + include_browse: Optional[bool] = None, + include_delta_metadata: Optional[bool] = None, + include_manifest_capabilities: Optional[bool] = None, + max_results: Optional[int] = None, + omit_columns: Optional[bool] = None, + omit_properties: Optional[bool] = None, + omit_username: Optional[bool] = None, + page_token: Optional[str] = None, + ) -> Iterator[TableInfo]: """List tables. - + Gets an array of all tables for the current metastore under the parent catalog and schema. The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for tables of interest. :param schema_name: str @@ -12856,59 +14549,69 @@ def list(self not. :param page_token: str (optional) Opaque token to send for the next page of results (pagination). - + :returns: Iterator over :class:`TableInfo` """ - + query = {} - if catalog_name is not None: query['catalog_name'] = catalog_name - if include_browse is not None: query['include_browse'] = include_browse - if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata - if include_manifest_capabilities is not None: query['include_manifest_capabilities'] = include_manifest_capabilities - if max_results is not None: query['max_results'] = max_results - if omit_columns is not None: query['omit_columns'] = omit_columns - if omit_properties is not None: query['omit_properties'] = omit_properties - if omit_username is not None: query['omit_username'] = omit_username - if page_token is not None: query['page_token'] = page_token - if schema_name is not None: query['schema_name'] = schema_name - headers = {'Accept': 'application/json',} - - - if "max_results" not in query: query['max_results'] = 0 + if catalog_name is not None: + query["catalog_name"] = catalog_name + if include_browse is not None: + query["include_browse"] = include_browse + if include_delta_metadata is not None: + query["include_delta_metadata"] = include_delta_metadata + if include_manifest_capabilities is not None: + query["include_manifest_capabilities"] = include_manifest_capabilities + if max_results is not None: + query["max_results"] = max_results + if omit_columns is not None: + query["omit_columns"] = omit_columns + if omit_properties is not None: + query["omit_properties"] = omit_properties + if omit_username is not None: + query["omit_username"] = omit_username + if page_token is not None: + query["page_token"] = page_token + if schema_name is not None: + query["schema_name"] = schema_name + headers = { + "Accept": "application/json", + } + + if "max_results" not in query: + query["max_results"] = 0 while True: - json = self._api.do('GET','/api/2.1/unity-catalog/tables', query=query - - , headers=headers - ) - if 'tables' in json: - for v in json['tables']: - yield TableInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def list_summaries(self - , catalog_name: str - , * - , include_manifest_capabilities: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None, schema_name_pattern: Optional[str] = None, table_name_pattern: Optional[str] = None) -> Iterator[TableSummary]: + json = self._api.do("GET", "/api/2.1/unity-catalog/tables", query=query, headers=headers) + if "tables" in json: + for v in json["tables"]: + yield TableInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_summaries( + self, + catalog_name: str, + *, + include_manifest_capabilities: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + schema_name_pattern: Optional[str] = None, + table_name_pattern: Optional[str] = None, + ) -> Iterator[TableSummary]: """List table summaries. - + Gets an array of summaries for tables for a schema and catalog within the metastore. The table summaries returned are either: - + * summaries for tables (within the current metastore and parent catalog and schema), when the user is a metastore admin, or: * summaries for tables and schemas (within the current metastore and parent catalog) for which the user has ownership or the **SELECT** privilege on the table and ownership or **USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the **USE_CATALOG** privilege on the parent catalog. - + There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for tables of interest. :param include_manifest_capabilities: bool (optional) @@ -12925,67 +14628,63 @@ def list_summaries(self A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or empty. :param table_name_pattern: str (optional) A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty. - + :returns: Iterator over :class:`TableSummary` """ - + query = {} - if catalog_name is not None: query['catalog_name'] = catalog_name - if include_manifest_capabilities is not None: query['include_manifest_capabilities'] = include_manifest_capabilities - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - if schema_name_pattern is not None: query['schema_name_pattern'] = schema_name_pattern - if table_name_pattern is not None: query['table_name_pattern'] = table_name_pattern - headers = {'Accept': 'application/json',} - - - if "max_results" not in query: query['max_results'] = 0 + if catalog_name is not None: + query["catalog_name"] = catalog_name + if include_manifest_capabilities is not None: + query["include_manifest_capabilities"] = include_manifest_capabilities + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + if schema_name_pattern is not None: + query["schema_name_pattern"] = schema_name_pattern + if table_name_pattern is not None: + query["table_name_pattern"] = table_name_pattern + headers = { + "Accept": "application/json", + } + + if "max_results" not in query: + query["max_results"] = 0 while True: - json = self._api.do('GET','/api/2.1/unity-catalog/table-summaries', query=query - - , headers=headers - ) - if 'tables' in json: - for v in json['tables']: - yield TableSummary.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , full_name: str - , * - , owner: Optional[str] = None): + json = self._api.do("GET", "/api/2.1/unity-catalog/table-summaries", query=query, headers=headers) + if "tables" in json: + for v in json["tables"]: + yield TableSummary.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, full_name: str, *, owner: Optional[str] = None): """Update a table owner. - + Change the owner of the table. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the table. :param owner: str (optional) - - + + """ body = {} - if owner is not None: body['owner'] = owner - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.1/unity-catalog/tables/{full_name}', body=body - - , headers=headers - ) - + if owner is not None: + body["owner"] = owner + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.1/unity-catalog/tables/{full_name}", body=body, headers=headers) + - - class TemporaryTableCredentialsAPI: """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks. These credentials are employed to provide secure and @@ -12999,50 +14698,42 @@ class TemporaryTableCredentialsAPI: by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for security reason.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def generate_temporary_table_credentials(self - - , * - , operation: Optional[TableOperation] = None, table_id: Optional[str] = None) -> GenerateTemporaryTableCredentialResponse: + def generate_temporary_table_credentials( + self, *, operation: Optional[TableOperation] = None, table_id: Optional[str] = None + ) -> GenerateTemporaryTableCredentialResponse: """Generate a temporary table credential. - + Get a short-lived credential for directly accessing the table data on cloud storage. The metastore must have external_access_enabled flag set to true (default false). The caller must have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog owners. - + :param operation: :class:`TableOperation` (optional) The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is specified, the credentials returned will have write permissions, otherwise, it will be read only. :param table_id: str (optional) UUID of the table to read or write. - + :returns: :class:`GenerateTemporaryTableCredentialResponse` """ body = {} - if operation is not None: body['operation'] = operation.value - if table_id is not None: body['table_id'] = table_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/unity-catalog/temporary-table-credentials', body=body - - , headers=headers - ) + if operation is not None: + body["operation"] = operation.value + if table_id is not None: + body["table_id"] = table_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/unity-catalog/temporary-table-credentials", body=body, headers=headers) return GenerateTemporaryTableCredentialResponse.from_dict(res) - - + class VolumesAPI: """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF @@ -13050,40 +14741,38 @@ class VolumesAPI: that require access to the local file system on cluster machines, storing library and config files of arbitrary formats such as .whl or .txt centrally and providing secure access across workspaces to it, or transforming and querying non-tabular data files in ETL.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , catalog_name: str, schema_name: str, name: str, volume_type: VolumeType - , * - , comment: Optional[str] = None, storage_location: Optional[str] = None) -> VolumeInfo: + def create( + self, + catalog_name: str, + schema_name: str, + name: str, + volume_type: VolumeType, + *, + comment: Optional[str] = None, + storage_location: Optional[str] = None, + ) -> VolumeInfo: """Create a Volume. - + Creates a new volume. - + The user could create either an external volume or a managed volume. An external volume will be created in the specified external location, while a managed volume will be located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. - + For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have **CREATE VOLUME** privilege on the parent schema. - + For an external volume, following conditions also need to satisfy - The caller must have **CREATE EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes existing in the specified storage location. - The specified storage location is not under the location of other tables, nor volumes, or catalogs or schemas. - + :param catalog_name: str The name of the catalog where the schema and the volume are :param schema_name: str @@ -13094,79 +14783,76 @@ def create(self The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] - + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external :param comment: str (optional) The comment attached to the volume :param storage_location: str (optional) The storage location on the cloud - + :returns: :class:`VolumeInfo` """ body = {} - if catalog_name is not None: body['catalog_name'] = catalog_name - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if schema_name is not None: body['schema_name'] = schema_name - if storage_location is not None: body['storage_location'] = storage_location - if volume_type is not None: body['volume_type'] = volume_type.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/volumes', body=body - - , headers=headers - ) + if catalog_name is not None: + body["catalog_name"] = catalog_name + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if schema_name is not None: + body["schema_name"] = schema_name + if storage_location is not None: + body["storage_location"] = storage_location + if volume_type is not None: + body["volume_type"] = volume_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/volumes", body=body, headers=headers) return VolumeInfo.from_dict(res) - - - - - def delete(self - , name: str - ): + def delete(self, name: str): """Delete a Volume. - + Deletes a volume from the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param name: str The three-level (fully qualified) name of the volume - - + + """ - - headers = {} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/volumes/{name}' - - , headers=headers - ) - - - - + headers = {} - def list(self - , catalog_name: str, schema_name: str - , * - , include_browse: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[VolumeInfo]: + self._api.do("DELETE", f"/api/2.1/unity-catalog/volumes/{name}", headers=headers) + + def list( + self, + catalog_name: str, + schema_name: str, + *, + include_browse: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[VolumeInfo]: """List Volumes. - + Gets an array of volumes for the current metastore under the parent catalog and schema. - + The returned volumes are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the volumes. A regular user needs to be the owner or have the **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str The identifier of the catalog :param schema_name: str @@ -13176,99 +14862,88 @@ def list(self metadata for :param max_results: int (optional) Maximum number of volumes to return (page length). - + If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value (10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter error is returned; - + Note: this parameter controls only the maximum number of volumes to return. The actual number of volumes returned in a page may be smaller than this value, including 0, even if there are more pages. :param page_token: str (optional) Opaque token returned by a previous request. It must be included in the request to retrieve the next page of results (pagination). - + :returns: Iterator over :class:`VolumeInfo` """ - - query = {} - if catalog_name is not None: query['catalog_name'] = catalog_name - if include_browse is not None: query['include_browse'] = include_browse - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - if schema_name is not None: query['schema_name'] = schema_name - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/unity-catalog/volumes', query=query - - , headers=headers - ) - if 'volumes' in json: - for v in json['volumes']: - yield VolumeInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if catalog_name is not None: + query["catalog_name"] = catalog_name + if include_browse is not None: + query["include_browse"] = include_browse + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + if schema_name is not None: + query["schema_name"] = schema_name + headers = { + "Accept": "application/json", + } - def read(self - , name: str - , * - , include_browse: Optional[bool] = None) -> VolumeInfo: + while True: + json = self._api.do("GET", "/api/2.1/unity-catalog/volumes", query=query, headers=headers) + if "volumes" in json: + for v in json["volumes"]: + yield VolumeInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def read(self, name: str, *, include_browse: Optional[bool] = None) -> VolumeInfo: """Get a Volume. - + Gets a volume from the metastore for a specific catalog and schema. - + The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param name: str The three-level (fully qualified) name of the volume :param include_browse: bool (optional) Whether to include volumes in the response for which the principal can only access selective metadata for - + :returns: :class:`VolumeInfo` """ - + query = {} - if include_browse is not None: query['include_browse'] = include_browse - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/volumes/{name}', query=query - - , headers=headers - ) - return VolumeInfo.from_dict(res) + if include_browse is not None: + query["include_browse"] = include_browse + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", f"/api/2.1/unity-catalog/volumes/{name}", query=query, headers=headers) + return VolumeInfo.from_dict(res) - def update(self - , name: str - , * - , comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None) -> VolumeInfo: + def update( + self, name: str, *, comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None + ) -> VolumeInfo: """Update a Volume. - + Updates the specified volume under the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the name, the owner or the comment of the volume could be updated. - + :param name: str The three-level (fully qualified) name of the volume :param comment: str (optional) @@ -13277,85 +14952,76 @@ def update(self New name for the volume. :param owner: str (optional) The identifier of the user who owns the volume - + :returns: :class:`VolumeInfo` """ body = {} - if comment is not None: body['comment'] = comment - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/volumes/{name}', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/volumes/{name}", body=body, headers=headers) return VolumeInfo.from_dict(res) - - + class WorkspaceBindingsAPI: """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable can be accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a configured list of workspaces. This API allows you to configure (bind) securables to workspaces. - + NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) and the workspace bindings are only consulted when the securable's __isolation_mode__ is set to __ISOLATED__. - + A securable's workspace bindings can be configured by a metastore admin or the owner of the securable. - + The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). - + Securable types that support binding: - catalog - storage_credential - credential - external_location""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - , name: str - ) -> GetCatalogWorkspaceBindingsResponse: + def get(self, name: str) -> GetCatalogWorkspaceBindingsResponse: """Get catalog workspace bindings. - + Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. - + :param name: str The name of the catalog. - + :returns: :class:`GetCatalogWorkspaceBindingsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}' - - , headers=headers - ) - return GetCatalogWorkspaceBindingsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}", headers=headers) + return GetCatalogWorkspaceBindingsResponse.from_dict(res) - def get_bindings(self - , securable_type: str, securable_name: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[WorkspaceBinding]: + def get_bindings( + self, + securable_type: str, + securable_name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[WorkspaceBinding]: """Get securable workspace bindings. - + Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). @@ -13368,76 +15034,82 @@ def get_bindings(self error is returned; - If not set, all the workspace bindings are returned (not recommended). :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`WorkspaceBinding` """ - - query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}', query=query - - , headers=headers - ) - if 'bindings' in json: - for v in json['bindings']: - yield WorkspaceBinding.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , name: str - , * - , assign_workspaces: Optional[List[int]] = None, unassign_workspaces: Optional[List[int]] = None) -> UpdateCatalogWorkspaceBindingsResponse: + while True: + json = self._api.do( + "GET", + f"/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}", + query=query, + headers=headers, + ) + if "bindings" in json: + for v in json["bindings"]: + yield WorkspaceBinding.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + name: str, + *, + assign_workspaces: Optional[List[int]] = None, + unassign_workspaces: Optional[List[int]] = None, + ) -> UpdateCatalogWorkspaceBindingsResponse: """Update catalog workspace bindings. - + Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. - + :param name: str The name of the catalog. :param assign_workspaces: List[int] (optional) A list of workspace IDs. :param unassign_workspaces: List[int] (optional) A list of workspace IDs. - + :returns: :class:`UpdateCatalogWorkspaceBindingsResponse` """ body = {} - if assign_workspaces is not None: body['assign_workspaces'] = [v for v in assign_workspaces] - if unassign_workspaces is not None: body['unassign_workspaces'] = [v for v in unassign_workspaces] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}', body=body - - , headers=headers - ) + if assign_workspaces is not None: + body["assign_workspaces"] = [v for v in assign_workspaces] + if unassign_workspaces is not None: + body["unassign_workspaces"] = [v for v in unassign_workspaces] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}", body=body, headers=headers + ) return UpdateCatalogWorkspaceBindingsResponse.from_dict(res) - - - - - def update_bindings(self - , securable_type: str, securable_name: str - , * - , add: Optional[List[WorkspaceBinding]] = None, remove: Optional[List[WorkspaceBinding]] = None) -> UpdateWorkspaceBindingsResponse: + def update_bindings( + self, + securable_type: str, + securable_name: str, + *, + add: Optional[List[WorkspaceBinding]] = None, + remove: Optional[List[WorkspaceBinding]] = None, + ) -> UpdateWorkspaceBindingsResponse: """Update securable workspace bindings. - + Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). @@ -13447,19 +15119,20 @@ def update_bindings(self List of workspace bindings. :param remove: List[:class:`WorkspaceBinding`] (optional) List of workspace bindings. - + :returns: :class:`UpdateWorkspaceBindingsResponse` """ body = {} - if add is not None: body['add'] = [v.as_dict() for v in add] - if remove is not None: body['remove'] = [v.as_dict() for v in remove] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}', body=body - - , headers=headers - ) + if add is not None: + body["add"] = [v.as_dict() for v in add] + if remove is not None: + body["remove"] = [v.as_dict() for v in remove] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}", body=body, headers=headers + ) return UpdateWorkspaceBindingsResponse.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 7545e253b..490f7711e 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -1,135 +1,155 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading +from typing import Any, Dict, Iterator, List, Optional -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from ._internal import _enum, _from_dict, _repeated_dict -_LOG = logging.getLogger('databricks.sdk') +_LOG = logging.getLogger("databricks.sdk") -from databricks.sdk.service import catalog -from databricks.sdk.service import jobs -from databricks.sdk.service import settings -from databricks.sdk.service import settings -from databricks.sdk.service import sharing +from databricks.sdk.service import catalog, jobs, settings, sharing # all definitions in this file are in alphabetical order + @dataclass class CleanRoom: access_restricted: Optional[CleanRoomAccessRestricted] = None """Whether clean room access is restricted due to [CSP] [CSP]: https://docs.databricks.com/en/security/privacy/security-profile.html""" - + comment: Optional[str] = None - + created_at: Optional[int] = None """When the clean room was created, in epoch milliseconds.""" - + local_collaborator_alias: Optional[str] = None """The alias of the collaborator tied to the local clean room.""" - + name: Optional[str] = None """The name of the clean room. It should follow [UC securable naming requirements]. [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements""" - + output_catalog: Optional[CleanRoomOutputCatalog] = None """Output catalog of the clean room. It is an output only field. Output catalog is manipulated using the separate CreateCleanRoomOutputCatalog API.""" - + owner: Optional[str] = None """This is Databricks username of the owner of the local clean room securable for permission management.""" - + remote_detailed_info: Optional[CleanRoomRemoteDetail] = None """Central clean room details. During creation, users need to specify cloud_vendor, region, and collaborators.global_metastore_id. This field will not be filled in the ListCleanRooms call.""" - + status: Optional[CleanRoomStatusEnum] = None """Clean room status.""" - + updated_at: Optional[int] = None """When the clean room was last updated, in epoch milliseconds.""" - + def as_dict(self) -> dict: """Serializes the CleanRoom into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_restricted is not None: body['access_restricted'] = self.access_restricted.value - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.local_collaborator_alias is not None: body['local_collaborator_alias'] = self.local_collaborator_alias - if self.name is not None: body['name'] = self.name - if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict() - if self.owner is not None: body['owner'] = self.owner - if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict() - if self.status is not None: body['status'] = self.status.value - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.access_restricted is not None: + body["access_restricted"] = self.access_restricted.value + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.local_collaborator_alias is not None: + body["local_collaborator_alias"] = self.local_collaborator_alias + if self.name is not None: + body["name"] = self.name + if self.output_catalog: + body["output_catalog"] = self.output_catalog.as_dict() + if self.owner is not None: + body["owner"] = self.owner + if self.remote_detailed_info: + body["remote_detailed_info"] = self.remote_detailed_info.as_dict() + if self.status is not None: + body["status"] = self.status.value + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoom into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_restricted is not None: body['access_restricted'] = self.access_restricted - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.local_collaborator_alias is not None: body['local_collaborator_alias'] = self.local_collaborator_alias - if self.name is not None: body['name'] = self.name - if self.output_catalog: body['output_catalog'] = self.output_catalog - if self.owner is not None: body['owner'] = self.owner - if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info - if self.status is not None: body['status'] = self.status - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.access_restricted is not None: + body["access_restricted"] = self.access_restricted + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.local_collaborator_alias is not None: + body["local_collaborator_alias"] = self.local_collaborator_alias + if self.name is not None: + body["name"] = self.name + if self.output_catalog: + body["output_catalog"] = self.output_catalog + if self.owner is not None: + body["owner"] = self.owner + if self.remote_detailed_info: + body["remote_detailed_info"] = self.remote_detailed_info + if self.status is not None: + body["status"] = self.status + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoom: """Deserializes the CleanRoom from a dictionary.""" - return cls(access_restricted=_enum(d, 'access_restricted', CleanRoomAccessRestricted), comment=d.get('comment', None), created_at=d.get('created_at', None), local_collaborator_alias=d.get('local_collaborator_alias', None), name=d.get('name', None), output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog), owner=d.get('owner', None), remote_detailed_info=_from_dict(d, 'remote_detailed_info', CleanRoomRemoteDetail), status=_enum(d, 'status', CleanRoomStatusEnum), updated_at=d.get('updated_at', None)) - - + return cls( + access_restricted=_enum(d, "access_restricted", CleanRoomAccessRestricted), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + local_collaborator_alias=d.get("local_collaborator_alias", None), + name=d.get("name", None), + output_catalog=_from_dict(d, "output_catalog", CleanRoomOutputCatalog), + owner=d.get("owner", None), + remote_detailed_info=_from_dict(d, "remote_detailed_info", CleanRoomRemoteDetail), + status=_enum(d, "status", CleanRoomStatusEnum), + updated_at=d.get("updated_at", None), + ) class CleanRoomAccessRestricted(Enum): - - - CSP_MISMATCH = 'CSP_MISMATCH' - NO_RESTRICTION = 'NO_RESTRICTION' + + CSP_MISMATCH = "CSP_MISMATCH" + NO_RESTRICTION = "NO_RESTRICTION" + @dataclass class CleanRoomAsset: """Metadata of the clean room asset""" - + added_at: Optional[int] = None """When the asset is added to the clean room, in epoch milliseconds.""" - + asset_type: Optional[CleanRoomAssetAssetType] = None """The type of the asset.""" - + clean_room_name: Optional[str] = None """The name of the clean room this asset belongs to. This is an output-only field to ensure proper resource identification.""" - + foreign_table: Optional[CleanRoomAssetForeignTable] = None """Foreign table details available to all collaborators of the clean room. Present if and only if **asset_type** is **FOREIGN_TABLE**""" - + foreign_table_local_details: Optional[CleanRoomAssetForeignTableLocalDetails] = None """Local details for a foreign that are only available to its owner. Present if and only if **asset_type** is **FOREIGN_TABLE**""" - + name: Optional[str] = None """A fully qualified name that uniquely identifies the asset within the clean room. This is also the name displayed in the clean room UI. @@ -138,115 +158,158 @@ class CleanRoomAsset: *shared_catalog*.*shared_schema*.*asset_name* For notebooks, the name is the notebook file name.""" - + notebook: Optional[CleanRoomAssetNotebook] = None """Notebook details available to all collaborators of the clean room. Present if and only if **asset_type** is **NOTEBOOK_FILE**""" - + owner_collaborator_alias: Optional[str] = None """The alias of the collaborator who owns this asset""" - + status: Optional[CleanRoomAssetStatusEnum] = None """Status of the asset""" - + table: Optional[CleanRoomAssetTable] = None """Table details available to all collaborators of the clean room. Present if and only if **asset_type** is **TABLE**""" - + table_local_details: Optional[CleanRoomAssetTableLocalDetails] = None """Local details for a table that are only available to its owner. Present if and only if **asset_type** is **TABLE**""" - + view: Optional[CleanRoomAssetView] = None """View details available to all collaborators of the clean room. Present if and only if **asset_type** is **VIEW**""" - + view_local_details: Optional[CleanRoomAssetViewLocalDetails] = None """Local details for a view that are only available to its owner. Present if and only if **asset_type** is **VIEW**""" - + volume_local_details: Optional[CleanRoomAssetVolumeLocalDetails] = None """Local details for a volume that are only available to its owner. Present if and only if **asset_type** is **VOLUME**""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAsset into a dictionary suitable for use as a JSON request body.""" body = {} - if self.added_at is not None: body['added_at'] = self.added_at - if self.asset_type is not None: body['asset_type'] = self.asset_type.value - if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name - if self.foreign_table: body['foreign_table'] = self.foreign_table.as_dict() - if self.foreign_table_local_details: body['foreign_table_local_details'] = self.foreign_table_local_details.as_dict() - if self.name is not None: body['name'] = self.name - if self.notebook: body['notebook'] = self.notebook.as_dict() - if self.owner_collaborator_alias is not None: body['owner_collaborator_alias'] = self.owner_collaborator_alias - if self.status is not None: body['status'] = self.status.value - if self.table: body['table'] = self.table.as_dict() - if self.table_local_details: body['table_local_details'] = self.table_local_details.as_dict() - if self.view: body['view'] = self.view.as_dict() - if self.view_local_details: body['view_local_details'] = self.view_local_details.as_dict() - if self.volume_local_details: body['volume_local_details'] = self.volume_local_details.as_dict() + if self.added_at is not None: + body["added_at"] = self.added_at + if self.asset_type is not None: + body["asset_type"] = self.asset_type.value + if self.clean_room_name is not None: + body["clean_room_name"] = self.clean_room_name + if self.foreign_table: + body["foreign_table"] = self.foreign_table.as_dict() + if self.foreign_table_local_details: + body["foreign_table_local_details"] = self.foreign_table_local_details.as_dict() + if self.name is not None: + body["name"] = self.name + if self.notebook: + body["notebook"] = self.notebook.as_dict() + if self.owner_collaborator_alias is not None: + body["owner_collaborator_alias"] = self.owner_collaborator_alias + if self.status is not None: + body["status"] = self.status.value + if self.table: + body["table"] = self.table.as_dict() + if self.table_local_details: + body["table_local_details"] = self.table_local_details.as_dict() + if self.view: + body["view"] = self.view.as_dict() + if self.view_local_details: + body["view_local_details"] = self.view_local_details.as_dict() + if self.volume_local_details: + body["volume_local_details"] = self.volume_local_details.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAsset into a shallow dictionary of its immediate attributes.""" body = {} - if self.added_at is not None: body['added_at'] = self.added_at - if self.asset_type is not None: body['asset_type'] = self.asset_type - if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name - if self.foreign_table: body['foreign_table'] = self.foreign_table - if self.foreign_table_local_details: body['foreign_table_local_details'] = self.foreign_table_local_details - if self.name is not None: body['name'] = self.name - if self.notebook: body['notebook'] = self.notebook - if self.owner_collaborator_alias is not None: body['owner_collaborator_alias'] = self.owner_collaborator_alias - if self.status is not None: body['status'] = self.status - if self.table: body['table'] = self.table - if self.table_local_details: body['table_local_details'] = self.table_local_details - if self.view: body['view'] = self.view - if self.view_local_details: body['view_local_details'] = self.view_local_details - if self.volume_local_details: body['volume_local_details'] = self.volume_local_details + if self.added_at is not None: + body["added_at"] = self.added_at + if self.asset_type is not None: + body["asset_type"] = self.asset_type + if self.clean_room_name is not None: + body["clean_room_name"] = self.clean_room_name + if self.foreign_table: + body["foreign_table"] = self.foreign_table + if self.foreign_table_local_details: + body["foreign_table_local_details"] = self.foreign_table_local_details + if self.name is not None: + body["name"] = self.name + if self.notebook: + body["notebook"] = self.notebook + if self.owner_collaborator_alias is not None: + body["owner_collaborator_alias"] = self.owner_collaborator_alias + if self.status is not None: + body["status"] = self.status + if self.table: + body["table"] = self.table + if self.table_local_details: + body["table_local_details"] = self.table_local_details + if self.view: + body["view"] = self.view + if self.view_local_details: + body["view_local_details"] = self.view_local_details + if self.volume_local_details: + body["volume_local_details"] = self.volume_local_details return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAsset: """Deserializes the CleanRoomAsset from a dictionary.""" - return cls(added_at=d.get('added_at', None), asset_type=_enum(d, 'asset_type', CleanRoomAssetAssetType), clean_room_name=d.get('clean_room_name', None), foreign_table=_from_dict(d, 'foreign_table', CleanRoomAssetForeignTable), foreign_table_local_details=_from_dict(d, 'foreign_table_local_details', CleanRoomAssetForeignTableLocalDetails), name=d.get('name', None), notebook=_from_dict(d, 'notebook', CleanRoomAssetNotebook), owner_collaborator_alias=d.get('owner_collaborator_alias', None), status=_enum(d, 'status', CleanRoomAssetStatusEnum), table=_from_dict(d, 'table', CleanRoomAssetTable), table_local_details=_from_dict(d, 'table_local_details', CleanRoomAssetTableLocalDetails), view=_from_dict(d, 'view', CleanRoomAssetView), view_local_details=_from_dict(d, 'view_local_details', CleanRoomAssetViewLocalDetails), volume_local_details=_from_dict(d, 'volume_local_details', CleanRoomAssetVolumeLocalDetails)) - - + return cls( + added_at=d.get("added_at", None), + asset_type=_enum(d, "asset_type", CleanRoomAssetAssetType), + clean_room_name=d.get("clean_room_name", None), + foreign_table=_from_dict(d, "foreign_table", CleanRoomAssetForeignTable), + foreign_table_local_details=_from_dict( + d, "foreign_table_local_details", CleanRoomAssetForeignTableLocalDetails + ), + name=d.get("name", None), + notebook=_from_dict(d, "notebook", CleanRoomAssetNotebook), + owner_collaborator_alias=d.get("owner_collaborator_alias", None), + status=_enum(d, "status", CleanRoomAssetStatusEnum), + table=_from_dict(d, "table", CleanRoomAssetTable), + table_local_details=_from_dict(d, "table_local_details", CleanRoomAssetTableLocalDetails), + view=_from_dict(d, "view", CleanRoomAssetView), + view_local_details=_from_dict(d, "view_local_details", CleanRoomAssetViewLocalDetails), + volume_local_details=_from_dict(d, "volume_local_details", CleanRoomAssetVolumeLocalDetails), + ) class CleanRoomAssetAssetType(Enum): - - - FOREIGN_TABLE = 'FOREIGN_TABLE' - NOTEBOOK_FILE = 'NOTEBOOK_FILE' - TABLE = 'TABLE' - VIEW = 'VIEW' - VOLUME = 'VOLUME' + + FOREIGN_TABLE = "FOREIGN_TABLE" + NOTEBOOK_FILE = "NOTEBOOK_FILE" + TABLE = "TABLE" + VIEW = "VIEW" + VOLUME = "VOLUME" + @dataclass class CleanRoomAssetForeignTable: columns: Optional[List[catalog.ColumnInfo]] = None """The metadata information of the columns in the foreign table""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetForeignTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetForeignTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: body['columns'] = self.columns + if self.columns: + body["columns"] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetForeignTable: """Deserializes the CleanRoomAssetForeignTable from a dictionary.""" - return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo)) - - + return cls(columns=_repeated_dict(d, "columns", catalog.ColumnInfo)) @dataclass @@ -254,103 +317,117 @@ class CleanRoomAssetForeignTableLocalDetails: local_name: Optional[str] = None """The fully qualified name of the foreign table in its owner's local metastore, in the format of *catalog*.*schema*.*foreign_table_name*""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetForeignTableLocalDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.local_name is not None: body['local_name'] = self.local_name + if self.local_name is not None: + body["local_name"] = self.local_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetForeignTableLocalDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.local_name is not None: body['local_name'] = self.local_name + if self.local_name is not None: + body["local_name"] = self.local_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetForeignTableLocalDetails: """Deserializes the CleanRoomAssetForeignTableLocalDetails from a dictionary.""" - return cls(local_name=d.get('local_name', None)) - - + return cls(local_name=d.get("local_name", None)) @dataclass class CleanRoomAssetNotebook: etag: Optional[str] = None """Server generated etag that represents the notebook version.""" - + notebook_content: Optional[str] = None """Base 64 representation of the notebook contents. This is the same format as returned by :method:workspace/export with the format of **HTML**.""" - + review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None """top-level status derived from all reviews""" - + reviews: Optional[List[CleanRoomNotebookReview]] = None """All existing approvals or rejections""" - + runner_collaborator_aliases: Optional[List[str]] = None """collaborators that can run the notebook""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.notebook_content is not None: body['notebook_content'] = self.notebook_content - if self.review_state is not None: body['review_state'] = self.review_state.value - if self.reviews: body['reviews'] = [v.as_dict() for v in self.reviews] - if self.runner_collaborator_aliases: body['runner_collaborator_aliases'] = [v for v in self.runner_collaborator_aliases] + if self.etag is not None: + body["etag"] = self.etag + if self.notebook_content is not None: + body["notebook_content"] = self.notebook_content + if self.review_state is not None: + body["review_state"] = self.review_state.value + if self.reviews: + body["reviews"] = [v.as_dict() for v in self.reviews] + if self.runner_collaborator_aliases: + body["runner_collaborator_aliases"] = [v for v in self.runner_collaborator_aliases] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetNotebook into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.notebook_content is not None: body['notebook_content'] = self.notebook_content - if self.review_state is not None: body['review_state'] = self.review_state - if self.reviews: body['reviews'] = self.reviews - if self.runner_collaborator_aliases: body['runner_collaborator_aliases'] = self.runner_collaborator_aliases + if self.etag is not None: + body["etag"] = self.etag + if self.notebook_content is not None: + body["notebook_content"] = self.notebook_content + if self.review_state is not None: + body["review_state"] = self.review_state + if self.reviews: + body["reviews"] = self.reviews + if self.runner_collaborator_aliases: + body["runner_collaborator_aliases"] = self.runner_collaborator_aliases return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetNotebook: """Deserializes the CleanRoomAssetNotebook from a dictionary.""" - return cls(etag=d.get('etag', None), notebook_content=d.get('notebook_content', None), review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState), reviews=_repeated_dict(d, 'reviews', CleanRoomNotebookReview), runner_collaborator_aliases=d.get('runner_collaborator_aliases', None)) - - + return cls( + etag=d.get("etag", None), + notebook_content=d.get("notebook_content", None), + review_state=_enum(d, "review_state", CleanRoomNotebookReviewNotebookReviewState), + reviews=_repeated_dict(d, "reviews", CleanRoomNotebookReview), + runner_collaborator_aliases=d.get("runner_collaborator_aliases", None), + ) class CleanRoomAssetStatusEnum(Enum): - - - ACTIVE = 'ACTIVE' - PENDING = 'PENDING' - PERMISSION_DENIED = 'PERMISSION_DENIED' + + ACTIVE = "ACTIVE" + PENDING = "PENDING" + PERMISSION_DENIED = "PERMISSION_DENIED" + @dataclass class CleanRoomAssetTable: columns: Optional[List[catalog.ColumnInfo]] = None """The metadata information of the columns in the table""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: body['columns'] = self.columns + if self.columns: + body["columns"] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetTable: """Deserializes the CleanRoomAssetTable from a dictionary.""" - return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo)) - - + return cls(columns=_repeated_dict(d, "columns", catalog.ColumnInfo)) @dataclass @@ -358,55 +435,57 @@ class CleanRoomAssetTableLocalDetails: local_name: Optional[str] = None """The fully qualified name of the table in its owner's local metastore, in the format of *catalog*.*schema*.*table_name*""" - + partitions: Optional[List[sharing.Partition]] = None """Partition filtering specification for a shared table.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetTableLocalDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.local_name is not None: body['local_name'] = self.local_name - if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions] + if self.local_name is not None: + body["local_name"] = self.local_name + if self.partitions: + body["partitions"] = [v.as_dict() for v in self.partitions] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetTableLocalDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.local_name is not None: body['local_name'] = self.local_name - if self.partitions: body['partitions'] = self.partitions + if self.local_name is not None: + body["local_name"] = self.local_name + if self.partitions: + body["partitions"] = self.partitions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetTableLocalDetails: """Deserializes the CleanRoomAssetTableLocalDetails from a dictionary.""" - return cls(local_name=d.get('local_name', None), partitions=_repeated_dict(d, 'partitions', sharing.Partition)) - - + return cls(local_name=d.get("local_name", None), partitions=_repeated_dict(d, "partitions", sharing.Partition)) @dataclass class CleanRoomAssetView: columns: Optional[List[catalog.ColumnInfo]] = None """The metadata information of the columns in the view""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetView into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetView into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: body['columns'] = self.columns + if self.columns: + body["columns"] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetView: """Deserializes the CleanRoomAssetView from a dictionary.""" - return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo)) - - + return cls(columns=_repeated_dict(d, "columns", catalog.ColumnInfo)) @dataclass @@ -414,25 +493,25 @@ class CleanRoomAssetViewLocalDetails: local_name: Optional[str] = None """The fully qualified name of the view in its owner's local metastore, in the format of *catalog*.*schema*.*view_name*""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetViewLocalDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.local_name is not None: body['local_name'] = self.local_name + if self.local_name is not None: + body["local_name"] = self.local_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetViewLocalDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.local_name is not None: body['local_name'] = self.local_name + if self.local_name is not None: + body["local_name"] = self.local_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetViewLocalDetails: """Deserializes the CleanRoomAssetViewLocalDetails from a dictionary.""" - return cls(local_name=d.get('local_name', None)) - - + return cls(local_name=d.get("local_name", None)) @dataclass @@ -440,216 +519,273 @@ class CleanRoomAssetVolumeLocalDetails: local_name: Optional[str] = None """The fully qualified name of the volume in its owner's local metastore, in the format of *catalog*.*schema*.*volume_name*""" - + def as_dict(self) -> dict: """Serializes the CleanRoomAssetVolumeLocalDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.local_name is not None: body['local_name'] = self.local_name + if self.local_name is not None: + body["local_name"] = self.local_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomAssetVolumeLocalDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.local_name is not None: body['local_name'] = self.local_name + if self.local_name is not None: + body["local_name"] = self.local_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomAssetVolumeLocalDetails: """Deserializes the CleanRoomAssetVolumeLocalDetails from a dictionary.""" - return cls(local_name=d.get('local_name', None)) - - + return cls(local_name=d.get("local_name", None)) @dataclass class CleanRoomCollaborator: """Publicly visible clean room collaborator.""" - + collaborator_alias: str """Collaborator alias specified by the clean room creator. It is unique across all collaborators of this clean room, and used to derive multiple values internally such as catalog alias and clean room name for single metastore clean rooms. It should follow [UC securable naming requirements]. [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements""" - + display_name: Optional[str] = None """Generated display name for the collaborator. In the case of a single metastore clean room, it is the clean room name. For x-metastore clean rooms, it is the organization name of the metastore. It is not restricted to these values and could change in the future""" - + global_metastore_id: Optional[str] = None """The global Unity Catalog metastore id of the collaborator. The identifier is of format cloud:region:metastore-uuid.""" - + invite_recipient_email: Optional[str] = None """Email of the user who is receiving the clean room "invitation". It should be empty for the creator of the clean room, and non-empty for the invitees of the clean room. It is only returned in the output when clean room creator calls GET""" - + invite_recipient_workspace_id: Optional[int] = None """Workspace ID of the user who is receiving the clean room "invitation". Must be specified if invite_recipient_email is specified. It should be empty when the collaborator is the creator of the clean room.""" - + organization_name: Optional[str] = None """[Organization name](:method:metastores/list#metastores-delta_sharing_organization_name) configured in the metastore""" - + def as_dict(self) -> dict: """Serializes the CleanRoomCollaborator into a dictionary suitable for use as a JSON request body.""" body = {} - if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias - if self.display_name is not None: body['display_name'] = self.display_name - if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id - if self.invite_recipient_email is not None: body['invite_recipient_email'] = self.invite_recipient_email - if self.invite_recipient_workspace_id is not None: body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id - if self.organization_name is not None: body['organization_name'] = self.organization_name + if self.collaborator_alias is not None: + body["collaborator_alias"] = self.collaborator_alias + if self.display_name is not None: + body["display_name"] = self.display_name + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id + if self.invite_recipient_email is not None: + body["invite_recipient_email"] = self.invite_recipient_email + if self.invite_recipient_workspace_id is not None: + body["invite_recipient_workspace_id"] = self.invite_recipient_workspace_id + if self.organization_name is not None: + body["organization_name"] = self.organization_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomCollaborator into a shallow dictionary of its immediate attributes.""" body = {} - if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias - if self.display_name is not None: body['display_name'] = self.display_name - if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id - if self.invite_recipient_email is not None: body['invite_recipient_email'] = self.invite_recipient_email - if self.invite_recipient_workspace_id is not None: body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id - if self.organization_name is not None: body['organization_name'] = self.organization_name + if self.collaborator_alias is not None: + body["collaborator_alias"] = self.collaborator_alias + if self.display_name is not None: + body["display_name"] = self.display_name + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id + if self.invite_recipient_email is not None: + body["invite_recipient_email"] = self.invite_recipient_email + if self.invite_recipient_workspace_id is not None: + body["invite_recipient_workspace_id"] = self.invite_recipient_workspace_id + if self.organization_name is not None: + body["organization_name"] = self.organization_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomCollaborator: """Deserializes the CleanRoomCollaborator from a dictionary.""" - return cls(collaborator_alias=d.get('collaborator_alias', None), display_name=d.get('display_name', None), global_metastore_id=d.get('global_metastore_id', None), invite_recipient_email=d.get('invite_recipient_email', None), invite_recipient_workspace_id=d.get('invite_recipient_workspace_id', None), organization_name=d.get('organization_name', None)) - - + return cls( + collaborator_alias=d.get("collaborator_alias", None), + display_name=d.get("display_name", None), + global_metastore_id=d.get("global_metastore_id", None), + invite_recipient_email=d.get("invite_recipient_email", None), + invite_recipient_workspace_id=d.get("invite_recipient_workspace_id", None), + organization_name=d.get("organization_name", None), + ) @dataclass class CleanRoomNotebookReview: comment: Optional[str] = None """review comment""" - + created_at_millis: Optional[int] = None """timestamp of when the review was submitted""" - + review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None """review outcome""" - + review_sub_reason: Optional[CleanRoomNotebookReviewNotebookReviewSubReason] = None """specified when the review was not explicitly made by a user""" - + reviewer_collaborator_alias: Optional[str] = None """collaborator alias of the reviewer""" - + def as_dict(self) -> dict: """Serializes the CleanRoomNotebookReview into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis - if self.review_state is not None: body['review_state'] = self.review_state.value - if self.review_sub_reason is not None: body['review_sub_reason'] = self.review_sub_reason.value - if self.reviewer_collaborator_alias is not None: body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias + if self.comment is not None: + body["comment"] = self.comment + if self.created_at_millis is not None: + body["created_at_millis"] = self.created_at_millis + if self.review_state is not None: + body["review_state"] = self.review_state.value + if self.review_sub_reason is not None: + body["review_sub_reason"] = self.review_sub_reason.value + if self.reviewer_collaborator_alias is not None: + body["reviewer_collaborator_alias"] = self.reviewer_collaborator_alias return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomNotebookReview into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis - if self.review_state is not None: body['review_state'] = self.review_state - if self.review_sub_reason is not None: body['review_sub_reason'] = self.review_sub_reason - if self.reviewer_collaborator_alias is not None: body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias + if self.comment is not None: + body["comment"] = self.comment + if self.created_at_millis is not None: + body["created_at_millis"] = self.created_at_millis + if self.review_state is not None: + body["review_state"] = self.review_state + if self.review_sub_reason is not None: + body["review_sub_reason"] = self.review_sub_reason + if self.reviewer_collaborator_alias is not None: + body["reviewer_collaborator_alias"] = self.reviewer_collaborator_alias return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomNotebookReview: """Deserializes the CleanRoomNotebookReview from a dictionary.""" - return cls(comment=d.get('comment', None), created_at_millis=d.get('created_at_millis', None), review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState), review_sub_reason=_enum(d, 'review_sub_reason', CleanRoomNotebookReviewNotebookReviewSubReason), reviewer_collaborator_alias=d.get('reviewer_collaborator_alias', None)) - - + return cls( + comment=d.get("comment", None), + created_at_millis=d.get("created_at_millis", None), + review_state=_enum(d, "review_state", CleanRoomNotebookReviewNotebookReviewState), + review_sub_reason=_enum(d, "review_sub_reason", CleanRoomNotebookReviewNotebookReviewSubReason), + reviewer_collaborator_alias=d.get("reviewer_collaborator_alias", None), + ) class CleanRoomNotebookReviewNotebookReviewState(Enum): - - - APPROVED = 'APPROVED' - PENDING = 'PENDING' - REJECTED = 'REJECTED' + + APPROVED = "APPROVED" + PENDING = "PENDING" + REJECTED = "REJECTED" + class CleanRoomNotebookReviewNotebookReviewSubReason(Enum): - - - AUTO_APPROVED = 'AUTO_APPROVED' - BACKFILLED = 'BACKFILLED' + + AUTO_APPROVED = "AUTO_APPROVED" + BACKFILLED = "BACKFILLED" + @dataclass class CleanRoomNotebookTaskRun: """Stores information about a single task run.""" - + collaborator_job_run_info: Optional[CollaboratorJobRunInfo] = None """Job run info of the task in the runner's local workspace. This field is only included in the LIST API. if the task was run within the same workspace the API is being called. If the task run was in a different workspace under the same metastore, only the workspace_id is included.""" - + notebook_etag: Optional[str] = None """Etag of the notebook executed in this task run, used to identify the notebook version.""" - + notebook_job_run_state: Optional[jobs.CleanRoomTaskRunState] = None """State of the task run.""" - + notebook_name: Optional[str] = None """Asset name of the notebook executed in this task run.""" - + notebook_updated_at: Optional[int] = None """The timestamp of when the notebook was last updated.""" - + output_schema_expiration_time: Optional[int] = None """Expiration time of the output schema of the task run (if any), in epoch milliseconds.""" - + output_schema_name: Optional[str] = None """Name of the output schema associated with the clean rooms notebook task run.""" - + run_duration: Optional[int] = None """Duration of the task run, in milliseconds.""" - + start_time: Optional[int] = None """When the task run started, in epoch milliseconds.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomNotebookTaskRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.collaborator_job_run_info: body['collaborator_job_run_info'] = self.collaborator_job_run_info.as_dict() - if self.notebook_etag is not None: body['notebook_etag'] = self.notebook_etag - if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state.as_dict() - if self.notebook_name is not None: body['notebook_name'] = self.notebook_name - if self.notebook_updated_at is not None: body['notebook_updated_at'] = self.notebook_updated_at - if self.output_schema_expiration_time is not None: body['output_schema_expiration_time'] = self.output_schema_expiration_time - if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name - if self.run_duration is not None: body['run_duration'] = self.run_duration - if self.start_time is not None: body['start_time'] = self.start_time + if self.collaborator_job_run_info: + body["collaborator_job_run_info"] = self.collaborator_job_run_info.as_dict() + if self.notebook_etag is not None: + body["notebook_etag"] = self.notebook_etag + if self.notebook_job_run_state: + body["notebook_job_run_state"] = self.notebook_job_run_state.as_dict() + if self.notebook_name is not None: + body["notebook_name"] = self.notebook_name + if self.notebook_updated_at is not None: + body["notebook_updated_at"] = self.notebook_updated_at + if self.output_schema_expiration_time is not None: + body["output_schema_expiration_time"] = self.output_schema_expiration_time + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.run_duration is not None: + body["run_duration"] = self.run_duration + if self.start_time is not None: + body["start_time"] = self.start_time return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomNotebookTaskRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.collaborator_job_run_info: body['collaborator_job_run_info'] = self.collaborator_job_run_info - if self.notebook_etag is not None: body['notebook_etag'] = self.notebook_etag - if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state - if self.notebook_name is not None: body['notebook_name'] = self.notebook_name - if self.notebook_updated_at is not None: body['notebook_updated_at'] = self.notebook_updated_at - if self.output_schema_expiration_time is not None: body['output_schema_expiration_time'] = self.output_schema_expiration_time - if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name - if self.run_duration is not None: body['run_duration'] = self.run_duration - if self.start_time is not None: body['start_time'] = self.start_time + if self.collaborator_job_run_info: + body["collaborator_job_run_info"] = self.collaborator_job_run_info + if self.notebook_etag is not None: + body["notebook_etag"] = self.notebook_etag + if self.notebook_job_run_state: + body["notebook_job_run_state"] = self.notebook_job_run_state + if self.notebook_name is not None: + body["notebook_name"] = self.notebook_name + if self.notebook_updated_at is not None: + body["notebook_updated_at"] = self.notebook_updated_at + if self.output_schema_expiration_time is not None: + body["output_schema_expiration_time"] = self.output_schema_expiration_time + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.run_duration is not None: + body["run_duration"] = self.run_duration + if self.start_time is not None: + body["start_time"] = self.start_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomNotebookTaskRun: """Deserializes the CleanRoomNotebookTaskRun from a dictionary.""" - return cls(collaborator_job_run_info=_from_dict(d, 'collaborator_job_run_info', CollaboratorJobRunInfo), notebook_etag=d.get('notebook_etag', None), notebook_job_run_state=_from_dict(d, 'notebook_job_run_state', jobs.CleanRoomTaskRunState), notebook_name=d.get('notebook_name', None), notebook_updated_at=d.get('notebook_updated_at', None), output_schema_expiration_time=d.get('output_schema_expiration_time', None), output_schema_name=d.get('output_schema_name', None), run_duration=d.get('run_duration', None), start_time=d.get('start_time', None)) - - + return cls( + collaborator_job_run_info=_from_dict(d, "collaborator_job_run_info", CollaboratorJobRunInfo), + notebook_etag=d.get("notebook_etag", None), + notebook_job_run_state=_from_dict(d, "notebook_job_run_state", jobs.CleanRoomTaskRunState), + notebook_name=d.get("notebook_name", None), + notebook_updated_at=d.get("notebook_updated_at", None), + output_schema_expiration_time=d.get("output_schema_expiration_time", None), + output_schema_name=d.get("output_schema_name", None), + run_duration=d.get("run_duration", None), + start_time=d.get("start_time", None), + ) @dataclass @@ -659,48 +795,53 @@ class CleanRoomOutputCatalog: field will always exist if status is CREATED. [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements""" - + status: Optional[CleanRoomOutputCatalogOutputCatalogStatus] = None - + def as_dict(self) -> dict: """Serializes the CleanRoomOutputCatalog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.status is not None: body['status'] = self.status.value + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomOutputCatalog into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.status is not None: body['status'] = self.status + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomOutputCatalog: """Deserializes the CleanRoomOutputCatalog from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), status=_enum(d, 'status', CleanRoomOutputCatalogOutputCatalogStatus)) - - + return cls( + catalog_name=d.get("catalog_name", None), + status=_enum(d, "status", CleanRoomOutputCatalogOutputCatalogStatus), + ) class CleanRoomOutputCatalogOutputCatalogStatus(Enum): - - - CREATED = 'CREATED' - NOT_CREATED = 'NOT_CREATED' - NOT_ELIGIBLE = 'NOT_ELIGIBLE' + + CREATED = "CREATED" + NOT_CREATED = "NOT_CREATED" + NOT_ELIGIBLE = "NOT_ELIGIBLE" + @dataclass class CleanRoomRemoteDetail: """Publicly visible central clean room details.""" - + central_clean_room_id: Optional[str] = None """Central clean room ID.""" - + cloud_vendor: Optional[str] = None """Cloud vendor (aws,azure,gcp) of the central clean room.""" - + collaborators: Optional[List[CleanRoomCollaborator]] = None """Collaborators in the central clean room. There should one and only one collaborator in the list that satisfies the owner condition: @@ -708,177 +849,204 @@ class CleanRoomRemoteDetail: 1. It has the creator's global_metastore_id (determined by caller of CreateCleanRoom). 2. Its invite_recipient_email is empty.""" - + compliance_security_profile: Optional[ComplianceSecurityProfile] = None """The compliance security profile used to process regulated data following compliance standards.""" - + creator: Optional[CleanRoomCollaborator] = None """Collaborator who creates the clean room.""" - + egress_network_policy: Optional[settings.EgressNetworkPolicy] = None """Egress network policy to apply to the central clean room workspace.""" - + region: Optional[str] = None """Region of the central clean room.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomRemoteDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id - if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor - if self.collaborators: body['collaborators'] = [v.as_dict() for v in self.collaborators] - if self.compliance_security_profile: body['compliance_security_profile'] = self.compliance_security_profile.as_dict() - if self.creator: body['creator'] = self.creator.as_dict() - if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy.as_dict() - if self.region is not None: body['region'] = self.region + if self.central_clean_room_id is not None: + body["central_clean_room_id"] = self.central_clean_room_id + if self.cloud_vendor is not None: + body["cloud_vendor"] = self.cloud_vendor + if self.collaborators: + body["collaborators"] = [v.as_dict() for v in self.collaborators] + if self.compliance_security_profile: + body["compliance_security_profile"] = self.compliance_security_profile.as_dict() + if self.creator: + body["creator"] = self.creator.as_dict() + if self.egress_network_policy: + body["egress_network_policy"] = self.egress_network_policy.as_dict() + if self.region is not None: + body["region"] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomRemoteDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id - if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor - if self.collaborators: body['collaborators'] = self.collaborators - if self.compliance_security_profile: body['compliance_security_profile'] = self.compliance_security_profile - if self.creator: body['creator'] = self.creator - if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy - if self.region is not None: body['region'] = self.region + if self.central_clean_room_id is not None: + body["central_clean_room_id"] = self.central_clean_room_id + if self.cloud_vendor is not None: + body["cloud_vendor"] = self.cloud_vendor + if self.collaborators: + body["collaborators"] = self.collaborators + if self.compliance_security_profile: + body["compliance_security_profile"] = self.compliance_security_profile + if self.creator: + body["creator"] = self.creator + if self.egress_network_policy: + body["egress_network_policy"] = self.egress_network_policy + if self.region is not None: + body["region"] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomRemoteDetail: """Deserializes the CleanRoomRemoteDetail from a dictionary.""" - return cls(central_clean_room_id=d.get('central_clean_room_id', None), cloud_vendor=d.get('cloud_vendor', None), collaborators=_repeated_dict(d, 'collaborators', CleanRoomCollaborator), compliance_security_profile=_from_dict(d, 'compliance_security_profile', ComplianceSecurityProfile), creator=_from_dict(d, 'creator', CleanRoomCollaborator), egress_network_policy=_from_dict(d, 'egress_network_policy', settings.EgressNetworkPolicy), region=d.get('region', None)) - - + return cls( + central_clean_room_id=d.get("central_clean_room_id", None), + cloud_vendor=d.get("cloud_vendor", None), + collaborators=_repeated_dict(d, "collaborators", CleanRoomCollaborator), + compliance_security_profile=_from_dict(d, "compliance_security_profile", ComplianceSecurityProfile), + creator=_from_dict(d, "creator", CleanRoomCollaborator), + egress_network_policy=_from_dict(d, "egress_network_policy", settings.EgressNetworkPolicy), + region=d.get("region", None), + ) class CleanRoomStatusEnum(Enum): - - - ACTIVE = 'ACTIVE' - DELETED = 'DELETED' - FAILED = 'FAILED' - PROVISIONING = 'PROVISIONING' + + ACTIVE = "ACTIVE" + DELETED = "DELETED" + FAILED = "FAILED" + PROVISIONING = "PROVISIONING" + @dataclass class CollaboratorJobRunInfo: collaborator_alias: Optional[str] = None """Alias of the collaborator that triggered the task run.""" - + collaborator_job_id: Optional[int] = None """Job ID of the task run in the collaborator's workspace.""" - + collaborator_job_run_id: Optional[int] = None """Job run ID of the task run in the collaborator's workspace.""" - + collaborator_task_run_id: Optional[int] = None """Task run ID of the task run in the collaborator's workspace.""" - + collaborator_workspace_id: Optional[int] = None """ID of the collaborator's workspace that triggered the task run.""" - + def as_dict(self) -> dict: """Serializes the CollaboratorJobRunInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias - if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id - if self.collaborator_job_run_id is not None: body['collaborator_job_run_id'] = self.collaborator_job_run_id - if self.collaborator_task_run_id is not None: body['collaborator_task_run_id'] = self.collaborator_task_run_id - if self.collaborator_workspace_id is not None: body['collaborator_workspace_id'] = self.collaborator_workspace_id + if self.collaborator_alias is not None: + body["collaborator_alias"] = self.collaborator_alias + if self.collaborator_job_id is not None: + body["collaborator_job_id"] = self.collaborator_job_id + if self.collaborator_job_run_id is not None: + body["collaborator_job_run_id"] = self.collaborator_job_run_id + if self.collaborator_task_run_id is not None: + body["collaborator_task_run_id"] = self.collaborator_task_run_id + if self.collaborator_workspace_id is not None: + body["collaborator_workspace_id"] = self.collaborator_workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the CollaboratorJobRunInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias - if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id - if self.collaborator_job_run_id is not None: body['collaborator_job_run_id'] = self.collaborator_job_run_id - if self.collaborator_task_run_id is not None: body['collaborator_task_run_id'] = self.collaborator_task_run_id - if self.collaborator_workspace_id is not None: body['collaborator_workspace_id'] = self.collaborator_workspace_id + if self.collaborator_alias is not None: + body["collaborator_alias"] = self.collaborator_alias + if self.collaborator_job_id is not None: + body["collaborator_job_id"] = self.collaborator_job_id + if self.collaborator_job_run_id is not None: + body["collaborator_job_run_id"] = self.collaborator_job_run_id + if self.collaborator_task_run_id is not None: + body["collaborator_task_run_id"] = self.collaborator_task_run_id + if self.collaborator_workspace_id is not None: + body["collaborator_workspace_id"] = self.collaborator_workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CollaboratorJobRunInfo: """Deserializes the CollaboratorJobRunInfo from a dictionary.""" - return cls(collaborator_alias=d.get('collaborator_alias', None), collaborator_job_id=d.get('collaborator_job_id', None), collaborator_job_run_id=d.get('collaborator_job_run_id', None), collaborator_task_run_id=d.get('collaborator_task_run_id', None), collaborator_workspace_id=d.get('collaborator_workspace_id', None)) - - + return cls( + collaborator_alias=d.get("collaborator_alias", None), + collaborator_job_id=d.get("collaborator_job_id", None), + collaborator_job_run_id=d.get("collaborator_job_run_id", None), + collaborator_task_run_id=d.get("collaborator_task_run_id", None), + collaborator_workspace_id=d.get("collaborator_workspace_id", None), + ) @dataclass class ComplianceSecurityProfile: """The compliance security profile used to process regulated data following compliance standards.""" - + compliance_standards: Optional[List[settings.ComplianceStandard]] = None """The list of compliance standards that the compliance security profile is configured to enforce.""" - + is_enabled: Optional[bool] = None """Whether the compliance security profile is enabled.""" - + def as_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compliance_standards: body['compliance_standards'] = [v.as_dict() for v in self.compliance_standards] - if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + if self.compliance_standards: + body["compliance_standards"] = [v.as_dict() for v in self.compliance_standards] + if self.is_enabled is not None: + body["is_enabled"] = self.is_enabled return body def as_shallow_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.compliance_standards: body['compliance_standards'] = self.compliance_standards - if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + if self.compliance_standards: + body["compliance_standards"] = self.compliance_standards + if self.is_enabled is not None: + body["is_enabled"] = self.is_enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: """Deserializes the ComplianceSecurityProfile from a dictionary.""" - return cls(compliance_standards=_repeated_dict(d, 'compliance_standards', settings.ComplianceStandard), is_enabled=d.get('is_enabled', None)) - - - - - - - - + return cls( + compliance_standards=_repeated_dict(d, "compliance_standards", settings.ComplianceStandard), + is_enabled=d.get("is_enabled", None), + ) @dataclass class CreateCleanRoomOutputCatalogResponse: output_catalog: Optional[CleanRoomOutputCatalog] = None - + def as_dict(self) -> dict: """Serializes the CreateCleanRoomOutputCatalogResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict() + if self.output_catalog: + body["output_catalog"] = self.output_catalog.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateCleanRoomOutputCatalogResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.output_catalog: body['output_catalog'] = self.output_catalog + if self.output_catalog: + body["output_catalog"] = self.output_catalog return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCleanRoomOutputCatalogResponse: """Deserializes the CreateCleanRoomOutputCatalogResponse from a dictionary.""" - return cls(output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog)) - - - - - - - - + return cls(output_catalog=_from_dict(d, "output_catalog", CleanRoomOutputCatalog)) @dataclass class DeleteCleanRoomAssetResponse: """Response for delete clean room request. Using an empty message since the generic Empty proto does not externd UnshadedMessageMarker.""" - + def as_dict(self) -> dict: """Serializes the DeleteCleanRoomAssetResponse into a dictionary suitable for use as a JSON request body.""" body = {} @@ -893,11 +1061,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCleanRoomAssetResponse: """Deserializes the DeleteCleanRoomAssetResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -916,51 +1079,39 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - - - - - - - - - - @dataclass class ListCleanRoomAssetsResponse: assets: Optional[List[CleanRoomAsset]] = None """Assets in the clean room.""" - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListCleanRoomAssetsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets: body['assets'] = [v.as_dict() for v in self.assets] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.assets: + body["assets"] = [v.as_dict() for v in self.assets] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListCleanRoomAssetsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets: body['assets'] = self.assets - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.assets: + body["assets"] = self.assets + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomAssetsResponse: """Deserializes the ListCleanRoomAssetsResponse from a dictionary.""" - return cls(assets=_repeated_dict(d, 'assets', CleanRoomAsset), next_page_token=d.get('next_page_token', None)) - - - - - + return cls(assets=_repeated_dict(d, "assets", CleanRoomAsset), next_page_token=d.get("next_page_token", None)) @dataclass @@ -968,248 +1119,214 @@ class ListCleanRoomNotebookTaskRunsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token should be set to this value for the next request (for the next page of results).""" - + runs: Optional[List[CleanRoomNotebookTaskRun]] = None """Name of the clean room.""" - + def as_dict(self) -> dict: """Serializes the ListCleanRoomNotebookTaskRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.runs: body['runs'] = [v.as_dict() for v in self.runs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.runs: + body["runs"] = [v.as_dict() for v in self.runs] return body def as_shallow_dict(self) -> dict: """Serializes the ListCleanRoomNotebookTaskRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.runs: body['runs'] = self.runs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.runs: + body["runs"] = self.runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomNotebookTaskRunsResponse: """Deserializes the ListCleanRoomNotebookTaskRunsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), runs=_repeated_dict(d, 'runs', CleanRoomNotebookTaskRun)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), runs=_repeated_dict(d, "runs", CleanRoomNotebookTaskRun) + ) @dataclass class ListCleanRoomsResponse: clean_rooms: Optional[List[CleanRoom]] = None - + next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token should be set to this value for the next request (for the next page of results).""" - + def as_dict(self) -> dict: """Serializes the ListCleanRoomsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_rooms: body['clean_rooms'] = [v.as_dict() for v in self.clean_rooms] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.clean_rooms: + body["clean_rooms"] = [v.as_dict() for v in self.clean_rooms] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListCleanRoomsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_rooms: body['clean_rooms'] = self.clean_rooms - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.clean_rooms: + body["clean_rooms"] = self.clean_rooms + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomsResponse: """Deserializes the ListCleanRoomsResponse from a dictionary.""" - return cls(clean_rooms=_repeated_dict(d, 'clean_rooms', CleanRoom), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + clean_rooms=_repeated_dict(d, "clean_rooms", CleanRoom), next_page_token=d.get("next_page_token", None) + ) @dataclass class UpdateCleanRoomRequest: clean_room: Optional[CleanRoom] = None - + name: Optional[str] = None """Name of the clean room.""" - + def as_dict(self) -> dict: """Serializes the UpdateCleanRoomRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_room: body['clean_room'] = self.clean_room.as_dict() - if self.name is not None: body['name'] = self.name + if self.clean_room: + body["clean_room"] = self.clean_room.as_dict() + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCleanRoomRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_room: body['clean_room'] = self.clean_room - if self.name is not None: body['name'] = self.name + if self.clean_room: + body["clean_room"] = self.clean_room + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCleanRoomRequest: """Deserializes the UpdateCleanRoomRequest from a dictionary.""" - return cls(clean_room=_from_dict(d, 'clean_room', CleanRoom), name=d.get('name', None)) - - - - + return cls(clean_room=_from_dict(d, "clean_room", CleanRoom), name=d.get("name", None)) class CleanRoomAssetsAPI: """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , clean_room_name: str, asset: CleanRoomAsset - ) -> CleanRoomAsset: + def create(self, clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset: """Create an asset. - + Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC asset that is added through this method, the clean room owner must also have enough privilege on the asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to access the asset. Typically, you should use a group as the clean room owner. - + :param clean_room_name: str Name of the clean room. :param asset: :class:`CleanRoomAsset` Metadata of the clean room asset - + :returns: :class:`CleanRoomAsset` """ body = asset.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/clean-rooms/{clean_room_name}/assets', body=body - - , headers=headers - ) - return CleanRoomAsset.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.0/clean-rooms/{clean_room_name}/assets", body=body, headers=headers) + return CleanRoomAsset.from_dict(res) - def delete(self - , clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str - ): + def delete(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str): """Delete an asset. - + Delete a clean room asset - unshare/remove the asset from the clean room - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` The type of the asset. :param name: str The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}", headers=headers + ) - def get(self - , clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str - ) -> CleanRoomAsset: + def get(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str) -> CleanRoomAsset: """Get an asset. - + Get the details of a clean room asset by its type and full name. - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` The type of the asset. :param name: str The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. - + :returns: :class:`CleanRoomAsset` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}' - - , headers=headers - ) - return CleanRoomAsset.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}", headers=headers + ) + return CleanRoomAsset.from_dict(res) - def list(self - , clean_room_name: str - , * - , page_token: Optional[str] = None) -> Iterator[CleanRoomAsset]: + def list(self, clean_room_name: str, *, page_token: Optional[str] = None) -> Iterator[CleanRoomAsset]: """List assets. - + :param clean_room_name: str Name of the clean room. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoomAsset` """ - - query = {} - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/clean-rooms/{clean_room_name}/assets', query=query - - , headers=headers - ) - if 'assets' in json: - for v in json['assets']: - yield CleanRoomAsset.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, asset: CleanRoomAsset - ) -> CleanRoomAsset: + while True: + json = self._api.do("GET", f"/api/2.0/clean-rooms/{clean_room_name}/assets", query=query, headers=headers) + if "assets" in json: + for v in json["assets"]: + yield CleanRoomAsset.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, asset: CleanRoomAsset + ) -> CleanRoomAsset: """Update an asset. - + Update a clean room asset. For example, updating the content of a notebook; changing the shared partitions of a table; etc. - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` @@ -1217,50 +1334,49 @@ def update(self :param name: str A fully qualified name that uniquely identifies the asset within the clean room. This is also the name displayed in the clean room UI. - + For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - + For notebooks, the name is the notebook file name. :param asset: :class:`CleanRoomAsset` Metadata of the clean room asset - + :returns: :class:`CleanRoomAsset` """ body = asset.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}", + body=body, + headers=headers, + ) return CleanRoomAsset.from_dict(res) - - + class CleanRoomTaskRunsAPI: """Clean room task runs are the executions of notebooks in a clean room.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def list(self - , clean_room_name: str - , * - , notebook_name: Optional[str] = None, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[CleanRoomNotebookTaskRun]: + def list( + self, + clean_room_name: str, + *, + notebook_name: Optional[str] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[CleanRoomNotebookTaskRun]: """List notebook task runs. - + List all the historical notebook task runs in a clean room. - + :param clean_room_name: str Name of the clean room. :param notebook_name: str (optional) @@ -1269,225 +1385,176 @@ def list(self The maximum number of task runs to return. Currently ignored - all runs will be returned. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoomNotebookTaskRun` """ - + query = {} - if notebook_name is not None: query['notebook_name'] = notebook_name - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if notebook_name is not None: + query["notebook_name"] = notebook_name + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.0/clean-rooms/{clean_room_name}/runs', query=query - - , headers=headers - ) - if 'runs' in json: - for v in json['runs']: - yield CleanRoomNotebookTaskRun.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - + json = self._api.do("GET", f"/api/2.0/clean-rooms/{clean_room_name}/runs", query=query, headers=headers) + if "runs" in json: + for v in json["runs"]: + yield CleanRoomNotebookTaskRun.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + - - class CleanRoomsAPI: """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , clean_room: CleanRoom - ) -> CleanRoom: + def create(self, clean_room: CleanRoom) -> CleanRoom: """Create a clean room. - + Create a new clean room with the specified collaborators. This method is asynchronous; the returned name field inside the clean_room field can be used to poll the clean room status, using the :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING state, with only name, owner, comment, created_at and status populated. The clean room will be usable once it enters an ACTIVE state. - + The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore. - + :param clean_room: :class:`CleanRoom` - + :returns: :class:`CleanRoom` """ body = clean_room.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/clean-rooms', body=body - - , headers=headers - ) - return CleanRoom.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/clean-rooms", body=body, headers=headers) + return CleanRoom.from_dict(res) - def create_output_catalog(self - , clean_room_name: str, output_catalog: CleanRoomOutputCatalog - ) -> CreateCleanRoomOutputCatalogResponse: + def create_output_catalog( + self, clean_room_name: str, output_catalog: CleanRoomOutputCatalog + ) -> CreateCleanRoomOutputCatalogResponse: """Create an output catalog. - + Create the output catalog of the clean room. - + :param clean_room_name: str Name of the clean room. :param output_catalog: :class:`CleanRoomOutputCatalog` - + :returns: :class:`CreateCleanRoomOutputCatalogResponse` """ body = output_catalog.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/clean-rooms/{clean_room_name}/output-catalogs', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/clean-rooms/{clean_room_name}/output-catalogs", body=body, headers=headers + ) return CreateCleanRoomOutputCatalogResponse.from_dict(res) - - - - - def delete(self - , name: str - ): + def delete(self, name: str): """Delete a clean room. - + Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other collaborators have not deleted the clean room, they will still have the clean room in their metastore, but it will be in a DELETED state and no operations other than deletion can be performed on it. - + :param name: str Name of the clean room. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/clean-rooms/{name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/clean-rooms/{name}", headers=headers) - def get(self - , name: str - ) -> CleanRoom: + def get(self, name: str) -> CleanRoom: """Get a clean room. - + Get the details of a clean room given its name. - + :param name: str - + :returns: :class:`CleanRoom` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/clean-rooms/{name}' - - , headers=headers - ) - return CleanRoom.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[CleanRoom]: + res = self._api.do("GET", f"/api/2.0/clean-rooms/{name}", headers=headers) + return CleanRoom.from_dict(res) + + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[CleanRoom]: """List clean rooms. - + Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are returned. - + :param page_size: int (optional) Maximum number of clean rooms to return (i.e., the page length). Defaults to 100. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoom` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/clean-rooms', query=query - - , headers=headers - ) - if 'clean_rooms' in json: - for v in json['clean_rooms']: - yield CleanRoom.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , name: str - , * - , clean_room: Optional[CleanRoom] = None) -> CleanRoom: + while True: + json = self._api.do("GET", "/api/2.0/clean-rooms", query=query, headers=headers) + if "clean_rooms" in json: + for v in json["clean_rooms"]: + yield CleanRoom.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, name: str, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom: """Update a clean room. - + Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM** privilege, or be metastore admin. - + When the caller is a metastore admin, only the __owner__ field can be updated. - + :param name: str Name of the clean room. :param clean_room: :class:`CleanRoom` (optional) - + :returns: :class:`CleanRoom` """ body = {} - if clean_room is not None: body['clean_room'] = clean_room.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/clean-rooms/{name}', body=body - - , headers=headers - ) + if clean_room is not None: + body["clean_room"] = clean_room.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/clean-rooms/{name}", body=body, headers=headers) return CleanRoom.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 550174964..46d940a04 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -1,31 +1,29 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Callable, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AddInstanceProfile: instance_profile_arn: str """The AWS ARN of the instance profile to register with Databricks. This field is required.""" - + iam_role_arn: Optional[str] = None """The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile @@ -34,44 +32,55 @@ class AddInstanceProfile: Otherwise, this field is optional. [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html""" - + is_meta_instance_profile: Optional[bool] = None """Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios. If true, it means the instance profile contains an meta IAM role which could assume a wide range of roles. Therefore it should always be used with authorization. This field is optional, the default value is `false`.""" - + skip_validation: Optional[bool] = None """By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile. This validation uses AWS dry-run mode for the RunInstances API. If validation fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your requested instance type is not supported in your requested availability zone”), you can pass this flag to skip the validation and forcibly add the instance profile.""" - + def as_dict(self) -> dict: """Serializes the AddInstanceProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.iam_role_arn is not None: + body["iam_role_arn"] = self.iam_role_arn + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: + body["is_meta_instance_profile"] = self.is_meta_instance_profile + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body def as_shallow_dict(self) -> dict: """Serializes the AddInstanceProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile - if self.skip_validation is not None: body['skip_validation'] = self.skip_validation + if self.iam_role_arn is not None: + body["iam_role_arn"] = self.iam_role_arn + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: + body["is_meta_instance_profile"] = self.is_meta_instance_profile + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AddInstanceProfile: """Deserializes the AddInstanceProfile from a dictionary.""" - return cls(iam_role_arn=d.get('iam_role_arn', None), instance_profile_arn=d.get('instance_profile_arn', None), is_meta_instance_profile=d.get('is_meta_instance_profile', None), skip_validation=d.get('skip_validation', None)) - - + return cls( + iam_role_arn=d.get("iam_role_arn", None), + instance_profile_arn=d.get("instance_profile_arn", None), + is_meta_instance_profile=d.get("is_meta_instance_profile", None), + skip_validation=d.get("skip_validation", None), + ) @dataclass @@ -90,36 +99,34 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AddResponse: """Deserializes the AddResponse from a dictionary.""" return cls() - - @dataclass class Adlsgen2Info: """A storage location in Adls Gen2""" - + destination: str """abfss destination, e.g. `abfss://@.dfs.core.windows.net/`.""" - + def as_dict(self) -> dict: """Serializes the Adlsgen2Info into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the Adlsgen2Info into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Adlsgen2Info: """Deserializes the Adlsgen2Info from a dictionary.""" - return cls(destination=d.get('destination', None)) - - + return cls(destination=d.get("destination", None)) @dataclass @@ -127,42 +134,44 @@ class AutoScale: max_workers: Optional[int] = None """The maximum number of workers to which the cluster can scale up when overloaded. Note that `max_workers` must be strictly greater than `min_workers`.""" - + min_workers: Optional[int] = None """The minimum number of workers to which the cluster can scale down when underutilized. It is also the initial number of workers the cluster will have after creation.""" - + def as_dict(self) -> dict: """Serializes the AutoScale into a dictionary suitable for use as a JSON request body.""" body = {} - if self.max_workers is not None: body['max_workers'] = self.max_workers - if self.min_workers is not None: body['min_workers'] = self.min_workers + if self.max_workers is not None: + body["max_workers"] = self.max_workers + if self.min_workers is not None: + body["min_workers"] = self.min_workers return body def as_shallow_dict(self) -> dict: """Serializes the AutoScale into a shallow dictionary of its immediate attributes.""" body = {} - if self.max_workers is not None: body['max_workers'] = self.max_workers - if self.min_workers is not None: body['min_workers'] = self.min_workers + if self.max_workers is not None: + body["max_workers"] = self.max_workers + if self.min_workers is not None: + body["min_workers"] = self.min_workers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutoScale: """Deserializes the AutoScale from a dictionary.""" - return cls(max_workers=d.get('max_workers', None), min_workers=d.get('min_workers', None)) - - + return cls(max_workers=d.get("max_workers", None), min_workers=d.get("min_workers", None)) @dataclass class AwsAttributes: """Attributes set during cluster creation which are related to Amazon Web Services.""" - + availability: Optional[AwsAvailability] = None """Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" - + ebs_volume_count: Optional[int] = None """The number of volumes launched for each instance. Users can choose up to 10 volumes. This feature is only enabled for supported node types. Legacy node types cannot specify custom EBS @@ -179,23 +188,23 @@ class AwsAttributes: Please note that if EBS volumes are specified, then the Spark configuration `spark.local.dir` will be overridden.""" - + ebs_volume_iops: Optional[int] = None """If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.""" - + ebs_volume_size: Optional[int] = None """The size of each EBS volume (in GiB) launched for each instance. For general purpose SSD, this value must be within the range 100 - 4096. For throughput optimized HDD, this value must be within the range 500 - 4096.""" - + ebs_volume_throughput: Optional[int] = None """If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.""" - + ebs_volume_type: Optional[EbsVolumeType] = None """The type of EBS volumes that will be launched with this cluster.""" - + first_on_demand: Optional[int] = None """The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. If this value is greater than 0, the cluster driver node in particular will be placed on an on-demand @@ -204,7 +213,7 @@ class AwsAttributes: `first_on_demand` nodes will be placed on on-demand instances and the remainder will be placed on `availability` instances. Note that this value does not affect cluster size and cannot currently be mutated over the lifetime of a cluster.""" - + instance_profile_arn: Optional[str] = None """Nodes for this cluster will only be placed on AWS instances with this instance profile. If ommitted, nodes will be placed on instances without an IAM instance profile. The instance @@ -212,7 +221,7 @@ class AwsAttributes: administrator. This feature may only be available to certain customer plans.""" - + spot_bid_price_percent: Optional[int] = None """The bid price for AWS spot instances, as a percentage of the corresponding instance type's on-demand price. For example, if this field is set to 50, and the cluster needs a new @@ -221,7 +230,7 @@ class AwsAttributes: `r3.xlarge` instances. If not specified, the default value is 100. When spot instances are requested for this cluster, only spot instances whose bid price percentage matches this field will be considered. Note that, for safety, we enforce this field to be no more than 10000.""" - + zone_id: Optional[str] = None """Identifier for the availability zone/datacenter in which the cluster resides. This string will be of a form like "us-west-2a". The provided availability zone must be in the same region as the @@ -233,62 +242,92 @@ class AwsAttributes: The list of available zones as well as the default value can be found by using the `List Zones` method.""" - + def as_dict(self) -> dict: """Serializes the AwsAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: body['availability'] = self.availability.value - if self.ebs_volume_count is not None: body['ebs_volume_count'] = self.ebs_volume_count - if self.ebs_volume_iops is not None: body['ebs_volume_iops'] = self.ebs_volume_iops - if self.ebs_volume_size is not None: body['ebs_volume_size'] = self.ebs_volume_size - if self.ebs_volume_throughput is not None: body['ebs_volume_throughput'] = self.ebs_volume_throughput - if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value - if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent - if self.zone_id is not None: body['zone_id'] = self.zone_id + if self.availability is not None: + body["availability"] = self.availability.value + if self.ebs_volume_count is not None: + body["ebs_volume_count"] = self.ebs_volume_count + if self.ebs_volume_iops is not None: + body["ebs_volume_iops"] = self.ebs_volume_iops + if self.ebs_volume_size is not None: + body["ebs_volume_size"] = self.ebs_volume_size + if self.ebs_volume_throughput is not None: + body["ebs_volume_throughput"] = self.ebs_volume_throughput + if self.ebs_volume_type is not None: + body["ebs_volume_type"] = self.ebs_volume_type.value + if self.first_on_demand is not None: + body["first_on_demand"] = self.first_on_demand + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.spot_bid_price_percent is not None: + body["spot_bid_price_percent"] = self.spot_bid_price_percent + if self.zone_id is not None: + body["zone_id"] = self.zone_id return body def as_shallow_dict(self) -> dict: """Serializes the AwsAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: body['availability'] = self.availability - if self.ebs_volume_count is not None: body['ebs_volume_count'] = self.ebs_volume_count - if self.ebs_volume_iops is not None: body['ebs_volume_iops'] = self.ebs_volume_iops - if self.ebs_volume_size is not None: body['ebs_volume_size'] = self.ebs_volume_size - if self.ebs_volume_throughput is not None: body['ebs_volume_throughput'] = self.ebs_volume_throughput - if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type - if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent - if self.zone_id is not None: body['zone_id'] = self.zone_id + if self.availability is not None: + body["availability"] = self.availability + if self.ebs_volume_count is not None: + body["ebs_volume_count"] = self.ebs_volume_count + if self.ebs_volume_iops is not None: + body["ebs_volume_iops"] = self.ebs_volume_iops + if self.ebs_volume_size is not None: + body["ebs_volume_size"] = self.ebs_volume_size + if self.ebs_volume_throughput is not None: + body["ebs_volume_throughput"] = self.ebs_volume_throughput + if self.ebs_volume_type is not None: + body["ebs_volume_type"] = self.ebs_volume_type + if self.first_on_demand is not None: + body["first_on_demand"] = self.first_on_demand + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.spot_bid_price_percent is not None: + body["spot_bid_price_percent"] = self.spot_bid_price_percent + if self.zone_id is not None: + body["zone_id"] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsAttributes: """Deserializes the AwsAttributes from a dictionary.""" - return cls(availability=_enum(d, 'availability', AwsAvailability), ebs_volume_count=d.get('ebs_volume_count', None), ebs_volume_iops=d.get('ebs_volume_iops', None), ebs_volume_size=d.get('ebs_volume_size', None), ebs_volume_throughput=d.get('ebs_volume_throughput', None), ebs_volume_type=_enum(d, 'ebs_volume_type', EbsVolumeType), first_on_demand=d.get('first_on_demand', None), instance_profile_arn=d.get('instance_profile_arn', None), spot_bid_price_percent=d.get('spot_bid_price_percent', None), zone_id=d.get('zone_id', None)) - - + return cls( + availability=_enum(d, "availability", AwsAvailability), + ebs_volume_count=d.get("ebs_volume_count", None), + ebs_volume_iops=d.get("ebs_volume_iops", None), + ebs_volume_size=d.get("ebs_volume_size", None), + ebs_volume_throughput=d.get("ebs_volume_throughput", None), + ebs_volume_type=_enum(d, "ebs_volume_type", EbsVolumeType), + first_on_demand=d.get("first_on_demand", None), + instance_profile_arn=d.get("instance_profile_arn", None), + spot_bid_price_percent=d.get("spot_bid_price_percent", None), + zone_id=d.get("zone_id", None), + ) class AwsAvailability(Enum): """Availability type used for all subsequent nodes past the `first_on_demand` ones. - + Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" - - ON_DEMAND = 'ON_DEMAND' - SPOT = 'SPOT' - SPOT_WITH_FALLBACK = 'SPOT_WITH_FALLBACK' + + ON_DEMAND = "ON_DEMAND" + SPOT = "SPOT" + SPOT_WITH_FALLBACK = "SPOT_WITH_FALLBACK" + @dataclass class AzureAttributes: """Attributes set during cluster creation which are related to Microsoft Azure.""" - + availability: Optional[AzureAvailability] = None """Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" - + first_on_demand: Optional[int] = None """The first `first_on_demand` nodes of the cluster will be placed on on-demand instances. This value should be greater than 0, to make sure the cluster driver node is placed on an on-demand @@ -297,80 +336,100 @@ class AzureAttributes: `first_on_demand` nodes will be placed on on-demand instances and the remainder will be placed on `availability` instances. Note that this value does not affect cluster size and cannot currently be mutated over the lifetime of a cluster.""" - + log_analytics_info: Optional[LogAnalyticsInfo] = None """Defines values necessary to configure and run Azure Log Analytics agent""" - + spot_bid_max_price: Optional[float] = None """The max bid price to be used for Azure spot instances. The Max price for the bid cannot be higher than the on-demand price of the instance. If not specified, the default value is -1, which specifies that the instance cannot be evicted on the basis of price, and only on the basis of availability. Further, the value should > 0 or -1.""" - + def as_dict(self) -> dict: """Serializes the AzureAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: body['availability'] = self.availability.value - if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand - if self.log_analytics_info: body['log_analytics_info'] = self.log_analytics_info.as_dict() - if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price + if self.availability is not None: + body["availability"] = self.availability.value + if self.first_on_demand is not None: + body["first_on_demand"] = self.first_on_demand + if self.log_analytics_info: + body["log_analytics_info"] = self.log_analytics_info.as_dict() + if self.spot_bid_max_price is not None: + body["spot_bid_max_price"] = self.spot_bid_max_price return body def as_shallow_dict(self) -> dict: """Serializes the AzureAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: body['availability'] = self.availability - if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand - if self.log_analytics_info: body['log_analytics_info'] = self.log_analytics_info - if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price + if self.availability is not None: + body["availability"] = self.availability + if self.first_on_demand is not None: + body["first_on_demand"] = self.first_on_demand + if self.log_analytics_info: + body["log_analytics_info"] = self.log_analytics_info + if self.spot_bid_max_price is not None: + body["spot_bid_max_price"] = self.spot_bid_max_price return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureAttributes: """Deserializes the AzureAttributes from a dictionary.""" - return cls(availability=_enum(d, 'availability', AzureAvailability), first_on_demand=d.get('first_on_demand', None), log_analytics_info=_from_dict(d, 'log_analytics_info', LogAnalyticsInfo), spot_bid_max_price=d.get('spot_bid_max_price', None)) - - + return cls( + availability=_enum(d, "availability", AzureAvailability), + first_on_demand=d.get("first_on_demand", None), + log_analytics_info=_from_dict(d, "log_analytics_info", LogAnalyticsInfo), + spot_bid_max_price=d.get("spot_bid_max_price", None), + ) class AzureAvailability(Enum): """Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If `first_on_demand` is zero, this availability type will be used for the entire cluster.""" - - ON_DEMAND_AZURE = 'ON_DEMAND_AZURE' - SPOT_AZURE = 'SPOT_AZURE' - SPOT_WITH_FALLBACK_AZURE = 'SPOT_WITH_FALLBACK_AZURE' + + ON_DEMAND_AZURE = "ON_DEMAND_AZURE" + SPOT_AZURE = "SPOT_AZURE" + SPOT_WITH_FALLBACK_AZURE = "SPOT_WITH_FALLBACK_AZURE" + @dataclass class CancelCommand: cluster_id: Optional[str] = None - + command_id: Optional[str] = None - + context_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CancelCommand into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['clusterId'] = self.cluster_id - if self.command_id is not None: body['commandId'] = self.command_id - if self.context_id is not None: body['contextId'] = self.context_id + if self.cluster_id is not None: + body["clusterId"] = self.cluster_id + if self.command_id is not None: + body["commandId"] = self.command_id + if self.context_id is not None: + body["contextId"] = self.context_id return body def as_shallow_dict(self) -> dict: """Serializes the CancelCommand into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['clusterId'] = self.cluster_id - if self.command_id is not None: body['commandId'] = self.command_id - if self.context_id is not None: body['contextId'] = self.context_id + if self.cluster_id is not None: + body["clusterId"] = self.cluster_id + if self.command_id is not None: + body["commandId"] = self.command_id + if self.context_id is not None: + body["contextId"] = self.context_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CancelCommand: """Deserializes the CancelCommand from a dictionary.""" - return cls(cluster_id=d.get('clusterId', None), command_id=d.get('commandId', None), context_id=d.get('contextId', None)) - - + return cls( + cluster_id=d.get("clusterId", None), + command_id=d.get("commandId", None), + context_id=d.get("contextId", None), + ) @dataclass @@ -389,37 +448,37 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelResponse: """Deserializes the CancelResponse from a dictionary.""" return cls() - - @dataclass class ChangeClusterOwner: cluster_id: str - + owner_username: str """New owner of the cluster_id after this RPC.""" - + def as_dict(self) -> dict: """Serializes the ChangeClusterOwner into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.owner_username is not None: body['owner_username'] = self.owner_username + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.owner_username is not None: + body["owner_username"] = self.owner_username return body def as_shallow_dict(self) -> dict: """Serializes the ChangeClusterOwner into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.owner_username is not None: body['owner_username'] = self.owner_username + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.owner_username is not None: + body["owner_username"] = self.owner_username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ChangeClusterOwner: """Deserializes the ChangeClusterOwner from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), owner_username=d.get('owner_username', None)) - - + return cls(cluster_id=d.get("cluster_id", None), owner_username=d.get("owner_username", None)) @dataclass @@ -438,217 +497,242 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ChangeClusterOwnerResponse: """Deserializes the ChangeClusterOwnerResponse from a dictionary.""" return cls() - - @dataclass class ClientsTypes: jobs: Optional[bool] = None """With jobs set, the cluster can be used for jobs""" - + notebooks: Optional[bool] = None """With notebooks set, this cluster can be used for notebooks""" - + def as_dict(self) -> dict: """Serializes the ClientsTypes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.jobs is not None: body['jobs'] = self.jobs - if self.notebooks is not None: body['notebooks'] = self.notebooks + if self.jobs is not None: + body["jobs"] = self.jobs + if self.notebooks is not None: + body["notebooks"] = self.notebooks return body def as_shallow_dict(self) -> dict: """Serializes the ClientsTypes into a shallow dictionary of its immediate attributes.""" body = {} - if self.jobs is not None: body['jobs'] = self.jobs - if self.notebooks is not None: body['notebooks'] = self.notebooks + if self.jobs is not None: + body["jobs"] = self.jobs + if self.notebooks is not None: + body["notebooks"] = self.notebooks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClientsTypes: """Deserializes the ClientsTypes from a dictionary.""" - return cls(jobs=d.get('jobs', None), notebooks=d.get('notebooks', None)) - - + return cls(jobs=d.get("jobs", None), notebooks=d.get("notebooks", None)) @dataclass class CloneCluster: source_cluster_id: str """The cluster that is being cloned.""" - + def as_dict(self) -> dict: """Serializes the CloneCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id + if self.source_cluster_id is not None: + body["source_cluster_id"] = self.source_cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the CloneCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id + if self.source_cluster_id is not None: + body["source_cluster_id"] = self.source_cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CloneCluster: """Deserializes the CloneCluster from a dictionary.""" - return cls(source_cluster_id=d.get('source_cluster_id', None)) - - + return cls(source_cluster_id=d.get("source_cluster_id", None)) @dataclass class CloudProviderNodeInfo: status: Optional[List[CloudProviderNodeStatus]] = None """Status as reported by the cloud provider""" - + def as_dict(self) -> dict: """Serializes the CloudProviderNodeInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.status: body['status'] = [v.value for v in self.status] + if self.status: + body["status"] = [v.value for v in self.status] return body def as_shallow_dict(self) -> dict: """Serializes the CloudProviderNodeInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.status: body['status'] = self.status + if self.status: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CloudProviderNodeInfo: """Deserializes the CloudProviderNodeInfo from a dictionary.""" - return cls(status=_repeated_enum(d, 'status', CloudProviderNodeStatus)) - - + return cls(status=_repeated_enum(d, "status", CloudProviderNodeStatus)) class CloudProviderNodeStatus(Enum): - - - NOT_AVAILABLE_IN_REGION = 'NotAvailableInRegion' - NOT_ENABLED_ON_SUBSCRIPTION = 'NotEnabledOnSubscription' + + NOT_AVAILABLE_IN_REGION = "NotAvailableInRegion" + NOT_ENABLED_ON_SUBSCRIPTION = "NotEnabledOnSubscription" + @dataclass class ClusterAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[ClusterPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ClusterAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAccessControlRequest: """Deserializes the ClusterAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ClusterPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", ClusterPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class ClusterAccessControlResponse: all_permissions: Optional[List[ClusterPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ClusterAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAccessControlResponse: """Deserializes the ClusterAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', ClusterPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", ClusterPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class ClusterAttributes: """Common set of attributes set during cluster creation. These attributes cannot be changed over the lifetime of a cluster.""" - + spark_version: str """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -656,7 +740,7 @@ class ClusterAttributes: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -682,14 +766,14 @@ class ClusterAttributes: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -697,33 +781,33 @@ class ClusterAttributes: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -738,16 +822,16 @@ class ClusterAttributes: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -756,16 +840,16 @@ class ClusterAttributes: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str,str]] = None + + spark_conf: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str,str]] = None + + spark_env_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -777,188 +861,276 @@ class ClusterAttributes: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the ClusterAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value - if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind.value - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type.as_dict() + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode.value + if self.docker_image: + body["docker_image"] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.init_scripts: + body["init_scripts"] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind.value + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine.value + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode - if self.docker_image: body['docker_image'] = self.docker_image - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.init_scripts: body['init_scripts'] = self.init_scripts - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode + if self.docker_image: + body["docker_image"] = self.docker_image + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.init_scripts: + body["init_scripts"] = self.init_scripts + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = self.ssh_public_keys + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAttributes: """Deserializes the ClusterAttributes from a dictionary.""" - return cls(autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) - - + return cls( + autotermination_minutes=d.get("autotermination_minutes", None), + aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), + cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), + cluster_name=d.get("cluster_name", None), + custom_tags=d.get("custom_tags", None), + data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), + docker_image=_from_dict(d, "docker_image", DockerImage), + driver_instance_pool_id=d.get("driver_instance_pool_id", None), + driver_node_type_id=d.get("driver_node_type_id", None), + enable_elastic_disk=d.get("enable_elastic_disk", None), + enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), + gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), + init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), + instance_pool_id=d.get("instance_pool_id", None), + is_single_node=d.get("is_single_node", None), + kind=_enum(d, "kind", Kind), + node_type_id=d.get("node_type_id", None), + policy_id=d.get("policy_id", None), + runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), + single_user_name=d.get("single_user_name", None), + spark_conf=d.get("spark_conf", None), + spark_env_vars=d.get("spark_env_vars", None), + spark_version=d.get("spark_version", None), + ssh_public_keys=d.get("ssh_public_keys", None), + use_ml_runtime=d.get("use_ml_runtime", None), + workload_type=_from_dict(d, "workload_type", WorkloadType), + ) @dataclass class ClusterCompliance: cluster_id: str """Canonical unique identifier for a cluster.""" - + is_compliant: Optional[bool] = None """Whether this cluster is in compliance with the latest version of its policy.""" - - violations: Optional[Dict[str,str]] = None + + violations: Optional[Dict[str, str]] = None """An object containing key-value mappings representing the first 200 policy validation errors. The keys indicate the path where the policy validation error is occurring. The values indicate an error message describing the policy validation error.""" - + def as_dict(self) -> dict: """Serializes the ClusterCompliance into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.is_compliant is not None: body['is_compliant'] = self.is_compliant - if self.violations: body['violations'] = self.violations + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.is_compliant is not None: + body["is_compliant"] = self.is_compliant + if self.violations: + body["violations"] = self.violations return body def as_shallow_dict(self) -> dict: """Serializes the ClusterCompliance into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.is_compliant is not None: body['is_compliant'] = self.is_compliant - if self.violations: body['violations'] = self.violations + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.is_compliant is not None: + body["is_compliant"] = self.is_compliant + if self.violations: + body["violations"] = self.violations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterCompliance: """Deserializes the ClusterCompliance from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), is_compliant=d.get('is_compliant', None), violations=d.get('violations', None)) - - + return cls( + cluster_id=d.get("cluster_id", None), + is_compliant=d.get("is_compliant", None), + violations=d.get("violations", None), + ) @dataclass class ClusterDetails: """Describes all of the metadata about a single Spark cluster in Databricks.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_cores: Optional[float] = None """Number of CPU cores available for this cluster. Note that this can be fractional, e.g. 7.5 cores, since certain node types are configured to share cores between Spark nodes on the same instance.""" - + cluster_id: Optional[str] = None """Canonical identifier for the cluster. This id is retained during cluster restarts and resizes, while each new cluster has a globally unique id.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_log_status: Optional[LogSyncStatus] = None """Cluster log delivery status.""" - + cluster_memory_mb: Optional[int] = None """Total amount of cluster memory, in megabytes""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - + cluster_source: Optional[ClusterSource] = None """Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.""" - + creator_user_name: Optional[str] = None """Creator user name. The field won't be included in the response if the user has already been deleted.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -966,7 +1138,7 @@ class ClusterDetails: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -992,8 +1164,8 @@ class ClusterDetails: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - - default_tags: Optional[Dict[str,str]] = None + + default_tags: Optional[Dict[str, str]] = None """Tags that are added by Databricks regardless of any `custom_tags`, including: - Vendor: Databricks @@ -1005,18 +1177,18 @@ class ClusterDetails: - ClusterId: - Name: """ - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver: Optional[SparkNode] = None """Node on which the Spark driver resides. The driver node contains the Spark master and the Databricks application that manages the per-notebook Spark REPLs.""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -1024,40 +1196,40 @@ class ClusterDetails: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + executors: Optional[List[SparkNode]] = None """Nodes on which the Spark executors reside.""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + jdbc_port: Optional[int] = None """Port on which Spark JDBC server is listening, in the driver nod. No service will be listeningon on this port in executor nodes.""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -1072,19 +1244,19 @@ class ClusterDetails: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + last_restarted_time: Optional[int] = None """the timestamp that the cluster was started/restarted""" - + last_state_loss_time: Optional[int] = None """Time when the cluster driver last lost its state (due to a restart or driver failure).""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -1094,10 +1266,10 @@ class ClusterDetails: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -1106,21 +1278,21 @@ class ClusterDetails: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str,str]] = None + + spark_conf: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - + spark_context_id: Optional[int] = None """A canonical SparkContext identifier. This value *does* change when the Spark driver restarts. The pair `(cluster_id, spark_context_id)` is a globally unique identifier over all Spark contexts.""" - - spark_env_vars: Optional[Dict[str,str]] = None + + spark_env_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -1132,658 +1304,913 @@ class ClusterDetails: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + spark_version: Optional[str] = None """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + spec: Optional[ClusterSpec] = None """The spec contains a snapshot of the latest user specified settings that were used to create/edit the cluster. Note: not included in the response of the ListClusters API.""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + start_time: Optional[int] = None """Time (in epoch milliseconds) when the cluster creation request was received (when the cluster entered a `PENDING` state).""" - + state: Optional[State] = None """Current state of the cluster.""" - + state_message: Optional[str] = None """A message associated with the most recent state transition (e.g., the reason why the cluster entered a `TERMINATED` state).""" - + terminated_time: Optional[int] = None """Time (in epoch milliseconds) when the cluster was terminated, if applicable.""" - + termination_reason: Optional[TerminationReason] = None """Information about why the cluster was terminated. This field only appears when the cluster is in a `TERMINATING` or `TERMINATED` state.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the ClusterDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() - if self.cluster_log_status: body['cluster_log_status'] = self.cluster_log_status.as_dict() - if self.cluster_memory_mb is not None: body['cluster_memory_mb'] = self.cluster_memory_mb - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.cluster_source is not None: body['cluster_source'] = self.cluster_source.value - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value - if self.default_tags: body['default_tags'] = self.default_tags - if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver: body['driver'] = self.driver.as_dict() - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.executors: body['executors'] = [v.as_dict() for v in self.executors] - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port - if self.kind is not None: body['kind'] = self.kind.value - if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time - if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.spec: body['spec'] = self.spec.as_dict() - if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] - if self.start_time is not None: body['start_time'] = self.start_time - if self.state is not None: body['state'] = self.state.value - if self.state_message is not None: body['state_message'] = self.state_message - if self.terminated_time is not None: body['terminated_time'] = self.terminated_time - if self.termination_reason: body['termination_reason'] = self.termination_reason.as_dict() - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type.as_dict() + if self.autoscale: + body["autoscale"] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.cluster_cores is not None: + body["cluster_cores"] = self.cluster_cores + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf.as_dict() + if self.cluster_log_status: + body["cluster_log_status"] = self.cluster_log_status.as_dict() + if self.cluster_memory_mb is not None: + body["cluster_memory_mb"] = self.cluster_memory_mb + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.cluster_source is not None: + body["cluster_source"] = self.cluster_source.value + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode.value + if self.default_tags: + body["default_tags"] = self.default_tags + if self.docker_image: + body["docker_image"] = self.docker_image.as_dict() + if self.driver: + body["driver"] = self.driver.as_dict() + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.executors: + body["executors"] = [v.as_dict() for v in self.executors] + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.init_scripts: + body["init_scripts"] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.jdbc_port is not None: + body["jdbc_port"] = self.jdbc_port + if self.kind is not None: + body["kind"] = self.kind.value + if self.last_restarted_time is not None: + body["last_restarted_time"] = self.last_restarted_time + if self.last_state_loss_time is not None: + body["last_state_loss_time"] = self.last_state_loss_time + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine.value + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_context_id is not None: + body["spark_context_id"] = self.spark_context_id + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.spec: + body["spec"] = self.spec.as_dict() + if self.ssh_public_keys: + body["ssh_public_keys"] = [v for v in self.ssh_public_keys] + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state is not None: + body["state"] = self.state.value + if self.state_message is not None: + body["state_message"] = self.state_message + if self.terminated_time is not None: + body["terminated_time"] = self.terminated_time + if self.termination_reason: + body["termination_reason"] = self.termination_reason.as_dict() + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.autoscale: body['autoscale'] = self.autoscale - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf - if self.cluster_log_status: body['cluster_log_status'] = self.cluster_log_status - if self.cluster_memory_mb is not None: body['cluster_memory_mb'] = self.cluster_memory_mb - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.cluster_source is not None: body['cluster_source'] = self.cluster_source - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode - if self.default_tags: body['default_tags'] = self.default_tags - if self.docker_image: body['docker_image'] = self.docker_image - if self.driver: body['driver'] = self.driver - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.executors: body['executors'] = self.executors - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.init_scripts: body['init_scripts'] = self.init_scripts - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port - if self.kind is not None: body['kind'] = self.kind - if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time - if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.spec: body['spec'] = self.spec - if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys - if self.start_time is not None: body['start_time'] = self.start_time - if self.state is not None: body['state'] = self.state - if self.state_message is not None: body['state_message'] = self.state_message - if self.terminated_time is not None: body['terminated_time'] = self.terminated_time - if self.termination_reason: body['termination_reason'] = self.termination_reason - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type + if self.autoscale: + body["autoscale"] = self.autoscale + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.cluster_cores is not None: + body["cluster_cores"] = self.cluster_cores + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf + if self.cluster_log_status: + body["cluster_log_status"] = self.cluster_log_status + if self.cluster_memory_mb is not None: + body["cluster_memory_mb"] = self.cluster_memory_mb + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.cluster_source is not None: + body["cluster_source"] = self.cluster_source + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode + if self.default_tags: + body["default_tags"] = self.default_tags + if self.docker_image: + body["docker_image"] = self.docker_image + if self.driver: + body["driver"] = self.driver + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.executors: + body["executors"] = self.executors + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.init_scripts: + body["init_scripts"] = self.init_scripts + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.jdbc_port is not None: + body["jdbc_port"] = self.jdbc_port + if self.kind is not None: + body["kind"] = self.kind + if self.last_restarted_time is not None: + body["last_restarted_time"] = self.last_restarted_time + if self.last_state_loss_time is not None: + body["last_state_loss_time"] = self.last_state_loss_time + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_context_id is not None: + body["spark_context_id"] = self.spark_context_id + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.spec: + body["spec"] = self.spec + if self.ssh_public_keys: + body["ssh_public_keys"] = self.ssh_public_keys + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state is not None: + body["state"] = self.state + if self.state_message is not None: + body["state_message"] = self.state_message + if self.terminated_time is not None: + body["terminated_time"] = self.terminated_time + if self.termination_reason: + body["termination_reason"] = self.termination_reason + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterDetails: """Deserializes the ClusterDetails from a dictionary.""" - return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_cores=d.get('cluster_cores', None), cluster_id=d.get('cluster_id', None), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_log_status=_from_dict(d, 'cluster_log_status', LogSyncStatus), cluster_memory_mb=d.get('cluster_memory_mb', None), cluster_name=d.get('cluster_name', None), cluster_source=_enum(d, 'cluster_source', ClusterSource), creator_user_name=d.get('creator_user_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), default_tags=d.get('default_tags', None), docker_image=_from_dict(d, 'docker_image', DockerImage), driver=_from_dict(d, 'driver', SparkNode), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), executors=_repeated_dict(d, 'executors', SparkNode), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), jdbc_port=d.get('jdbc_port', None), kind=_enum(d, 'kind', Kind), last_restarted_time=d.get('last_restarted_time', None), last_state_loss_time=d.get('last_state_loss_time', None), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_context_id=d.get('spark_context_id', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), spec=_from_dict(d, 'spec', ClusterSpec), ssh_public_keys=d.get('ssh_public_keys', None), start_time=d.get('start_time', None), state=_enum(d, 'state', State), state_message=d.get('state_message', None), terminated_time=d.get('terminated_time', None), termination_reason=_from_dict(d, 'termination_reason', TerminationReason), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) - - + return cls( + autoscale=_from_dict(d, "autoscale", AutoScale), + autotermination_minutes=d.get("autotermination_minutes", None), + aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), + cluster_cores=d.get("cluster_cores", None), + cluster_id=d.get("cluster_id", None), + cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), + cluster_log_status=_from_dict(d, "cluster_log_status", LogSyncStatus), + cluster_memory_mb=d.get("cluster_memory_mb", None), + cluster_name=d.get("cluster_name", None), + cluster_source=_enum(d, "cluster_source", ClusterSource), + creator_user_name=d.get("creator_user_name", None), + custom_tags=d.get("custom_tags", None), + data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), + default_tags=d.get("default_tags", None), + docker_image=_from_dict(d, "docker_image", DockerImage), + driver=_from_dict(d, "driver", SparkNode), + driver_instance_pool_id=d.get("driver_instance_pool_id", None), + driver_node_type_id=d.get("driver_node_type_id", None), + enable_elastic_disk=d.get("enable_elastic_disk", None), + enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), + executors=_repeated_dict(d, "executors", SparkNode), + gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), + init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), + instance_pool_id=d.get("instance_pool_id", None), + is_single_node=d.get("is_single_node", None), + jdbc_port=d.get("jdbc_port", None), + kind=_enum(d, "kind", Kind), + last_restarted_time=d.get("last_restarted_time", None), + last_state_loss_time=d.get("last_state_loss_time", None), + node_type_id=d.get("node_type_id", None), + num_workers=d.get("num_workers", None), + policy_id=d.get("policy_id", None), + runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), + single_user_name=d.get("single_user_name", None), + spark_conf=d.get("spark_conf", None), + spark_context_id=d.get("spark_context_id", None), + spark_env_vars=d.get("spark_env_vars", None), + spark_version=d.get("spark_version", None), + spec=_from_dict(d, "spec", ClusterSpec), + ssh_public_keys=d.get("ssh_public_keys", None), + start_time=d.get("start_time", None), + state=_enum(d, "state", State), + state_message=d.get("state_message", None), + terminated_time=d.get("terminated_time", None), + termination_reason=_from_dict(d, "termination_reason", TerminationReason), + use_ml_runtime=d.get("use_ml_runtime", None), + workload_type=_from_dict(d, "workload_type", WorkloadType), + ) @dataclass class ClusterEvent: cluster_id: str - + data_plane_event_details: Optional[DataPlaneEventDetails] = None - + details: Optional[EventDetails] = None - + timestamp: Optional[int] = None """The timestamp when the event occurred, stored as the number of milliseconds since the Unix epoch. If not provided, this will be assigned by the Timeline service.""" - + type: Optional[EventType] = None - + def as_dict(self) -> dict: """Serializes the ClusterEvent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.data_plane_event_details: body['data_plane_event_details'] = self.data_plane_event_details.as_dict() - if self.details: body['details'] = self.details.as_dict() - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.type is not None: body['type'] = self.type.value + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.data_plane_event_details: + body["data_plane_event_details"] = self.data_plane_event_details.as_dict() + if self.details: + body["details"] = self.details.as_dict() + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterEvent into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.data_plane_event_details: body['data_plane_event_details'] = self.data_plane_event_details - if self.details: body['details'] = self.details - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.type is not None: body['type'] = self.type + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.data_plane_event_details: + body["data_plane_event_details"] = self.data_plane_event_details + if self.details: + body["details"] = self.details + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterEvent: """Deserializes the ClusterEvent from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), data_plane_event_details=_from_dict(d, 'data_plane_event_details', DataPlaneEventDetails), details=_from_dict(d, 'details', EventDetails), timestamp=d.get('timestamp', None), type=_enum(d, 'type', EventType)) - - + return cls( + cluster_id=d.get("cluster_id", None), + data_plane_event_details=_from_dict(d, "data_plane_event_details", DataPlaneEventDetails), + details=_from_dict(d, "details", EventDetails), + timestamp=d.get("timestamp", None), + type=_enum(d, "type", EventType), + ) @dataclass class ClusterLibraryStatuses: cluster_id: Optional[str] = None """Unique identifier for the cluster.""" - + library_statuses: Optional[List[LibraryFullStatus]] = None """Status of all libraries on the cluster.""" - + def as_dict(self) -> dict: """Serializes the ClusterLibraryStatuses into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.library_statuses: body['library_statuses'] = [v.as_dict() for v in self.library_statuses] + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.library_statuses: + body["library_statuses"] = [v.as_dict() for v in self.library_statuses] return body def as_shallow_dict(self) -> dict: """Serializes the ClusterLibraryStatuses into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.library_statuses: body['library_statuses'] = self.library_statuses + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.library_statuses: + body["library_statuses"] = self.library_statuses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterLibraryStatuses: """Deserializes the ClusterLibraryStatuses from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), library_statuses=_repeated_dict(d, 'library_statuses', LibraryFullStatus)) - - + return cls( + cluster_id=d.get("cluster_id", None), + library_statuses=_repeated_dict(d, "library_statuses", LibraryFullStatus), + ) @dataclass class ClusterLogConf: """Cluster log delivery config""" - + dbfs: Optional[DbfsStorageInfo] = None """destination needs to be provided. e.g. `{ "dbfs" : { "destination" : "dbfs:/home/cluster_log" } }`""" - + s3: Optional[S3StorageInfo] = None """destination and either the region or endpoint need to be provided. e.g. `{ "s3": { "destination" : "s3://cluster_log_bucket/prefix", "region" : "us-west-2" } }` Cluster iam role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to write data to the s3 destination.""" - + volumes: Optional[VolumesStorageInfo] = None """destination needs to be provided, e.g. `{ "volumes": { "destination": "/Volumes/catalog/schema/volume/cluster_log" } }`""" - + def as_dict(self) -> dict: """Serializes the ClusterLogConf into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbfs: body['dbfs'] = self.dbfs.as_dict() - if self.s3: body['s3'] = self.s3.as_dict() - if self.volumes: body['volumes'] = self.volumes.as_dict() + if self.dbfs: + body["dbfs"] = self.dbfs.as_dict() + if self.s3: + body["s3"] = self.s3.as_dict() + if self.volumes: + body["volumes"] = self.volumes.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterLogConf into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbfs: body['dbfs'] = self.dbfs - if self.s3: body['s3'] = self.s3 - if self.volumes: body['volumes'] = self.volumes + if self.dbfs: + body["dbfs"] = self.dbfs + if self.s3: + body["s3"] = self.s3 + if self.volumes: + body["volumes"] = self.volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterLogConf: """Deserializes the ClusterLogConf from a dictionary.""" - return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo), volumes=_from_dict(d, 'volumes', VolumesStorageInfo)) - - + return cls( + dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), + s3=_from_dict(d, "s3", S3StorageInfo), + volumes=_from_dict(d, "volumes", VolumesStorageInfo), + ) @dataclass class ClusterPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[ClusterPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ClusterPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPermission: """Deserializes the ClusterPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ClusterPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", ClusterPermissionLevel), + ) class ClusterPermissionLevel(Enum): """Permission level""" - - CAN_ATTACH_TO = 'CAN_ATTACH_TO' - CAN_MANAGE = 'CAN_MANAGE' - CAN_RESTART = 'CAN_RESTART' + + CAN_ATTACH_TO = "CAN_ATTACH_TO" + CAN_MANAGE = "CAN_MANAGE" + CAN_RESTART = "CAN_RESTART" + @dataclass class ClusterPermissions: access_control_list: Optional[List[ClusterAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ClusterPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissions: """Deserializes the ClusterPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", ClusterAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class ClusterPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[ClusterPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ClusterPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissionsDescription: """Deserializes the ClusterPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ClusterPermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", ClusterPermissionLevel), + ) @dataclass class ClusterPermissionsRequest: access_control_list: Optional[List[ClusterAccessControlRequest]] = None - + cluster_id: Optional[str] = None """The cluster for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the ClusterPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissionsRequest: """Deserializes the ClusterPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterAccessControlRequest), cluster_id=d.get('cluster_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", ClusterAccessControlRequest), + cluster_id=d.get("cluster_id", None), + ) @dataclass class ClusterPolicyAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[ClusterPolicyPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyAccessControlRequest: """Deserializes the ClusterPolicyAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", ClusterPolicyPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class ClusterPolicyAccessControlResponse: all_permissions: Optional[List[ClusterPolicyPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyAccessControlResponse: """Deserializes the ClusterPolicyAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', ClusterPolicyPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", ClusterPolicyPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class ClusterPolicyPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[ClusterPolicyPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermission: """Deserializes the ClusterPolicyPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", ClusterPolicyPermissionLevel), + ) class ClusterPolicyPermissionLevel(Enum): """Permission level""" - - CAN_USE = 'CAN_USE' + + CAN_USE = "CAN_USE" + @dataclass class ClusterPolicyPermissions: access_control_list: Optional[List[ClusterPolicyAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ClusterPolicyPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissions: """Deserializes the ClusterPolicyPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterPolicyAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", ClusterPolicyAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class ClusterPolicyPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[ClusterPolicyPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissionsDescription: """Deserializes the ClusterPolicyPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ClusterPolicyPermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", ClusterPolicyPermissionLevel), + ) @dataclass class ClusterPolicyPermissionsRequest: access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None - + cluster_policy_id: Optional[str] = None """The cluster policy for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the ClusterPolicyPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.cluster_policy_id is not None: + body["cluster_policy_id"] = self.cluster_policy_id return body def as_shallow_dict(self) -> dict: """Serializes the ClusterPolicyPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.cluster_policy_id is not None: + body["cluster_policy_id"] = self.cluster_policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissionsRequest: """Deserializes the ClusterPolicyPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', ClusterPolicyAccessControlRequest), cluster_policy_id=d.get('cluster_policy_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", ClusterPolicyAccessControlRequest), + cluster_policy_id=d.get("cluster_policy_id", None), + ) @dataclass class ClusterSettingsChange: """Represents a change to the cluster settings required for the cluster to become compliant with its policy.""" - + field: Optional[str] = None """The field where this change would be made.""" - + new_value: Optional[str] = None """The new value of this field after enforcing policy compliance (either a number, a boolean, or a string) converted to a string. This is intended to be read by a human. The typed new value of this field can be retrieved by reading the settings field in the API response.""" - + previous_value: Optional[str] = None """The previous value of this field before enforcing policy compliance (either a number, a boolean, or a string) converted to a string. This is intended to be read by a human. The type of the field can be retrieved by reading the settings field in the API response.""" - + def as_dict(self) -> dict: """Serializes the ClusterSettingsChange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.field is not None: body['field'] = self.field - if self.new_value is not None: body['new_value'] = self.new_value - if self.previous_value is not None: body['previous_value'] = self.previous_value + if self.field is not None: + body["field"] = self.field + if self.new_value is not None: + body["new_value"] = self.new_value + if self.previous_value is not None: + body["previous_value"] = self.previous_value return body def as_shallow_dict(self) -> dict: """Serializes the ClusterSettingsChange into a shallow dictionary of its immediate attributes.""" body = {} - if self.field is not None: body['field'] = self.field - if self.new_value is not None: body['new_value'] = self.new_value - if self.previous_value is not None: body['previous_value'] = self.previous_value + if self.field is not None: + body["field"] = self.field + if self.new_value is not None: + body["new_value"] = self.new_value + if self.previous_value is not None: + body["previous_value"] = self.previous_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterSettingsChange: """Deserializes the ClusterSettingsChange from a dictionary.""" - return cls(field=d.get('field', None), new_value=d.get('new_value', None), previous_value=d.get('previous_value', None)) - - + return cls( + field=d.get("field", None), new_value=d.get("new_value", None), previous_value=d.get("previous_value", None) + ) @dataclass @@ -1791,7 +2218,7 @@ class ClusterSize: autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -1801,81 +2228,84 @@ class ClusterSize: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + def as_dict(self) -> dict: """Serializes the ClusterSize into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.autoscale: + body["autoscale"] = self.autoscale.as_dict() + if self.num_workers is not None: + body["num_workers"] = self.num_workers return body def as_shallow_dict(self) -> dict: """Serializes the ClusterSize into a shallow dictionary of its immediate attributes.""" body = {} - if self.autoscale: body['autoscale'] = self.autoscale - if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.autoscale: + body["autoscale"] = self.autoscale + if self.num_workers is not None: + body["num_workers"] = self.num_workers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterSize: """Deserializes the ClusterSize from a dictionary.""" - return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), num_workers=d.get('num_workers', None)) - - + return cls(autoscale=_from_dict(d, "autoscale", AutoScale), num_workers=d.get("num_workers", None)) class ClusterSource(Enum): """Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request. This is the same as cluster_creator, but read only.""" - - API = 'API' - JOB = 'JOB' - MODELS = 'MODELS' - PIPELINE = 'PIPELINE' - PIPELINE_MAINTENANCE = 'PIPELINE_MAINTENANCE' - SQL = 'SQL' - UI = 'UI' + + API = "API" + JOB = "JOB" + MODELS = "MODELS" + PIPELINE = "PIPELINE" + PIPELINE_MAINTENANCE = "PIPELINE_MAINTENANCE" + SQL = "SQL" + UI = "UI" + @dataclass class ClusterSpec: """Contains a snapshot of the latest user specified settings that were used to create/edit the cluster.""" - + apply_policy_default_values: Optional[bool] = None """When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -1883,7 +2313,7 @@ class ClusterSpec: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -1909,14 +2339,14 @@ class ClusterSpec: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -1924,33 +2354,33 @@ class ClusterSpec: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -1965,13 +2395,13 @@ class ClusterSpec: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -1981,10 +2411,10 @@ class ClusterSpec: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -1993,16 +2423,16 @@ class ClusterSpec: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str,str]] = None + + spark_conf: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str,str]] = None + + spark_env_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -2014,226 +2444,325 @@ class ClusterSpec: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + spark_version: Optional[str] = None """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the ClusterSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values - if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value - if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind.value - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type.as_dict() + if self.apply_policy_default_values is not None: + body["apply_policy_default_values"] = self.apply_policy_default_values + if self.autoscale: + body["autoscale"] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode.value + if self.docker_image: + body["docker_image"] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.init_scripts: + body["init_scripts"] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind.value + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine.value + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values - if self.autoscale: body['autoscale'] = self.autoscale - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode - if self.docker_image: body['docker_image'] = self.docker_image - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.init_scripts: body['init_scripts'] = self.init_scripts - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type + if self.apply_policy_default_values is not None: + body["apply_policy_default_values"] = self.apply_policy_default_values + if self.autoscale: + body["autoscale"] = self.autoscale + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode + if self.docker_image: + body["docker_image"] = self.docker_image + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.init_scripts: + body["init_scripts"] = self.init_scripts + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = self.ssh_public_keys + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterSpec: """Deserializes the ClusterSpec from a dictionary.""" - return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) - - - - - + return cls( + apply_policy_default_values=d.get("apply_policy_default_values", None), + autoscale=_from_dict(d, "autoscale", AutoScale), + autotermination_minutes=d.get("autotermination_minutes", None), + aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), + cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), + cluster_name=d.get("cluster_name", None), + custom_tags=d.get("custom_tags", None), + data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), + docker_image=_from_dict(d, "docker_image", DockerImage), + driver_instance_pool_id=d.get("driver_instance_pool_id", None), + driver_node_type_id=d.get("driver_node_type_id", None), + enable_elastic_disk=d.get("enable_elastic_disk", None), + enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), + gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), + init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), + instance_pool_id=d.get("instance_pool_id", None), + is_single_node=d.get("is_single_node", None), + kind=_enum(d, "kind", Kind), + node_type_id=d.get("node_type_id", None), + num_workers=d.get("num_workers", None), + policy_id=d.get("policy_id", None), + runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), + single_user_name=d.get("single_user_name", None), + spark_conf=d.get("spark_conf", None), + spark_env_vars=d.get("spark_env_vars", None), + spark_version=d.get("spark_version", None), + ssh_public_keys=d.get("ssh_public_keys", None), + use_ml_runtime=d.get("use_ml_runtime", None), + workload_type=_from_dict(d, "workload_type", WorkloadType), + ) @dataclass class Command: cluster_id: Optional[str] = None """Running cluster id""" - + command: Optional[str] = None """Executable code""" - + context_id: Optional[str] = None """Running context id""" - + language: Optional[Language] = None - + def as_dict(self) -> dict: """Serializes the Command into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['clusterId'] = self.cluster_id - if self.command is not None: body['command'] = self.command - if self.context_id is not None: body['contextId'] = self.context_id - if self.language is not None: body['language'] = self.language.value + if self.cluster_id is not None: + body["clusterId"] = self.cluster_id + if self.command is not None: + body["command"] = self.command + if self.context_id is not None: + body["contextId"] = self.context_id + if self.language is not None: + body["language"] = self.language.value return body def as_shallow_dict(self) -> dict: """Serializes the Command into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['clusterId'] = self.cluster_id - if self.command is not None: body['command'] = self.command - if self.context_id is not None: body['contextId'] = self.context_id - if self.language is not None: body['language'] = self.language + if self.cluster_id is not None: + body["clusterId"] = self.cluster_id + if self.command is not None: + body["command"] = self.command + if self.context_id is not None: + body["contextId"] = self.context_id + if self.language is not None: + body["language"] = self.language return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Command: """Deserializes the Command from a dictionary.""" - return cls(cluster_id=d.get('clusterId', None), command=d.get('command', None), context_id=d.get('contextId', None), language=_enum(d, 'language', Language)) - - + return cls( + cluster_id=d.get("clusterId", None), + command=d.get("command", None), + context_id=d.get("contextId", None), + language=_enum(d, "language", Language), + ) class CommandStatus(Enum): - - - CANCELLED = 'Cancelled' - CANCELLING = 'Cancelling' - ERROR = 'Error' - FINISHED = 'Finished' - QUEUED = 'Queued' - RUNNING = 'Running' - + CANCELLED = "Cancelled" + CANCELLING = "Cancelling" + ERROR = "Error" + FINISHED = "Finished" + QUEUED = "Queued" + RUNNING = "Running" @dataclass class CommandStatusResponse: id: Optional[str] = None - + results: Optional[Results] = None - + status: Optional[CommandStatus] = None - + def as_dict(self) -> dict: """Serializes the CommandStatusResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.results: body['results'] = self.results.as_dict() - if self.status is not None: body['status'] = self.status.value + if self.id is not None: + body["id"] = self.id + if self.results: + body["results"] = self.results.as_dict() + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the CommandStatusResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.results: body['results'] = self.results - if self.status is not None: body['status'] = self.status + if self.id is not None: + body["id"] = self.id + if self.results: + body["results"] = self.results + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CommandStatusResponse: """Deserializes the CommandStatusResponse from a dictionary.""" - return cls(id=d.get('id', None), results=_from_dict(d, 'results', Results), status=_enum(d, 'status', CommandStatus)) - - + return cls( + id=d.get("id", None), results=_from_dict(d, "results", Results), status=_enum(d, "status", CommandStatus) + ) class ContextStatus(Enum): - - - ERROR = 'Error' - PENDING = 'Pending' - RUNNING = 'Running' - + ERROR = "Error" + PENDING = "Pending" + RUNNING = "Running" @dataclass class ContextStatusResponse: id: Optional[str] = None - + status: Optional[ContextStatus] = None - + def as_dict(self) -> dict: """Serializes the ContextStatusResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.status is not None: body['status'] = self.status.value + if self.id is not None: + body["id"] = self.id + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the ContextStatusResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.status is not None: body['status'] = self.status + if self.id is not None: + body["id"] = self.id + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ContextStatusResponse: """Deserializes the ContextStatusResponse from a dictionary.""" - return cls(id=d.get('id', None), status=_enum(d, 'status', ContextStatus)) - - + return cls(id=d.get("id", None), status=_enum(d, "status", ContextStatus)) @dataclass @@ -2241,46 +2770,46 @@ class CreateCluster: spark_version: str """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + apply_policy_default_values: Optional[bool] = None """When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + clone_from: Optional[CloneCluster] = None """When specified, this clones libraries from a source cluster during the creation of a new cluster.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -2288,7 +2817,7 @@ class CreateCluster: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -2314,14 +2843,14 @@ class CreateCluster: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -2329,33 +2858,33 @@ class CreateCluster: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -2370,13 +2899,13 @@ class CreateCluster: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -2386,10 +2915,10 @@ class CreateCluster: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -2398,16 +2927,16 @@ class CreateCluster: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str,str]] = None + + spark_conf: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str,str]] = None + + spark_env_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -2419,152 +2948,246 @@ class CreateCluster: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the CreateCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values - if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.clone_from: body['clone_from'] = self.clone_from.as_dict() - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value - if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind.value - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type.as_dict() + if self.apply_policy_default_values is not None: + body["apply_policy_default_values"] = self.apply_policy_default_values + if self.autoscale: + body["autoscale"] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.clone_from: + body["clone_from"] = self.clone_from.as_dict() + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode.value + if self.docker_image: + body["docker_image"] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.init_scripts: + body["init_scripts"] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind.value + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine.value + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values - if self.autoscale: body['autoscale'] = self.autoscale - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.clone_from: body['clone_from'] = self.clone_from - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode - if self.docker_image: body['docker_image'] = self.docker_image - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.init_scripts: body['init_scripts'] = self.init_scripts - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type + if self.apply_policy_default_values is not None: + body["apply_policy_default_values"] = self.apply_policy_default_values + if self.autoscale: + body["autoscale"] = self.autoscale + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.clone_from: + body["clone_from"] = self.clone_from + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode + if self.docker_image: + body["docker_image"] = self.docker_image + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.init_scripts: + body["init_scripts"] = self.init_scripts + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = self.ssh_public_keys + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCluster: """Deserializes the CreateCluster from a dictionary.""" - return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), clone_from=_from_dict(d, 'clone_from', CloneCluster), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) - - + return cls( + apply_policy_default_values=d.get("apply_policy_default_values", None), + autoscale=_from_dict(d, "autoscale", AutoScale), + autotermination_minutes=d.get("autotermination_minutes", None), + aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), + clone_from=_from_dict(d, "clone_from", CloneCluster), + cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), + cluster_name=d.get("cluster_name", None), + custom_tags=d.get("custom_tags", None), + data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), + docker_image=_from_dict(d, "docker_image", DockerImage), + driver_instance_pool_id=d.get("driver_instance_pool_id", None), + driver_node_type_id=d.get("driver_node_type_id", None), + enable_elastic_disk=d.get("enable_elastic_disk", None), + enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), + gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), + init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), + instance_pool_id=d.get("instance_pool_id", None), + is_single_node=d.get("is_single_node", None), + kind=_enum(d, "kind", Kind), + node_type_id=d.get("node_type_id", None), + num_workers=d.get("num_workers", None), + policy_id=d.get("policy_id", None), + runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), + single_user_name=d.get("single_user_name", None), + spark_conf=d.get("spark_conf", None), + spark_env_vars=d.get("spark_env_vars", None), + spark_version=d.get("spark_version", None), + ssh_public_keys=d.get("ssh_public_keys", None), + use_ml_runtime=d.get("use_ml_runtime", None), + workload_type=_from_dict(d, "workload_type", WorkloadType), + ) @dataclass class CreateClusterResponse: cluster_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateClusterResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateClusterResponse: """Deserializes the CreateClusterResponse from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None)) - - + return cls(cluster_id=d.get("cluster_id", None)) @dataclass class CreateContext: cluster_id: Optional[str] = None """Running cluster id""" - + language: Optional[Language] = None - + def as_dict(self) -> dict: """Serializes the CreateContext into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['clusterId'] = self.cluster_id - if self.language is not None: body['language'] = self.language.value + if self.cluster_id is not None: + body["clusterId"] = self.cluster_id + if self.language is not None: + body["language"] = self.language.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateContext into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['clusterId'] = self.cluster_id - if self.language is not None: body['language'] = self.language + if self.cluster_id is not None: + body["clusterId"] = self.cluster_id + if self.language is not None: + body["language"] = self.language return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateContext: """Deserializes the CreateContext from a dictionary.""" - return cls(cluster_id=d.get('clusterId', None), language=_enum(d, 'language', Language)) - - + return cls(cluster_id=d.get("clusterId", None), language=_enum(d, "language", Language)) @dataclass @@ -2572,130 +3195,168 @@ class CreateInstancePool: instance_pool_name: str """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters.""" - + node_type_id: str """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + aws_attributes: Optional[InstancePoolAwsAttributes] = None """Attributes related to instance pools running on Amazon Web Services. If not specified at pool creation, a set of default values will be used.""" - + azure_attributes: Optional[InstancePoolAzureAttributes] = None """Attributes related to instance pools running on Azure. If not specified at pool creation, a set of default values will be used.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags""" - + disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + gcp_attributes: Optional[InstancePoolGcpAttributes] = None """Attributes related to instance pools running on Google Cloud Platform. If not specified at pool creation, a set of default values will be used.""" - + idle_instance_autotermination_minutes: Optional[int] = None """Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances will be automatically terminated after a default timeout. If specified, the threshold must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances from the cache if min cache size could still hold.""" - + max_capacity: Optional[int] = None """Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances. Clusters that require further instance provisioning will fail during upsize requests.""" - + min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" - + preloaded_docker_images: Optional[List[DockerImage]] = None """Custom Docker Image BYOC""" - + preloaded_spark_versions: Optional[List[str]] = None """A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + def as_dict(self) -> dict: """Serializes the CreateInstancePool into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity is not None: body['max_capacity'] = self.max_capacity - if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.disk_spec: + body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes + if self.instance_pool_name is not None: + body["instance_pool_name"] = self.instance_pool_name + if self.max_capacity is not None: + body["max_capacity"] = self.max_capacity + if self.min_idle_instances is not None: + body["min_idle_instances"] = self.min_idle_instances + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.preloaded_docker_images: + body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: + body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions] return body def as_shallow_dict(self) -> dict: """Serializes the CreateInstancePool into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity is not None: body['max_capacity'] = self.max_capacity - if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images - if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.disk_spec: + body["disk_spec"] = self.disk_spec + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes + if self.instance_pool_name is not None: + body["instance_pool_name"] = self.instance_pool_name + if self.max_capacity is not None: + body["max_capacity"] = self.max_capacity + if self.min_idle_instances is not None: + body["min_idle_instances"] = self.min_idle_instances + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.preloaded_docker_images: + body["preloaded_docker_images"] = self.preloaded_docker_images + if self.preloaded_spark_versions: + body["preloaded_spark_versions"] = self.preloaded_spark_versions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePool: """Deserializes the CreateInstancePool from a dictionary.""" - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), custom_tags=d.get('custom_tags', None), disk_spec=_from_dict(d, 'disk_spec', DiskSpec), enable_elastic_disk=d.get('enable_elastic_disk', None), gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None), preloaded_docker_images=_repeated_dict(d, 'preloaded_docker_images', DockerImage), preloaded_spark_versions=d.get('preloaded_spark_versions', None)) - - + return cls( + aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes), + custom_tags=d.get("custom_tags", None), + disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_elastic_disk=d.get("enable_elastic_disk", None), + gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), + idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), + instance_pool_name=d.get("instance_pool_name", None), + max_capacity=d.get("max_capacity", None), + min_idle_instances=d.get("min_idle_instances", None), + node_type_id=d.get("node_type_id", None), + preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), + preloaded_spark_versions=d.get("preloaded_spark_versions", None), + ) @dataclass class CreateInstancePoolResponse: instance_pool_id: Optional[str] = None """The ID of the created instance pool.""" - + def as_dict(self) -> dict: """Serializes the CreateInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePoolResponse: """Deserializes the CreateInstancePoolResponse from a dictionary.""" - return cls(instance_pool_id=d.get('instance_pool_id', None)) - - + return cls(instance_pool_id=d.get("instance_pool_id", None)) @dataclass @@ -2704,22 +3365,22 @@ class CreatePolicy: """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + description: Optional[str] = None """Additional human-readable description of the cluster policy.""" - + libraries: Optional[List[Library]] = None """A list of libraries to be installed on the next cluster restart that uses this policy. The maximum number of libraries is 500.""" - + max_clusters_per_user: Optional[int] = None """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" - + name: Optional[str] = None """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 characters.""" - + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -2728,118 +3389,138 @@ class CreatePolicy: rules specified here are merged into the inherited policy definition. [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + policy_family_id: Optional[str] = None """ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition.""" - + def as_dict(self) -> dict: """Serializes the CreatePolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: body['definition'] = self.definition - if self.description is not None: body['description'] = self.description - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user - if self.name is not None: body['name'] = self.name - if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides - if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.definition is not None: + body["definition"] = self.definition + if self.description is not None: + body["description"] = self.description + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.max_clusters_per_user is not None: + body["max_clusters_per_user"] = self.max_clusters_per_user + if self.name is not None: + body["name"] = self.name + if self.policy_family_definition_overrides is not None: + body["policy_family_definition_overrides"] = self.policy_family_definition_overrides + if self.policy_family_id is not None: + body["policy_family_id"] = self.policy_family_id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: body['definition'] = self.definition - if self.description is not None: body['description'] = self.description - if self.libraries: body['libraries'] = self.libraries - if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user - if self.name is not None: body['name'] = self.name - if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides - if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.definition is not None: + body["definition"] = self.definition + if self.description is not None: + body["description"] = self.description + if self.libraries: + body["libraries"] = self.libraries + if self.max_clusters_per_user is not None: + body["max_clusters_per_user"] = self.max_clusters_per_user + if self.name is not None: + body["name"] = self.name + if self.policy_family_definition_overrides is not None: + body["policy_family_definition_overrides"] = self.policy_family_definition_overrides + if self.policy_family_id is not None: + body["policy_family_id"] = self.policy_family_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePolicy: """Deserializes the CreatePolicy from a dictionary.""" - return cls(definition=d.get('definition', None), description=d.get('description', None), libraries=_repeated_dict(d, 'libraries', Library), max_clusters_per_user=d.get('max_clusters_per_user', None), name=d.get('name', None), policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), policy_family_id=d.get('policy_family_id', None)) - - + return cls( + definition=d.get("definition", None), + description=d.get("description", None), + libraries=_repeated_dict(d, "libraries", Library), + max_clusters_per_user=d.get("max_clusters_per_user", None), + name=d.get("name", None), + policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), + policy_family_id=d.get("policy_family_id", None), + ) @dataclass class CreatePolicyResponse: policy_id: Optional[str] = None """Canonical unique identifier for the cluster policy.""" - + def as_dict(self) -> dict: """Serializes the CreatePolicyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePolicyResponse: """Deserializes the CreatePolicyResponse from a dictionary.""" - return cls(policy_id=d.get('policy_id', None)) - - + return cls(policy_id=d.get("policy_id", None)) @dataclass class CreateResponse: script_id: Optional[str] = None """The global init script ID.""" - + def as_dict(self) -> dict: """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.script_id is not None: body['script_id'] = self.script_id + if self.script_id is not None: + body["script_id"] = self.script_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.script_id is not None: body['script_id'] = self.script_id + if self.script_id is not None: + body["script_id"] = self.script_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" - return cls(script_id=d.get('script_id', None)) - - + return cls(script_id=d.get("script_id", None)) @dataclass class Created: id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the Created into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the Created into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Created: """Deserializes the Created from a dictionary.""" - return cls(id=d.get('id', None)) - - + return cls(id=d.get("id", None)) @dataclass @@ -2848,82 +3529,95 @@ class CustomPolicyTag: """The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these tags are preserved.""" - + value: Optional[str] = None """The value of the tag.""" - + def as_dict(self) -> dict: """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomPolicyTag: """Deserializes the CustomPolicyTag from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class DataPlaneEventDetails: event_type: Optional[DataPlaneEventDetailsEventType] = None - + executor_failures: Optional[int] = None - + host_id: Optional[str] = None - + timestamp: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the DataPlaneEventDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.event_type is not None: body['event_type'] = self.event_type.value - if self.executor_failures is not None: body['executor_failures'] = self.executor_failures - if self.host_id is not None: body['host_id'] = self.host_id - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.event_type is not None: + body["event_type"] = self.event_type.value + if self.executor_failures is not None: + body["executor_failures"] = self.executor_failures + if self.host_id is not None: + body["host_id"] = self.host_id + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.event_type is not None: body['event_type'] = self.event_type - if self.executor_failures is not None: body['executor_failures'] = self.executor_failures - if self.host_id is not None: body['host_id'] = self.host_id - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.event_type is not None: + body["event_type"] = self.event_type + if self.executor_failures is not None: + body["executor_failures"] = self.executor_failures + if self.host_id is not None: + body["host_id"] = self.host_id + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataPlaneEventDetails: """Deserializes the DataPlaneEventDetails from a dictionary.""" - return cls(event_type=_enum(d, 'event_type', DataPlaneEventDetailsEventType), executor_failures=d.get('executor_failures', None), host_id=d.get('host_id', None), timestamp=d.get('timestamp', None)) - - + return cls( + event_type=_enum(d, "event_type", DataPlaneEventDetailsEventType), + executor_failures=d.get("executor_failures", None), + host_id=d.get("host_id", None), + timestamp=d.get("timestamp", None), + ) class DataPlaneEventDetailsEventType(Enum): - - - NODE_BLACKLISTED = 'NODE_BLACKLISTED' - NODE_EXCLUDED_DECOMMISSIONED = 'NODE_EXCLUDED_DECOMMISSIONED' + + NODE_BLACKLISTED = "NODE_BLACKLISTED" + NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED" + class DataSecurityMode(Enum): """Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in @@ -2932,77 +3626,78 @@ class DataSecurityMode(Enum): users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - - DATA_SECURITY_MODE_AUTO = 'DATA_SECURITY_MODE_AUTO' - DATA_SECURITY_MODE_DEDICATED = 'DATA_SECURITY_MODE_DEDICATED' - DATA_SECURITY_MODE_STANDARD = 'DATA_SECURITY_MODE_STANDARD' - LEGACY_PASSTHROUGH = 'LEGACY_PASSTHROUGH' - LEGACY_SINGLE_USER = 'LEGACY_SINGLE_USER' - LEGACY_SINGLE_USER_STANDARD = 'LEGACY_SINGLE_USER_STANDARD' - LEGACY_TABLE_ACL = 'LEGACY_TABLE_ACL' - NONE = 'NONE' - SINGLE_USER = 'SINGLE_USER' - USER_ISOLATION = 'USER_ISOLATION' + + DATA_SECURITY_MODE_AUTO = "DATA_SECURITY_MODE_AUTO" + DATA_SECURITY_MODE_DEDICATED = "DATA_SECURITY_MODE_DEDICATED" + DATA_SECURITY_MODE_STANDARD = "DATA_SECURITY_MODE_STANDARD" + LEGACY_PASSTHROUGH = "LEGACY_PASSTHROUGH" + LEGACY_SINGLE_USER = "LEGACY_SINGLE_USER" + LEGACY_SINGLE_USER_STANDARD = "LEGACY_SINGLE_USER_STANDARD" + LEGACY_TABLE_ACL = "LEGACY_TABLE_ACL" + NONE = "NONE" + SINGLE_USER = "SINGLE_USER" + USER_ISOLATION = "USER_ISOLATION" + @dataclass class DbfsStorageInfo: """A storage location in DBFS""" - + destination: str """dbfs destination, e.g. `dbfs:/my/path`""" - + def as_dict(self) -> dict: """Serializes the DbfsStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: """Deserializes the DbfsStorageInfo from a dictionary.""" - return cls(destination=d.get('destination', None)) - - + return cls(destination=d.get("destination", None)) @dataclass class DeleteCluster: cluster_id: str """The cluster to be terminated.""" - + def as_dict(self) -> dict: """Serializes the DeleteCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteCluster: """Deserializes the DeleteCluster from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None)) - - + return cls(cluster_id=d.get("cluster_id", None)) @dataclass @@ -3021,36 +3716,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteClusterResponse: """Deserializes the DeleteClusterResponse from a dictionary.""" return cls() - - - - - @dataclass class DeleteInstancePool: instance_pool_id: str """The instance pool to be terminated.""" - + def as_dict(self) -> dict: """Serializes the DeleteInstancePool into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteInstancePool into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePool: """Deserializes the DeleteInstancePool from a dictionary.""" - return cls(instance_pool_id=d.get('instance_pool_id', None)) - - + return cls(instance_pool_id=d.get("instance_pool_id", None)) @dataclass @@ -3069,33 +3759,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePoolResponse: """Deserializes the DeleteInstancePoolResponse from a dictionary.""" return cls() - - @dataclass class DeletePolicy: policy_id: str """The ID of the policy to delete.""" - + def as_dict(self) -> dict: """Serializes the DeletePolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the DeletePolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeletePolicy: """Deserializes the DeletePolicy from a dictionary.""" - return cls(policy_id=d.get('policy_id', None)) - - + return cls(policy_id=d.get("policy_id", None)) @dataclass @@ -3114,8 +3802,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeletePolicyResponse: """Deserializes the DeletePolicyResponse from a dictionary.""" return cls() - - @dataclass @@ -3134,36 +3820,36 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - @dataclass class DestroyContext: cluster_id: str - + context_id: str - + def as_dict(self) -> dict: """Serializes the DestroyContext into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['clusterId'] = self.cluster_id - if self.context_id is not None: body['contextId'] = self.context_id + if self.cluster_id is not None: + body["clusterId"] = self.cluster_id + if self.context_id is not None: + body["contextId"] = self.context_id return body def as_shallow_dict(self) -> dict: """Serializes the DestroyContext into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['clusterId'] = self.cluster_id - if self.context_id is not None: body['contextId'] = self.context_id + if self.cluster_id is not None: + body["clusterId"] = self.cluster_id + if self.context_id is not None: + body["contextId"] = self.context_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DestroyContext: """Deserializes the DestroyContext from a dictionary.""" - return cls(cluster_id=d.get('clusterId', None), context_id=d.get('contextId', None)) - - + return cls(cluster_id=d.get("clusterId", None), context_id=d.get("contextId", None)) @dataclass @@ -3182,8 +3868,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DestroyResponse: """Deserializes the DestroyResponse from a dictionary.""" return cls() - - @dataclass @@ -3191,7 +3875,7 @@ class DiskSpec: """Describes the disks that are launched for each instance in the spark cluster. For example, if the cluster has 3 instances, each instance is configured to launch 2 disks, 100 GiB each, then Databricks will launch a total of 6 disks, 100 GiB each, for this cluster.""" - + disk_count: Optional[int] = None """The number of disks launched for each instance: - This feature is only enabled for supported node types. - Users can choose up to the limit of the disks supported by the node type. - For @@ -3206,9 +3890,9 @@ class DiskSpec: Disks will be mounted at: - For AWS: `/ebs0`, `/ebs1`, and etc. - For Azure: `/remote_volume0`, `/remote_volume1`, and etc.""" - + disk_iops: Optional[int] = None - + disk_size: Optional[int] = None """The size of each disk (in GiB) launched for each instance. Values must fall into the supported range for a particular instance type. @@ -3216,199 +3900,225 @@ class DiskSpec: For AWS: - General Purpose SSD: 100 - 4096 GiB - Throughput Optimized HDD: 500 - 4096 GiB For Azure: - Premium LRS (SSD): 1 - 1023 GiB - Standard LRS (HDD): 1- 1023 GiB""" - + disk_throughput: Optional[int] = None - + disk_type: Optional[DiskType] = None """The type of disks that will be launched with this cluster.""" - + def as_dict(self) -> dict: """Serializes the DiskSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.disk_count is not None: body['disk_count'] = self.disk_count - if self.disk_iops is not None: body['disk_iops'] = self.disk_iops - if self.disk_size is not None: body['disk_size'] = self.disk_size - if self.disk_throughput is not None: body['disk_throughput'] = self.disk_throughput - if self.disk_type: body['disk_type'] = self.disk_type.as_dict() + if self.disk_count is not None: + body["disk_count"] = self.disk_count + if self.disk_iops is not None: + body["disk_iops"] = self.disk_iops + if self.disk_size is not None: + body["disk_size"] = self.disk_size + if self.disk_throughput is not None: + body["disk_throughput"] = self.disk_throughput + if self.disk_type: + body["disk_type"] = self.disk_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the DiskSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.disk_count is not None: body['disk_count'] = self.disk_count - if self.disk_iops is not None: body['disk_iops'] = self.disk_iops - if self.disk_size is not None: body['disk_size'] = self.disk_size - if self.disk_throughput is not None: body['disk_throughput'] = self.disk_throughput - if self.disk_type: body['disk_type'] = self.disk_type + if self.disk_count is not None: + body["disk_count"] = self.disk_count + if self.disk_iops is not None: + body["disk_iops"] = self.disk_iops + if self.disk_size is not None: + body["disk_size"] = self.disk_size + if self.disk_throughput is not None: + body["disk_throughput"] = self.disk_throughput + if self.disk_type: + body["disk_type"] = self.disk_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DiskSpec: """Deserializes the DiskSpec from a dictionary.""" - return cls(disk_count=d.get('disk_count', None), disk_iops=d.get('disk_iops', None), disk_size=d.get('disk_size', None), disk_throughput=d.get('disk_throughput', None), disk_type=_from_dict(d, 'disk_type', DiskType)) - - + return cls( + disk_count=d.get("disk_count", None), + disk_iops=d.get("disk_iops", None), + disk_size=d.get("disk_size", None), + disk_throughput=d.get("disk_throughput", None), + disk_type=_from_dict(d, "disk_type", DiskType), + ) @dataclass class DiskType: """Describes the disk type.""" - + azure_disk_volume_type: Optional[DiskTypeAzureDiskVolumeType] = None """All Azure Disk types that Databricks supports. See https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" - + ebs_volume_type: Optional[DiskTypeEbsVolumeType] = None """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for details.""" - + def as_dict(self) -> dict: """Serializes the DiskType into a dictionary suitable for use as a JSON request body.""" body = {} - if self.azure_disk_volume_type is not None: body['azure_disk_volume_type'] = self.azure_disk_volume_type.value - if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value + if self.azure_disk_volume_type is not None: + body["azure_disk_volume_type"] = self.azure_disk_volume_type.value + if self.ebs_volume_type is not None: + body["ebs_volume_type"] = self.ebs_volume_type.value return body def as_shallow_dict(self) -> dict: """Serializes the DiskType into a shallow dictionary of its immediate attributes.""" body = {} - if self.azure_disk_volume_type is not None: body['azure_disk_volume_type'] = self.azure_disk_volume_type - if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type + if self.azure_disk_volume_type is not None: + body["azure_disk_volume_type"] = self.azure_disk_volume_type + if self.ebs_volume_type is not None: + body["ebs_volume_type"] = self.ebs_volume_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DiskType: """Deserializes the DiskType from a dictionary.""" - return cls(azure_disk_volume_type=_enum(d, 'azure_disk_volume_type', DiskTypeAzureDiskVolumeType), ebs_volume_type=_enum(d, 'ebs_volume_type', DiskTypeEbsVolumeType)) - - + return cls( + azure_disk_volume_type=_enum(d, "azure_disk_volume_type", DiskTypeAzureDiskVolumeType), + ebs_volume_type=_enum(d, "ebs_volume_type", DiskTypeEbsVolumeType), + ) class DiskTypeAzureDiskVolumeType(Enum): """All Azure Disk types that Databricks supports. See https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" - - PREMIUM_LRS = 'PREMIUM_LRS' - STANDARD_LRS = 'STANDARD_LRS' + + PREMIUM_LRS = "PREMIUM_LRS" + STANDARD_LRS = "STANDARD_LRS" + class DiskTypeEbsVolumeType(Enum): """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for details.""" - - GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD' - THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD' + + GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" + THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" + @dataclass class DockerBasicAuth: password: Optional[str] = None """Password of the user""" - + username: Optional[str] = None """Name of the user""" - + def as_dict(self) -> dict: """Serializes the DockerBasicAuth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.password is not None: body['password'] = self.password - if self.username is not None: body['username'] = self.username + if self.password is not None: + body["password"] = self.password + if self.username is not None: + body["username"] = self.username return body def as_shallow_dict(self) -> dict: """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes.""" body = {} - if self.password is not None: body['password'] = self.password - if self.username is not None: body['username'] = self.username + if self.password is not None: + body["password"] = self.password + if self.username is not None: + body["username"] = self.username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DockerBasicAuth: """Deserializes the DockerBasicAuth from a dictionary.""" - return cls(password=d.get('password', None), username=d.get('username', None)) - - + return cls(password=d.get("password", None), username=d.get("username", None)) @dataclass class DockerImage: basic_auth: Optional[DockerBasicAuth] = None """Basic auth with username and password""" - + url: Optional[str] = None """URL of the docker image.""" - + def as_dict(self) -> dict: """Serializes the DockerImage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.basic_auth: body['basic_auth'] = self.basic_auth.as_dict() - if self.url is not None: body['url'] = self.url + if self.basic_auth: + body["basic_auth"] = self.basic_auth.as_dict() + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the DockerImage into a shallow dictionary of its immediate attributes.""" body = {} - if self.basic_auth: body['basic_auth'] = self.basic_auth - if self.url is not None: body['url'] = self.url + if self.basic_auth: + body["basic_auth"] = self.basic_auth + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DockerImage: """Deserializes the DockerImage from a dictionary.""" - return cls(basic_auth=_from_dict(d, 'basic_auth', DockerBasicAuth), url=d.get('url', None)) - - + return cls(basic_auth=_from_dict(d, "basic_auth", DockerBasicAuth), url=d.get("url", None)) class EbsVolumeType(Enum): """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for details.""" - - GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD' - THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD' + + GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" + THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" + @dataclass class EditCluster: cluster_id: str """ID of the cluster""" - + spark_version: str """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + apply_policy_default_values: Optional[bool] = None """When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -3416,7 +4126,7 @@ class EditCluster: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -3442,14 +4152,14 @@ class EditCluster: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -3457,33 +4167,33 @@ class EditCluster: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -3498,13 +4208,13 @@ class EditCluster: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -3514,10 +4224,10 @@ class EditCluster: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -3526,16 +4236,16 @@ class EditCluster: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str,str]] = None + + spark_conf: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str,str]] = None + + spark_env_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -3547,99 +4257,191 @@ class EditCluster: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the EditCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values - if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value - if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind.value - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type.as_dict() + if self.apply_policy_default_values is not None: + body["apply_policy_default_values"] = self.apply_policy_default_values + if self.autoscale: + body["autoscale"] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode.value + if self.docker_image: + body["docker_image"] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.init_scripts: + body["init_scripts"] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind.value + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine.value + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EditCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values - if self.autoscale: body['autoscale'] = self.autoscale - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode - if self.docker_image: body['docker_image'] = self.docker_image - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.init_scripts: body['init_scripts'] = self.init_scripts - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type + if self.apply_policy_default_values is not None: + body["apply_policy_default_values"] = self.apply_policy_default_values + if self.autoscale: + body["autoscale"] = self.autoscale + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode + if self.docker_image: + body["docker_image"] = self.docker_image + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.init_scripts: + body["init_scripts"] = self.init_scripts + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = self.ssh_public_keys + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditCluster: """Deserializes the EditCluster from a dictionary.""" - return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_id=d.get('cluster_id', None), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) - - + return cls( + apply_policy_default_values=d.get("apply_policy_default_values", None), + autoscale=_from_dict(d, "autoscale", AutoScale), + autotermination_minutes=d.get("autotermination_minutes", None), + aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), + cluster_id=d.get("cluster_id", None), + cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), + cluster_name=d.get("cluster_name", None), + custom_tags=d.get("custom_tags", None), + data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), + docker_image=_from_dict(d, "docker_image", DockerImage), + driver_instance_pool_id=d.get("driver_instance_pool_id", None), + driver_node_type_id=d.get("driver_node_type_id", None), + enable_elastic_disk=d.get("enable_elastic_disk", None), + enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), + gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), + init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), + instance_pool_id=d.get("instance_pool_id", None), + is_single_node=d.get("is_single_node", None), + kind=_enum(d, "kind", Kind), + node_type_id=d.get("node_type_id", None), + num_workers=d.get("num_workers", None), + policy_id=d.get("policy_id", None), + runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), + single_user_name=d.get("single_user_name", None), + spark_conf=d.get("spark_conf", None), + spark_env_vars=d.get("spark_env_vars", None), + spark_version=d.get("spark_version", None), + ssh_public_keys=d.get("ssh_public_keys", None), + use_ml_runtime=d.get("use_ml_runtime", None), + workload_type=_from_dict(d, "workload_type", WorkloadType), + ) @dataclass @@ -3658,76 +4460,94 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditClusterResponse: """Deserializes the EditClusterResponse from a dictionary.""" return cls() - - @dataclass class EditInstancePool: instance_pool_id: str """Instance pool ID""" - + instance_pool_name: str """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters.""" - + node_type_id: str """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags""" - + idle_instance_autotermination_minutes: Optional[int] = None """Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances will be automatically terminated after a default timeout. If specified, the threshold must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances from the cache if min cache size could still hold.""" - + max_capacity: Optional[int] = None """Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances. Clusters that require further instance provisioning will fail during upsize requests.""" - + min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" - + def as_dict(self) -> dict: """Serializes the EditInstancePool into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity is not None: body['max_capacity'] = self.max_capacity - if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_name is not None: + body["instance_pool_name"] = self.instance_pool_name + if self.max_capacity is not None: + body["max_capacity"] = self.max_capacity + if self.min_idle_instances is not None: + body["min_idle_instances"] = self.min_idle_instances + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id return body def as_shallow_dict(self) -> dict: """Serializes the EditInstancePool into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity is not None: body['max_capacity'] = self.max_capacity - if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_name is not None: + body["instance_pool_name"] = self.instance_pool_name + if self.max_capacity is not None: + body["max_capacity"] = self.max_capacity + if self.min_idle_instances is not None: + body["min_idle_instances"] = self.min_idle_instances + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditInstancePool: """Deserializes the EditInstancePool from a dictionary.""" - return cls(custom_tags=d.get('custom_tags', None), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_id=d.get('instance_pool_id', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None)) - - + return cls( + custom_tags=d.get("custom_tags", None), + idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), + instance_pool_id=d.get("instance_pool_id", None), + instance_pool_name=d.get("instance_pool_name", None), + max_capacity=d.get("max_capacity", None), + min_idle_instances=d.get("min_idle_instances", None), + node_type_id=d.get("node_type_id", None), + ) @dataclass @@ -3746,35 +4566,33 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditInstancePoolResponse: """Deserializes the EditInstancePoolResponse from a dictionary.""" return cls() - - @dataclass class EditPolicy: policy_id: str """The ID of the policy to update.""" - + definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + description: Optional[str] = None """Additional human-readable description of the cluster policy.""" - + libraries: Optional[List[Library]] = None """A list of libraries to be installed on the next cluster restart that uses this policy. The maximum number of libraries is 500.""" - + max_clusters_per_user: Optional[int] = None """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" - + name: Optional[str] = None """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 characters.""" - + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -3783,46 +4601,69 @@ class EditPolicy: rules specified here are merged into the inherited policy definition. [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + policy_family_id: Optional[str] = None """ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition.""" - + def as_dict(self) -> dict: """Serializes the EditPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: body['definition'] = self.definition - if self.description is not None: body['description'] = self.description - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user - if self.name is not None: body['name'] = self.name - if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides - if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.definition is not None: + body["definition"] = self.definition + if self.description is not None: + body["description"] = self.description + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.max_clusters_per_user is not None: + body["max_clusters_per_user"] = self.max_clusters_per_user + if self.name is not None: + body["name"] = self.name + if self.policy_family_definition_overrides is not None: + body["policy_family_definition_overrides"] = self.policy_family_definition_overrides + if self.policy_family_id is not None: + body["policy_family_id"] = self.policy_family_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the EditPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: body['definition'] = self.definition - if self.description is not None: body['description'] = self.description - if self.libraries: body['libraries'] = self.libraries - if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user - if self.name is not None: body['name'] = self.name - if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides - if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.definition is not None: + body["definition"] = self.definition + if self.description is not None: + body["description"] = self.description + if self.libraries: + body["libraries"] = self.libraries + if self.max_clusters_per_user is not None: + body["max_clusters_per_user"] = self.max_clusters_per_user + if self.name is not None: + body["name"] = self.name + if self.policy_family_definition_overrides is not None: + body["policy_family_definition_overrides"] = self.policy_family_definition_overrides + if self.policy_family_id is not None: + body["policy_family_id"] = self.policy_family_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditPolicy: """Deserializes the EditPolicy from a dictionary.""" - return cls(definition=d.get('definition', None), description=d.get('description', None), libraries=_repeated_dict(d, 'libraries', Library), max_clusters_per_user=d.get('max_clusters_per_user', None), name=d.get('name', None), policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), policy_family_id=d.get('policy_family_id', None), policy_id=d.get('policy_id', None)) - - + return cls( + definition=d.get("definition", None), + description=d.get("description", None), + libraries=_repeated_dict(d, "libraries", Library), + max_clusters_per_user=d.get("max_clusters_per_user", None), + name=d.get("name", None), + policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), + policy_family_id=d.get("policy_family_id", None), + policy_id=d.get("policy_id", None), + ) @dataclass @@ -3841,8 +4682,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditPolicyResponse: """Deserializes the EditPolicyResponse from a dictionary.""" return cls() - - @dataclass @@ -3861,39 +4700,39 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditResponse: """Deserializes the EditResponse from a dictionary.""" return cls() - - @dataclass class EnforceClusterComplianceRequest: cluster_id: str """The ID of the cluster you want to enforce policy compliance on.""" - + validate_only: Optional[bool] = None """If set, previews the changes that would be made to a cluster to enforce compliance but does not update the cluster.""" - + def as_dict(self) -> dict: """Serializes the EnforceClusterComplianceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.validate_only is not None: body['validate_only'] = self.validate_only + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.validate_only is not None: + body["validate_only"] = self.validate_only return body def as_shallow_dict(self) -> dict: """Serializes the EnforceClusterComplianceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.validate_only is not None: body['validate_only'] = self.validate_only + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.validate_only is not None: + body["validate_only"] = self.validate_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforceClusterComplianceRequest: """Deserializes the EnforceClusterComplianceRequest from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), validate_only=d.get('validate_only', None)) - - + return cls(cluster_id=d.get("cluster_id", None), validate_only=d.get("validate_only", None)) @dataclass @@ -3901,31 +4740,33 @@ class EnforceClusterComplianceResponse: changes: Optional[List[ClusterSettingsChange]] = None """A list of changes that have been made to the cluster settings for the cluster to become compliant with its policy.""" - + has_changes: Optional[bool] = None """Whether any changes have been made to the cluster settings for the cluster to become compliant with its policy.""" - + def as_dict(self) -> dict: """Serializes the EnforceClusterComplianceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.changes: body['changes'] = [v.as_dict() for v in self.changes] - if self.has_changes is not None: body['has_changes'] = self.has_changes + if self.changes: + body["changes"] = [v.as_dict() for v in self.changes] + if self.has_changes is not None: + body["has_changes"] = self.has_changes return body def as_shallow_dict(self) -> dict: """Serializes the EnforceClusterComplianceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.changes: body['changes'] = self.changes - if self.has_changes is not None: body['has_changes'] = self.has_changes + if self.changes: + body["changes"] = self.changes + if self.has_changes is not None: + body["has_changes"] = self.has_changes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforceClusterComplianceResponse: """Deserializes the EnforceClusterComplianceResponse from a dictionary.""" - return cls(changes=_repeated_dict(d, 'changes', ClusterSettingsChange), has_changes=d.get('has_changes', None)) - - + return cls(changes=_repeated_dict(d, "changes", ClusterSettingsChange), has_changes=d.get("has_changes", None)) @dataclass @@ -3933,49 +4774,60 @@ class Environment: """The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal environment spec, only pip dependencies are supported.""" - + client: Optional[str] = None """Use `environment_version` instead.""" - + dependencies: Optional[List[str]] = None """List of pip dependencies, as supported by the version of pip in this environment. Each dependency is a valid pip requirements file line per https://pip.pypa.io/en/stable/reference/requirements-file-format/. Allowed dependencies include a requirement specifier, an archive URL, a local project path (such as WSFS or UC Volumes in Databricks), or a VCS project URL.""" - + environment_version: Optional[str] = None """Required. Environment version used by the environment. Each version comes with a specific Python version and a set of Python packages. The version is a string, consisting of an integer.""" - + jar_dependencies: Optional[List[str]] = None """List of jar dependencies, should be string representing volume paths. For example: `/Volumes/path/to/test.jar`.""" - + def as_dict(self) -> dict: """Serializes the Environment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.client is not None: body['client'] = self.client - if self.dependencies: body['dependencies'] = [v for v in self.dependencies] - if self.environment_version is not None: body['environment_version'] = self.environment_version - if self.jar_dependencies: body['jar_dependencies'] = [v for v in self.jar_dependencies] + if self.client is not None: + body["client"] = self.client + if self.dependencies: + body["dependencies"] = [v for v in self.dependencies] + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.jar_dependencies: + body["jar_dependencies"] = [v for v in self.jar_dependencies] return body def as_shallow_dict(self) -> dict: """Serializes the Environment into a shallow dictionary of its immediate attributes.""" body = {} - if self.client is not None: body['client'] = self.client - if self.dependencies: body['dependencies'] = self.dependencies - if self.environment_version is not None: body['environment_version'] = self.environment_version - if self.jar_dependencies: body['jar_dependencies'] = self.jar_dependencies + if self.client is not None: + body["client"] = self.client + if self.dependencies: + body["dependencies"] = self.dependencies + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.jar_dependencies: + body["jar_dependencies"] = self.jar_dependencies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Environment: """Deserializes the Environment from a dictionary.""" - return cls(client=d.get('client', None), dependencies=d.get('dependencies', None), environment_version=d.get('environment_version', None), jar_dependencies=d.get('jar_dependencies', None)) - - + return cls( + client=d.get("client", None), + dependencies=d.get("dependencies", None), + environment_version=d.get("environment_version", None), + jar_dependencies=d.get("jar_dependencies", None), + ) @dataclass @@ -3983,265 +4835,340 @@ class EventDetails: attributes: Optional[ClusterAttributes] = None """* For created clusters, the attributes of the cluster. * For edited clusters, the new attributes of the cluster.""" - + cause: Optional[EventDetailsCause] = None """The cause of a change in target size.""" - + cluster_size: Optional[ClusterSize] = None """The actual cluster size that was set in the cluster creation or edit.""" - + current_num_vcpus: Optional[int] = None """The current number of vCPUs in the cluster.""" - + current_num_workers: Optional[int] = None """The current number of nodes in the cluster.""" - + did_not_expand_reason: Optional[str] = None - + disk_size: Optional[int] = None """Current disk size in bytes""" - + driver_state_message: Optional[str] = None """More details about the change in driver's state""" - + enable_termination_for_node_blocklisted: Optional[bool] = None """Whether or not a blocklisted node should be terminated. For ClusterEventType NODE_BLACKLISTED.""" - + free_space: Optional[int] = None - + init_scripts: Optional[InitScriptEventDetails] = None """List of global and cluster init scripts associated with this cluster event.""" - + instance_id: Optional[str] = None """Instance Id where the event originated from""" - + job_run_name: Optional[str] = None """Unique identifier of the specific job run associated with this cluster event * For clusters created for jobs, this will be the same as the cluster name""" - + previous_attributes: Optional[ClusterAttributes] = None """The cluster attributes before a cluster was edited.""" - + previous_cluster_size: Optional[ClusterSize] = None """The size of the cluster before an edit or resize.""" - + previous_disk_size: Optional[int] = None """Previous disk size in bytes""" - + reason: Optional[TerminationReason] = None """A termination reason: * On a TERMINATED event, this is the reason of the termination. * On a RESIZE_COMPLETE event, this indicates the reason that we failed to acquire some nodes.""" - + target_num_vcpus: Optional[int] = None """The targeted number of vCPUs in the cluster.""" - + target_num_workers: Optional[int] = None """The targeted number of nodes in the cluster.""" - + user: Optional[str] = None """The user that caused the event to occur. (Empty if it was done by the control plane.)""" - + def as_dict(self) -> dict: """Serializes the EventDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attributes: body['attributes'] = self.attributes.as_dict() - if self.cause is not None: body['cause'] = self.cause.value - if self.cluster_size: body['cluster_size'] = self.cluster_size.as_dict() - if self.current_num_vcpus is not None: body['current_num_vcpus'] = self.current_num_vcpus - if self.current_num_workers is not None: body['current_num_workers'] = self.current_num_workers - if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason - if self.disk_size is not None: body['disk_size'] = self.disk_size - if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message - if self.enable_termination_for_node_blocklisted is not None: body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted - if self.free_space is not None: body['free_space'] = self.free_space - if self.init_scripts: body['init_scripts'] = self.init_scripts.as_dict() - if self.instance_id is not None: body['instance_id'] = self.instance_id - if self.job_run_name is not None: body['job_run_name'] = self.job_run_name - if self.previous_attributes: body['previous_attributes'] = self.previous_attributes.as_dict() - if self.previous_cluster_size: body['previous_cluster_size'] = self.previous_cluster_size.as_dict() - if self.previous_disk_size is not None: body['previous_disk_size'] = self.previous_disk_size - if self.reason: body['reason'] = self.reason.as_dict() - if self.target_num_vcpus is not None: body['target_num_vcpus'] = self.target_num_vcpus - if self.target_num_workers is not None: body['target_num_workers'] = self.target_num_workers - if self.user is not None: body['user'] = self.user + if self.attributes: + body["attributes"] = self.attributes.as_dict() + if self.cause is not None: + body["cause"] = self.cause.value + if self.cluster_size: + body["cluster_size"] = self.cluster_size.as_dict() + if self.current_num_vcpus is not None: + body["current_num_vcpus"] = self.current_num_vcpus + if self.current_num_workers is not None: + body["current_num_workers"] = self.current_num_workers + if self.did_not_expand_reason is not None: + body["did_not_expand_reason"] = self.did_not_expand_reason + if self.disk_size is not None: + body["disk_size"] = self.disk_size + if self.driver_state_message is not None: + body["driver_state_message"] = self.driver_state_message + if self.enable_termination_for_node_blocklisted is not None: + body["enable_termination_for_node_blocklisted"] = self.enable_termination_for_node_blocklisted + if self.free_space is not None: + body["free_space"] = self.free_space + if self.init_scripts: + body["init_scripts"] = self.init_scripts.as_dict() + if self.instance_id is not None: + body["instance_id"] = self.instance_id + if self.job_run_name is not None: + body["job_run_name"] = self.job_run_name + if self.previous_attributes: + body["previous_attributes"] = self.previous_attributes.as_dict() + if self.previous_cluster_size: + body["previous_cluster_size"] = self.previous_cluster_size.as_dict() + if self.previous_disk_size is not None: + body["previous_disk_size"] = self.previous_disk_size + if self.reason: + body["reason"] = self.reason.as_dict() + if self.target_num_vcpus is not None: + body["target_num_vcpus"] = self.target_num_vcpus + if self.target_num_workers is not None: + body["target_num_workers"] = self.target_num_workers + if self.user is not None: + body["user"] = self.user return body def as_shallow_dict(self) -> dict: """Serializes the EventDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.attributes: body['attributes'] = self.attributes - if self.cause is not None: body['cause'] = self.cause - if self.cluster_size: body['cluster_size'] = self.cluster_size - if self.current_num_vcpus is not None: body['current_num_vcpus'] = self.current_num_vcpus - if self.current_num_workers is not None: body['current_num_workers'] = self.current_num_workers - if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason - if self.disk_size is not None: body['disk_size'] = self.disk_size - if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message - if self.enable_termination_for_node_blocklisted is not None: body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted - if self.free_space is not None: body['free_space'] = self.free_space - if self.init_scripts: body['init_scripts'] = self.init_scripts - if self.instance_id is not None: body['instance_id'] = self.instance_id - if self.job_run_name is not None: body['job_run_name'] = self.job_run_name - if self.previous_attributes: body['previous_attributes'] = self.previous_attributes - if self.previous_cluster_size: body['previous_cluster_size'] = self.previous_cluster_size - if self.previous_disk_size is not None: body['previous_disk_size'] = self.previous_disk_size - if self.reason: body['reason'] = self.reason - if self.target_num_vcpus is not None: body['target_num_vcpus'] = self.target_num_vcpus - if self.target_num_workers is not None: body['target_num_workers'] = self.target_num_workers - if self.user is not None: body['user'] = self.user + if self.attributes: + body["attributes"] = self.attributes + if self.cause is not None: + body["cause"] = self.cause + if self.cluster_size: + body["cluster_size"] = self.cluster_size + if self.current_num_vcpus is not None: + body["current_num_vcpus"] = self.current_num_vcpus + if self.current_num_workers is not None: + body["current_num_workers"] = self.current_num_workers + if self.did_not_expand_reason is not None: + body["did_not_expand_reason"] = self.did_not_expand_reason + if self.disk_size is not None: + body["disk_size"] = self.disk_size + if self.driver_state_message is not None: + body["driver_state_message"] = self.driver_state_message + if self.enable_termination_for_node_blocklisted is not None: + body["enable_termination_for_node_blocklisted"] = self.enable_termination_for_node_blocklisted + if self.free_space is not None: + body["free_space"] = self.free_space + if self.init_scripts: + body["init_scripts"] = self.init_scripts + if self.instance_id is not None: + body["instance_id"] = self.instance_id + if self.job_run_name is not None: + body["job_run_name"] = self.job_run_name + if self.previous_attributes: + body["previous_attributes"] = self.previous_attributes + if self.previous_cluster_size: + body["previous_cluster_size"] = self.previous_cluster_size + if self.previous_disk_size is not None: + body["previous_disk_size"] = self.previous_disk_size + if self.reason: + body["reason"] = self.reason + if self.target_num_vcpus is not None: + body["target_num_vcpus"] = self.target_num_vcpus + if self.target_num_workers is not None: + body["target_num_workers"] = self.target_num_workers + if self.user is not None: + body["user"] = self.user return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EventDetails: """Deserializes the EventDetails from a dictionary.""" - return cls(attributes=_from_dict(d, 'attributes', ClusterAttributes), cause=_enum(d, 'cause', EventDetailsCause), cluster_size=_from_dict(d, 'cluster_size', ClusterSize), current_num_vcpus=d.get('current_num_vcpus', None), current_num_workers=d.get('current_num_workers', None), did_not_expand_reason=d.get('did_not_expand_reason', None), disk_size=d.get('disk_size', None), driver_state_message=d.get('driver_state_message', None), enable_termination_for_node_blocklisted=d.get('enable_termination_for_node_blocklisted', None), free_space=d.get('free_space', None), init_scripts=_from_dict(d, 'init_scripts', InitScriptEventDetails), instance_id=d.get('instance_id', None), job_run_name=d.get('job_run_name', None), previous_attributes=_from_dict(d, 'previous_attributes', ClusterAttributes), previous_cluster_size=_from_dict(d, 'previous_cluster_size', ClusterSize), previous_disk_size=d.get('previous_disk_size', None), reason=_from_dict(d, 'reason', TerminationReason), target_num_vcpus=d.get('target_num_vcpus', None), target_num_workers=d.get('target_num_workers', None), user=d.get('user', None)) - - + return cls( + attributes=_from_dict(d, "attributes", ClusterAttributes), + cause=_enum(d, "cause", EventDetailsCause), + cluster_size=_from_dict(d, "cluster_size", ClusterSize), + current_num_vcpus=d.get("current_num_vcpus", None), + current_num_workers=d.get("current_num_workers", None), + did_not_expand_reason=d.get("did_not_expand_reason", None), + disk_size=d.get("disk_size", None), + driver_state_message=d.get("driver_state_message", None), + enable_termination_for_node_blocklisted=d.get("enable_termination_for_node_blocklisted", None), + free_space=d.get("free_space", None), + init_scripts=_from_dict(d, "init_scripts", InitScriptEventDetails), + instance_id=d.get("instance_id", None), + job_run_name=d.get("job_run_name", None), + previous_attributes=_from_dict(d, "previous_attributes", ClusterAttributes), + previous_cluster_size=_from_dict(d, "previous_cluster_size", ClusterSize), + previous_disk_size=d.get("previous_disk_size", None), + reason=_from_dict(d, "reason", TerminationReason), + target_num_vcpus=d.get("target_num_vcpus", None), + target_num_workers=d.get("target_num_workers", None), + user=d.get("user", None), + ) class EventDetailsCause(Enum): """The cause of a change in target size.""" - - AUTORECOVERY = 'AUTORECOVERY' - AUTOSCALE = 'AUTOSCALE' - REPLACE_BAD_NODES = 'REPLACE_BAD_NODES' - USER_REQUEST = 'USER_REQUEST' + + AUTORECOVERY = "AUTORECOVERY" + AUTOSCALE = "AUTOSCALE" + REPLACE_BAD_NODES = "REPLACE_BAD_NODES" + USER_REQUEST = "USER_REQUEST" + class EventType(Enum): - - - ADD_NODES_FAILED = 'ADD_NODES_FAILED' - AUTOMATIC_CLUSTER_UPDATE = 'AUTOMATIC_CLUSTER_UPDATE' - AUTOSCALING_BACKOFF = 'AUTOSCALING_BACKOFF' - AUTOSCALING_FAILED = 'AUTOSCALING_FAILED' - AUTOSCALING_STATS_REPORT = 'AUTOSCALING_STATS_REPORT' - CLUSTER_MIGRATED = 'CLUSTER_MIGRATED' - CREATING = 'CREATING' - DBFS_DOWN = 'DBFS_DOWN' - DID_NOT_EXPAND_DISK = 'DID_NOT_EXPAND_DISK' - DRIVER_HEALTHY = 'DRIVER_HEALTHY' - DRIVER_NOT_RESPONDING = 'DRIVER_NOT_RESPONDING' - DRIVER_UNAVAILABLE = 'DRIVER_UNAVAILABLE' - EDITED = 'EDITED' - EXPANDED_DISK = 'EXPANDED_DISK' - FAILED_TO_EXPAND_DISK = 'FAILED_TO_EXPAND_DISK' - INIT_SCRIPTS_FINISHED = 'INIT_SCRIPTS_FINISHED' - INIT_SCRIPTS_STARTED = 'INIT_SCRIPTS_STARTED' - METASTORE_DOWN = 'METASTORE_DOWN' - NODES_LOST = 'NODES_LOST' - NODE_BLACKLISTED = 'NODE_BLACKLISTED' - NODE_EXCLUDED_DECOMMISSIONED = 'NODE_EXCLUDED_DECOMMISSIONED' - PINNED = 'PINNED' - RESIZING = 'RESIZING' - RESTARTING = 'RESTARTING' - RUNNING = 'RUNNING' - SPARK_EXCEPTION = 'SPARK_EXCEPTION' - STARTING = 'STARTING' - TERMINATING = 'TERMINATING' - UNPINNED = 'UNPINNED' - UPSIZE_COMPLETED = 'UPSIZE_COMPLETED' + + ADD_NODES_FAILED = "ADD_NODES_FAILED" + AUTOMATIC_CLUSTER_UPDATE = "AUTOMATIC_CLUSTER_UPDATE" + AUTOSCALING_BACKOFF = "AUTOSCALING_BACKOFF" + AUTOSCALING_FAILED = "AUTOSCALING_FAILED" + AUTOSCALING_STATS_REPORT = "AUTOSCALING_STATS_REPORT" + CLUSTER_MIGRATED = "CLUSTER_MIGRATED" + CREATING = "CREATING" + DBFS_DOWN = "DBFS_DOWN" + DID_NOT_EXPAND_DISK = "DID_NOT_EXPAND_DISK" + DRIVER_HEALTHY = "DRIVER_HEALTHY" + DRIVER_NOT_RESPONDING = "DRIVER_NOT_RESPONDING" + DRIVER_UNAVAILABLE = "DRIVER_UNAVAILABLE" + EDITED = "EDITED" + EXPANDED_DISK = "EXPANDED_DISK" + FAILED_TO_EXPAND_DISK = "FAILED_TO_EXPAND_DISK" + INIT_SCRIPTS_FINISHED = "INIT_SCRIPTS_FINISHED" + INIT_SCRIPTS_STARTED = "INIT_SCRIPTS_STARTED" + METASTORE_DOWN = "METASTORE_DOWN" + NODES_LOST = "NODES_LOST" + NODE_BLACKLISTED = "NODE_BLACKLISTED" + NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED" + PINNED = "PINNED" + RESIZING = "RESIZING" + RESTARTING = "RESTARTING" + RUNNING = "RUNNING" + SPARK_EXCEPTION = "SPARK_EXCEPTION" + STARTING = "STARTING" + TERMINATING = "TERMINATING" + UNPINNED = "UNPINNED" + UPSIZE_COMPLETED = "UPSIZE_COMPLETED" + @dataclass class GcpAttributes: """Attributes set during cluster creation which are related to GCP.""" - + availability: Optional[GcpAvailability] = None """This field determines whether the spark executors will be scheduled to run on preemptible VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.""" - + boot_disk_size: Optional[int] = None """Boot disk size in GB""" - + google_service_account: Optional[str] = None """If provided, the cluster will impersonate the google service account when accessing gcloud services (like GCS). The google service account must have previously been added to the Databricks environment by an account administrator.""" - + local_ssd_count: Optional[int] = None """If provided, each node (workers and driver) in the cluster will have this number of local SSDs attached. Each local SSD is 375GB in size. Refer to [GCP documentation] for the supported number of local SSDs for each instance type. [GCP documentation]: https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds""" - + use_preemptible_executors: Optional[bool] = None """This field determines whether the spark executors will be scheduled to run on preemptible VMs (when set to true) versus standard compute engine VMs (when set to false; default). Note: Soon to be deprecated, use the 'availability' field instead.""" - + zone_id: Optional[str] = None """Identifier for the availability zone in which the cluster resides. This can be one of the following: - "HA" => High availability, spread nodes across availability zones for a Databricks deployment region [default]. - "AUTO" => Databricks picks an availability zone to schedule the cluster on. - A GCP availability zone => Pick One of the available zones for (machine type + region) from https://cloud.google.com/compute/docs/regions-zones.""" - + def as_dict(self) -> dict: """Serializes the GcpAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: body['availability'] = self.availability.value - if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size - if self.google_service_account is not None: body['google_service_account'] = self.google_service_account - if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count - if self.use_preemptible_executors is not None: body['use_preemptible_executors'] = self.use_preemptible_executors - if self.zone_id is not None: body['zone_id'] = self.zone_id + if self.availability is not None: + body["availability"] = self.availability.value + if self.boot_disk_size is not None: + body["boot_disk_size"] = self.boot_disk_size + if self.google_service_account is not None: + body["google_service_account"] = self.google_service_account + if self.local_ssd_count is not None: + body["local_ssd_count"] = self.local_ssd_count + if self.use_preemptible_executors is not None: + body["use_preemptible_executors"] = self.use_preemptible_executors + if self.zone_id is not None: + body["zone_id"] = self.zone_id return body def as_shallow_dict(self) -> dict: """Serializes the GcpAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: body['availability'] = self.availability - if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size - if self.google_service_account is not None: body['google_service_account'] = self.google_service_account - if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count - if self.use_preemptible_executors is not None: body['use_preemptible_executors'] = self.use_preemptible_executors - if self.zone_id is not None: body['zone_id'] = self.zone_id + if self.availability is not None: + body["availability"] = self.availability + if self.boot_disk_size is not None: + body["boot_disk_size"] = self.boot_disk_size + if self.google_service_account is not None: + body["google_service_account"] = self.google_service_account + if self.local_ssd_count is not None: + body["local_ssd_count"] = self.local_ssd_count + if self.use_preemptible_executors is not None: + body["use_preemptible_executors"] = self.use_preemptible_executors + if self.zone_id is not None: + body["zone_id"] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpAttributes: """Deserializes the GcpAttributes from a dictionary.""" - return cls(availability=_enum(d, 'availability', GcpAvailability), boot_disk_size=d.get('boot_disk_size', None), google_service_account=d.get('google_service_account', None), local_ssd_count=d.get('local_ssd_count', None), use_preemptible_executors=d.get('use_preemptible_executors', None), zone_id=d.get('zone_id', None)) - - + return cls( + availability=_enum(d, "availability", GcpAvailability), + boot_disk_size=d.get("boot_disk_size", None), + google_service_account=d.get("google_service_account", None), + local_ssd_count=d.get("local_ssd_count", None), + use_preemptible_executors=d.get("use_preemptible_executors", None), + zone_id=d.get("zone_id", None), + ) class GcpAvailability(Enum): """This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.""" - - ON_DEMAND_GCP = 'ON_DEMAND_GCP' - PREEMPTIBLE_GCP = 'PREEMPTIBLE_GCP' - PREEMPTIBLE_WITH_FALLBACK_GCP = 'PREEMPTIBLE_WITH_FALLBACK_GCP' + + ON_DEMAND_GCP = "ON_DEMAND_GCP" + PREEMPTIBLE_GCP = "PREEMPTIBLE_GCP" + PREEMPTIBLE_WITH_FALLBACK_GCP = "PREEMPTIBLE_WITH_FALLBACK_GCP" + @dataclass class GcsStorageInfo: """A storage location in Google Cloud Platform's GCS""" - + destination: str """GCS destination/URI, e.g. `gs://my-bucket/some-prefix`""" - + def as_dict(self) -> dict: """Serializes the GcsStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the GcsStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcsStorageInfo: """Deserializes the GcsStorageInfo from a dictionary.""" - return cls(destination=d.get('destination', None)) - - - - - + return cls(destination=d.get("destination", None)) @dataclass @@ -4249,259 +5176,280 @@ class GetClusterComplianceResponse: is_compliant: Optional[bool] = None """Whether the cluster is compliant with its policy or not. Clusters could be out of compliance if the policy was updated after the cluster was last edited.""" - - violations: Optional[Dict[str,str]] = None + + violations: Optional[Dict[str, str]] = None """An object containing key-value mappings representing the first 200 policy validation errors. The keys indicate the path where the policy validation error is occurring. The values indicate an error message describing the policy validation error.""" - + def as_dict(self) -> dict: """Serializes the GetClusterComplianceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_compliant is not None: body['is_compliant'] = self.is_compliant - if self.violations: body['violations'] = self.violations + if self.is_compliant is not None: + body["is_compliant"] = self.is_compliant + if self.violations: + body["violations"] = self.violations return body def as_shallow_dict(self) -> dict: """Serializes the GetClusterComplianceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_compliant is not None: body['is_compliant'] = self.is_compliant - if self.violations: body['violations'] = self.violations + if self.is_compliant is not None: + body["is_compliant"] = self.is_compliant + if self.violations: + body["violations"] = self.violations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetClusterComplianceResponse: """Deserializes the GetClusterComplianceResponse from a dictionary.""" - return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None)) - - - - - + return cls(is_compliant=d.get("is_compliant", None), violations=d.get("violations", None)) @dataclass class GetClusterPermissionLevelsResponse: permission_levels: Optional[List[ClusterPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetClusterPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetClusterPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetClusterPermissionLevelsResponse: """Deserializes the GetClusterPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', ClusterPermissionsDescription)) - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", ClusterPermissionsDescription)) @dataclass class GetClusterPolicyPermissionLevelsResponse: permission_levels: Optional[List[ClusterPolicyPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetClusterPolicyPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetClusterPolicyPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetClusterPolicyPermissionLevelsResponse: """Deserializes the GetClusterPolicyPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', ClusterPolicyPermissionsDescription)) - - - - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", ClusterPolicyPermissionsDescription)) @dataclass class GetEvents: cluster_id: str """The ID of the cluster to retrieve events about.""" - + end_time: Optional[int] = None """The end time in epoch milliseconds. If empty, returns events up to the current time.""" - + event_types: Optional[List[EventType]] = None """An optional set of event types to filter on. If empty, all event types are returned.""" - + limit: Optional[int] = None """Deprecated: use page_token in combination with page_size instead. The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed value is 500.""" - + offset: Optional[int] = None """Deprecated: use page_token in combination with page_size instead. The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results are requested in descending order, the end_time field is required.""" - + order: Optional[GetEventsOrder] = None """The order to list events in; either "ASC" or "DESC". Defaults to "DESC".""" - + page_size: Optional[int] = None """The maximum number of events to include in a page of events. The server may further constrain the maximum number of results returned in a single page. If the page_size is empty or 0, the server will decide the number of results to be returned. The field has to be in the range [0,500]. If the value is outside the range, the server enforces 0 or 500.""" - + page_token: Optional[str] = None """Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of events respectively. If page_token is empty, the first page is returned.""" - + start_time: Optional[int] = None """The start time in epoch milliseconds. If empty, returns events starting from the beginning of time.""" - + def as_dict(self) -> dict: """Serializes the GetEvents into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.end_time is not None: body['end_time'] = self.end_time - if self.event_types: body['event_types'] = [v.value for v in self.event_types] - if self.limit is not None: body['limit'] = self.limit - if self.offset is not None: body['offset'] = self.offset - if self.order is not None: body['order'] = self.order.value - if self.page_size is not None: body['page_size'] = self.page_size - if self.page_token is not None: body['page_token'] = self.page_token - if self.start_time is not None: body['start_time'] = self.start_time + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.end_time is not None: + body["end_time"] = self.end_time + if self.event_types: + body["event_types"] = [v.value for v in self.event_types] + if self.limit is not None: + body["limit"] = self.limit + if self.offset is not None: + body["offset"] = self.offset + if self.order is not None: + body["order"] = self.order.value + if self.page_size is not None: + body["page_size"] = self.page_size + if self.page_token is not None: + body["page_token"] = self.page_token + if self.start_time is not None: + body["start_time"] = self.start_time return body def as_shallow_dict(self) -> dict: """Serializes the GetEvents into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.end_time is not None: body['end_time'] = self.end_time - if self.event_types: body['event_types'] = self.event_types - if self.limit is not None: body['limit'] = self.limit - if self.offset is not None: body['offset'] = self.offset - if self.order is not None: body['order'] = self.order - if self.page_size is not None: body['page_size'] = self.page_size - if self.page_token is not None: body['page_token'] = self.page_token - if self.start_time is not None: body['start_time'] = self.start_time + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.end_time is not None: + body["end_time"] = self.end_time + if self.event_types: + body["event_types"] = self.event_types + if self.limit is not None: + body["limit"] = self.limit + if self.offset is not None: + body["offset"] = self.offset + if self.order is not None: + body["order"] = self.order + if self.page_size is not None: + body["page_size"] = self.page_size + if self.page_token is not None: + body["page_token"] = self.page_token + if self.start_time is not None: + body["start_time"] = self.start_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetEvents: """Deserializes the GetEvents from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), end_time=d.get('end_time', None), event_types=_repeated_enum(d, 'event_types', EventType), limit=d.get('limit', None), offset=d.get('offset', None), order=_enum(d, 'order', GetEventsOrder), page_size=d.get('page_size', None), page_token=d.get('page_token', None), start_time=d.get('start_time', None)) - - + return cls( + cluster_id=d.get("cluster_id", None), + end_time=d.get("end_time", None), + event_types=_repeated_enum(d, "event_types", EventType), + limit=d.get("limit", None), + offset=d.get("offset", None), + order=_enum(d, "order", GetEventsOrder), + page_size=d.get("page_size", None), + page_token=d.get("page_token", None), + start_time=d.get("start_time", None), + ) class GetEventsOrder(Enum): - - - ASC = 'ASC' - DESC = 'DESC' + + ASC = "ASC" + DESC = "DESC" + @dataclass class GetEventsResponse: events: Optional[List[ClusterEvent]] = None - + next_page: Optional[GetEvents] = None """Deprecated: use next_page_token or prev_page_token instead. The parameters required to retrieve the next page of events. Omitted if there are no more events to read.""" - + next_page_token: Optional[str] = None """This field represents the pagination token to retrieve the next page of results. If the value is "", it means no further results for the request.""" - + prev_page_token: Optional[str] = None """This field represents the pagination token to retrieve the previous page of results. If the value is "", it means no further results for the request.""" - + total_count: Optional[int] = None """Deprecated: Returns 0 when request uses page_token. Will start returning zero when request uses offset/limit soon. The total number of events filtered by the start_time, end_time, and event_types.""" - + def as_dict(self) -> dict: """Serializes the GetEventsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.events: body['events'] = [v.as_dict() for v in self.events] - if self.next_page: body['next_page'] = self.next_page.as_dict() - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token - if self.total_count is not None: body['total_count'] = self.total_count + if self.events: + body["events"] = [v.as_dict() for v in self.events] + if self.next_page: + body["next_page"] = self.next_page.as_dict() + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token + if self.total_count is not None: + body["total_count"] = self.total_count return body def as_shallow_dict(self) -> dict: """Serializes the GetEventsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.events: body['events'] = self.events - if self.next_page: body['next_page'] = self.next_page - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token - if self.total_count is not None: body['total_count'] = self.total_count + if self.events: + body["events"] = self.events + if self.next_page: + body["next_page"] = self.next_page + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token + if self.total_count is not None: + body["total_count"] = self.total_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetEventsResponse: """Deserializes the GetEventsResponse from a dictionary.""" - return cls(events=_repeated_dict(d, 'events', ClusterEvent), next_page=_from_dict(d, 'next_page', GetEvents), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None), total_count=d.get('total_count', None)) - - - - - + return cls( + events=_repeated_dict(d, "events", ClusterEvent), + next_page=_from_dict(d, "next_page", GetEvents), + next_page_token=d.get("next_page_token", None), + prev_page_token=d.get("prev_page_token", None), + total_count=d.get("total_count", None), + ) @dataclass class GetInstancePool: instance_pool_id: str """Canonical unique identifier for the pool.""" - + aws_attributes: Optional[InstancePoolAwsAttributes] = None """Attributes related to instance pools running on Amazon Web Services. If not specified at pool creation, a set of default values will be used.""" - + azure_attributes: Optional[InstancePoolAzureAttributes] = None """Attributes related to instance pools running on Azure. If not specified at pool creation, a set of default values will be used.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags""" - - default_tags: Optional[Dict[str,str]] = None + + default_tags: Optional[Dict[str, str]] = None """Tags that are added by Databricks regardless of any ``custom_tags``, including: - Vendor: Databricks @@ -4511,189 +5459,230 @@ class GetInstancePool: - InstancePoolName: - InstancePoolId: """ - + disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + gcp_attributes: Optional[InstancePoolGcpAttributes] = None """Attributes related to instance pools running on Google Cloud Platform. If not specified at pool creation, a set of default values will be used.""" - + idle_instance_autotermination_minutes: Optional[int] = None """Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances will be automatically terminated after a default timeout. If specified, the threshold must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances from the cache if min cache size could still hold.""" - + instance_pool_name: Optional[str] = None """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters.""" - + max_capacity: Optional[int] = None """Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances. Clusters that require further instance provisioning will fail during upsize requests.""" - + min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + preloaded_docker_images: Optional[List[DockerImage]] = None """Custom Docker Image BYOC""" - + preloaded_spark_versions: Optional[List[str]] = None """A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + state: Optional[InstancePoolState] = None """Current state of the instance pool.""" - + stats: Optional[InstancePoolStats] = None """Usage statistics about the instance pool.""" - + status: Optional[InstancePoolStatus] = None """Status of failed pending instances in the pool.""" - + def as_dict(self) -> dict: """Serializes the GetInstancePool into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.default_tags: body['default_tags'] = self.default_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity is not None: body['max_capacity'] = self.max_capacity - if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] - if self.state is not None: body['state'] = self.state.value - if self.stats: body['stats'] = self.stats.as_dict() - if self.status: body['status'] = self.status.as_dict() + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.default_tags: + body["default_tags"] = self.default_tags + if self.disk_spec: + body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_name is not None: + body["instance_pool_name"] = self.instance_pool_name + if self.max_capacity is not None: + body["max_capacity"] = self.max_capacity + if self.min_idle_instances is not None: + body["min_idle_instances"] = self.min_idle_instances + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.preloaded_docker_images: + body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: + body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions] + if self.state is not None: + body["state"] = self.state.value + if self.stats: + body["stats"] = self.stats.as_dict() + if self.status: + body["status"] = self.status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetInstancePool into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.default_tags: body['default_tags'] = self.default_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity is not None: body['max_capacity'] = self.max_capacity - if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images - if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions - if self.state is not None: body['state'] = self.state - if self.stats: body['stats'] = self.stats - if self.status: body['status'] = self.status + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.default_tags: + body["default_tags"] = self.default_tags + if self.disk_spec: + body["disk_spec"] = self.disk_spec + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_name is not None: + body["instance_pool_name"] = self.instance_pool_name + if self.max_capacity is not None: + body["max_capacity"] = self.max_capacity + if self.min_idle_instances is not None: + body["min_idle_instances"] = self.min_idle_instances + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.preloaded_docker_images: + body["preloaded_docker_images"] = self.preloaded_docker_images + if self.preloaded_spark_versions: + body["preloaded_spark_versions"] = self.preloaded_spark_versions + if self.state is not None: + body["state"] = self.state + if self.stats: + body["stats"] = self.stats + if self.status: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetInstancePool: """Deserializes the GetInstancePool from a dictionary.""" - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), custom_tags=d.get('custom_tags', None), default_tags=d.get('default_tags', None), disk_spec=_from_dict(d, 'disk_spec', DiskSpec), enable_elastic_disk=d.get('enable_elastic_disk', None), gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_id=d.get('instance_pool_id', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None), preloaded_docker_images=_repeated_dict(d, 'preloaded_docker_images', DockerImage), preloaded_spark_versions=d.get('preloaded_spark_versions', None), state=_enum(d, 'state', InstancePoolState), stats=_from_dict(d, 'stats', InstancePoolStats), status=_from_dict(d, 'status', InstancePoolStatus)) - - - - - + return cls( + aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes), + custom_tags=d.get("custom_tags", None), + default_tags=d.get("default_tags", None), + disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_elastic_disk=d.get("enable_elastic_disk", None), + gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), + idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), + instance_pool_id=d.get("instance_pool_id", None), + instance_pool_name=d.get("instance_pool_name", None), + max_capacity=d.get("max_capacity", None), + min_idle_instances=d.get("min_idle_instances", None), + node_type_id=d.get("node_type_id", None), + preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), + preloaded_spark_versions=d.get("preloaded_spark_versions", None), + state=_enum(d, "state", InstancePoolState), + stats=_from_dict(d, "stats", InstancePoolStats), + status=_from_dict(d, "status", InstancePoolStatus), + ) @dataclass class GetInstancePoolPermissionLevelsResponse: permission_levels: Optional[List[InstancePoolPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetInstancePoolPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetInstancePoolPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetInstancePoolPermissionLevelsResponse: """Deserializes the GetInstancePoolPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', InstancePoolPermissionsDescription)) - - - - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", InstancePoolPermissionsDescription)) @dataclass class GetSparkVersionsResponse: versions: Optional[List[SparkVersion]] = None """All the available Spark versions.""" - + def as_dict(self) -> dict: """Serializes the GetSparkVersionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.versions: body['versions'] = [v.as_dict() for v in self.versions] + if self.versions: + body["versions"] = [v.as_dict() for v in self.versions] return body def as_shallow_dict(self) -> dict: """Serializes the GetSparkVersionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.versions: body['versions'] = self.versions + if self.versions: + body["versions"] = self.versions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetSparkVersionsResponse: """Deserializes the GetSparkVersionsResponse from a dictionary.""" - return cls(versions=_repeated_dict(d, 'versions', SparkVersion)) - - + return cls(versions=_repeated_dict(d, "versions", SparkVersion)) @dataclass class GlobalInitScriptCreateRequest: name: str """The name of the script""" - + script: str """The Base64-encoded content of the script.""" - + enabled: Optional[bool] = None """Specifies whether the script is enabled. The script runs only if enabled.""" - + position: Optional[int] = None """The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. @@ -4704,171 +5693,231 @@ class GlobalInitScriptCreateRequest: 0, 1, and 2. Any position of (3) or greater puts the script in the last position. If an explicit position value conflicts with an existing script value, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1.""" - + def as_dict(self) -> dict: """Serializes the GlobalInitScriptCreateRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.script is not None: body['script'] = self.script + if self.enabled is not None: + body["enabled"] = self.enabled + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.script is not None: + body["script"] = self.script return body def as_shallow_dict(self) -> dict: """Serializes the GlobalInitScriptCreateRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.script is not None: body['script'] = self.script + if self.enabled is not None: + body["enabled"] = self.enabled + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.script is not None: + body["script"] = self.script return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptCreateRequest: """Deserializes the GlobalInitScriptCreateRequest from a dictionary.""" - return cls(enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script=d.get('script', None)) - - + return cls( + enabled=d.get("enabled", None), + name=d.get("name", None), + position=d.get("position", None), + script=d.get("script", None), + ) @dataclass class GlobalInitScriptDetails: created_at: Optional[int] = None """Time when the script was created, represented as a Unix timestamp in milliseconds.""" - + created_by: Optional[str] = None """The username of the user who created the script.""" - + enabled: Optional[bool] = None """Specifies whether the script is enabled. The script runs only if enabled.""" - + name: Optional[str] = None """The name of the script""" - + position: Optional[int] = None """The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.""" - + script_id: Optional[str] = None """The global init script ID.""" - + updated_at: Optional[int] = None """Time when the script was updated, represented as a Unix timestamp in milliseconds.""" - + updated_by: Optional[str] = None """The username of the user who last updated the script""" - + def as_dict(self) -> dict: """Serializes the GlobalInitScriptDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.enabled is not None: body['enabled'] = self.enabled - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.script_id is not None: body['script_id'] = self.script_id - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.enabled is not None: + body["enabled"] = self.enabled + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.script_id is not None: + body["script_id"] = self.script_id + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the GlobalInitScriptDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.enabled is not None: body['enabled'] = self.enabled - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.script_id is not None: body['script_id'] = self.script_id - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.enabled is not None: + body["enabled"] = self.enabled + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.script_id is not None: + body["script_id"] = self.script_id + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptDetails: """Deserializes the GlobalInitScriptDetails from a dictionary.""" - return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script_id=d.get('script_id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + enabled=d.get("enabled", None), + name=d.get("name", None), + position=d.get("position", None), + script_id=d.get("script_id", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class GlobalInitScriptDetailsWithContent: created_at: Optional[int] = None """Time when the script was created, represented as a Unix timestamp in milliseconds.""" - + created_by: Optional[str] = None """The username of the user who created the script.""" - + enabled: Optional[bool] = None """Specifies whether the script is enabled. The script runs only if enabled.""" - + name: Optional[str] = None """The name of the script""" - + position: Optional[int] = None """The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.""" - + script: Optional[str] = None """The Base64-encoded content of the script.""" - + script_id: Optional[str] = None """The global init script ID.""" - + updated_at: Optional[int] = None """Time when the script was updated, represented as a Unix timestamp in milliseconds.""" - + updated_by: Optional[str] = None """The username of the user who last updated the script""" - + def as_dict(self) -> dict: """Serializes the GlobalInitScriptDetailsWithContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.enabled is not None: body['enabled'] = self.enabled - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.script is not None: body['script'] = self.script - if self.script_id is not None: body['script_id'] = self.script_id - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.enabled is not None: + body["enabled"] = self.enabled + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.script is not None: + body["script"] = self.script + if self.script_id is not None: + body["script_id"] = self.script_id + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the GlobalInitScriptDetailsWithContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.enabled is not None: body['enabled'] = self.enabled - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.script is not None: body['script'] = self.script - if self.script_id is not None: body['script_id'] = self.script_id - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.enabled is not None: + body["enabled"] = self.enabled + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.script is not None: + body["script"] = self.script + if self.script_id is not None: + body["script_id"] = self.script_id + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptDetailsWithContent: """Deserializes the GlobalInitScriptDetailsWithContent from a dictionary.""" - return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script=d.get('script', None), script_id=d.get('script_id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + enabled=d.get("enabled", None), + name=d.get("name", None), + position=d.get("position", None), + script=d.get("script", None), + script_id=d.get("script_id", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class GlobalInitScriptUpdateRequest: name: str """The name of the script""" - + script: str """The Base64-encoded content of the script.""" - + enabled: Optional[bool] = None """Specifies whether the script is enabled. The script runs only if enabled.""" - + position: Optional[int] = None """The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. To move the script to run first, set its position to 0. @@ -4879,150 +5928,193 @@ class GlobalInitScriptUpdateRequest: If an explicit position value conflicts with an existing script, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1.""" - + script_id: Optional[str] = None """The ID of the global init script.""" - + def as_dict(self) -> dict: """Serializes the GlobalInitScriptUpdateRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.script is not None: body['script'] = self.script - if self.script_id is not None: body['script_id'] = self.script_id + if self.enabled is not None: + body["enabled"] = self.enabled + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.script is not None: + body["script"] = self.script + if self.script_id is not None: + body["script_id"] = self.script_id return body def as_shallow_dict(self) -> dict: """Serializes the GlobalInitScriptUpdateRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.script is not None: body['script'] = self.script - if self.script_id is not None: body['script_id'] = self.script_id + if self.enabled is not None: + body["enabled"] = self.enabled + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.script is not None: + body["script"] = self.script + if self.script_id is not None: + body["script_id"] = self.script_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptUpdateRequest: """Deserializes the GlobalInitScriptUpdateRequest from a dictionary.""" - return cls(enabled=d.get('enabled', None), name=d.get('name', None), position=d.get('position', None), script=d.get('script', None), script_id=d.get('script_id', None)) - - + return cls( + enabled=d.get("enabled", None), + name=d.get("name", None), + position=d.get("position", None), + script=d.get("script", None), + script_id=d.get("script_id", None), + ) @dataclass class InitScriptEventDetails: cluster: Optional[List[InitScriptInfoAndExecutionDetails]] = None """The cluster scoped init scripts associated with this cluster event.""" - + global_: Optional[List[InitScriptInfoAndExecutionDetails]] = None """The global init scripts associated with this cluster event.""" - + reported_for_node: Optional[str] = None """The private ip of the node we are reporting init script execution details for (we will select the execution details from only one node rather than reporting the execution details from every node to keep these event details small) This should only be defined for the INIT_SCRIPTS_FINISHED event""" - + def as_dict(self) -> dict: """Serializes the InitScriptEventDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster: body['cluster'] = [v.as_dict() for v in self.cluster] - if self.global_: body['global'] = [v.as_dict() for v in self.global_] - if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node + if self.cluster: + body["cluster"] = [v.as_dict() for v in self.cluster] + if self.global_: + body["global"] = [v.as_dict() for v in self.global_] + if self.reported_for_node is not None: + body["reported_for_node"] = self.reported_for_node return body def as_shallow_dict(self) -> dict: """Serializes the InitScriptEventDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster: body['cluster'] = self.cluster - if self.global_: body['global'] = self.global_ - if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node + if self.cluster: + body["cluster"] = self.cluster + if self.global_: + body["global"] = self.global_ + if self.reported_for_node is not None: + body["reported_for_node"] = self.reported_for_node return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InitScriptEventDetails: """Deserializes the InitScriptEventDetails from a dictionary.""" - return cls(cluster=_repeated_dict(d, 'cluster', InitScriptInfoAndExecutionDetails), global_=_repeated_dict(d, 'global', InitScriptInfoAndExecutionDetails), reported_for_node=d.get('reported_for_node', None)) - - + return cls( + cluster=_repeated_dict(d, "cluster", InitScriptInfoAndExecutionDetails), + global_=_repeated_dict(d, "global", InitScriptInfoAndExecutionDetails), + reported_for_node=d.get("reported_for_node", None), + ) class InitScriptExecutionDetailsInitScriptExecutionStatus(Enum): """Result of attempted script execution""" - - FAILED_EXECUTION = 'FAILED_EXECUTION' - FAILED_FETCH = 'FAILED_FETCH' - FUSE_MOUNT_FAILED = 'FUSE_MOUNT_FAILED' - NOT_EXECUTED = 'NOT_EXECUTED' - SKIPPED = 'SKIPPED' - SUCCEEDED = 'SUCCEEDED' - UNKNOWN = 'UNKNOWN' + + FAILED_EXECUTION = "FAILED_EXECUTION" + FAILED_FETCH = "FAILED_FETCH" + FUSE_MOUNT_FAILED = "FUSE_MOUNT_FAILED" + NOT_EXECUTED = "NOT_EXECUTED" + SKIPPED = "SKIPPED" + SUCCEEDED = "SUCCEEDED" + UNKNOWN = "UNKNOWN" + @dataclass class InitScriptInfo: """Config for an individual init script Next ID: 11""" - + abfss: Optional[Adlsgen2Info] = None """destination needs to be provided, e.g. `abfss://@.dfs.core.windows.net/`""" - + dbfs: Optional[DbfsStorageInfo] = None """destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`""" - + file: Optional[LocalFileInfo] = None """destination needs to be provided, e.g. `{ "file": { "destination": "file:/my/local/file.sh" } }`""" - + gcs: Optional[GcsStorageInfo] = None """destination needs to be provided, e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }`""" - + s3: Optional[S3StorageInfo] = None """destination and either the region or endpoint need to be provided. e.g. `{ \"s3\": { \"destination\": \"s3://cluster_log_bucket/prefix\", \"region\": \"us-west-2\" } }` Cluster iam role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to write data to the s3 destination.""" - + volumes: Optional[VolumesStorageInfo] = None """destination needs to be provided. e.g. `{ \"volumes\" : { \"destination\" : \"/Volumes/my-init.sh\" } }`""" - + workspace: Optional[WorkspaceStorageInfo] = None """destination needs to be provided, e.g. `{ "workspace": { "destination": "/cluster-init-scripts/setup-datadog.sh" } }`""" - + def as_dict(self) -> dict: """Serializes the InitScriptInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.abfss: body['abfss'] = self.abfss.as_dict() - if self.dbfs: body['dbfs'] = self.dbfs.as_dict() - if self.file: body['file'] = self.file.as_dict() - if self.gcs: body['gcs'] = self.gcs.as_dict() - if self.s3: body['s3'] = self.s3.as_dict() - if self.volumes: body['volumes'] = self.volumes.as_dict() - if self.workspace: body['workspace'] = self.workspace.as_dict() + if self.abfss: + body["abfss"] = self.abfss.as_dict() + if self.dbfs: + body["dbfs"] = self.dbfs.as_dict() + if self.file: + body["file"] = self.file.as_dict() + if self.gcs: + body["gcs"] = self.gcs.as_dict() + if self.s3: + body["s3"] = self.s3.as_dict() + if self.volumes: + body["volumes"] = self.volumes.as_dict() + if self.workspace: + body["workspace"] = self.workspace.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the InitScriptInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.abfss: body['abfss'] = self.abfss - if self.dbfs: body['dbfs'] = self.dbfs - if self.file: body['file'] = self.file - if self.gcs: body['gcs'] = self.gcs - if self.s3: body['s3'] = self.s3 - if self.volumes: body['volumes'] = self.volumes - if self.workspace: body['workspace'] = self.workspace + if self.abfss: + body["abfss"] = self.abfss + if self.dbfs: + body["dbfs"] = self.dbfs + if self.file: + body["file"] = self.file + if self.gcs: + body["gcs"] = self.gcs + if self.s3: + body["s3"] = self.s3 + if self.volumes: + body["volumes"] = self.volumes + if self.workspace: + body["workspace"] = self.workspace return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InitScriptInfo: """Deserializes the InitScriptInfo from a dictionary.""" - return cls(abfss=_from_dict(d, 'abfss', Adlsgen2Info), dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), file=_from_dict(d, 'file', LocalFileInfo), gcs=_from_dict(d, 'gcs', GcsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo), volumes=_from_dict(d, 'volumes', VolumesStorageInfo), workspace=_from_dict(d, 'workspace', WorkspaceStorageInfo)) - - + return cls( + abfss=_from_dict(d, "abfss", Adlsgen2Info), + dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), + file=_from_dict(d, "file", LocalFileInfo), + gcs=_from_dict(d, "gcs", GcsStorageInfo), + s3=_from_dict(d, "s3", S3StorageInfo), + volumes=_from_dict(d, "volumes", VolumesStorageInfo), + workspace=_from_dict(d, "workspace", WorkspaceStorageInfo), + ) @dataclass @@ -5030,108 +6122,139 @@ class InitScriptInfoAndExecutionDetails: abfss: Optional[Adlsgen2Info] = None """destination needs to be provided, e.g. `abfss://@.dfs.core.windows.net/`""" - + dbfs: Optional[DbfsStorageInfo] = None """destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`""" - + error_message: Optional[str] = None """Additional details regarding errors (such as a file not found message if the status is FAILED_FETCH). This field should only be used to provide *additional* information to the status field, not duplicate it.""" - + execution_duration_seconds: Optional[int] = None """The number duration of the script execution in seconds""" - + file: Optional[LocalFileInfo] = None """destination needs to be provided, e.g. `{ "file": { "destination": "file:/my/local/file.sh" } }`""" - + gcs: Optional[GcsStorageInfo] = None """destination needs to be provided, e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }`""" - + s3: Optional[S3StorageInfo] = None """destination and either the region or endpoint need to be provided. e.g. `{ \"s3\": { \"destination\": \"s3://cluster_log_bucket/prefix\", \"region\": \"us-west-2\" } }` Cluster iam role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to write data to the s3 destination.""" - + status: Optional[InitScriptExecutionDetailsInitScriptExecutionStatus] = None """The current status of the script""" - + volumes: Optional[VolumesStorageInfo] = None """destination needs to be provided. e.g. `{ \"volumes\" : { \"destination\" : \"/Volumes/my-init.sh\" } }`""" - + workspace: Optional[WorkspaceStorageInfo] = None """destination needs to be provided, e.g. `{ "workspace": { "destination": "/cluster-init-scripts/setup-datadog.sh" } }`""" - + def as_dict(self) -> dict: """Serializes the InitScriptInfoAndExecutionDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.abfss: body['abfss'] = self.abfss.as_dict() - if self.dbfs: body['dbfs'] = self.dbfs.as_dict() - if self.error_message is not None: body['error_message'] = self.error_message - if self.execution_duration_seconds is not None: body['execution_duration_seconds'] = self.execution_duration_seconds - if self.file: body['file'] = self.file.as_dict() - if self.gcs: body['gcs'] = self.gcs.as_dict() - if self.s3: body['s3'] = self.s3.as_dict() - if self.status is not None: body['status'] = self.status.value - if self.volumes: body['volumes'] = self.volumes.as_dict() - if self.workspace: body['workspace'] = self.workspace.as_dict() + if self.abfss: + body["abfss"] = self.abfss.as_dict() + if self.dbfs: + body["dbfs"] = self.dbfs.as_dict() + if self.error_message is not None: + body["error_message"] = self.error_message + if self.execution_duration_seconds is not None: + body["execution_duration_seconds"] = self.execution_duration_seconds + if self.file: + body["file"] = self.file.as_dict() + if self.gcs: + body["gcs"] = self.gcs.as_dict() + if self.s3: + body["s3"] = self.s3.as_dict() + if self.status is not None: + body["status"] = self.status.value + if self.volumes: + body["volumes"] = self.volumes.as_dict() + if self.workspace: + body["workspace"] = self.workspace.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the InitScriptInfoAndExecutionDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.abfss: body['abfss'] = self.abfss - if self.dbfs: body['dbfs'] = self.dbfs - if self.error_message is not None: body['error_message'] = self.error_message - if self.execution_duration_seconds is not None: body['execution_duration_seconds'] = self.execution_duration_seconds - if self.file: body['file'] = self.file - if self.gcs: body['gcs'] = self.gcs - if self.s3: body['s3'] = self.s3 - if self.status is not None: body['status'] = self.status - if self.volumes: body['volumes'] = self.volumes - if self.workspace: body['workspace'] = self.workspace + if self.abfss: + body["abfss"] = self.abfss + if self.dbfs: + body["dbfs"] = self.dbfs + if self.error_message is not None: + body["error_message"] = self.error_message + if self.execution_duration_seconds is not None: + body["execution_duration_seconds"] = self.execution_duration_seconds + if self.file: + body["file"] = self.file + if self.gcs: + body["gcs"] = self.gcs + if self.s3: + body["s3"] = self.s3 + if self.status is not None: + body["status"] = self.status + if self.volumes: + body["volumes"] = self.volumes + if self.workspace: + body["workspace"] = self.workspace return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InitScriptInfoAndExecutionDetails: """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary.""" - return cls(abfss=_from_dict(d, 'abfss', Adlsgen2Info), dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), error_message=d.get('error_message', None), execution_duration_seconds=d.get('execution_duration_seconds', None), file=_from_dict(d, 'file', LocalFileInfo), gcs=_from_dict(d, 'gcs', GcsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo), status=_enum(d, 'status', InitScriptExecutionDetailsInitScriptExecutionStatus), volumes=_from_dict(d, 'volumes', VolumesStorageInfo), workspace=_from_dict(d, 'workspace', WorkspaceStorageInfo)) - - + return cls( + abfss=_from_dict(d, "abfss", Adlsgen2Info), + dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), + error_message=d.get("error_message", None), + execution_duration_seconds=d.get("execution_duration_seconds", None), + file=_from_dict(d, "file", LocalFileInfo), + gcs=_from_dict(d, "gcs", GcsStorageInfo), + s3=_from_dict(d, "s3", S3StorageInfo), + status=_enum(d, "status", InitScriptExecutionDetailsInitScriptExecutionStatus), + volumes=_from_dict(d, "volumes", VolumesStorageInfo), + workspace=_from_dict(d, "workspace", WorkspaceStorageInfo), + ) @dataclass class InstallLibraries: cluster_id: str """Unique identifier for the cluster on which to install these libraries.""" - + libraries: List[Library] """The libraries to install.""" - + def as_dict(self) -> dict: """Serializes the InstallLibraries into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] return body def as_shallow_dict(self) -> dict: """Serializes the InstallLibraries into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.libraries: body['libraries'] = self.libraries + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.libraries: + body["libraries"] = self.libraries return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstallLibraries: """Deserializes the InstallLibraries from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated_dict(d, 'libraries', Library)) - - + return cls(cluster_id=d.get("cluster_id", None), libraries=_repeated_dict(d, "libraries", Library)) @dataclass @@ -5150,93 +6273,116 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> InstallLibrariesResponse: """Deserializes the InstallLibrariesResponse from a dictionary.""" return cls() - - @dataclass class InstancePoolAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[InstancePoolPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAccessControlRequest: """Deserializes the InstancePoolAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", InstancePoolPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class InstancePoolAccessControlResponse: all_permissions: Optional[List[InstancePoolPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAccessControlResponse: """Deserializes the InstancePoolAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', InstancePoolPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", InstancePoolPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass @@ -5244,18 +6390,18 @@ class InstancePoolAndStats: aws_attributes: Optional[InstancePoolAwsAttributes] = None """Attributes related to instance pools running on Amazon Web Services. If not specified at pool creation, a set of default values will be used.""" - + azure_attributes: Optional[InstancePoolAzureAttributes] = None """Attributes related to instance pools running on Azure. If not specified at pool creation, a set of default values will be used.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - Currently, Databricks allows at most 45 custom tags""" - - default_tags: Optional[Dict[str,str]] = None + + default_tags: Optional[Dict[str, str]] = None """Tags that are added by Databricks regardless of any ``custom_tags``, including: - Vendor: Databricks @@ -5265,126 +6411,179 @@ class InstancePoolAndStats: - InstancePoolName: - InstancePoolId: """ - + disk_spec: Optional[DiskSpec] = None """Defines the specification of the disks that will be attached to all spark containers.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space. In AWS, this feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + gcp_attributes: Optional[InstancePoolGcpAttributes] = None """Attributes related to instance pools running on Google Cloud Platform. If not specified at pool creation, a set of default values will be used.""" - + idle_instance_autotermination_minutes: Optional[int] = None """Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances will be automatically terminated after a default timeout. If specified, the threshold must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances from the cache if min cache size could still hold.""" - + instance_pool_id: Optional[str] = None """Canonical unique identifier for the pool.""" - + instance_pool_name: Optional[str] = None """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters.""" - + max_capacity: Optional[int] = None """Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances. Clusters that require further instance provisioning will fail during upsize requests.""" - + min_idle_instances: Optional[int] = None """Minimum number of idle instances to keep in the instance pool""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + preloaded_docker_images: Optional[List[DockerImage]] = None """Custom Docker Image BYOC""" - + preloaded_spark_versions: Optional[List[str]] = None """A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + state: Optional[InstancePoolState] = None """Current state of the instance pool.""" - + stats: Optional[InstancePoolStats] = None """Usage statistics about the instance pool.""" - + status: Optional[InstancePoolStatus] = None """Status of failed pending instances in the pool.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAndStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.default_tags: body['default_tags'] = self.default_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec.as_dict() - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity is not None: body['max_capacity'] = self.max_capacity - if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: body['preloaded_docker_images'] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions] - if self.state is not None: body['state'] = self.state.value - if self.stats: body['stats'] = self.stats.as_dict() - if self.status: body['status'] = self.status.as_dict() + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.default_tags: + body["default_tags"] = self.default_tags + if self.disk_spec: + body["disk_spec"] = self.disk_spec.as_dict() + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_name is not None: + body["instance_pool_name"] = self.instance_pool_name + if self.max_capacity is not None: + body["max_capacity"] = self.max_capacity + if self.min_idle_instances is not None: + body["min_idle_instances"] = self.min_idle_instances + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.preloaded_docker_images: + body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images] + if self.preloaded_spark_versions: + body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions] + if self.state is not None: + body["state"] = self.state.value + if self.stats: + body["stats"] = self.stats.as_dict() + if self.status: + body["status"] = self.status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAndStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.default_tags: body['default_tags'] = self.default_tags - if self.disk_spec: body['disk_spec'] = self.disk_spec - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name - if self.max_capacity is not None: body['max_capacity'] = self.max_capacity - if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images - if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions - if self.state is not None: body['state'] = self.state - if self.stats: body['stats'] = self.stats - if self.status: body['status'] = self.status + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.default_tags: + body["default_tags"] = self.default_tags + if self.disk_spec: + body["disk_spec"] = self.disk_spec + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.instance_pool_name is not None: + body["instance_pool_name"] = self.instance_pool_name + if self.max_capacity is not None: + body["max_capacity"] = self.max_capacity + if self.min_idle_instances is not None: + body["min_idle_instances"] = self.min_idle_instances + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.preloaded_docker_images: + body["preloaded_docker_images"] = self.preloaded_docker_images + if self.preloaded_spark_versions: + body["preloaded_spark_versions"] = self.preloaded_spark_versions + if self.state is not None: + body["state"] = self.state + if self.stats: + body["stats"] = self.stats + if self.status: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAndStats: """Deserializes the InstancePoolAndStats from a dictionary.""" - return cls(aws_attributes=_from_dict(d, 'aws_attributes', InstancePoolAwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', InstancePoolAzureAttributes), custom_tags=d.get('custom_tags', None), default_tags=d.get('default_tags', None), disk_spec=_from_dict(d, 'disk_spec', DiskSpec), enable_elastic_disk=d.get('enable_elastic_disk', None), gcp_attributes=_from_dict(d, 'gcp_attributes', InstancePoolGcpAttributes), idle_instance_autotermination_minutes=d.get('idle_instance_autotermination_minutes', None), instance_pool_id=d.get('instance_pool_id', None), instance_pool_name=d.get('instance_pool_name', None), max_capacity=d.get('max_capacity', None), min_idle_instances=d.get('min_idle_instances', None), node_type_id=d.get('node_type_id', None), preloaded_docker_images=_repeated_dict(d, 'preloaded_docker_images', DockerImage), preloaded_spark_versions=d.get('preloaded_spark_versions', None), state=_enum(d, 'state', InstancePoolState), stats=_from_dict(d, 'stats', InstancePoolStats), status=_from_dict(d, 'status', InstancePoolStatus)) - - + return cls( + aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes), + custom_tags=d.get("custom_tags", None), + default_tags=d.get("default_tags", None), + disk_spec=_from_dict(d, "disk_spec", DiskSpec), + enable_elastic_disk=d.get("enable_elastic_disk", None), + gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), + idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), + instance_pool_id=d.get("instance_pool_id", None), + instance_pool_name=d.get("instance_pool_name", None), + max_capacity=d.get("max_capacity", None), + min_idle_instances=d.get("min_idle_instances", None), + node_type_id=d.get("node_type_id", None), + preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), + preloaded_spark_versions=d.get("preloaded_spark_versions", None), + state=_enum(d, "state", InstancePoolState), + stats=_from_dict(d, "stats", InstancePoolStats), + status=_from_dict(d, "status", InstancePoolStatus), + ) @dataclass class InstancePoolAwsAttributes: """Attributes set during instance pool creation which are related to Amazon Web Services.""" - + availability: Optional[InstancePoolAwsAttributesAvailability] = None """Availability type used for the spot nodes.""" - + spot_bid_price_percent: Optional[int] = None """Calculates the bid price for AWS spot instances, as a percentage of the corresponding instance type's on-demand price. For example, if this field is set to 50, and the cluster needs a new @@ -5393,7 +6592,7 @@ class InstancePoolAwsAttributes: `r3.xlarge` instances. If not specified, the default value is 100. When spot instances are requested for this cluster, only spot instances whose bid price percentage matches this field will be considered. Note that, for safety, we enforce this field to be no more than 10000.""" - + zone_id: Optional[str] = None """Identifier for the availability zone/datacenter in which the cluster resides. This string will be of a form like "us-west-2a". The provided availability zone must be in the same region as the @@ -5401,93 +6600,108 @@ class InstancePoolAwsAttributes: deployment resides in the "us-east-1" region. This is an optional field at cluster creation, and if not specified, a default zone will be used. The list of available zones as well as the default value can be found by using the `List Zones` method.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAwsAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: body['availability'] = self.availability.value - if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent - if self.zone_id is not None: body['zone_id'] = self.zone_id + if self.availability is not None: + body["availability"] = self.availability.value + if self.spot_bid_price_percent is not None: + body["spot_bid_price_percent"] = self.spot_bid_price_percent + if self.zone_id is not None: + body["zone_id"] = self.zone_id return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAwsAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: body['availability'] = self.availability - if self.spot_bid_price_percent is not None: body['spot_bid_price_percent'] = self.spot_bid_price_percent - if self.zone_id is not None: body['zone_id'] = self.zone_id + if self.availability is not None: + body["availability"] = self.availability + if self.spot_bid_price_percent is not None: + body["spot_bid_price_percent"] = self.spot_bid_price_percent + if self.zone_id is not None: + body["zone_id"] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAwsAttributes: """Deserializes the InstancePoolAwsAttributes from a dictionary.""" - return cls(availability=_enum(d, 'availability', InstancePoolAwsAttributesAvailability), spot_bid_price_percent=d.get('spot_bid_price_percent', None), zone_id=d.get('zone_id', None)) - - + return cls( + availability=_enum(d, "availability", InstancePoolAwsAttributesAvailability), + spot_bid_price_percent=d.get("spot_bid_price_percent", None), + zone_id=d.get("zone_id", None), + ) class InstancePoolAwsAttributesAvailability(Enum): """The set of AWS availability types supported when setting up nodes for a cluster.""" - - ON_DEMAND = 'ON_DEMAND' - SPOT = 'SPOT' + + ON_DEMAND = "ON_DEMAND" + SPOT = "SPOT" + @dataclass class InstancePoolAzureAttributes: """Attributes set during instance pool creation which are related to Azure.""" - + availability: Optional[InstancePoolAzureAttributesAvailability] = None """Availability type used for the spot nodes.""" - + spot_bid_max_price: Optional[float] = None """With variable pricing, you have option to set a max price, in US dollars (USD) For example, the value 2 would be a max price of $2.00 USD per hour. If you set the max price to be -1, the VM won't be evicted based on price. The price for the VM will be the current price for spot or the price for a standard VM, which ever is less, as long as there is capacity and quota available.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolAzureAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.availability is not None: body['availability'] = self.availability.value - if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price + if self.availability is not None: + body["availability"] = self.availability.value + if self.spot_bid_max_price is not None: + body["spot_bid_max_price"] = self.spot_bid_max_price return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolAzureAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.availability is not None: body['availability'] = self.availability - if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price + if self.availability is not None: + body["availability"] = self.availability + if self.spot_bid_max_price is not None: + body["spot_bid_max_price"] = self.spot_bid_max_price return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolAzureAttributes: """Deserializes the InstancePoolAzureAttributes from a dictionary.""" - return cls(availability=_enum(d, 'availability', InstancePoolAzureAttributesAvailability), spot_bid_max_price=d.get('spot_bid_max_price', None)) - - + return cls( + availability=_enum(d, "availability", InstancePoolAzureAttributesAvailability), + spot_bid_max_price=d.get("spot_bid_max_price", None), + ) class InstancePoolAzureAttributesAvailability(Enum): """The set of Azure availability types supported when setting up nodes for a cluster.""" - - ON_DEMAND_AZURE = 'ON_DEMAND_AZURE' - SPOT_AZURE = 'SPOT_AZURE' + + ON_DEMAND_AZURE = "ON_DEMAND_AZURE" + SPOT_AZURE = "SPOT_AZURE" + @dataclass class InstancePoolGcpAttributes: """Attributes set during instance pool creation which are related to GCP.""" - + gcp_availability: Optional[GcpAvailability] = None """This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.""" - + local_ssd_count: Optional[int] = None """If provided, each node in the instance pool will have this number of local SSDs attached. Each local SSD is 375GB in size. Refer to [GCP documentation] for the supported number of local SSDs for each instance type. [GCP documentation]: https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds""" - + zone_id: Optional[str] = None """Identifier for the availability zone/datacenter in which the cluster resides. This string will be of a form like "us-west1-a". The provided availability zone must be in the same region as the @@ -5501,208 +6715,255 @@ class InstancePoolGcpAttributes: https://cloud.google.com/compute/docs/regions-zones (e.g. "us-west1-a"). If empty, Databricks picks an availability zone to schedule the cluster on.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolGcpAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability.value - if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count - if self.zone_id is not None: body['zone_id'] = self.zone_id + if self.gcp_availability is not None: + body["gcp_availability"] = self.gcp_availability.value + if self.local_ssd_count is not None: + body["local_ssd_count"] = self.local_ssd_count + if self.zone_id is not None: + body["zone_id"] = self.zone_id return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolGcpAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability - if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count - if self.zone_id is not None: body['zone_id'] = self.zone_id + if self.gcp_availability is not None: + body["gcp_availability"] = self.gcp_availability + if self.local_ssd_count is not None: + body["local_ssd_count"] = self.local_ssd_count + if self.zone_id is not None: + body["zone_id"] = self.zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolGcpAttributes: """Deserializes the InstancePoolGcpAttributes from a dictionary.""" - return cls(gcp_availability=_enum(d, 'gcp_availability', GcpAvailability), local_ssd_count=d.get('local_ssd_count', None), zone_id=d.get('zone_id', None)) - - + return cls( + gcp_availability=_enum(d, "gcp_availability", GcpAvailability), + local_ssd_count=d.get("local_ssd_count", None), + zone_id=d.get("zone_id", None), + ) @dataclass class InstancePoolPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[InstancePoolPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the InstancePoolPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermission: """Deserializes the InstancePoolPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", InstancePoolPermissionLevel), + ) class InstancePoolPermissionLevel(Enum): """Permission level""" - - CAN_ATTACH_TO = 'CAN_ATTACH_TO' - CAN_MANAGE = 'CAN_MANAGE' + + CAN_ATTACH_TO = "CAN_ATTACH_TO" + CAN_MANAGE = "CAN_MANAGE" + @dataclass class InstancePoolPermissions: access_control_list: Optional[List[InstancePoolAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the InstancePoolPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissions: """Deserializes the InstancePoolPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', InstancePoolAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", InstancePoolAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class InstancePoolPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[InstancePoolPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the InstancePoolPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissionsDescription: """Deserializes the InstancePoolPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', InstancePoolPermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", InstancePoolPermissionLevel), + ) @dataclass class InstancePoolPermissionsRequest: access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None - + instance_pool_id: Optional[str] = None """The instance pool for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissionsRequest: """Deserializes the InstancePoolPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', InstancePoolAccessControlRequest), instance_pool_id=d.get('instance_pool_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", InstancePoolAccessControlRequest), + instance_pool_id=d.get("instance_pool_id", None), + ) class InstancePoolState(Enum): """The state of a Cluster. The current allowable state transitions are as follows: - + - ``ACTIVE`` -> ``STOPPED`` - ``ACTIVE`` -> ``DELETED`` - ``STOPPED`` -> ``ACTIVE`` - ``STOPPED`` -> ``DELETED``""" - - ACTIVE = 'ACTIVE' - DELETED = 'DELETED' - STOPPED = 'STOPPED' + + ACTIVE = "ACTIVE" + DELETED = "DELETED" + STOPPED = "STOPPED" + @dataclass class InstancePoolStats: idle_count: Optional[int] = None """Number of active instances in the pool that are NOT part of a cluster.""" - + pending_idle_count: Optional[int] = None """Number of pending instances in the pool that are NOT part of a cluster.""" - + pending_used_count: Optional[int] = None """Number of pending instances in the pool that are part of a cluster.""" - + used_count: Optional[int] = None """Number of active instances in the pool that are part of a cluster.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.idle_count is not None: body['idle_count'] = self.idle_count - if self.pending_idle_count is not None: body['pending_idle_count'] = self.pending_idle_count - if self.pending_used_count is not None: body['pending_used_count'] = self.pending_used_count - if self.used_count is not None: body['used_count'] = self.used_count + if self.idle_count is not None: + body["idle_count"] = self.idle_count + if self.pending_idle_count is not None: + body["pending_idle_count"] = self.pending_idle_count + if self.pending_used_count is not None: + body["pending_used_count"] = self.pending_used_count + if self.used_count is not None: + body["used_count"] = self.used_count return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.idle_count is not None: body['idle_count'] = self.idle_count - if self.pending_idle_count is not None: body['pending_idle_count'] = self.pending_idle_count - if self.pending_used_count is not None: body['pending_used_count'] = self.pending_used_count - if self.used_count is not None: body['used_count'] = self.used_count + if self.idle_count is not None: + body["idle_count"] = self.idle_count + if self.pending_idle_count is not None: + body["pending_idle_count"] = self.pending_idle_count + if self.pending_used_count is not None: + body["pending_used_count"] = self.pending_used_count + if self.used_count is not None: + body["used_count"] = self.used_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolStats: """Deserializes the InstancePoolStats from a dictionary.""" - return cls(idle_count=d.get('idle_count', None), pending_idle_count=d.get('pending_idle_count', None), pending_used_count=d.get('pending_used_count', None), used_count=d.get('used_count', None)) - - + return cls( + idle_count=d.get("idle_count", None), + pending_idle_count=d.get("pending_idle_count", None), + pending_used_count=d.get("pending_used_count", None), + used_count=d.get("used_count", None), + ) @dataclass @@ -5711,32 +6972,32 @@ class InstancePoolStatus: """List of error messages for the failed pending instances. The pending_instance_errors follows FIFO with maximum length of the min_idle of the pool. The pending_instance_errors is emptied once the number of exiting available instances reaches the min_idle of the pool.""" - + def as_dict(self) -> dict: """Serializes the InstancePoolStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pending_instance_errors: body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors] + if self.pending_instance_errors: + body["pending_instance_errors"] = [v.as_dict() for v in self.pending_instance_errors] return body def as_shallow_dict(self) -> dict: """Serializes the InstancePoolStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.pending_instance_errors: body['pending_instance_errors'] = self.pending_instance_errors + if self.pending_instance_errors: + body["pending_instance_errors"] = self.pending_instance_errors return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstancePoolStatus: """Deserializes the InstancePoolStatus from a dictionary.""" - return cls(pending_instance_errors=_repeated_dict(d, 'pending_instance_errors', PendingInstanceError)) - - + return cls(pending_instance_errors=_repeated_dict(d, "pending_instance_errors", PendingInstanceError)) @dataclass class InstanceProfile: instance_profile_arn: str """The AWS ARN of the instance profile to register with Databricks. This field is required.""" - + iam_role_arn: Optional[str] = None """The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile @@ -5745,526 +7006,589 @@ class InstanceProfile: Otherwise, this field is optional. [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html""" - + is_meta_instance_profile: Optional[bool] = None """Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios. If true, it means the instance profile contains an meta IAM role which could assume a wide range of roles. Therefore it should always be used with authorization. This field is optional, the default value is `false`.""" - + def as_dict(self) -> dict: """Serializes the InstanceProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile + if self.iam_role_arn is not None: + body["iam_role_arn"] = self.iam_role_arn + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: + body["is_meta_instance_profile"] = self.is_meta_instance_profile return body def as_shallow_dict(self) -> dict: """Serializes the InstanceProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: body['is_meta_instance_profile'] = self.is_meta_instance_profile + if self.iam_role_arn is not None: + body["iam_role_arn"] = self.iam_role_arn + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.is_meta_instance_profile is not None: + body["is_meta_instance_profile"] = self.is_meta_instance_profile return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstanceProfile: """Deserializes the InstanceProfile from a dictionary.""" - return cls(iam_role_arn=d.get('iam_role_arn', None), instance_profile_arn=d.get('instance_profile_arn', None), is_meta_instance_profile=d.get('is_meta_instance_profile', None)) - - + return cls( + iam_role_arn=d.get("iam_role_arn", None), + instance_profile_arn=d.get("instance_profile_arn", None), + is_meta_instance_profile=d.get("is_meta_instance_profile", None), + ) class Kind(Enum): """The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html""" - - CLASSIC_PREVIEW = 'CLASSIC_PREVIEW' + + CLASSIC_PREVIEW = "CLASSIC_PREVIEW" + class Language(Enum): - - - PYTHON = 'python' - SCALA = 'scala' - SQL = 'sql' + + PYTHON = "python" + SCALA = "scala" + SQL = "sql" + @dataclass class Library: cran: Optional[RCranLibrary] = None """Specification of a CRAN library to be installed as part of the library""" - + egg: Optional[str] = None """Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.""" - + jar: Optional[str] = None """URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example: `{ "jar": "/Workspace/path/to/library.jar" }`, `{ "jar" : "/Volumes/path/to/library.jar" }` or `{ "jar": "s3://my-bucket/library.jar" }`. If S3 is used, please make sure the cluster has read access on the library. You may need to launch the cluster with an IAM role to access the S3 URI.""" - + maven: Optional[MavenLibrary] = None """Specification of a maven library to be installed. For example: `{ "coordinates": "org.jsoup:jsoup:1.7.2" }`""" - + pypi: Optional[PythonPyPiLibrary] = None """Specification of a PyPi library to be installed. For example: `{ "package": "simplejson" }`""" - + requirements: Optional[str] = None """URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes paths are supported. For example: `{ "requirements": "/Workspace/path/to/requirements.txt" }` or `{ "requirements" : "/Volumes/path/to/requirements.txt" }`""" - + whl: Optional[str] = None """URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example: `{ "whl": "/Workspace/path/to/library.whl" }`, `{ "whl" : "/Volumes/path/to/library.whl" }` or `{ "whl": "s3://my-bucket/library.whl" }`. If S3 is used, please make sure the cluster has read access on the library. You may need to launch the cluster with an IAM role to access the S3 URI.""" - + def as_dict(self) -> dict: """Serializes the Library into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cran: body['cran'] = self.cran.as_dict() - if self.egg is not None: body['egg'] = self.egg - if self.jar is not None: body['jar'] = self.jar - if self.maven: body['maven'] = self.maven.as_dict() - if self.pypi: body['pypi'] = self.pypi.as_dict() - if self.requirements is not None: body['requirements'] = self.requirements - if self.whl is not None: body['whl'] = self.whl + if self.cran: + body["cran"] = self.cran.as_dict() + if self.egg is not None: + body["egg"] = self.egg + if self.jar is not None: + body["jar"] = self.jar + if self.maven: + body["maven"] = self.maven.as_dict() + if self.pypi: + body["pypi"] = self.pypi.as_dict() + if self.requirements is not None: + body["requirements"] = self.requirements + if self.whl is not None: + body["whl"] = self.whl return body def as_shallow_dict(self) -> dict: """Serializes the Library into a shallow dictionary of its immediate attributes.""" body = {} - if self.cran: body['cran'] = self.cran - if self.egg is not None: body['egg'] = self.egg - if self.jar is not None: body['jar'] = self.jar - if self.maven: body['maven'] = self.maven - if self.pypi: body['pypi'] = self.pypi - if self.requirements is not None: body['requirements'] = self.requirements - if self.whl is not None: body['whl'] = self.whl + if self.cran: + body["cran"] = self.cran + if self.egg is not None: + body["egg"] = self.egg + if self.jar is not None: + body["jar"] = self.jar + if self.maven: + body["maven"] = self.maven + if self.pypi: + body["pypi"] = self.pypi + if self.requirements is not None: + body["requirements"] = self.requirements + if self.whl is not None: + body["whl"] = self.whl return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Library: """Deserializes the Library from a dictionary.""" - return cls(cran=_from_dict(d, 'cran', RCranLibrary), egg=d.get('egg', None), jar=d.get('jar', None), maven=_from_dict(d, 'maven', MavenLibrary), pypi=_from_dict(d, 'pypi', PythonPyPiLibrary), requirements=d.get('requirements', None), whl=d.get('whl', None)) - - + return cls( + cran=_from_dict(d, "cran", RCranLibrary), + egg=d.get("egg", None), + jar=d.get("jar", None), + maven=_from_dict(d, "maven", MavenLibrary), + pypi=_from_dict(d, "pypi", PythonPyPiLibrary), + requirements=d.get("requirements", None), + whl=d.get("whl", None), + ) @dataclass class LibraryFullStatus: """The status of the library on a specific cluster.""" - + is_library_for_all_clusters: Optional[bool] = None """Whether the library was set to be installed on all clusters via the libraries UI.""" - + library: Optional[Library] = None """Unique identifier for the library.""" - + messages: Optional[List[str]] = None """All the info and warning messages that have occurred so far for this library.""" - + status: Optional[LibraryInstallStatus] = None """Status of installing the library on the cluster.""" - + def as_dict(self) -> dict: """Serializes the LibraryFullStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_library_for_all_clusters is not None: body['is_library_for_all_clusters'] = self.is_library_for_all_clusters - if self.library: body['library'] = self.library.as_dict() - if self.messages: body['messages'] = [v for v in self.messages] - if self.status is not None: body['status'] = self.status.value + if self.is_library_for_all_clusters is not None: + body["is_library_for_all_clusters"] = self.is_library_for_all_clusters + if self.library: + body["library"] = self.library.as_dict() + if self.messages: + body["messages"] = [v for v in self.messages] + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the LibraryFullStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_library_for_all_clusters is not None: body['is_library_for_all_clusters'] = self.is_library_for_all_clusters - if self.library: body['library'] = self.library - if self.messages: body['messages'] = self.messages - if self.status is not None: body['status'] = self.status + if self.is_library_for_all_clusters is not None: + body["is_library_for_all_clusters"] = self.is_library_for_all_clusters + if self.library: + body["library"] = self.library + if self.messages: + body["messages"] = self.messages + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LibraryFullStatus: """Deserializes the LibraryFullStatus from a dictionary.""" - return cls(is_library_for_all_clusters=d.get('is_library_for_all_clusters', None), library=_from_dict(d, 'library', Library), messages=d.get('messages', None), status=_enum(d, 'status', LibraryInstallStatus)) - - + return cls( + is_library_for_all_clusters=d.get("is_library_for_all_clusters", None), + library=_from_dict(d, "library", Library), + messages=d.get("messages", None), + status=_enum(d, "status", LibraryInstallStatus), + ) class LibraryInstallStatus(Enum): """The status of a library on a specific cluster.""" - - FAILED = 'FAILED' - INSTALLED = 'INSTALLED' - INSTALLING = 'INSTALLING' - PENDING = 'PENDING' - RESOLVING = 'RESOLVING' - RESTORED = 'RESTORED' - SKIPPED = 'SKIPPED' - UNINSTALL_ON_RESTART = 'UNINSTALL_ON_RESTART' + + FAILED = "FAILED" + INSTALLED = "INSTALLED" + INSTALLING = "INSTALLING" + PENDING = "PENDING" + RESOLVING = "RESOLVING" + RESTORED = "RESTORED" + SKIPPED = "SKIPPED" + UNINSTALL_ON_RESTART = "UNINSTALL_ON_RESTART" + @dataclass class ListAllClusterLibraryStatusesResponse: statuses: Optional[List[ClusterLibraryStatuses]] = None """A list of cluster statuses.""" - + def as_dict(self) -> dict: """Serializes the ListAllClusterLibraryStatusesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses] + if self.statuses: + body["statuses"] = [v.as_dict() for v in self.statuses] return body def as_shallow_dict(self) -> dict: """Serializes the ListAllClusterLibraryStatusesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.statuses: body['statuses'] = self.statuses + if self.statuses: + body["statuses"] = self.statuses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAllClusterLibraryStatusesResponse: """Deserializes the ListAllClusterLibraryStatusesResponse from a dictionary.""" - return cls(statuses=_repeated_dict(d, 'statuses', ClusterLibraryStatuses)) - - + return cls(statuses=_repeated_dict(d, "statuses", ClusterLibraryStatuses)) @dataclass class ListAvailableZonesResponse: default_zone: Optional[str] = None """The availability zone if no ``zone_id`` is provided in the cluster creation request.""" - + zones: Optional[List[str]] = None """The list of available zones (e.g., ['us-west-2c', 'us-east-2']).""" - + def as_dict(self) -> dict: """Serializes the ListAvailableZonesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_zone is not None: body['default_zone'] = self.default_zone - if self.zones: body['zones'] = [v for v in self.zones] + if self.default_zone is not None: + body["default_zone"] = self.default_zone + if self.zones: + body["zones"] = [v for v in self.zones] return body def as_shallow_dict(self) -> dict: """Serializes the ListAvailableZonesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_zone is not None: body['default_zone'] = self.default_zone - if self.zones: body['zones'] = self.zones + if self.default_zone is not None: + body["default_zone"] = self.default_zone + if self.zones: + body["zones"] = self.zones return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAvailableZonesResponse: """Deserializes the ListAvailableZonesResponse from a dictionary.""" - return cls(default_zone=d.get('default_zone', None), zones=d.get('zones', None)) - - - - - + return cls(default_zone=d.get("default_zone", None), zones=d.get("zones", None)) @dataclass class ListClusterCompliancesResponse: clusters: Optional[List[ClusterCompliance]] = None """A list of clusters and their policy compliance statuses.""" - + next_page_token: Optional[str] = None """This field represents the pagination token to retrieve the next page of results. If the value is "", it means no further results for the request.""" - + prev_page_token: Optional[str] = None """This field represents the pagination token to retrieve the previous page of results. If the value is "", it means no further results for the request.""" - + def as_dict(self) -> dict: """Serializes the ListClusterCompliancesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.clusters: + body["clusters"] = [v.as_dict() for v in self.clusters] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListClusterCompliancesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.clusters: body['clusters'] = self.clusters - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.clusters: + body["clusters"] = self.clusters + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListClusterCompliancesResponse: """Deserializes the ListClusterCompliancesResponse from a dictionary.""" - return cls(clusters=_repeated_dict(d, 'clusters', ClusterCompliance), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) - - - - - + return cls( + clusters=_repeated_dict(d, "clusters", ClusterCompliance), + next_page_token=d.get("next_page_token", None), + prev_page_token=d.get("prev_page_token", None), + ) @dataclass class ListClustersFilterBy: cluster_sources: Optional[List[ClusterSource]] = None """The source of cluster creation.""" - + cluster_states: Optional[List[State]] = None """The current state of the clusters.""" - + is_pinned: Optional[bool] = None """Whether the clusters are pinned or not.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + def as_dict(self) -> dict: """Serializes the ListClustersFilterBy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_sources: body['cluster_sources'] = [v.value for v in self.cluster_sources] - if self.cluster_states: body['cluster_states'] = [v.value for v in self.cluster_states] - if self.is_pinned is not None: body['is_pinned'] = self.is_pinned - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.cluster_sources: + body["cluster_sources"] = [v.value for v in self.cluster_sources] + if self.cluster_states: + body["cluster_states"] = [v.value for v in self.cluster_states] + if self.is_pinned is not None: + body["is_pinned"] = self.is_pinned + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the ListClustersFilterBy into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_sources: body['cluster_sources'] = self.cluster_sources - if self.cluster_states: body['cluster_states'] = self.cluster_states - if self.is_pinned is not None: body['is_pinned'] = self.is_pinned - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.cluster_sources: + body["cluster_sources"] = self.cluster_sources + if self.cluster_states: + body["cluster_states"] = self.cluster_states + if self.is_pinned is not None: + body["is_pinned"] = self.is_pinned + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListClustersFilterBy: """Deserializes the ListClustersFilterBy from a dictionary.""" - return cls(cluster_sources=_repeated_enum(d, 'cluster_sources', ClusterSource), cluster_states=_repeated_enum(d, 'cluster_states', State), is_pinned=d.get('is_pinned', None), policy_id=d.get('policy_id', None)) - - - - - + return cls( + cluster_sources=_repeated_enum(d, "cluster_sources", ClusterSource), + cluster_states=_repeated_enum(d, "cluster_states", State), + is_pinned=d.get("is_pinned", None), + policy_id=d.get("policy_id", None), + ) @dataclass class ListClustersResponse: clusters: Optional[List[ClusterDetails]] = None - + next_page_token: Optional[str] = None """This field represents the pagination token to retrieve the next page of results. If the value is "", it means no further results for the request.""" - + prev_page_token: Optional[str] = None """This field represents the pagination token to retrieve the previous page of results. If the value is "", it means no further results for the request.""" - + def as_dict(self) -> dict: """Serializes the ListClustersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.clusters: + body["clusters"] = [v.as_dict() for v in self.clusters] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListClustersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.clusters: body['clusters'] = self.clusters - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.clusters: + body["clusters"] = self.clusters + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListClustersResponse: """Deserializes the ListClustersResponse from a dictionary.""" - return cls(clusters=_repeated_dict(d, 'clusters', ClusterDetails), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) - - + return cls( + clusters=_repeated_dict(d, "clusters", ClusterDetails), + next_page_token=d.get("next_page_token", None), + prev_page_token=d.get("prev_page_token", None), + ) @dataclass class ListClustersSortBy: direction: Optional[ListClustersSortByDirection] = None """The direction to sort by.""" - + field: Optional[ListClustersSortByField] = None """The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest precedence: cluster state, pinned or unpinned, then cluster name.""" - + def as_dict(self) -> dict: """Serializes the ListClustersSortBy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.direction is not None: body['direction'] = self.direction.value - if self.field is not None: body['field'] = self.field.value + if self.direction is not None: + body["direction"] = self.direction.value + if self.field is not None: + body["field"] = self.field.value return body def as_shallow_dict(self) -> dict: """Serializes the ListClustersSortBy into a shallow dictionary of its immediate attributes.""" body = {} - if self.direction is not None: body['direction'] = self.direction - if self.field is not None: body['field'] = self.field + if self.direction is not None: + body["direction"] = self.direction + if self.field is not None: + body["field"] = self.field return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListClustersSortBy: """Deserializes the ListClustersSortBy from a dictionary.""" - return cls(direction=_enum(d, 'direction', ListClustersSortByDirection), field=_enum(d, 'field', ListClustersSortByField)) - - + return cls( + direction=_enum(d, "direction", ListClustersSortByDirection), + field=_enum(d, "field", ListClustersSortByField), + ) class ListClustersSortByDirection(Enum): - - - ASC = 'ASC' - DESC = 'DESC' + + ASC = "ASC" + DESC = "DESC" + class ListClustersSortByField(Enum): - - - CLUSTER_NAME = 'CLUSTER_NAME' - DEFAULT = 'DEFAULT' + + CLUSTER_NAME = "CLUSTER_NAME" + DEFAULT = "DEFAULT" + @dataclass class ListGlobalInitScriptsResponse: scripts: Optional[List[GlobalInitScriptDetails]] = None - + def as_dict(self) -> dict: """Serializes the ListGlobalInitScriptsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scripts: body['scripts'] = [v.as_dict() for v in self.scripts] + if self.scripts: + body["scripts"] = [v.as_dict() for v in self.scripts] return body def as_shallow_dict(self) -> dict: """Serializes the ListGlobalInitScriptsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.scripts: body['scripts'] = self.scripts + if self.scripts: + body["scripts"] = self.scripts return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListGlobalInitScriptsResponse: """Deserializes the ListGlobalInitScriptsResponse from a dictionary.""" - return cls(scripts=_repeated_dict(d, 'scripts', GlobalInitScriptDetails)) - - + return cls(scripts=_repeated_dict(d, "scripts", GlobalInitScriptDetails)) @dataclass class ListInstancePools: instance_pools: Optional[List[InstancePoolAndStats]] = None - + def as_dict(self) -> dict: """Serializes the ListInstancePools into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools] + if self.instance_pools: + body["instance_pools"] = [v.as_dict() for v in self.instance_pools] return body def as_shallow_dict(self) -> dict: """Serializes the ListInstancePools into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_pools: body['instance_pools'] = self.instance_pools + if self.instance_pools: + body["instance_pools"] = self.instance_pools return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListInstancePools: """Deserializes the ListInstancePools from a dictionary.""" - return cls(instance_pools=_repeated_dict(d, 'instance_pools', InstancePoolAndStats)) - - + return cls(instance_pools=_repeated_dict(d, "instance_pools", InstancePoolAndStats)) @dataclass class ListInstanceProfilesResponse: instance_profiles: Optional[List[InstanceProfile]] = None """A list of instance profiles that the user can access.""" - + def as_dict(self) -> dict: """Serializes the ListInstanceProfilesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_profiles: body['instance_profiles'] = [v.as_dict() for v in self.instance_profiles] + if self.instance_profiles: + body["instance_profiles"] = [v.as_dict() for v in self.instance_profiles] return body def as_shallow_dict(self) -> dict: """Serializes the ListInstanceProfilesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_profiles: body['instance_profiles'] = self.instance_profiles + if self.instance_profiles: + body["instance_profiles"] = self.instance_profiles return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListInstanceProfilesResponse: """Deserializes the ListInstanceProfilesResponse from a dictionary.""" - return cls(instance_profiles=_repeated_dict(d, 'instance_profiles', InstanceProfile)) - - + return cls(instance_profiles=_repeated_dict(d, "instance_profiles", InstanceProfile)) @dataclass class ListNodeTypesResponse: node_types: Optional[List[NodeType]] = None """The list of available Spark node types.""" - + def as_dict(self) -> dict: """Serializes the ListNodeTypesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.node_types: body['node_types'] = [v.as_dict() for v in self.node_types] + if self.node_types: + body["node_types"] = [v.as_dict() for v in self.node_types] return body def as_shallow_dict(self) -> dict: """Serializes the ListNodeTypesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.node_types: body['node_types'] = self.node_types + if self.node_types: + body["node_types"] = self.node_types return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNodeTypesResponse: """Deserializes the ListNodeTypesResponse from a dictionary.""" - return cls(node_types=_repeated_dict(d, 'node_types', NodeType)) - - + return cls(node_types=_repeated_dict(d, "node_types", NodeType)) @dataclass class ListPoliciesResponse: policies: Optional[List[Policy]] = None """List of policies.""" - + def as_dict(self) -> dict: """Serializes the ListPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.policies: body['policies'] = [v.as_dict() for v in self.policies] + if self.policies: + body["policies"] = [v.as_dict() for v in self.policies] return body def as_shallow_dict(self) -> dict: """Serializes the ListPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.policies: body['policies'] = self.policies + if self.policies: + body["policies"] = self.policies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPoliciesResponse: """Deserializes the ListPoliciesResponse from a dictionary.""" - return cls(policies=_repeated_dict(d, 'policies', Policy)) - - - - - + return cls(policies=_repeated_dict(d, "policies", Policy)) @dataclass @@ -6272,171 +7596,189 @@ class ListPolicyFamiliesResponse: next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" - + policy_families: Optional[List[PolicyFamily]] = None """List of policy families.""" - + def as_dict(self) -> dict: """Serializes the ListPolicyFamiliesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.policy_families: body['policy_families'] = [v.as_dict() for v in self.policy_families] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policy_families: + body["policy_families"] = [v.as_dict() for v in self.policy_families] return body def as_shallow_dict(self) -> dict: """Serializes the ListPolicyFamiliesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.policy_families: body['policy_families'] = self.policy_families + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policy_families: + body["policy_families"] = self.policy_families return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPolicyFamiliesResponse: """Deserializes the ListPolicyFamiliesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), policy_families=_repeated_dict(d, 'policy_families', PolicyFamily)) - - + return cls( + next_page_token=d.get("next_page_token", None), + policy_families=_repeated_dict(d, "policy_families", PolicyFamily), + ) class ListSortColumn(Enum): - - - POLICY_CREATION_TIME = 'POLICY_CREATION_TIME' - POLICY_NAME = 'POLICY_NAME' + + POLICY_CREATION_TIME = "POLICY_CREATION_TIME" + POLICY_NAME = "POLICY_NAME" + class ListSortOrder(Enum): - - - ASC = 'ASC' - DESC = 'DESC' + + ASC = "ASC" + DESC = "DESC" + @dataclass class LocalFileInfo: destination: str """local file destination, e.g. `file:/my/local/file.sh`""" - + def as_dict(self) -> dict: """Serializes the LocalFileInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the LocalFileInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LocalFileInfo: """Deserializes the LocalFileInfo from a dictionary.""" - return cls(destination=d.get('destination', None)) - - + return cls(destination=d.get("destination", None)) @dataclass class LogAnalyticsInfo: log_analytics_primary_key: Optional[str] = None - + log_analytics_workspace_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the LogAnalyticsInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_analytics_primary_key is not None: body['log_analytics_primary_key'] = self.log_analytics_primary_key - if self.log_analytics_workspace_id is not None: body['log_analytics_workspace_id'] = self.log_analytics_workspace_id + if self.log_analytics_primary_key is not None: + body["log_analytics_primary_key"] = self.log_analytics_primary_key + if self.log_analytics_workspace_id is not None: + body["log_analytics_workspace_id"] = self.log_analytics_workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the LogAnalyticsInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_analytics_primary_key is not None: body['log_analytics_primary_key'] = self.log_analytics_primary_key - if self.log_analytics_workspace_id is not None: body['log_analytics_workspace_id'] = self.log_analytics_workspace_id + if self.log_analytics_primary_key is not None: + body["log_analytics_primary_key"] = self.log_analytics_primary_key + if self.log_analytics_workspace_id is not None: + body["log_analytics_workspace_id"] = self.log_analytics_workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogAnalyticsInfo: """Deserializes the LogAnalyticsInfo from a dictionary.""" - return cls(log_analytics_primary_key=d.get('log_analytics_primary_key', None), log_analytics_workspace_id=d.get('log_analytics_workspace_id', None)) - - + return cls( + log_analytics_primary_key=d.get("log_analytics_primary_key", None), + log_analytics_workspace_id=d.get("log_analytics_workspace_id", None), + ) @dataclass class LogSyncStatus: """The log delivery status""" - + last_attempted: Optional[int] = None """The timestamp of last attempt. If the last attempt fails, `last_exception` will contain the exception in the last attempt.""" - + last_exception: Optional[str] = None """The exception thrown in the last attempt, it would be null (omitted in the response) if there is no exception in last attempted.""" - + def as_dict(self) -> dict: """Serializes the LogSyncStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_attempted is not None: body['last_attempted'] = self.last_attempted - if self.last_exception is not None: body['last_exception'] = self.last_exception + if self.last_attempted is not None: + body["last_attempted"] = self.last_attempted + if self.last_exception is not None: + body["last_exception"] = self.last_exception return body def as_shallow_dict(self) -> dict: """Serializes the LogSyncStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_attempted is not None: body['last_attempted'] = self.last_attempted - if self.last_exception is not None: body['last_exception'] = self.last_exception + if self.last_attempted is not None: + body["last_attempted"] = self.last_attempted + if self.last_exception is not None: + body["last_exception"] = self.last_exception return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogSyncStatus: """Deserializes the LogSyncStatus from a dictionary.""" - return cls(last_attempted=d.get('last_attempted', None), last_exception=d.get('last_exception', None)) - + return cls(last_attempted=d.get("last_attempted", None), last_exception=d.get("last_exception", None)) - -MapAny = Dict[str,Any] +MapAny = Dict[str, Any] @dataclass class MavenLibrary: coordinates: str """Gradle-style maven coordinates. For example: "org.jsoup:jsoup:1.7.2".""" - + exclusions: Optional[List[str]] = None """List of dependences to exclude. For example: `["slf4j:slf4j", "*:hadoop-client"]`. Maven dependency exclusions: https://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html.""" - + repo: Optional[str] = None """Maven repo to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched.""" - + def as_dict(self) -> dict: """Serializes the MavenLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.coordinates is not None: body['coordinates'] = self.coordinates - if self.exclusions: body['exclusions'] = [v for v in self.exclusions] - if self.repo is not None: body['repo'] = self.repo + if self.coordinates is not None: + body["coordinates"] = self.coordinates + if self.exclusions: + body["exclusions"] = [v for v in self.exclusions] + if self.repo is not None: + body["repo"] = self.repo return body def as_shallow_dict(self) -> dict: """Serializes the MavenLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.coordinates is not None: body['coordinates'] = self.coordinates - if self.exclusions: body['exclusions'] = self.exclusions - if self.repo is not None: body['repo'] = self.repo + if self.coordinates is not None: + body["coordinates"] = self.coordinates + if self.exclusions: + body["exclusions"] = self.exclusions + if self.repo is not None: + body["repo"] = self.repo return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MavenLibrary: """Deserializes the MavenLibrary from a dictionary.""" - return cls(coordinates=d.get('coordinates', None), exclusions=d.get('exclusions', None), repo=d.get('repo', None)) - - + return cls( + coordinates=d.get("coordinates", None), exclusions=d.get("exclusions", None), repo=d.get("repo", None) + ) @dataclass @@ -6444,230 +7786,305 @@ class NodeInstanceType: """This structure embodies the machine type that hosts spark containers Note: this should be an internal data structure for now It is defined in proto in case we want to send it over the wire in the future (which is likely)""" - + instance_type_id: str """Unique identifier across instance types""" - + local_disk_size_gb: Optional[int] = None """Size of the individual local disks attached to this instance (i.e. per local disk).""" - + local_disks: Optional[int] = None """Number of local disks that are present on this instance.""" - + local_nvme_disk_size_gb: Optional[int] = None """Size of the individual local nvme disks attached to this instance (i.e. per local disk).""" - + local_nvme_disks: Optional[int] = None """Number of local nvme disks that are present on this instance.""" - + def as_dict(self) -> dict: """Serializes the NodeInstanceType into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id - if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb - if self.local_disks is not None: body['local_disks'] = self.local_disks - if self.local_nvme_disk_size_gb is not None: body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb - if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks + if self.instance_type_id is not None: + body["instance_type_id"] = self.instance_type_id + if self.local_disk_size_gb is not None: + body["local_disk_size_gb"] = self.local_disk_size_gb + if self.local_disks is not None: + body["local_disks"] = self.local_disks + if self.local_nvme_disk_size_gb is not None: + body["local_nvme_disk_size_gb"] = self.local_nvme_disk_size_gb + if self.local_nvme_disks is not None: + body["local_nvme_disks"] = self.local_nvme_disks return body def as_shallow_dict(self) -> dict: """Serializes the NodeInstanceType into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id - if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb - if self.local_disks is not None: body['local_disks'] = self.local_disks - if self.local_nvme_disk_size_gb is not None: body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb - if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks + if self.instance_type_id is not None: + body["instance_type_id"] = self.instance_type_id + if self.local_disk_size_gb is not None: + body["local_disk_size_gb"] = self.local_disk_size_gb + if self.local_disks is not None: + body["local_disks"] = self.local_disks + if self.local_nvme_disk_size_gb is not None: + body["local_nvme_disk_size_gb"] = self.local_nvme_disk_size_gb + if self.local_nvme_disks is not None: + body["local_nvme_disks"] = self.local_nvme_disks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NodeInstanceType: """Deserializes the NodeInstanceType from a dictionary.""" - return cls(instance_type_id=d.get('instance_type_id', None), local_disk_size_gb=d.get('local_disk_size_gb', None), local_disks=d.get('local_disks', None), local_nvme_disk_size_gb=d.get('local_nvme_disk_size_gb', None), local_nvme_disks=d.get('local_nvme_disks', None)) - - + return cls( + instance_type_id=d.get("instance_type_id", None), + local_disk_size_gb=d.get("local_disk_size_gb", None), + local_disks=d.get("local_disks", None), + local_nvme_disk_size_gb=d.get("local_nvme_disk_size_gb", None), + local_nvme_disks=d.get("local_nvme_disks", None), + ) @dataclass class NodeType: """A description of a Spark node type including both the dimensions of the node and the instance type on which it will be hosted.""" - + node_type_id: str """Unique identifier for this node type.""" - + memory_mb: int """Memory (in MB) available for this node type.""" - + num_cores: float """Number of CPU cores available for this node type. Note that this can be fractional, e.g., 2.5 cores, if the the number of cores on a machine instance is not divisible by the number of Spark nodes on that machine.""" - + description: str """A string description associated with this node type, e.g., "r3.xlarge".""" - + instance_type_id: str """An identifier for the type of hardware that this node runs on, e.g., "r3.2xlarge" in AWS.""" - + category: str """A descriptive category for this node type. Examples include "Memory Optimized" and "Compute Optimized".""" - + display_order: Optional[int] = None """An optional hint at the display order of node types in the UI. Within a node type category, lowest numbers come first.""" - + is_deprecated: Optional[bool] = None """Whether the node type is deprecated. Non-deprecated node types offer greater performance.""" - + is_encrypted_in_transit: Optional[bool] = None """AWS specific, whether this instance supports encryption in transit, used for hipaa and pci workloads.""" - + is_graviton: Optional[bool] = None """Whether this is an Arm-based instance.""" - + is_hidden: Optional[bool] = None """Whether this node is hidden from presentation in the UI.""" - + is_io_cache_enabled: Optional[bool] = None """Whether this node comes with IO cache enabled by default.""" - + node_info: Optional[CloudProviderNodeInfo] = None """A collection of node type info reported by the cloud provider""" - + node_instance_type: Optional[NodeInstanceType] = None """The NodeInstanceType object corresponding to instance_type_id""" - + num_gpus: Optional[int] = None """Number of GPUs available for this node type.""" - + photon_driver_capable: Optional[bool] = None - + photon_worker_capable: Optional[bool] = None - + support_cluster_tags: Optional[bool] = None """Whether this node type support cluster tags.""" - + support_ebs_volumes: Optional[bool] = None """Whether this node type support EBS volumes. EBS volumes is disabled for node types that we could place multiple corresponding containers on the same hosting instance.""" - + support_port_forwarding: Optional[bool] = None """Whether this node type supports port forwarding.""" - + def as_dict(self) -> dict: """Serializes the NodeType into a dictionary suitable for use as a JSON request body.""" body = {} - if self.category is not None: body['category'] = self.category - if self.description is not None: body['description'] = self.description - if self.display_order is not None: body['display_order'] = self.display_order - if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id - if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated - if self.is_encrypted_in_transit is not None: body['is_encrypted_in_transit'] = self.is_encrypted_in_transit - if self.is_graviton is not None: body['is_graviton'] = self.is_graviton - if self.is_hidden is not None: body['is_hidden'] = self.is_hidden - if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled - if self.memory_mb is not None: body['memory_mb'] = self.memory_mb - if self.node_info: body['node_info'] = self.node_info.as_dict() - if self.node_instance_type: body['node_instance_type'] = self.node_instance_type.as_dict() - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_cores is not None: body['num_cores'] = self.num_cores - if self.num_gpus is not None: body['num_gpus'] = self.num_gpus - if self.photon_driver_capable is not None: body['photon_driver_capable'] = self.photon_driver_capable - if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable - if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags - if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes - if self.support_port_forwarding is not None: body['support_port_forwarding'] = self.support_port_forwarding + if self.category is not None: + body["category"] = self.category + if self.description is not None: + body["description"] = self.description + if self.display_order is not None: + body["display_order"] = self.display_order + if self.instance_type_id is not None: + body["instance_type_id"] = self.instance_type_id + if self.is_deprecated is not None: + body["is_deprecated"] = self.is_deprecated + if self.is_encrypted_in_transit is not None: + body["is_encrypted_in_transit"] = self.is_encrypted_in_transit + if self.is_graviton is not None: + body["is_graviton"] = self.is_graviton + if self.is_hidden is not None: + body["is_hidden"] = self.is_hidden + if self.is_io_cache_enabled is not None: + body["is_io_cache_enabled"] = self.is_io_cache_enabled + if self.memory_mb is not None: + body["memory_mb"] = self.memory_mb + if self.node_info: + body["node_info"] = self.node_info.as_dict() + if self.node_instance_type: + body["node_instance_type"] = self.node_instance_type.as_dict() + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_cores is not None: + body["num_cores"] = self.num_cores + if self.num_gpus is not None: + body["num_gpus"] = self.num_gpus + if self.photon_driver_capable is not None: + body["photon_driver_capable"] = self.photon_driver_capable + if self.photon_worker_capable is not None: + body["photon_worker_capable"] = self.photon_worker_capable + if self.support_cluster_tags is not None: + body["support_cluster_tags"] = self.support_cluster_tags + if self.support_ebs_volumes is not None: + body["support_ebs_volumes"] = self.support_ebs_volumes + if self.support_port_forwarding is not None: + body["support_port_forwarding"] = self.support_port_forwarding return body def as_shallow_dict(self) -> dict: """Serializes the NodeType into a shallow dictionary of its immediate attributes.""" body = {} - if self.category is not None: body['category'] = self.category - if self.description is not None: body['description'] = self.description - if self.display_order is not None: body['display_order'] = self.display_order - if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id - if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated - if self.is_encrypted_in_transit is not None: body['is_encrypted_in_transit'] = self.is_encrypted_in_transit - if self.is_graviton is not None: body['is_graviton'] = self.is_graviton - if self.is_hidden is not None: body['is_hidden'] = self.is_hidden - if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled - if self.memory_mb is not None: body['memory_mb'] = self.memory_mb - if self.node_info: body['node_info'] = self.node_info - if self.node_instance_type: body['node_instance_type'] = self.node_instance_type - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_cores is not None: body['num_cores'] = self.num_cores - if self.num_gpus is not None: body['num_gpus'] = self.num_gpus - if self.photon_driver_capable is not None: body['photon_driver_capable'] = self.photon_driver_capable - if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable - if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags - if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes - if self.support_port_forwarding is not None: body['support_port_forwarding'] = self.support_port_forwarding + if self.category is not None: + body["category"] = self.category + if self.description is not None: + body["description"] = self.description + if self.display_order is not None: + body["display_order"] = self.display_order + if self.instance_type_id is not None: + body["instance_type_id"] = self.instance_type_id + if self.is_deprecated is not None: + body["is_deprecated"] = self.is_deprecated + if self.is_encrypted_in_transit is not None: + body["is_encrypted_in_transit"] = self.is_encrypted_in_transit + if self.is_graviton is not None: + body["is_graviton"] = self.is_graviton + if self.is_hidden is not None: + body["is_hidden"] = self.is_hidden + if self.is_io_cache_enabled is not None: + body["is_io_cache_enabled"] = self.is_io_cache_enabled + if self.memory_mb is not None: + body["memory_mb"] = self.memory_mb + if self.node_info: + body["node_info"] = self.node_info + if self.node_instance_type: + body["node_instance_type"] = self.node_instance_type + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_cores is not None: + body["num_cores"] = self.num_cores + if self.num_gpus is not None: + body["num_gpus"] = self.num_gpus + if self.photon_driver_capable is not None: + body["photon_driver_capable"] = self.photon_driver_capable + if self.photon_worker_capable is not None: + body["photon_worker_capable"] = self.photon_worker_capable + if self.support_cluster_tags is not None: + body["support_cluster_tags"] = self.support_cluster_tags + if self.support_ebs_volumes is not None: + body["support_ebs_volumes"] = self.support_ebs_volumes + if self.support_port_forwarding is not None: + body["support_port_forwarding"] = self.support_port_forwarding return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NodeType: """Deserializes the NodeType from a dictionary.""" - return cls(category=d.get('category', None), description=d.get('description', None), display_order=d.get('display_order', None), instance_type_id=d.get('instance_type_id', None), is_deprecated=d.get('is_deprecated', None), is_encrypted_in_transit=d.get('is_encrypted_in_transit', None), is_graviton=d.get('is_graviton', None), is_hidden=d.get('is_hidden', None), is_io_cache_enabled=d.get('is_io_cache_enabled', None), memory_mb=d.get('memory_mb', None), node_info=_from_dict(d, 'node_info', CloudProviderNodeInfo), node_instance_type=_from_dict(d, 'node_instance_type', NodeInstanceType), node_type_id=d.get('node_type_id', None), num_cores=d.get('num_cores', None), num_gpus=d.get('num_gpus', None), photon_driver_capable=d.get('photon_driver_capable', None), photon_worker_capable=d.get('photon_worker_capable', None), support_cluster_tags=d.get('support_cluster_tags', None), support_ebs_volumes=d.get('support_ebs_volumes', None), support_port_forwarding=d.get('support_port_forwarding', None)) - - + return cls( + category=d.get("category", None), + description=d.get("description", None), + display_order=d.get("display_order", None), + instance_type_id=d.get("instance_type_id", None), + is_deprecated=d.get("is_deprecated", None), + is_encrypted_in_transit=d.get("is_encrypted_in_transit", None), + is_graviton=d.get("is_graviton", None), + is_hidden=d.get("is_hidden", None), + is_io_cache_enabled=d.get("is_io_cache_enabled", None), + memory_mb=d.get("memory_mb", None), + node_info=_from_dict(d, "node_info", CloudProviderNodeInfo), + node_instance_type=_from_dict(d, "node_instance_type", NodeInstanceType), + node_type_id=d.get("node_type_id", None), + num_cores=d.get("num_cores", None), + num_gpus=d.get("num_gpus", None), + photon_driver_capable=d.get("photon_driver_capable", None), + photon_worker_capable=d.get("photon_worker_capable", None), + support_cluster_tags=d.get("support_cluster_tags", None), + support_ebs_volumes=d.get("support_ebs_volumes", None), + support_port_forwarding=d.get("support_port_forwarding", None), + ) @dataclass class PendingInstanceError: """Error message of a failed pending instances""" - + instance_id: Optional[str] = None - + message: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the PendingInstanceError into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_id is not None: body['instance_id'] = self.instance_id - if self.message is not None: body['message'] = self.message + if self.instance_id is not None: + body["instance_id"] = self.instance_id + if self.message is not None: + body["message"] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the PendingInstanceError into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_id is not None: body['instance_id'] = self.instance_id - if self.message is not None: body['message'] = self.message + if self.instance_id is not None: + body["instance_id"] = self.instance_id + if self.message is not None: + body["message"] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PendingInstanceError: """Deserializes the PendingInstanceError from a dictionary.""" - return cls(instance_id=d.get('instance_id', None), message=d.get('message', None)) - - + return cls(instance_id=d.get("instance_id", None), message=d.get("message", None)) @dataclass class PermanentDeleteCluster: cluster_id: str """The cluster to be deleted.""" - + def as_dict(self) -> dict: """Serializes the PermanentDeleteCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the PermanentDeleteCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermanentDeleteCluster: """Deserializes the PermanentDeleteCluster from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None)) - - + return cls(cluster_id=d.get("cluster_id", None)) @dataclass @@ -6686,32 +8103,30 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PermanentDeleteClusterResponse: """Deserializes the PermanentDeleteClusterResponse from a dictionary.""" return cls() - - @dataclass class PinCluster: cluster_id: str - + def as_dict(self) -> dict: """Serializes the PinCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the PinCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PinCluster: """Deserializes the PinCluster from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None)) - - + return cls(cluster_id=d.get("cluster_id", None)) @dataclass @@ -6730,45 +8145,43 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PinClusterResponse: """Deserializes the PinClusterResponse from a dictionary.""" return cls() - - @dataclass class Policy: """Describes a Cluster Policy entity.""" - + created_at_timestamp: Optional[int] = None """Creation time. The timestamp (in millisecond) when this Cluster Policy was created.""" - + creator_user_name: Optional[str] = None """Creator user name. The field won't be included in the response if the user has already been deleted.""" - + definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + description: Optional[str] = None """Additional human-readable description of the cluster policy.""" - + is_default: Optional[bool] = None """If true, policy is a default policy created and managed by Databricks. Default policies cannot be deleted, and their policy families cannot be changed.""" - + libraries: Optional[List[Library]] = None """A list of libraries to be installed on the next cluster restart that uses this policy. The maximum number of libraries is 500.""" - + max_clusters_per_user: Optional[int] = None """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" - + name: Optional[str] = None """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 characters.""" - + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -6777,55 +8190,87 @@ class Policy: rules specified here are merged into the inherited policy definition. [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + policy_family_id: Optional[str] = None """ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition.""" - + policy_id: Optional[str] = None """Canonical unique identifier for the Cluster Policy.""" - + def as_dict(self) -> dict: """Serializes the Policy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.definition is not None: body['definition'] = self.definition - if self.description is not None: body['description'] = self.description - if self.is_default is not None: body['is_default'] = self.is_default - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user - if self.name is not None: body['name'] = self.name - if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides - if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.created_at_timestamp is not None: + body["created_at_timestamp"] = self.created_at_timestamp + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.definition is not None: + body["definition"] = self.definition + if self.description is not None: + body["description"] = self.description + if self.is_default is not None: + body["is_default"] = self.is_default + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.max_clusters_per_user is not None: + body["max_clusters_per_user"] = self.max_clusters_per_user + if self.name is not None: + body["name"] = self.name + if self.policy_family_definition_overrides is not None: + body["policy_family_definition_overrides"] = self.policy_family_definition_overrides + if self.policy_family_id is not None: + body["policy_family_id"] = self.policy_family_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body def as_shallow_dict(self) -> dict: """Serializes the Policy into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.definition is not None: body['definition'] = self.definition - if self.description is not None: body['description'] = self.description - if self.is_default is not None: body['is_default'] = self.is_default - if self.libraries: body['libraries'] = self.libraries - if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user - if self.name is not None: body['name'] = self.name - if self.policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = self.policy_family_definition_overrides - if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id - if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.created_at_timestamp is not None: + body["created_at_timestamp"] = self.created_at_timestamp + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.definition is not None: + body["definition"] = self.definition + if self.description is not None: + body["description"] = self.description + if self.is_default is not None: + body["is_default"] = self.is_default + if self.libraries: + body["libraries"] = self.libraries + if self.max_clusters_per_user is not None: + body["max_clusters_per_user"] = self.max_clusters_per_user + if self.name is not None: + body["name"] = self.name + if self.policy_family_definition_overrides is not None: + body["policy_family_definition_overrides"] = self.policy_family_definition_overrides + if self.policy_family_id is not None: + body["policy_family_id"] = self.policy_family_id + if self.policy_id is not None: + body["policy_id"] = self.policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Policy: """Deserializes the Policy from a dictionary.""" - return cls(created_at_timestamp=d.get('created_at_timestamp', None), creator_user_name=d.get('creator_user_name', None), definition=d.get('definition', None), description=d.get('description', None), is_default=d.get('is_default', None), libraries=_repeated_dict(d, 'libraries', Library), max_clusters_per_user=d.get('max_clusters_per_user', None), name=d.get('name', None), policy_family_definition_overrides=d.get('policy_family_definition_overrides', None), policy_family_id=d.get('policy_family_id', None), policy_id=d.get('policy_id', None)) - - + return cls( + created_at_timestamp=d.get("created_at_timestamp", None), + creator_user_name=d.get("creator_user_name", None), + definition=d.get("definition", None), + description=d.get("description", None), + is_default=d.get("is_default", None), + libraries=_repeated_dict(d, "libraries", Library), + max_clusters_per_user=d.get("max_clusters_per_user", None), + name=d.get("name", None), + policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), + policy_family_id=d.get("policy_family_id", None), + policy_id=d.get("policy_id", None), + ) @dataclass @@ -6834,40 +8279,51 @@ class PolicyFamily: """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - + description: Optional[str] = None """Human-readable description of the purpose of the policy family.""" - + name: Optional[str] = None """Name of the policy family.""" - + policy_family_id: Optional[str] = None """Unique identifier for the policy family.""" - + def as_dict(self) -> dict: """Serializes the PolicyFamily into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: body['definition'] = self.definition - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.definition is not None: + body["definition"] = self.definition + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.policy_family_id is not None: + body["policy_family_id"] = self.policy_family_id return body def as_shallow_dict(self) -> dict: """Serializes the PolicyFamily into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: body['definition'] = self.definition - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id + if self.definition is not None: + body["definition"] = self.definition + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.policy_family_id is not None: + body["policy_family_id"] = self.policy_family_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PolicyFamily: """Deserializes the PolicyFamily from a dictionary.""" - return cls(definition=d.get('definition', None), description=d.get('description', None), name=d.get('name', None), policy_family_id=d.get('policy_family_id', None)) - - + return cls( + definition=d.get("definition", None), + description=d.get("description", None), + name=d.get("name", None), + policy_family_id=d.get("policy_family_id", None), + ) @dataclass @@ -6875,85 +8331,89 @@ class PythonPyPiLibrary: package: str """The name of the pypi package to install. An optional exact version specification is also supported. Examples: "simplejson" and "simplejson==3.8.0".""" - + repo: Optional[str] = None """The repository where the package can be found. If not specified, the default pip index is used.""" - + def as_dict(self) -> dict: """Serializes the PythonPyPiLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.package is not None: body['package'] = self.package - if self.repo is not None: body['repo'] = self.repo + if self.package is not None: + body["package"] = self.package + if self.repo is not None: + body["repo"] = self.repo return body def as_shallow_dict(self) -> dict: """Serializes the PythonPyPiLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.package is not None: body['package'] = self.package - if self.repo is not None: body['repo'] = self.repo + if self.package is not None: + body["package"] = self.package + if self.repo is not None: + body["repo"] = self.repo return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PythonPyPiLibrary: """Deserializes the PythonPyPiLibrary from a dictionary.""" - return cls(package=d.get('package', None), repo=d.get('repo', None)) - - + return cls(package=d.get("package", None), repo=d.get("repo", None)) @dataclass class RCranLibrary: package: str """The name of the CRAN package to install.""" - + repo: Optional[str] = None """The repository where the package can be found. If not specified, the default CRAN repo is used.""" - + def as_dict(self) -> dict: """Serializes the RCranLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.package is not None: body['package'] = self.package - if self.repo is not None: body['repo'] = self.repo + if self.package is not None: + body["package"] = self.package + if self.repo is not None: + body["repo"] = self.repo return body def as_shallow_dict(self) -> dict: """Serializes the RCranLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.package is not None: body['package'] = self.package - if self.repo is not None: body['repo'] = self.repo + if self.package is not None: + body["package"] = self.package + if self.repo is not None: + body["repo"] = self.repo return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RCranLibrary: """Deserializes the RCranLibrary from a dictionary.""" - return cls(package=d.get('package', None), repo=d.get('repo', None)) - - + return cls(package=d.get("package", None), repo=d.get("repo", None)) @dataclass class RemoveInstanceProfile: instance_profile_arn: str """The ARN of the instance profile to remove. This field is required.""" - + def as_dict(self) -> dict: """Serializes the RemoveInstanceProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn return body def as_shallow_dict(self) -> dict: """Serializes the RemoveInstanceProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RemoveInstanceProfile: """Deserializes the RemoveInstanceProfile from a dictionary.""" - return cls(instance_profile_arn=d.get('instance_profile_arn', None)) - - + return cls(instance_profile_arn=d.get("instance_profile_arn", None)) @dataclass @@ -6972,19 +8432,17 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RemoveResponse: """Deserializes the RemoveResponse from a dictionary.""" return cls() - - @dataclass class ResizeCluster: cluster_id: str """The cluster to be resized.""" - + autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -6994,29 +8452,37 @@ class ResizeCluster: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + def as_dict(self) -> dict: """Serializes the ResizeCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.autoscale: + body["autoscale"] = self.autoscale.as_dict() + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers return body def as_shallow_dict(self) -> dict: """Serializes the ResizeCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.autoscale: body['autoscale'] = self.autoscale - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.autoscale: + body["autoscale"] = self.autoscale + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResizeCluster: """Deserializes the ResizeCluster from a dictionary.""" - return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), cluster_id=d.get('cluster_id', None), num_workers=d.get('num_workers', None)) - - + return cls( + autoscale=_from_dict(d, "autoscale", AutoScale), + cluster_id=d.get("cluster_id", None), + num_workers=d.get("num_workers", None), + ) @dataclass @@ -7035,37 +8501,37 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ResizeClusterResponse: """Deserializes the ResizeClusterResponse from a dictionary.""" return cls() - - @dataclass class RestartCluster: cluster_id: str """The cluster to be started.""" - + restart_user: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the RestartCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.restart_user is not None: body['restart_user'] = self.restart_user + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.restart_user is not None: + body["restart_user"] = self.restart_user return body def as_shallow_dict(self) -> dict: """Serializes the RestartCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.restart_user is not None: body['restart_user'] = self.restart_user + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.restart_user is not None: + body["restart_user"] = self.restart_user return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestartCluster: """Deserializes the RestartCluster from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), restart_user=d.get('restart_user', None)) - - + return cls(cluster_id=d.get("cluster_id", None), restart_user=d.get("restart_user", None)) @dataclass @@ -7084,102 +8550,129 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RestartClusterResponse: """Deserializes the RestartClusterResponse from a dictionary.""" return cls() - - class ResultType(Enum): - - - ERROR = 'error' - IMAGE = 'image' - IMAGES = 'images' - TABLE = 'table' - TEXT = 'text' + + ERROR = "error" + IMAGE = "image" + IMAGES = "images" + TABLE = "table" + TEXT = "text" + @dataclass class Results: cause: Optional[str] = None """The cause of the error""" - + data: Optional[Any] = None - + file_name: Optional[str] = None """The image filename""" - + file_names: Optional[List[str]] = None - + is_json_schema: Optional[bool] = None """true if a JSON schema is returned instead of a string representation of the Hive type.""" - + pos: Optional[int] = None """internal field used by SDK""" - + result_type: Optional[ResultType] = None - - schema: Optional[List[Dict[str,Any]]] = None + + schema: Optional[List[Dict[str, Any]]] = None """The table schema""" - + summary: Optional[str] = None """The summary of the error""" - + truncated: Optional[bool] = None """true if partial results are returned.""" - + def as_dict(self) -> dict: """Serializes the Results into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cause is not None: body['cause'] = self.cause - if self.data: body['data'] = self.data - if self.file_name is not None: body['fileName'] = self.file_name - if self.file_names: body['fileNames'] = [v for v in self.file_names] - if self.is_json_schema is not None: body['isJsonSchema'] = self.is_json_schema - if self.pos is not None: body['pos'] = self.pos - if self.result_type is not None: body['resultType'] = self.result_type.value - if self.schema: body['schema'] = [v for v in self.schema] - if self.summary is not None: body['summary'] = self.summary - if self.truncated is not None: body['truncated'] = self.truncated + if self.cause is not None: + body["cause"] = self.cause + if self.data: + body["data"] = self.data + if self.file_name is not None: + body["fileName"] = self.file_name + if self.file_names: + body["fileNames"] = [v for v in self.file_names] + if self.is_json_schema is not None: + body["isJsonSchema"] = self.is_json_schema + if self.pos is not None: + body["pos"] = self.pos + if self.result_type is not None: + body["resultType"] = self.result_type.value + if self.schema: + body["schema"] = [v for v in self.schema] + if self.summary is not None: + body["summary"] = self.summary + if self.truncated is not None: + body["truncated"] = self.truncated return body def as_shallow_dict(self) -> dict: """Serializes the Results into a shallow dictionary of its immediate attributes.""" body = {} - if self.cause is not None: body['cause'] = self.cause - if self.data: body['data'] = self.data - if self.file_name is not None: body['fileName'] = self.file_name - if self.file_names: body['fileNames'] = self.file_names - if self.is_json_schema is not None: body['isJsonSchema'] = self.is_json_schema - if self.pos is not None: body['pos'] = self.pos - if self.result_type is not None: body['resultType'] = self.result_type - if self.schema: body['schema'] = self.schema - if self.summary is not None: body['summary'] = self.summary - if self.truncated is not None: body['truncated'] = self.truncated + if self.cause is not None: + body["cause"] = self.cause + if self.data: + body["data"] = self.data + if self.file_name is not None: + body["fileName"] = self.file_name + if self.file_names: + body["fileNames"] = self.file_names + if self.is_json_schema is not None: + body["isJsonSchema"] = self.is_json_schema + if self.pos is not None: + body["pos"] = self.pos + if self.result_type is not None: + body["resultType"] = self.result_type + if self.schema: + body["schema"] = self.schema + if self.summary is not None: + body["summary"] = self.summary + if self.truncated is not None: + body["truncated"] = self.truncated return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Results: """Deserializes the Results from a dictionary.""" - return cls(cause=d.get('cause', None), data=d.get('data', None), file_name=d.get('fileName', None), file_names=d.get('fileNames', None), is_json_schema=d.get('isJsonSchema', None), pos=d.get('pos', None), result_type=_enum(d, 'resultType', ResultType), schema=d.get('schema', None), summary=d.get('summary', None), truncated=d.get('truncated', None)) - - + return cls( + cause=d.get("cause", None), + data=d.get("data", None), + file_name=d.get("fileName", None), + file_names=d.get("fileNames", None), + is_json_schema=d.get("isJsonSchema", None), + pos=d.get("pos", None), + result_type=_enum(d, "resultType", ResultType), + schema=d.get("schema", None), + summary=d.get("summary", None), + truncated=d.get("truncated", None), + ) class RuntimeEngine(Enum): - - - NULL = 'NULL' - PHOTON = 'PHOTON' - STANDARD = 'STANDARD' + + NULL = "NULL" + PHOTON = "PHOTON" + STANDARD = "STANDARD" + @dataclass class S3StorageInfo: """A storage location in Amazon S3""" - + destination: str """S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using cluster iam role, please make sure you set cluster iam role and the role has write access to the destination. Please also note that you cannot use AWS keys to deliver logs.""" - + canned_acl: Optional[str] = None """(Optional) Set canned access control list for the logs, e.g. `bucket-owner-full-control`. If `canned_cal` is set, please make sure the cluster iam role has `s3:PutObjectAcl` permission on @@ -7188,143 +8681,183 @@ class S3StorageInfo: that by default only the object owner gets full controls. If you are using cross account role for writing data, you may want to set `bucket-owner-full-control` to make bucket owner able to read the logs.""" - + enable_encryption: Optional[bool] = None """(Optional) Flag to enable server side encryption, `false` by default.""" - + encryption_type: Optional[str] = None """(Optional) The encryption type, it could be `sse-s3` or `sse-kms`. It will be used only when encryption is enabled and the default type is `sse-s3`.""" - + endpoint: Optional[str] = None """S3 endpoint, e.g. `https://s3-us-west-2.amazonaws.com`. Either region or endpoint needs to be set. If both are set, endpoint will be used.""" - + kms_key: Optional[str] = None """(Optional) Kms key which will be used if encryption is enabled and encryption type is set to `sse-kms`.""" - + region: Optional[str] = None """S3 region, e.g. `us-west-2`. Either region or endpoint needs to be set. If both are set, endpoint will be used.""" - + def as_dict(self) -> dict: """Serializes the S3StorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.canned_acl is not None: body['canned_acl'] = self.canned_acl - if self.destination is not None: body['destination'] = self.destination - if self.enable_encryption is not None: body['enable_encryption'] = self.enable_encryption - if self.encryption_type is not None: body['encryption_type'] = self.encryption_type - if self.endpoint is not None: body['endpoint'] = self.endpoint - if self.kms_key is not None: body['kms_key'] = self.kms_key - if self.region is not None: body['region'] = self.region + if self.canned_acl is not None: + body["canned_acl"] = self.canned_acl + if self.destination is not None: + body["destination"] = self.destination + if self.enable_encryption is not None: + body["enable_encryption"] = self.enable_encryption + if self.encryption_type is not None: + body["encryption_type"] = self.encryption_type + if self.endpoint is not None: + body["endpoint"] = self.endpoint + if self.kms_key is not None: + body["kms_key"] = self.kms_key + if self.region is not None: + body["region"] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the S3StorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.canned_acl is not None: body['canned_acl'] = self.canned_acl - if self.destination is not None: body['destination'] = self.destination - if self.enable_encryption is not None: body['enable_encryption'] = self.enable_encryption - if self.encryption_type is not None: body['encryption_type'] = self.encryption_type - if self.endpoint is not None: body['endpoint'] = self.endpoint - if self.kms_key is not None: body['kms_key'] = self.kms_key - if self.region is not None: body['region'] = self.region + if self.canned_acl is not None: + body["canned_acl"] = self.canned_acl + if self.destination is not None: + body["destination"] = self.destination + if self.enable_encryption is not None: + body["enable_encryption"] = self.enable_encryption + if self.encryption_type is not None: + body["encryption_type"] = self.encryption_type + if self.endpoint is not None: + body["endpoint"] = self.endpoint + if self.kms_key is not None: + body["kms_key"] = self.kms_key + if self.region is not None: + body["region"] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> S3StorageInfo: """Deserializes the S3StorageInfo from a dictionary.""" - return cls(canned_acl=d.get('canned_acl', None), destination=d.get('destination', None), enable_encryption=d.get('enable_encryption', None), encryption_type=d.get('encryption_type', None), endpoint=d.get('endpoint', None), kms_key=d.get('kms_key', None), region=d.get('region', None)) - - + return cls( + canned_acl=d.get("canned_acl", None), + destination=d.get("destination", None), + enable_encryption=d.get("enable_encryption", None), + encryption_type=d.get("encryption_type", None), + endpoint=d.get("endpoint", None), + kms_key=d.get("kms_key", None), + region=d.get("region", None), + ) @dataclass class SparkNode: """Describes a specific Spark driver or executor.""" - + host_private_ip: Optional[str] = None """The private IP address of the host instance.""" - + instance_id: Optional[str] = None """Globally unique identifier for the host instance from the cloud provider.""" - + node_aws_attributes: Optional[SparkNodeAwsAttributes] = None """Attributes specific to AWS for a Spark node.""" - + node_id: Optional[str] = None """Globally unique identifier for this node.""" - + private_ip: Optional[str] = None """Private IP address (typically a 10.x.x.x address) of the Spark node. Note that this is different from the private IP address of the host instance.""" - + public_dns: Optional[str] = None """Public DNS address of this node. This address can be used to access the Spark JDBC server on the driver node. To communicate with the JDBC server, traffic must be manually authorized by adding security group rules to the "worker-unmanaged" security group via the AWS console.""" - + start_timestamp: Optional[int] = None """The timestamp (in millisecond) when the Spark node is launched.""" - + def as_dict(self) -> dict: """Serializes the SparkNode into a dictionary suitable for use as a JSON request body.""" body = {} - if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip - if self.instance_id is not None: body['instance_id'] = self.instance_id - if self.node_aws_attributes: body['node_aws_attributes'] = self.node_aws_attributes.as_dict() - if self.node_id is not None: body['node_id'] = self.node_id - if self.private_ip is not None: body['private_ip'] = self.private_ip - if self.public_dns is not None: body['public_dns'] = self.public_dns - if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp + if self.host_private_ip is not None: + body["host_private_ip"] = self.host_private_ip + if self.instance_id is not None: + body["instance_id"] = self.instance_id + if self.node_aws_attributes: + body["node_aws_attributes"] = self.node_aws_attributes.as_dict() + if self.node_id is not None: + body["node_id"] = self.node_id + if self.private_ip is not None: + body["private_ip"] = self.private_ip + if self.public_dns is not None: + body["public_dns"] = self.public_dns + if self.start_timestamp is not None: + body["start_timestamp"] = self.start_timestamp return body def as_shallow_dict(self) -> dict: """Serializes the SparkNode into a shallow dictionary of its immediate attributes.""" body = {} - if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip - if self.instance_id is not None: body['instance_id'] = self.instance_id - if self.node_aws_attributes: body['node_aws_attributes'] = self.node_aws_attributes - if self.node_id is not None: body['node_id'] = self.node_id - if self.private_ip is not None: body['private_ip'] = self.private_ip - if self.public_dns is not None: body['public_dns'] = self.public_dns - if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp + if self.host_private_ip is not None: + body["host_private_ip"] = self.host_private_ip + if self.instance_id is not None: + body["instance_id"] = self.instance_id + if self.node_aws_attributes: + body["node_aws_attributes"] = self.node_aws_attributes + if self.node_id is not None: + body["node_id"] = self.node_id + if self.private_ip is not None: + body["private_ip"] = self.private_ip + if self.public_dns is not None: + body["public_dns"] = self.public_dns + if self.start_timestamp is not None: + body["start_timestamp"] = self.start_timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkNode: """Deserializes the SparkNode from a dictionary.""" - return cls(host_private_ip=d.get('host_private_ip', None), instance_id=d.get('instance_id', None), node_aws_attributes=_from_dict(d, 'node_aws_attributes', SparkNodeAwsAttributes), node_id=d.get('node_id', None), private_ip=d.get('private_ip', None), public_dns=d.get('public_dns', None), start_timestamp=d.get('start_timestamp', None)) - - + return cls( + host_private_ip=d.get("host_private_ip", None), + instance_id=d.get("instance_id", None), + node_aws_attributes=_from_dict(d, "node_aws_attributes", SparkNodeAwsAttributes), + node_id=d.get("node_id", None), + private_ip=d.get("private_ip", None), + public_dns=d.get("public_dns", None), + start_timestamp=d.get("start_timestamp", None), + ) @dataclass class SparkNodeAwsAttributes: """Attributes specific to AWS for a Spark node.""" - + is_spot: Optional[bool] = None """Whether this node is on an Amazon spot instance.""" - + def as_dict(self) -> dict: """Serializes the SparkNodeAwsAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_spot is not None: body['is_spot'] = self.is_spot + if self.is_spot is not None: + body["is_spot"] = self.is_spot return body def as_shallow_dict(self) -> dict: """Serializes the SparkNodeAwsAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_spot is not None: body['is_spot'] = self.is_spot + if self.is_spot is not None: + body["is_spot"] = self.is_spot return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkNodeAwsAttributes: """Deserializes the SparkNodeAwsAttributes from a dictionary.""" - return cls(is_spot=d.get('is_spot', None)) - - + return cls(is_spot=d.get("is_spot", None)) @dataclass @@ -7334,55 +8867,57 @@ class SparkVersion: the "spark_version" when creating a new cluster. Note that the exact Spark version may change over time for a "wildcard" version (i.e., "2.1.x-scala2.11" is a "wildcard" version) with minor bug fixes.""" - + name: Optional[str] = None """A descriptive name for this Spark version, for example "Spark 2.1".""" - + def as_dict(self) -> dict: """Serializes the SparkVersion into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.name is not None: body['name'] = self.name + if self.key is not None: + body["key"] = self.key + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the SparkVersion into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.name is not None: body['name'] = self.name + if self.key is not None: + body["key"] = self.key + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkVersion: """Deserializes the SparkVersion from a dictionary.""" - return cls(key=d.get('key', None), name=d.get('name', None)) - - + return cls(key=d.get("key", None), name=d.get("name", None)) @dataclass class StartCluster: cluster_id: str """The cluster to be started.""" - + def as_dict(self) -> dict: """Serializes the StartCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the StartCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StartCluster: """Deserializes the StartCluster from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None)) - - + return cls(cluster_id=d.get("cluster_id", None)) @dataclass @@ -7401,274 +8936,285 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> StartClusterResponse: """Deserializes the StartClusterResponse from a dictionary.""" return cls() - - class State(Enum): """The state of a Cluster. The current allowable state transitions are as follows: - + - `PENDING` -> `RUNNING` - `PENDING` -> `TERMINATING` - `RUNNING` -> `RESIZING` - `RUNNING` -> `RESTARTING` - `RUNNING` -> `TERMINATING` - `RESTARTING` -> `RUNNING` - `RESTARTING` -> `TERMINATING` - `RESIZING` -> `RUNNING` - `RESIZING` -> `TERMINATING` - `TERMINATING` -> `TERMINATED`""" - - ERROR = 'ERROR' - PENDING = 'PENDING' - RESIZING = 'RESIZING' - RESTARTING = 'RESTARTING' - RUNNING = 'RUNNING' - TERMINATED = 'TERMINATED' - TERMINATING = 'TERMINATING' - UNKNOWN = 'UNKNOWN' + + ERROR = "ERROR" + PENDING = "PENDING" + RESIZING = "RESIZING" + RESTARTING = "RESTARTING" + RUNNING = "RUNNING" + TERMINATED = "TERMINATED" + TERMINATING = "TERMINATING" + UNKNOWN = "UNKNOWN" + @dataclass class TerminationReason: code: Optional[TerminationReasonCode] = None """status code indicating why the cluster was terminated""" - - parameters: Optional[Dict[str,str]] = None + + parameters: Optional[Dict[str, str]] = None """list of parameters that provide additional information about why the cluster was terminated""" - + type: Optional[TerminationReasonType] = None """type of the termination""" - + def as_dict(self) -> dict: """Serializes the TerminationReason into a dictionary suitable for use as a JSON request body.""" body = {} - if self.code is not None: body['code'] = self.code.value - if self.parameters: body['parameters'] = self.parameters - if self.type is not None: body['type'] = self.type.value + if self.code is not None: + body["code"] = self.code.value + if self.parameters: + body["parameters"] = self.parameters + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the TerminationReason into a shallow dictionary of its immediate attributes.""" body = {} - if self.code is not None: body['code'] = self.code - if self.parameters: body['parameters'] = self.parameters - if self.type is not None: body['type'] = self.type + if self.code is not None: + body["code"] = self.code + if self.parameters: + body["parameters"] = self.parameters + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TerminationReason: """Deserializes the TerminationReason from a dictionary.""" - return cls(code=_enum(d, 'code', TerminationReasonCode), parameters=d.get('parameters', None), type=_enum(d, 'type', TerminationReasonType)) - - + return cls( + code=_enum(d, "code", TerminationReasonCode), + parameters=d.get("parameters", None), + type=_enum(d, "type", TerminationReasonType), + ) class TerminationReasonCode(Enum): """The status code indicating why the cluster was terminated""" - - ABUSE_DETECTED = 'ABUSE_DETECTED' - ACCESS_TOKEN_FAILURE = 'ACCESS_TOKEN_FAILURE' - ALLOCATION_TIMEOUT = 'ALLOCATION_TIMEOUT' - ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY = 'ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY' - ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS' - ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS' - ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS' - ALLOCATION_TIMEOUT_NO_READY_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_READY_CLUSTERS' - ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS' - ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS = 'ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS' - ATTACH_PROJECT_FAILURE = 'ATTACH_PROJECT_FAILURE' - AWS_AUTHORIZATION_FAILURE = 'AWS_AUTHORIZATION_FAILURE' - AWS_INACCESSIBLE_KMS_KEY_FAILURE = 'AWS_INACCESSIBLE_KMS_KEY_FAILURE' - AWS_INSTANCE_PROFILE_UPDATE_FAILURE = 'AWS_INSTANCE_PROFILE_UPDATE_FAILURE' - AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = 'AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE' - AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = 'AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE' - AWS_INVALID_KEY_PAIR = 'AWS_INVALID_KEY_PAIR' - AWS_INVALID_KMS_KEY_STATE = 'AWS_INVALID_KMS_KEY_STATE' - AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = 'AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE' - AWS_REQUEST_LIMIT_EXCEEDED = 'AWS_REQUEST_LIMIT_EXCEEDED' - AWS_RESOURCE_QUOTA_EXCEEDED = 'AWS_RESOURCE_QUOTA_EXCEEDED' - AWS_UNSUPPORTED_FAILURE = 'AWS_UNSUPPORTED_FAILURE' - AZURE_BYOK_KEY_PERMISSION_FAILURE = 'AZURE_BYOK_KEY_PERMISSION_FAILURE' - AZURE_EPHEMERAL_DISK_FAILURE = 'AZURE_EPHEMERAL_DISK_FAILURE' - AZURE_INVALID_DEPLOYMENT_TEMPLATE = 'AZURE_INVALID_DEPLOYMENT_TEMPLATE' - AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = 'AZURE_OPERATION_NOT_ALLOWED_EXCEPTION' - AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE = 'AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE' - AZURE_QUOTA_EXCEEDED_EXCEPTION = 'AZURE_QUOTA_EXCEEDED_EXCEPTION' - AZURE_RESOURCE_MANAGER_THROTTLING = 'AZURE_RESOURCE_MANAGER_THROTTLING' - AZURE_RESOURCE_PROVIDER_THROTTLING = 'AZURE_RESOURCE_PROVIDER_THROTTLING' - AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = 'AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE' - AZURE_VM_EXTENSION_FAILURE = 'AZURE_VM_EXTENSION_FAILURE' - AZURE_VNET_CONFIGURATION_FAILURE = 'AZURE_VNET_CONFIGURATION_FAILURE' - BOOTSTRAP_TIMEOUT = 'BOOTSTRAP_TIMEOUT' - BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = 'BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION' - BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG = 'BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG' - BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED = 'BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED' - BUDGET_POLICY_RESOLUTION_FAILURE = 'BUDGET_POLICY_RESOLUTION_FAILURE' - CLOUD_ACCOUNT_SETUP_FAILURE = 'CLOUD_ACCOUNT_SETUP_FAILURE' - CLOUD_OPERATION_CANCELLED = 'CLOUD_OPERATION_CANCELLED' - CLOUD_PROVIDER_DISK_SETUP_FAILURE = 'CLOUD_PROVIDER_DISK_SETUP_FAILURE' - CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED = 'CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED' - CLOUD_PROVIDER_LAUNCH_FAILURE = 'CLOUD_PROVIDER_LAUNCH_FAILURE' - CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG = 'CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG' - CLOUD_PROVIDER_RESOURCE_STOCKOUT = 'CLOUD_PROVIDER_RESOURCE_STOCKOUT' - CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG = 'CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG' - CLOUD_PROVIDER_SHUTDOWN = 'CLOUD_PROVIDER_SHUTDOWN' - CLUSTER_OPERATION_THROTTLED = 'CLUSTER_OPERATION_THROTTLED' - CLUSTER_OPERATION_TIMEOUT = 'CLUSTER_OPERATION_TIMEOUT' - COMMUNICATION_LOST = 'COMMUNICATION_LOST' - CONTAINER_LAUNCH_FAILURE = 'CONTAINER_LAUNCH_FAILURE' - CONTROL_PLANE_REQUEST_FAILURE = 'CONTROL_PLANE_REQUEST_FAILURE' - CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG = 'CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG' - DATABASE_CONNECTION_FAILURE = 'DATABASE_CONNECTION_FAILURE' - DATA_ACCESS_CONFIG_CHANGED = 'DATA_ACCESS_CONFIG_CHANGED' - DBFS_COMPONENT_UNHEALTHY = 'DBFS_COMPONENT_UNHEALTHY' - DISASTER_RECOVERY_REPLICATION = 'DISASTER_RECOVERY_REPLICATION' - DNS_RESOLUTION_ERROR = 'DNS_RESOLUTION_ERROR' - DOCKER_CONTAINER_CREATION_EXCEPTION = 'DOCKER_CONTAINER_CREATION_EXCEPTION' - DOCKER_IMAGE_PULL_FAILURE = 'DOCKER_IMAGE_PULL_FAILURE' - DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = 'DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION' - DOCKER_INVALID_OS_EXCEPTION = 'DOCKER_INVALID_OS_EXCEPTION' - DRIVER_EVICTION = 'DRIVER_EVICTION' - DRIVER_LAUNCH_TIMEOUT = 'DRIVER_LAUNCH_TIMEOUT' - DRIVER_NODE_UNREACHABLE = 'DRIVER_NODE_UNREACHABLE' - DRIVER_OUT_OF_DISK = 'DRIVER_OUT_OF_DISK' - DRIVER_OUT_OF_MEMORY = 'DRIVER_OUT_OF_MEMORY' - DRIVER_POD_CREATION_FAILURE = 'DRIVER_POD_CREATION_FAILURE' - DRIVER_UNEXPECTED_FAILURE = 'DRIVER_UNEXPECTED_FAILURE' - DRIVER_UNHEALTHY = 'DRIVER_UNHEALTHY' - DRIVER_UNREACHABLE = 'DRIVER_UNREACHABLE' - DRIVER_UNRESPONSIVE = 'DRIVER_UNRESPONSIVE' - DYNAMIC_SPARK_CONF_SIZE_EXCEEDED = 'DYNAMIC_SPARK_CONF_SIZE_EXCEEDED' - EOS_SPARK_IMAGE = 'EOS_SPARK_IMAGE' - EXECUTION_COMPONENT_UNHEALTHY = 'EXECUTION_COMPONENT_UNHEALTHY' - EXECUTOR_POD_UNSCHEDULED = 'EXECUTOR_POD_UNSCHEDULED' - GCP_API_RATE_QUOTA_EXCEEDED = 'GCP_API_RATE_QUOTA_EXCEEDED' - GCP_DENIED_BY_ORG_POLICY = 'GCP_DENIED_BY_ORG_POLICY' - GCP_FORBIDDEN = 'GCP_FORBIDDEN' - GCP_IAM_TIMEOUT = 'GCP_IAM_TIMEOUT' - GCP_INACCESSIBLE_KMS_KEY_FAILURE = 'GCP_INACCESSIBLE_KMS_KEY_FAILURE' - GCP_INSUFFICIENT_CAPACITY = 'GCP_INSUFFICIENT_CAPACITY' - GCP_IP_SPACE_EXHAUSTED = 'GCP_IP_SPACE_EXHAUSTED' - GCP_KMS_KEY_PERMISSION_DENIED = 'GCP_KMS_KEY_PERMISSION_DENIED' - GCP_NOT_FOUND = 'GCP_NOT_FOUND' - GCP_QUOTA_EXCEEDED = 'GCP_QUOTA_EXCEEDED' - GCP_RESOURCE_QUOTA_EXCEEDED = 'GCP_RESOURCE_QUOTA_EXCEEDED' - GCP_SERVICE_ACCOUNT_ACCESS_DENIED = 'GCP_SERVICE_ACCOUNT_ACCESS_DENIED' - GCP_SERVICE_ACCOUNT_DELETED = 'GCP_SERVICE_ACCOUNT_DELETED' - GCP_SERVICE_ACCOUNT_NOT_FOUND = 'GCP_SERVICE_ACCOUNT_NOT_FOUND' - GCP_SUBNET_NOT_READY = 'GCP_SUBNET_NOT_READY' - GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED = 'GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED' - GKE_BASED_CLUSTER_TERMINATION = 'GKE_BASED_CLUSTER_TERMINATION' - GLOBAL_INIT_SCRIPT_FAILURE = 'GLOBAL_INIT_SCRIPT_FAILURE' - HIVE_METASTORE_PROVISIONING_FAILURE = 'HIVE_METASTORE_PROVISIONING_FAILURE' - IMAGE_PULL_PERMISSION_DENIED = 'IMAGE_PULL_PERMISSION_DENIED' - INACTIVITY = 'INACTIVITY' - INIT_CONTAINER_NOT_FINISHED = 'INIT_CONTAINER_NOT_FINISHED' - INIT_SCRIPT_FAILURE = 'INIT_SCRIPT_FAILURE' - INSTANCE_POOL_CLUSTER_FAILURE = 'INSTANCE_POOL_CLUSTER_FAILURE' - INSTANCE_POOL_MAX_CAPACITY_REACHED = 'INSTANCE_POOL_MAX_CAPACITY_REACHED' - INSTANCE_POOL_NOT_FOUND = 'INSTANCE_POOL_NOT_FOUND' - INSTANCE_UNREACHABLE = 'INSTANCE_UNREACHABLE' - INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG = 'INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG' - INTERNAL_CAPACITY_FAILURE = 'INTERNAL_CAPACITY_FAILURE' - INTERNAL_ERROR = 'INTERNAL_ERROR' - INVALID_ARGUMENT = 'INVALID_ARGUMENT' - INVALID_AWS_PARAMETER = 'INVALID_AWS_PARAMETER' - INVALID_INSTANCE_PLACEMENT_PROTOCOL = 'INVALID_INSTANCE_PLACEMENT_PROTOCOL' - INVALID_SPARK_IMAGE = 'INVALID_SPARK_IMAGE' - INVALID_WORKER_IMAGE_FAILURE = 'INVALID_WORKER_IMAGE_FAILURE' - IN_PENALTY_BOX = 'IN_PENALTY_BOX' - IP_EXHAUSTION_FAILURE = 'IP_EXHAUSTION_FAILURE' - JOB_FINISHED = 'JOB_FINISHED' - K8S_AUTOSCALING_FAILURE = 'K8S_AUTOSCALING_FAILURE' - K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = 'K8S_DBR_CLUSTER_LAUNCH_TIMEOUT' - LAZY_ALLOCATION_TIMEOUT = 'LAZY_ALLOCATION_TIMEOUT' - MAINTENANCE_MODE = 'MAINTENANCE_MODE' - METASTORE_COMPONENT_UNHEALTHY = 'METASTORE_COMPONENT_UNHEALTHY' - NEPHOS_RESOURCE_MANAGEMENT = 'NEPHOS_RESOURCE_MANAGEMENT' - NETVISOR_SETUP_TIMEOUT = 'NETVISOR_SETUP_TIMEOUT' - NETWORK_CHECK_CONTROL_PLANE_FAILURE = 'NETWORK_CHECK_CONTROL_PLANE_FAILURE' - NETWORK_CHECK_DNS_SERVER_FAILURE = 'NETWORK_CHECK_DNS_SERVER_FAILURE' - NETWORK_CHECK_METADATA_ENDPOINT_FAILURE = 'NETWORK_CHECK_METADATA_ENDPOINT_FAILURE' - NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE = 'NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE' - NETWORK_CHECK_NIC_FAILURE = 'NETWORK_CHECK_NIC_FAILURE' - NETWORK_CHECK_STORAGE_FAILURE = 'NETWORK_CHECK_STORAGE_FAILURE' - NETWORK_CONFIGURATION_FAILURE = 'NETWORK_CONFIGURATION_FAILURE' - NFS_MOUNT_FAILURE = 'NFS_MOUNT_FAILURE' - NO_MATCHED_K8S = 'NO_MATCHED_K8S' - NO_MATCHED_K8S_TESTING_TAG = 'NO_MATCHED_K8S_TESTING_TAG' - NPIP_TUNNEL_SETUP_FAILURE = 'NPIP_TUNNEL_SETUP_FAILURE' - NPIP_TUNNEL_TOKEN_FAILURE = 'NPIP_TUNNEL_TOKEN_FAILURE' - POD_ASSIGNMENT_FAILURE = 'POD_ASSIGNMENT_FAILURE' - POD_SCHEDULING_FAILURE = 'POD_SCHEDULING_FAILURE' - REQUEST_REJECTED = 'REQUEST_REJECTED' - REQUEST_THROTTLED = 'REQUEST_THROTTLED' - RESOURCE_USAGE_BLOCKED = 'RESOURCE_USAGE_BLOCKED' - SECRET_CREATION_FAILURE = 'SECRET_CREATION_FAILURE' - SECRET_PERMISSION_DENIED = 'SECRET_PERMISSION_DENIED' - SECRET_RESOLUTION_ERROR = 'SECRET_RESOLUTION_ERROR' - SECURITY_DAEMON_REGISTRATION_EXCEPTION = 'SECURITY_DAEMON_REGISTRATION_EXCEPTION' - SELF_BOOTSTRAP_FAILURE = 'SELF_BOOTSTRAP_FAILURE' - SERVERLESS_LONG_RUNNING_TERMINATED = 'SERVERLESS_LONG_RUNNING_TERMINATED' - SKIPPED_SLOW_NODES = 'SKIPPED_SLOW_NODES' - SLOW_IMAGE_DOWNLOAD = 'SLOW_IMAGE_DOWNLOAD' - SPARK_ERROR = 'SPARK_ERROR' - SPARK_IMAGE_DOWNLOAD_FAILURE = 'SPARK_IMAGE_DOWNLOAD_FAILURE' - SPARK_IMAGE_DOWNLOAD_THROTTLED = 'SPARK_IMAGE_DOWNLOAD_THROTTLED' - SPARK_IMAGE_NOT_FOUND = 'SPARK_IMAGE_NOT_FOUND' - SPARK_STARTUP_FAILURE = 'SPARK_STARTUP_FAILURE' - SPOT_INSTANCE_TERMINATION = 'SPOT_INSTANCE_TERMINATION' - SSH_BOOTSTRAP_FAILURE = 'SSH_BOOTSTRAP_FAILURE' - STORAGE_DOWNLOAD_FAILURE = 'STORAGE_DOWNLOAD_FAILURE' - STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG = 'STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG' - STORAGE_DOWNLOAD_FAILURE_SLOW = 'STORAGE_DOWNLOAD_FAILURE_SLOW' - STORAGE_DOWNLOAD_FAILURE_THROTTLED = 'STORAGE_DOWNLOAD_FAILURE_THROTTLED' - STS_CLIENT_SETUP_FAILURE = 'STS_CLIENT_SETUP_FAILURE' - SUBNET_EXHAUSTED_FAILURE = 'SUBNET_EXHAUSTED_FAILURE' - TEMPORARILY_UNAVAILABLE = 'TEMPORARILY_UNAVAILABLE' - TRIAL_EXPIRED = 'TRIAL_EXPIRED' - UNEXPECTED_LAUNCH_FAILURE = 'UNEXPECTED_LAUNCH_FAILURE' - UNEXPECTED_POD_RECREATION = 'UNEXPECTED_POD_RECREATION' - UNKNOWN = 'UNKNOWN' - UNSUPPORTED_INSTANCE_TYPE = 'UNSUPPORTED_INSTANCE_TYPE' - UPDATE_INSTANCE_PROFILE_FAILURE = 'UPDATE_INSTANCE_PROFILE_FAILURE' - USER_INITIATED_VM_TERMINATION = 'USER_INITIATED_VM_TERMINATION' - USER_REQUEST = 'USER_REQUEST' - WORKER_SETUP_FAILURE = 'WORKER_SETUP_FAILURE' - WORKSPACE_CANCELLED_ERROR = 'WORKSPACE_CANCELLED_ERROR' - WORKSPACE_CONFIGURATION_ERROR = 'WORKSPACE_CONFIGURATION_ERROR' - WORKSPACE_UPDATE = 'WORKSPACE_UPDATE' + + ABUSE_DETECTED = "ABUSE_DETECTED" + ACCESS_TOKEN_FAILURE = "ACCESS_TOKEN_FAILURE" + ALLOCATION_TIMEOUT = "ALLOCATION_TIMEOUT" + ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY = "ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY" + ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS = "ALLOCATION_TIMEOUT_NO_HEALTHY_AND_WARMED_UP_CLUSTERS" + ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS = "ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS" + ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS = "ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS" + ALLOCATION_TIMEOUT_NO_READY_CLUSTERS = "ALLOCATION_TIMEOUT_NO_READY_CLUSTERS" + ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS = "ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS" + ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS = "ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS" + ATTACH_PROJECT_FAILURE = "ATTACH_PROJECT_FAILURE" + AWS_AUTHORIZATION_FAILURE = "AWS_AUTHORIZATION_FAILURE" + AWS_INACCESSIBLE_KMS_KEY_FAILURE = "AWS_INACCESSIBLE_KMS_KEY_FAILURE" + AWS_INSTANCE_PROFILE_UPDATE_FAILURE = "AWS_INSTANCE_PROFILE_UPDATE_FAILURE" + AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE" + AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE" + AWS_INVALID_KEY_PAIR = "AWS_INVALID_KEY_PAIR" + AWS_INVALID_KMS_KEY_STATE = "AWS_INVALID_KMS_KEY_STATE" + AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE" + AWS_REQUEST_LIMIT_EXCEEDED = "AWS_REQUEST_LIMIT_EXCEEDED" + AWS_RESOURCE_QUOTA_EXCEEDED = "AWS_RESOURCE_QUOTA_EXCEEDED" + AWS_UNSUPPORTED_FAILURE = "AWS_UNSUPPORTED_FAILURE" + AZURE_BYOK_KEY_PERMISSION_FAILURE = "AZURE_BYOK_KEY_PERMISSION_FAILURE" + AZURE_EPHEMERAL_DISK_FAILURE = "AZURE_EPHEMERAL_DISK_FAILURE" + AZURE_INVALID_DEPLOYMENT_TEMPLATE = "AZURE_INVALID_DEPLOYMENT_TEMPLATE" + AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION" + AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE = "AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE" + AZURE_QUOTA_EXCEEDED_EXCEPTION = "AZURE_QUOTA_EXCEEDED_EXCEPTION" + AZURE_RESOURCE_MANAGER_THROTTLING = "AZURE_RESOURCE_MANAGER_THROTTLING" + AZURE_RESOURCE_PROVIDER_THROTTLING = "AZURE_RESOURCE_PROVIDER_THROTTLING" + AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = "AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE" + AZURE_VM_EXTENSION_FAILURE = "AZURE_VM_EXTENSION_FAILURE" + AZURE_VNET_CONFIGURATION_FAILURE = "AZURE_VNET_CONFIGURATION_FAILURE" + BOOTSTRAP_TIMEOUT = "BOOTSTRAP_TIMEOUT" + BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION" + BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG = "BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG" + BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED = "BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED" + BUDGET_POLICY_RESOLUTION_FAILURE = "BUDGET_POLICY_RESOLUTION_FAILURE" + CLOUD_ACCOUNT_SETUP_FAILURE = "CLOUD_ACCOUNT_SETUP_FAILURE" + CLOUD_OPERATION_CANCELLED = "CLOUD_OPERATION_CANCELLED" + CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE" + CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED = "CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED" + CLOUD_PROVIDER_LAUNCH_FAILURE = "CLOUD_PROVIDER_LAUNCH_FAILURE" + CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG = "CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG" + CLOUD_PROVIDER_RESOURCE_STOCKOUT = "CLOUD_PROVIDER_RESOURCE_STOCKOUT" + CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG = "CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG" + CLOUD_PROVIDER_SHUTDOWN = "CLOUD_PROVIDER_SHUTDOWN" + CLUSTER_OPERATION_THROTTLED = "CLUSTER_OPERATION_THROTTLED" + CLUSTER_OPERATION_TIMEOUT = "CLUSTER_OPERATION_TIMEOUT" + COMMUNICATION_LOST = "COMMUNICATION_LOST" + CONTAINER_LAUNCH_FAILURE = "CONTAINER_LAUNCH_FAILURE" + CONTROL_PLANE_REQUEST_FAILURE = "CONTROL_PLANE_REQUEST_FAILURE" + CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG = "CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG" + DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE" + DATA_ACCESS_CONFIG_CHANGED = "DATA_ACCESS_CONFIG_CHANGED" + DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY" + DISASTER_RECOVERY_REPLICATION = "DISASTER_RECOVERY_REPLICATION" + DNS_RESOLUTION_ERROR = "DNS_RESOLUTION_ERROR" + DOCKER_CONTAINER_CREATION_EXCEPTION = "DOCKER_CONTAINER_CREATION_EXCEPTION" + DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" + DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = "DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION" + DOCKER_INVALID_OS_EXCEPTION = "DOCKER_INVALID_OS_EXCEPTION" + DRIVER_EVICTION = "DRIVER_EVICTION" + DRIVER_LAUNCH_TIMEOUT = "DRIVER_LAUNCH_TIMEOUT" + DRIVER_NODE_UNREACHABLE = "DRIVER_NODE_UNREACHABLE" + DRIVER_OUT_OF_DISK = "DRIVER_OUT_OF_DISK" + DRIVER_OUT_OF_MEMORY = "DRIVER_OUT_OF_MEMORY" + DRIVER_POD_CREATION_FAILURE = "DRIVER_POD_CREATION_FAILURE" + DRIVER_UNEXPECTED_FAILURE = "DRIVER_UNEXPECTED_FAILURE" + DRIVER_UNHEALTHY = "DRIVER_UNHEALTHY" + DRIVER_UNREACHABLE = "DRIVER_UNREACHABLE" + DRIVER_UNRESPONSIVE = "DRIVER_UNRESPONSIVE" + DYNAMIC_SPARK_CONF_SIZE_EXCEEDED = "DYNAMIC_SPARK_CONF_SIZE_EXCEEDED" + EOS_SPARK_IMAGE = "EOS_SPARK_IMAGE" + EXECUTION_COMPONENT_UNHEALTHY = "EXECUTION_COMPONENT_UNHEALTHY" + EXECUTOR_POD_UNSCHEDULED = "EXECUTOR_POD_UNSCHEDULED" + GCP_API_RATE_QUOTA_EXCEEDED = "GCP_API_RATE_QUOTA_EXCEEDED" + GCP_DENIED_BY_ORG_POLICY = "GCP_DENIED_BY_ORG_POLICY" + GCP_FORBIDDEN = "GCP_FORBIDDEN" + GCP_IAM_TIMEOUT = "GCP_IAM_TIMEOUT" + GCP_INACCESSIBLE_KMS_KEY_FAILURE = "GCP_INACCESSIBLE_KMS_KEY_FAILURE" + GCP_INSUFFICIENT_CAPACITY = "GCP_INSUFFICIENT_CAPACITY" + GCP_IP_SPACE_EXHAUSTED = "GCP_IP_SPACE_EXHAUSTED" + GCP_KMS_KEY_PERMISSION_DENIED = "GCP_KMS_KEY_PERMISSION_DENIED" + GCP_NOT_FOUND = "GCP_NOT_FOUND" + GCP_QUOTA_EXCEEDED = "GCP_QUOTA_EXCEEDED" + GCP_RESOURCE_QUOTA_EXCEEDED = "GCP_RESOURCE_QUOTA_EXCEEDED" + GCP_SERVICE_ACCOUNT_ACCESS_DENIED = "GCP_SERVICE_ACCOUNT_ACCESS_DENIED" + GCP_SERVICE_ACCOUNT_DELETED = "GCP_SERVICE_ACCOUNT_DELETED" + GCP_SERVICE_ACCOUNT_NOT_FOUND = "GCP_SERVICE_ACCOUNT_NOT_FOUND" + GCP_SUBNET_NOT_READY = "GCP_SUBNET_NOT_READY" + GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED = "GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED" + GKE_BASED_CLUSTER_TERMINATION = "GKE_BASED_CLUSTER_TERMINATION" + GLOBAL_INIT_SCRIPT_FAILURE = "GLOBAL_INIT_SCRIPT_FAILURE" + HIVE_METASTORE_PROVISIONING_FAILURE = "HIVE_METASTORE_PROVISIONING_FAILURE" + IMAGE_PULL_PERMISSION_DENIED = "IMAGE_PULL_PERMISSION_DENIED" + INACTIVITY = "INACTIVITY" + INIT_CONTAINER_NOT_FINISHED = "INIT_CONTAINER_NOT_FINISHED" + INIT_SCRIPT_FAILURE = "INIT_SCRIPT_FAILURE" + INSTANCE_POOL_CLUSTER_FAILURE = "INSTANCE_POOL_CLUSTER_FAILURE" + INSTANCE_POOL_MAX_CAPACITY_REACHED = "INSTANCE_POOL_MAX_CAPACITY_REACHED" + INSTANCE_POOL_NOT_FOUND = "INSTANCE_POOL_NOT_FOUND" + INSTANCE_UNREACHABLE = "INSTANCE_UNREACHABLE" + INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG = "INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG" + INTERNAL_CAPACITY_FAILURE = "INTERNAL_CAPACITY_FAILURE" + INTERNAL_ERROR = "INTERNAL_ERROR" + INVALID_ARGUMENT = "INVALID_ARGUMENT" + INVALID_AWS_PARAMETER = "INVALID_AWS_PARAMETER" + INVALID_INSTANCE_PLACEMENT_PROTOCOL = "INVALID_INSTANCE_PLACEMENT_PROTOCOL" + INVALID_SPARK_IMAGE = "INVALID_SPARK_IMAGE" + INVALID_WORKER_IMAGE_FAILURE = "INVALID_WORKER_IMAGE_FAILURE" + IN_PENALTY_BOX = "IN_PENALTY_BOX" + IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE" + JOB_FINISHED = "JOB_FINISHED" + K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE" + K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT" + LAZY_ALLOCATION_TIMEOUT = "LAZY_ALLOCATION_TIMEOUT" + MAINTENANCE_MODE = "MAINTENANCE_MODE" + METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY" + NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT" + NETVISOR_SETUP_TIMEOUT = "NETVISOR_SETUP_TIMEOUT" + NETWORK_CHECK_CONTROL_PLANE_FAILURE = "NETWORK_CHECK_CONTROL_PLANE_FAILURE" + NETWORK_CHECK_DNS_SERVER_FAILURE = "NETWORK_CHECK_DNS_SERVER_FAILURE" + NETWORK_CHECK_METADATA_ENDPOINT_FAILURE = "NETWORK_CHECK_METADATA_ENDPOINT_FAILURE" + NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE = "NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE" + NETWORK_CHECK_NIC_FAILURE = "NETWORK_CHECK_NIC_FAILURE" + NETWORK_CHECK_STORAGE_FAILURE = "NETWORK_CHECK_STORAGE_FAILURE" + NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" + NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" + NO_MATCHED_K8S = "NO_MATCHED_K8S" + NO_MATCHED_K8S_TESTING_TAG = "NO_MATCHED_K8S_TESTING_TAG" + NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" + NPIP_TUNNEL_TOKEN_FAILURE = "NPIP_TUNNEL_TOKEN_FAILURE" + POD_ASSIGNMENT_FAILURE = "POD_ASSIGNMENT_FAILURE" + POD_SCHEDULING_FAILURE = "POD_SCHEDULING_FAILURE" + REQUEST_REJECTED = "REQUEST_REJECTED" + REQUEST_THROTTLED = "REQUEST_THROTTLED" + RESOURCE_USAGE_BLOCKED = "RESOURCE_USAGE_BLOCKED" + SECRET_CREATION_FAILURE = "SECRET_CREATION_FAILURE" + SECRET_PERMISSION_DENIED = "SECRET_PERMISSION_DENIED" + SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" + SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" + SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" + SERVERLESS_LONG_RUNNING_TERMINATED = "SERVERLESS_LONG_RUNNING_TERMINATED" + SKIPPED_SLOW_NODES = "SKIPPED_SLOW_NODES" + SLOW_IMAGE_DOWNLOAD = "SLOW_IMAGE_DOWNLOAD" + SPARK_ERROR = "SPARK_ERROR" + SPARK_IMAGE_DOWNLOAD_FAILURE = "SPARK_IMAGE_DOWNLOAD_FAILURE" + SPARK_IMAGE_DOWNLOAD_THROTTLED = "SPARK_IMAGE_DOWNLOAD_THROTTLED" + SPARK_IMAGE_NOT_FOUND = "SPARK_IMAGE_NOT_FOUND" + SPARK_STARTUP_FAILURE = "SPARK_STARTUP_FAILURE" + SPOT_INSTANCE_TERMINATION = "SPOT_INSTANCE_TERMINATION" + SSH_BOOTSTRAP_FAILURE = "SSH_BOOTSTRAP_FAILURE" + STORAGE_DOWNLOAD_FAILURE = "STORAGE_DOWNLOAD_FAILURE" + STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG = "STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG" + STORAGE_DOWNLOAD_FAILURE_SLOW = "STORAGE_DOWNLOAD_FAILURE_SLOW" + STORAGE_DOWNLOAD_FAILURE_THROTTLED = "STORAGE_DOWNLOAD_FAILURE_THROTTLED" + STS_CLIENT_SETUP_FAILURE = "STS_CLIENT_SETUP_FAILURE" + SUBNET_EXHAUSTED_FAILURE = "SUBNET_EXHAUSTED_FAILURE" + TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" + TRIAL_EXPIRED = "TRIAL_EXPIRED" + UNEXPECTED_LAUNCH_FAILURE = "UNEXPECTED_LAUNCH_FAILURE" + UNEXPECTED_POD_RECREATION = "UNEXPECTED_POD_RECREATION" + UNKNOWN = "UNKNOWN" + UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE" + UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE" + USER_INITIATED_VM_TERMINATION = "USER_INITIATED_VM_TERMINATION" + USER_REQUEST = "USER_REQUEST" + WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE" + WORKSPACE_CANCELLED_ERROR = "WORKSPACE_CANCELLED_ERROR" + WORKSPACE_CONFIGURATION_ERROR = "WORKSPACE_CONFIGURATION_ERROR" + WORKSPACE_UPDATE = "WORKSPACE_UPDATE" + class TerminationReasonType(Enum): """type of the termination""" - - CLIENT_ERROR = 'CLIENT_ERROR' - CLOUD_FAILURE = 'CLOUD_FAILURE' - SERVICE_FAULT = 'SERVICE_FAULT' - SUCCESS = 'SUCCESS' + + CLIENT_ERROR = "CLIENT_ERROR" + CLOUD_FAILURE = "CLOUD_FAILURE" + SERVICE_FAULT = "SERVICE_FAULT" + SUCCESS = "SUCCESS" + @dataclass class UninstallLibraries: cluster_id: str """Unique identifier for the cluster on which to uninstall these libraries.""" - + libraries: List[Library] """The libraries to uninstall.""" - + def as_dict(self) -> dict: """Serializes the UninstallLibraries into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] return body def as_shallow_dict(self) -> dict: """Serializes the UninstallLibraries into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.libraries: body['libraries'] = self.libraries + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.libraries: + body["libraries"] = self.libraries return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UninstallLibraries: """Deserializes the UninstallLibraries from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), libraries=_repeated_dict(d, 'libraries', Library)) - - + return cls(cluster_id=d.get("cluster_id", None), libraries=_repeated_dict(d, "libraries", Library)) @dataclass @@ -7687,32 +9233,30 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UninstallLibrariesResponse: """Deserializes the UninstallLibrariesResponse from a dictionary.""" return cls() - - @dataclass class UnpinCluster: cluster_id: str - + def as_dict(self) -> dict: """Serializes the UnpinCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body def as_shallow_dict(self) -> dict: """Serializes the UnpinCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UnpinCluster: """Deserializes the UnpinCluster from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None)) - - + return cls(cluster_id=d.get("cluster_id", None)) @dataclass @@ -7731,15 +9275,13 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UnpinClusterResponse: """Deserializes the UnpinClusterResponse from a dictionary.""" return cls() - - @dataclass class UpdateCluster: cluster_id: str """ID of the cluster.""" - + update_mask: str """Used to specify which cluster attributes and size fields to update. See https://google.aip.dev/161 for more details. @@ -7753,32 +9295,40 @@ class UpdateCluster: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + cluster: Optional[UpdateClusterResource] = None """The cluster to be updated.""" - + def as_dict(self) -> dict: """Serializes the UpdateCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster: body['cluster'] = self.cluster.as_dict() - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.cluster: + body["cluster"] = self.cluster.as_dict() + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.update_mask is not None: + body["update_mask"] = self.update_mask return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster: body['cluster'] = self.cluster - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.cluster: + body["cluster"] = self.cluster + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.update_mask is not None: + body["update_mask"] = self.update_mask return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCluster: """Deserializes the UpdateCluster from a dictionary.""" - return cls(cluster=_from_dict(d, 'cluster', UpdateClusterResource), cluster_id=d.get('cluster_id', None), update_mask=d.get('update_mask', None)) - - + return cls( + cluster=_from_dict(d, "cluster", UpdateClusterResource), + cluster_id=d.get("cluster_id", None), + update_mask=d.get("update_mask", None), + ) @dataclass @@ -7786,34 +9336,34 @@ class UpdateClusterResource: autoscale: Optional[AutoScale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + autotermination_minutes: Optional[int] = None """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this cluster will not be automatically terminated. If specified, the threshold must be between 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.""" - + aws_attributes: Optional[AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Three kinds of destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - + cluster_name: Optional[str] = None """Cluster name requested by the user. This doesn't have to be unique. If not specified at creation, the cluster name will be an empty string. For job clusters, the cluster name is automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -7821,7 +9371,7 @@ class UpdateClusterResource: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + data_security_mode: Optional[DataSecurityMode] = None """Data security mode decides what data governance model to use when accessing data from a cluster. @@ -7847,14 +9397,14 @@ class UpdateClusterResource: concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.""" - + docker_image: Optional[DockerImage] = None """Custom docker image BYOC""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. @@ -7862,33 +9412,33 @@ class UpdateClusterResource: This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence.""" - + enable_elastic_disk: Optional[bool] = None """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space. This feature requires specific AWS permissions to function correctly - refer to the User Guide for more details.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable LUKS on cluster VMs' local disks""" - + gcp_attributes: Optional[GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + is_single_node: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers`""" - + kind: Optional[Kind] = None """The kind of compute described by this compute specification. @@ -7903,13 +9453,13 @@ class UpdateClusterResource: By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. [simple form]: https://docs.databricks.com/compute/simple-form.html""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -7919,10 +9469,10 @@ class UpdateClusterResource: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - + runtime_engine: Optional[RuntimeEngine] = None """Determines the cluster's runtime engine, either standard or Photon. @@ -7931,16 +9481,16 @@ class UpdateClusterResource: If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.""" - + single_user_name: Optional[str] = None """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str,str]] = None + + spark_conf: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. Users can also pass in a string of extra JVM options to the driver and the executors via `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str,str]] = None + + spark_env_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -7952,99 +9502,185 @@ class UpdateClusterResource: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + spark_version: Optional[str] = None """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call.""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + use_ml_runtime: Optional[bool] = None """This field can only be used when `kind = CLASSIC_PREVIEW`. `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - + workload_type: Optional[WorkloadType] = None """Cluster Attributes showing for clusters workload types.""" - + def as_dict(self) -> dict: """Serializes the UpdateClusterResource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value - if self.docker_image: body['docker_image'] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind.value - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type.as_dict() + if self.autoscale: + body["autoscale"] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode.value + if self.docker_image: + body["docker_image"] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.init_scripts: + body["init_scripts"] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind.value + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine.value + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = [v for v in self.ssh_public_keys] + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateClusterResource into a shallow dictionary of its immediate attributes.""" body = {} - if self.autoscale: body['autoscale'] = self.autoscale - if self.autotermination_minutes is not None: body['autotermination_minutes'] = self.autotermination_minutes - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf - if self.cluster_name is not None: body['cluster_name'] = self.cluster_name - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode - if self.docker_image: body['docker_image'] = self.docker_image - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.init_scripts: body['init_scripts'] = self.init_scripts - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.is_single_node is not None: body['is_single_node'] = self.is_single_node - if self.kind is not None: body['kind'] = self.kind - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine - if self.single_user_name is not None: body['single_user_name'] = self.single_user_name - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.spark_version is not None: body['spark_version'] = self.spark_version - if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys - if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime - if self.workload_type: body['workload_type'] = self.workload_type + if self.autoscale: + body["autoscale"] = self.autoscale + if self.autotermination_minutes is not None: + body["autotermination_minutes"] = self.autotermination_minutes + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf + if self.cluster_name is not None: + body["cluster_name"] = self.cluster_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.data_security_mode is not None: + body["data_security_mode"] = self.data_security_mode + if self.docker_image: + body["docker_image"] = self.docker_image + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_elastic_disk is not None: + body["enable_elastic_disk"] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.init_scripts: + body["init_scripts"] = self.init_scripts + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.is_single_node is not None: + body["is_single_node"] = self.is_single_node + if self.kind is not None: + body["kind"] = self.kind + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.runtime_engine is not None: + body["runtime_engine"] = self.runtime_engine + if self.single_user_name is not None: + body["single_user_name"] = self.single_user_name + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.spark_version is not None: + body["spark_version"] = self.spark_version + if self.ssh_public_keys: + body["ssh_public_keys"] = self.ssh_public_keys + if self.use_ml_runtime is not None: + body["use_ml_runtime"] = self.use_ml_runtime + if self.workload_type: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateClusterResource: """Deserializes the UpdateClusterResource from a dictionary.""" - return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), autotermination_minutes=d.get('autotermination_minutes', None), aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), cluster_name=d.get('cluster_name', None), custom_tags=d.get('custom_tags', None), data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), docker_image=_from_dict(d, 'docker_image', DockerImage), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_elastic_disk=d.get('enable_elastic_disk', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), is_single_node=d.get('is_single_node', None), kind=_enum(d, 'kind', Kind), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), single_user_name=d.get('single_user_name', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), spark_version=d.get('spark_version', None), ssh_public_keys=d.get('ssh_public_keys', None), use_ml_runtime=d.get('use_ml_runtime', None), workload_type=_from_dict(d, 'workload_type', WorkloadType)) - - + return cls( + autoscale=_from_dict(d, "autoscale", AutoScale), + autotermination_minutes=d.get("autotermination_minutes", None), + aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), + cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), + cluster_name=d.get("cluster_name", None), + custom_tags=d.get("custom_tags", None), + data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), + docker_image=_from_dict(d, "docker_image", DockerImage), + driver_instance_pool_id=d.get("driver_instance_pool_id", None), + driver_node_type_id=d.get("driver_node_type_id", None), + enable_elastic_disk=d.get("enable_elastic_disk", None), + enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), + gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), + init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), + instance_pool_id=d.get("instance_pool_id", None), + is_single_node=d.get("is_single_node", None), + kind=_enum(d, "kind", Kind), + node_type_id=d.get("node_type_id", None), + num_workers=d.get("num_workers", None), + policy_id=d.get("policy_id", None), + runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), + single_user_name=d.get("single_user_name", None), + spark_conf=d.get("spark_conf", None), + spark_env_vars=d.get("spark_env_vars", None), + spark_version=d.get("spark_version", None), + ssh_public_keys=d.get("ssh_public_keys", None), + use_ml_runtime=d.get("use_ml_runtime", None), + workload_type=_from_dict(d, "workload_type", WorkloadType), + ) @dataclass @@ -8063,8 +9699,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateClusterResponse: """Deserializes the UpdateClusterResponse from a dictionary.""" return cls() - - @dataclass @@ -8083,135 +9717,130 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() - - @dataclass class VolumesStorageInfo: """A storage location back by UC Volumes.""" - + destination: str """UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` or `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh`""" - + def as_dict(self) -> dict: """Serializes the VolumesStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the VolumesStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VolumesStorageInfo: """Deserializes the VolumesStorageInfo from a dictionary.""" - return cls(destination=d.get('destination', None)) - - + return cls(destination=d.get("destination", None)) @dataclass class WorkloadType: """Cluster Attributes showing for clusters workload types.""" - + clients: ClientsTypes """defined what type of clients can use the cluster. E.g. Notebooks, Jobs""" - + def as_dict(self) -> dict: """Serializes the WorkloadType into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clients: body['clients'] = self.clients.as_dict() + if self.clients: + body["clients"] = self.clients.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the WorkloadType into a shallow dictionary of its immediate attributes.""" body = {} - if self.clients: body['clients'] = self.clients + if self.clients: + body["clients"] = self.clients return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkloadType: """Deserializes the WorkloadType from a dictionary.""" - return cls(clients=_from_dict(d, 'clients', ClientsTypes)) - - + return cls(clients=_from_dict(d, "clients", ClientsTypes)) @dataclass class WorkspaceStorageInfo: """A storage location in Workspace Filesystem (WSFS)""" - + destination: str """wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh`""" - + def as_dict(self) -> dict: """Serializes the WorkspaceStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: body['destination'] = self.destination + if self.destination is not None: + body["destination"] = self.destination return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceStorageInfo: """Deserializes the WorkspaceStorageInfo from a dictionary.""" - return cls(destination=d.get('destination', None)) - - - - + return cls(destination=d.get("destination", None)) class ClusterPoliciesAPI: """You can use cluster policies to control users' ability to configure clusters based on a set of rules. These rules specify which attributes or attribute values can be used during cluster creation. Cluster policies have ACLs that limit their use to specific users and groups. - + With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in the policy's "libraries" field (Public Preview). - Limit users to creating clusters with the prescribed settings. - Simplify the user interface, enabling more users to create clusters, by fixing and hiding some fields. - Manage costs by setting limits on attributes that impact the hourly rate. - + Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted policy and create fully-configurable clusters. - A user who has both unrestricted cluster create permission and access to cluster policies can select the Unrestricted policy and policies they have access to. - A user that has access to only cluster policies, can select the policies they have access to. - + If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create, edit, and delete policies. Admin users also have access to all policies.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, name: Optional[str] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None) -> CreatePolicyResponse: + def create( + self, + *, + definition: Optional[str] = None, + description: Optional[str] = None, + libraries: Optional[List[Library]] = None, + max_clusters_per_user: Optional[int] = None, + name: Optional[str] = None, + policy_family_definition_overrides: Optional[str] = None, + policy_family_id: Optional[str] = None, + ) -> CreatePolicyResponse: """Create a new policy. - + Creates a new policy with prescribed settings. - + :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - + [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param description: str (optional) Additional human-readable description of the cluster policy. @@ -8227,80 +9856,85 @@ def create(self :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. - + You can use this to customize the policy definition inherited from the policy family. Policy rules specified here are merged into the inherited policy definition. - + [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param policy_family_id: str (optional) ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. - + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition. - + :returns: :class:`CreatePolicyResponse` """ body = {} - if definition is not None: body['definition'] = definition - if description is not None: body['description'] = description - if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] - if max_clusters_per_user is not None: body['max_clusters_per_user'] = max_clusters_per_user - if name is not None: body['name'] = name - if policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = policy_family_definition_overrides - if policy_family_id is not None: body['policy_family_id'] = policy_family_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/policies/clusters/create', body=body - - , headers=headers - ) + if definition is not None: + body["definition"] = definition + if description is not None: + body["description"] = description + if libraries is not None: + body["libraries"] = [v.as_dict() for v in libraries] + if max_clusters_per_user is not None: + body["max_clusters_per_user"] = max_clusters_per_user + if name is not None: + body["name"] = name + if policy_family_definition_overrides is not None: + body["policy_family_definition_overrides"] = policy_family_definition_overrides + if policy_family_id is not None: + body["policy_family_id"] = policy_family_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/policies/clusters/create", body=body, headers=headers) return CreatePolicyResponse.from_dict(res) - - - - - def delete(self - , policy_id: str - ): + def delete(self, policy_id: str): """Delete a cluster policy. - + Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited. - + :param policy_id: str The ID of the policy to delete. - - - """ - body = {} - if policy_id is not None: body['policy_id'] = policy_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/policies/clusters/delete', body=body - - , headers=headers - ) - - - - - def edit(self - , policy_id: str - , * - , definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, name: Optional[str] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None): + """ + body = {} + if policy_id is not None: + body["policy_id"] = policy_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/policies/clusters/delete", body=body, headers=headers) + + def edit( + self, + policy_id: str, + *, + definition: Optional[str] = None, + description: Optional[str] = None, + libraries: Optional[List[Library]] = None, + max_clusters_per_user: Optional[int] = None, + name: Optional[str] = None, + policy_family_definition_overrides: Optional[str] = None, + policy_family_id: Optional[str] = None, + ): """Update a cluster policy. - + Update an existing policy for cluster. This operation may make some clusters governed by the previous policy invalid. - + :param policy_id: str The ID of the policy to update. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - + [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param description: str (optional) Additional human-readable description of the cluster policy. @@ -8316,350 +9950,358 @@ def edit(self :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. - + You can use this to customize the policy definition inherited from the policy family. Policy rules specified here are merged into the inherited policy definition. - + [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param policy_family_id: str (optional) ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. - + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition. - - - """ - body = {} - if definition is not None: body['definition'] = definition - if description is not None: body['description'] = description - if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] - if max_clusters_per_user is not None: body['max_clusters_per_user'] = max_clusters_per_user - if name is not None: body['name'] = name - if policy_family_definition_overrides is not None: body['policy_family_definition_overrides'] = policy_family_definition_overrides - if policy_family_id is not None: body['policy_family_id'] = policy_family_id - if policy_id is not None: body['policy_id'] = policy_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/policies/clusters/edit', body=body - - , headers=headers - ) - - - - - def get(self - , policy_id: str - ) -> Policy: + """ + body = {} + if definition is not None: + body["definition"] = definition + if description is not None: + body["description"] = description + if libraries is not None: + body["libraries"] = [v.as_dict() for v in libraries] + if max_clusters_per_user is not None: + body["max_clusters_per_user"] = max_clusters_per_user + if name is not None: + body["name"] = name + if policy_family_definition_overrides is not None: + body["policy_family_definition_overrides"] = policy_family_definition_overrides + if policy_family_id is not None: + body["policy_family_id"] = policy_family_id + if policy_id is not None: + body["policy_id"] = policy_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/policies/clusters/edit", body=body, headers=headers) + + def get(self, policy_id: str) -> Policy: """Get a cluster policy. - + Get a cluster policy entity. Creation and editing is available to admins only. - + :param policy_id: str Canonical unique identifier for the Cluster Policy. - + :returns: :class:`Policy` """ - + query = {} - if policy_id is not None: query['policy_id'] = policy_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/policies/clusters/get', query=query - - , headers=headers - ) - return Policy.from_dict(res) + if policy_id is not None: + query["policy_id"] = policy_id + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/policies/clusters/get", query=query, headers=headers) + return Policy.from_dict(res) - def get_permission_levels(self - , cluster_policy_id: str - ) -> GetClusterPolicyPermissionLevelsResponse: + def get_permission_levels(self, cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse: """Get cluster policy permission levels. - + Gets the permission levels that a user can have on an object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. - + :returns: :class:`GetClusterPolicyPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}/permissionLevels' - - , headers=headers - ) - return GetClusterPolicyPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_permissions(self - , cluster_policy_id: str - ) -> ClusterPolicyPermissions: + res = self._api.do( + "GET", f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}/permissionLevels", headers=headers + ) + return GetClusterPolicyPermissionLevelsResponse.from_dict(res) + + def get_permissions(self, cluster_policy_id: str) -> ClusterPolicyPermissions: """Get cluster policy permissions. - + Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. - + :returns: :class:`ClusterPolicyPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}' - - , headers=headers - ) - return ClusterPolicyPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}", headers=headers) + return ClusterPolicyPermissions.from_dict(res) - def list(self - - , * - , sort_column: Optional[ListSortColumn] = None, sort_order: Optional[ListSortOrder] = None) -> Iterator[Policy]: + def list( + self, *, sort_column: Optional[ListSortColumn] = None, sort_order: Optional[ListSortOrder] = None + ) -> Iterator[Policy]: """List cluster policies. - + Returns a list of policies accessible by the requesting user. - + :param sort_column: :class:`ListSortColumn` (optional) The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy creation time. * `POLICY_NAME` - Sort result list by policy name. :param sort_order: :class:`ListSortOrder` (optional) The order in which the policies get listed. * `DESC` - Sort result list in descending order. * `ASC` - Sort result list in ascending order. - + :returns: Iterator over :class:`Policy` """ - + query = {} - if sort_column is not None: query['sort_column'] = sort_column.value - if sort_order is not None: query['sort_order'] = sort_order.value - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/policies/clusters/list', query=query - - , headers=headers - ) + if sort_column is not None: + query["sort_column"] = sort_column.value + if sort_order is not None: + query["sort_order"] = sort_order.value + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/policies/clusters/list", query=query, headers=headers) parsed = ListPoliciesResponse.from_dict(json).policies return parsed if parsed is not None else [] - - - - - - def set_permissions(self - , cluster_policy_id: str - , * - , access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None) -> ClusterPolicyPermissions: + def set_permissions( + self, cluster_policy_id: str, *, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None + ) -> ClusterPolicyPermissions: """Set cluster policy permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional) - + :returns: :class:`ClusterPolicyPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}', body=body - - , headers=headers - ) - return ClusterPolicyPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "PUT", f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}", body=body, headers=headers + ) + return ClusterPolicyPermissions.from_dict(res) - def update_permissions(self - , cluster_policy_id: str - , * - , access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None) -> ClusterPolicyPermissions: + def update_permissions( + self, cluster_policy_id: str, *, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None + ) -> ClusterPolicyPermissions: """Update cluster policy permissions. - + Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional) - + :returns: :class:`ClusterPolicyPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/cluster-policies/{cluster_policy_id}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/permissions/cluster-policies/{cluster_policy_id}", body=body, headers=headers + ) return ClusterPolicyPermissions.from_dict(res) - - + class ClustersAPI: """The Clusters API allows you to create, start, edit, list, terminate, and delete clusters. - + Databricks maps cluster node instance types to compute units known as DBUs. See the instance type pricing page for a list of the supported instance types and their corresponding DBUs. - + A Databricks cluster is a set of computation resources and configurations on which you run data engineering, data science, and data analytics workloads, such as production ETL pipelines, streaming analytics, ad-hoc analytics, and machine learning. - + You run these workloads as a set of commands in a notebook or as an automated job. Databricks makes a distinction between all-purpose clusters and job clusters. You use all-purpose clusters to analyze data collaboratively using interactive notebooks. You use job clusters to run fast and robust automated jobs. - + You can create an all-purpose cluster using the UI, CLI, or REST API. You can manually terminate and restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive analysis. - + IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list.""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_get_cluster_running(self, cluster_id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails: - deadline = time.time() + timeout.total_seconds() - target_states = (State.RUNNING, ) - failure_states = (State.ERROR, State.TERMINATED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(cluster_id=cluster_id) - status = poll.state - status_message = poll.state_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach RUNNING, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_get_cluster_terminated(self, cluster_id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[ClusterDetails], None]] = None) -> ClusterDetails: - deadline = time.time() + timeout.total_seconds() - target_states = (State.TERMINATED, ) - failure_states = (State.ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(cluster_id=cluster_id) - status = poll.state - status_message = poll.state_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach TERMINATED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - - def change_owner(self - , cluster_id: str, owner_username: str - ): + + def wait_get_cluster_running( + self, + cluster_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[ClusterDetails], None]] = None, + ) -> ClusterDetails: + deadline = time.time() + timeout.total_seconds() + target_states = (State.RUNNING,) + failure_states = ( + State.ERROR, + State.TERMINATED, + ) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(cluster_id=cluster_id) + status = poll.state + status_message = poll.state_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach RUNNING, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def wait_get_cluster_terminated( + self, + cluster_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[ClusterDetails], None]] = None, + ) -> ClusterDetails: + deadline = time.time() + timeout.total_seconds() + target_states = (State.TERMINATED,) + failure_states = (State.ERROR,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(cluster_id=cluster_id) + status = poll.state + status_message = poll.state_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach TERMINATED, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def change_owner(self, cluster_id: str, owner_username: str): """Change cluster owner. - + Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform this operation. The service principal application ID can be supplied as an argument to `owner_username`. - + :param cluster_id: str :param owner_username: str New owner of the cluster_id after this RPC. - - - """ - body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - if owner_username is not None: body['owner_username'] = owner_username - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.1/clusters/change-owner', body=body - - , headers=headers - ) - - - - - def create(self - , spark_version: str - , * - , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, clone_from: Optional[CloneCluster] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, is_single_node: Optional[bool] = None, kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: + """ + body = {} + if cluster_id is not None: + body["cluster_id"] = cluster_id + if owner_username is not None: + body["owner_username"] = owner_username + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.1/clusters/change-owner", body=body, headers=headers) + + def create( + self, + spark_version: str, + *, + apply_policy_default_values: Optional[bool] = None, + autoscale: Optional[AutoScale] = None, + autotermination_minutes: Optional[int] = None, + aws_attributes: Optional[AwsAttributes] = None, + azure_attributes: Optional[AzureAttributes] = None, + clone_from: Optional[CloneCluster] = None, + cluster_log_conf: Optional[ClusterLogConf] = None, + cluster_name: Optional[str] = None, + custom_tags: Optional[Dict[str, str]] = None, + data_security_mode: Optional[DataSecurityMode] = None, + docker_image: Optional[DockerImage] = None, + driver_instance_pool_id: Optional[str] = None, + driver_node_type_id: Optional[str] = None, + enable_elastic_disk: Optional[bool] = None, + enable_local_disk_encryption: Optional[bool] = None, + gcp_attributes: Optional[GcpAttributes] = None, + init_scripts: Optional[List[InitScriptInfo]] = None, + instance_pool_id: Optional[str] = None, + is_single_node: Optional[bool] = None, + kind: Optional[Kind] = None, + node_type_id: Optional[str] = None, + num_workers: Optional[int] = None, + policy_id: Optional[str] = None, + runtime_engine: Optional[RuntimeEngine] = None, + single_user_name: Optional[str] = None, + spark_conf: Optional[Dict[str, str]] = None, + spark_env_vars: Optional[Dict[str, str]] = None, + ssh_public_keys: Optional[List[str]] = None, + use_ml_runtime: Optional[bool] = None, + workload_type: Optional[WorkloadType] = None, + ) -> Wait[ClusterDetails]: """Create new cluster. - + Creates a new Spark cluster. This method will acquire new instances from the cloud provider if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The cluster will be usable once it enters a ``RUNNING`` state. Note: Databricks may not be able to acquire some of the requested nodes, due to cloud provider limitations (account limits, spot price, etc.) or transient network issues. - + If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed. Otherwise the cluster will terminate with an informative error message. - + Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out the [create compute UI] and then copying the generated JSON definition from the UI. - + [create compute UI]: https://docs.databricks.com/compute/configure.html - + :param spark_version: str The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. @@ -8694,18 +10336,18 @@ def create(self :param custom_tags: Dict[str,str] (optional) Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags - + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. @@ -8714,10 +10356,10 @@ def create(self fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on @@ -8731,7 +10373,7 @@ def create(self :param driver_node_type_id: str (optional) The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. - + This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence. @@ -8752,22 +10394,22 @@ def create(self The optional ID of the instance pool to which the cluster belongs. :param is_single_node: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in @@ -8777,7 +10419,7 @@ def create(self :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas @@ -8787,10 +10429,10 @@ def create(self The ID of the cluster policy used to create the cluster if applicable. :param runtime_engine: :class:`RuntimeEngine` (optional) Determines the cluster's runtime engine, either standard or Photon. - + This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - + If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used. :param single_user_name: str (optional) @@ -8803,11 +10445,11 @@ def create(self An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. - + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks managed environmental variables are included as well. - + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}` @@ -8817,125 +10459,242 @@ def create(self specified. :param use_ml_runtime: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) Cluster Attributes showing for clusters workload types. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if apply_policy_default_values is not None: body['apply_policy_default_values'] = apply_policy_default_values - if autoscale is not None: body['autoscale'] = autoscale.as_dict() - if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes - if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict() - if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict() - if clone_from is not None: body['clone_from'] = clone_from.as_dict() - if cluster_log_conf is not None: body['cluster_log_conf'] = cluster_log_conf.as_dict() - if cluster_name is not None: body['cluster_name'] = cluster_name - if custom_tags is not None: body['custom_tags'] = custom_tags - if data_security_mode is not None: body['data_security_mode'] = data_security_mode.value - if docker_image is not None: body['docker_image'] = docker_image.as_dict() - if driver_instance_pool_id is not None: body['driver_instance_pool_id'] = driver_instance_pool_id - if driver_node_type_id is not None: body['driver_node_type_id'] = driver_node_type_id - if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk - if enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = enable_local_disk_encryption - if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() - if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts] - if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id - if is_single_node is not None: body['is_single_node'] = is_single_node - if kind is not None: body['kind'] = kind.value - if node_type_id is not None: body['node_type_id'] = node_type_id - if num_workers is not None: body['num_workers'] = num_workers - if policy_id is not None: body['policy_id'] = policy_id - if runtime_engine is not None: body['runtime_engine'] = runtime_engine.value - if single_user_name is not None: body['single_user_name'] = single_user_name - if spark_conf is not None: body['spark_conf'] = spark_conf - if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars - if spark_version is not None: body['spark_version'] = spark_version - if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys] - if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime - if workload_type is not None: body['workload_type'] = workload_type.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.1/clusters/create', body=body - - , headers=headers - ) - return Wait(self.wait_get_cluster_running - , response = CreateClusterResponse.from_dict(op_response) - , cluster_id=op_response['cluster_id']) - - - def create_and_wait(self - , spark_version: str - , * - , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, clone_from: Optional[CloneCluster] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, is_single_node: Optional[bool] = None, kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None, - timeout=timedelta(minutes=20)) -> ClusterDetails: - return self.create(apply_policy_default_values=apply_policy_default_values, autoscale=autoscale, autotermination_minutes=autotermination_minutes, aws_attributes=aws_attributes, azure_attributes=azure_attributes, clone_from=clone_from, cluster_log_conf=cluster_log_conf, cluster_name=cluster_name, custom_tags=custom_tags, data_security_mode=data_security_mode, docker_image=docker_image, driver_instance_pool_id=driver_instance_pool_id, driver_node_type_id=driver_node_type_id, enable_elastic_disk=enable_elastic_disk, enable_local_disk_encryption=enable_local_disk_encryption, gcp_attributes=gcp_attributes, init_scripts=init_scripts, instance_pool_id=instance_pool_id, is_single_node=is_single_node, kind=kind, node_type_id=node_type_id, num_workers=num_workers, policy_id=policy_id, runtime_engine=runtime_engine, single_user_name=single_user_name, spark_conf=spark_conf, spark_env_vars=spark_env_vars, spark_version=spark_version, ssh_public_keys=ssh_public_keys, use_ml_runtime=use_ml_runtime, workload_type=workload_type).result(timeout=timeout) - - - - - def delete(self - , cluster_id: str - ) -> Wait[ClusterDetails]: + if apply_policy_default_values is not None: + body["apply_policy_default_values"] = apply_policy_default_values + if autoscale is not None: + body["autoscale"] = autoscale.as_dict() + if autotermination_minutes is not None: + body["autotermination_minutes"] = autotermination_minutes + if aws_attributes is not None: + body["aws_attributes"] = aws_attributes.as_dict() + if azure_attributes is not None: + body["azure_attributes"] = azure_attributes.as_dict() + if clone_from is not None: + body["clone_from"] = clone_from.as_dict() + if cluster_log_conf is not None: + body["cluster_log_conf"] = cluster_log_conf.as_dict() + if cluster_name is not None: + body["cluster_name"] = cluster_name + if custom_tags is not None: + body["custom_tags"] = custom_tags + if data_security_mode is not None: + body["data_security_mode"] = data_security_mode.value + if docker_image is not None: + body["docker_image"] = docker_image.as_dict() + if driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = driver_instance_pool_id + if driver_node_type_id is not None: + body["driver_node_type_id"] = driver_node_type_id + if enable_elastic_disk is not None: + body["enable_elastic_disk"] = enable_elastic_disk + if enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = enable_local_disk_encryption + if gcp_attributes is not None: + body["gcp_attributes"] = gcp_attributes.as_dict() + if init_scripts is not None: + body["init_scripts"] = [v.as_dict() for v in init_scripts] + if instance_pool_id is not None: + body["instance_pool_id"] = instance_pool_id + if is_single_node is not None: + body["is_single_node"] = is_single_node + if kind is not None: + body["kind"] = kind.value + if node_type_id is not None: + body["node_type_id"] = node_type_id + if num_workers is not None: + body["num_workers"] = num_workers + if policy_id is not None: + body["policy_id"] = policy_id + if runtime_engine is not None: + body["runtime_engine"] = runtime_engine.value + if single_user_name is not None: + body["single_user_name"] = single_user_name + if spark_conf is not None: + body["spark_conf"] = spark_conf + if spark_env_vars is not None: + body["spark_env_vars"] = spark_env_vars + if spark_version is not None: + body["spark_version"] = spark_version + if ssh_public_keys is not None: + body["ssh_public_keys"] = [v for v in ssh_public_keys] + if use_ml_runtime is not None: + body["use_ml_runtime"] = use_ml_runtime + if workload_type is not None: + body["workload_type"] = workload_type.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.1/clusters/create", body=body, headers=headers) + return Wait( + self.wait_get_cluster_running, + response=CreateClusterResponse.from_dict(op_response), + cluster_id=op_response["cluster_id"], + ) + + def create_and_wait( + self, + spark_version: str, + *, + apply_policy_default_values: Optional[bool] = None, + autoscale: Optional[AutoScale] = None, + autotermination_minutes: Optional[int] = None, + aws_attributes: Optional[AwsAttributes] = None, + azure_attributes: Optional[AzureAttributes] = None, + clone_from: Optional[CloneCluster] = None, + cluster_log_conf: Optional[ClusterLogConf] = None, + cluster_name: Optional[str] = None, + custom_tags: Optional[Dict[str, str]] = None, + data_security_mode: Optional[DataSecurityMode] = None, + docker_image: Optional[DockerImage] = None, + driver_instance_pool_id: Optional[str] = None, + driver_node_type_id: Optional[str] = None, + enable_elastic_disk: Optional[bool] = None, + enable_local_disk_encryption: Optional[bool] = None, + gcp_attributes: Optional[GcpAttributes] = None, + init_scripts: Optional[List[InitScriptInfo]] = None, + instance_pool_id: Optional[str] = None, + is_single_node: Optional[bool] = None, + kind: Optional[Kind] = None, + node_type_id: Optional[str] = None, + num_workers: Optional[int] = None, + policy_id: Optional[str] = None, + runtime_engine: Optional[RuntimeEngine] = None, + single_user_name: Optional[str] = None, + spark_conf: Optional[Dict[str, str]] = None, + spark_env_vars: Optional[Dict[str, str]] = None, + ssh_public_keys: Optional[List[str]] = None, + use_ml_runtime: Optional[bool] = None, + workload_type: Optional[WorkloadType] = None, + timeout=timedelta(minutes=20), + ) -> ClusterDetails: + return self.create( + apply_policy_default_values=apply_policy_default_values, + autoscale=autoscale, + autotermination_minutes=autotermination_minutes, + aws_attributes=aws_attributes, + azure_attributes=azure_attributes, + clone_from=clone_from, + cluster_log_conf=cluster_log_conf, + cluster_name=cluster_name, + custom_tags=custom_tags, + data_security_mode=data_security_mode, + docker_image=docker_image, + driver_instance_pool_id=driver_instance_pool_id, + driver_node_type_id=driver_node_type_id, + enable_elastic_disk=enable_elastic_disk, + enable_local_disk_encryption=enable_local_disk_encryption, + gcp_attributes=gcp_attributes, + init_scripts=init_scripts, + instance_pool_id=instance_pool_id, + is_single_node=is_single_node, + kind=kind, + node_type_id=node_type_id, + num_workers=num_workers, + policy_id=policy_id, + runtime_engine=runtime_engine, + single_user_name=single_user_name, + spark_conf=spark_conf, + spark_env_vars=spark_env_vars, + spark_version=spark_version, + ssh_public_keys=ssh_public_keys, + use_ml_runtime=use_ml_runtime, + workload_type=workload_type, + ).result(timeout=timeout) + + def delete(self, cluster_id: str) -> Wait[ClusterDetails]: """Terminate cluster. - + Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a `TERMINATING` or `TERMINATED` state, nothing will happen. - + :param cluster_id: str The cluster to be terminated. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_terminated for more details. """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.1/clusters/delete', body=body - - , headers=headers - ) - return Wait(self.wait_get_cluster_terminated - , response = DeleteClusterResponse.from_dict(op_response) - , cluster_id=cluster_id) - - - def delete_and_wait(self - , cluster_id: str - , - timeout=timedelta(minutes=20)) -> ClusterDetails: + if cluster_id is not None: + body["cluster_id"] = cluster_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.1/clusters/delete", body=body, headers=headers) + return Wait( + self.wait_get_cluster_terminated, + response=DeleteClusterResponse.from_dict(op_response), + cluster_id=cluster_id, + ) + + def delete_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: return self.delete(cluster_id=cluster_id).result(timeout=timeout) - - - - def edit(self - , cluster_id: str, spark_version: str - , * - , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, is_single_node: Optional[bool] = None, kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]: + def edit( + self, + cluster_id: str, + spark_version: str, + *, + apply_policy_default_values: Optional[bool] = None, + autoscale: Optional[AutoScale] = None, + autotermination_minutes: Optional[int] = None, + aws_attributes: Optional[AwsAttributes] = None, + azure_attributes: Optional[AzureAttributes] = None, + cluster_log_conf: Optional[ClusterLogConf] = None, + cluster_name: Optional[str] = None, + custom_tags: Optional[Dict[str, str]] = None, + data_security_mode: Optional[DataSecurityMode] = None, + docker_image: Optional[DockerImage] = None, + driver_instance_pool_id: Optional[str] = None, + driver_node_type_id: Optional[str] = None, + enable_elastic_disk: Optional[bool] = None, + enable_local_disk_encryption: Optional[bool] = None, + gcp_attributes: Optional[GcpAttributes] = None, + init_scripts: Optional[List[InitScriptInfo]] = None, + instance_pool_id: Optional[str] = None, + is_single_node: Optional[bool] = None, + kind: Optional[Kind] = None, + node_type_id: Optional[str] = None, + num_workers: Optional[int] = None, + policy_id: Optional[str] = None, + runtime_engine: Optional[RuntimeEngine] = None, + single_user_name: Optional[str] = None, + spark_conf: Optional[Dict[str, str]] = None, + spark_env_vars: Optional[Dict[str, str]] = None, + ssh_public_keys: Optional[List[str]] = None, + use_ml_runtime: Optional[bool] = None, + workload_type: Optional[WorkloadType] = None, + ) -> Wait[ClusterDetails]: """Update cluster configuration. - + Updates the configuration of a cluster to match the provided attributes and size. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. - + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes can take effect. - + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time it is started using the `clusters/start` API, the new attributes will take effect. Any attempt to update a cluster in any other state will be rejected with an `INVALID_STATE` error code. - + Clusters created by the Databricks Jobs service cannot be edited. - + :param cluster_id: str ID of the cluster :param spark_version: str @@ -8970,18 +10729,18 @@ def edit(self :param custom_tags: Dict[str,str] (optional) Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags - + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. @@ -8990,10 +10749,10 @@ def edit(self fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on @@ -9007,7 +10766,7 @@ def edit(self :param driver_node_type_id: str (optional) The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. - + This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence. @@ -9028,22 +10787,22 @@ def edit(self The optional ID of the instance pool to which the cluster belongs. :param is_single_node: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in @@ -9053,7 +10812,7 @@ def edit(self :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas @@ -9063,10 +10822,10 @@ def edit(self The ID of the cluster policy used to create the cluster if applicable. :param runtime_engine: :class:`RuntimeEngine` (optional) Determines the cluster's runtime engine, either standard or Photon. - + This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - + If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used. :param single_user_name: str (optional) @@ -9079,11 +10838,11 @@ def edit(self An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. - + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks managed environmental variables are included as well. - + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}` @@ -9093,78 +10852,177 @@ def edit(self specified. :param use_ml_runtime: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) Cluster Attributes showing for clusters workload types. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if apply_policy_default_values is not None: body['apply_policy_default_values'] = apply_policy_default_values - if autoscale is not None: body['autoscale'] = autoscale.as_dict() - if autotermination_minutes is not None: body['autotermination_minutes'] = autotermination_minutes - if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict() - if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict() - if cluster_id is not None: body['cluster_id'] = cluster_id - if cluster_log_conf is not None: body['cluster_log_conf'] = cluster_log_conf.as_dict() - if cluster_name is not None: body['cluster_name'] = cluster_name - if custom_tags is not None: body['custom_tags'] = custom_tags - if data_security_mode is not None: body['data_security_mode'] = data_security_mode.value - if docker_image is not None: body['docker_image'] = docker_image.as_dict() - if driver_instance_pool_id is not None: body['driver_instance_pool_id'] = driver_instance_pool_id - if driver_node_type_id is not None: body['driver_node_type_id'] = driver_node_type_id - if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk - if enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = enable_local_disk_encryption - if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() - if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts] - if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id - if is_single_node is not None: body['is_single_node'] = is_single_node - if kind is not None: body['kind'] = kind.value - if node_type_id is not None: body['node_type_id'] = node_type_id - if num_workers is not None: body['num_workers'] = num_workers - if policy_id is not None: body['policy_id'] = policy_id - if runtime_engine is not None: body['runtime_engine'] = runtime_engine.value - if single_user_name is not None: body['single_user_name'] = single_user_name - if spark_conf is not None: body['spark_conf'] = spark_conf - if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars - if spark_version is not None: body['spark_version'] = spark_version - if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys] - if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime - if workload_type is not None: body['workload_type'] = workload_type.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.1/clusters/edit', body=body - - , headers=headers - ) - return Wait(self.wait_get_cluster_running - , response = EditClusterResponse.from_dict(op_response) - , cluster_id=cluster_id) - - - def edit_and_wait(self - , cluster_id: str, spark_version: str - , * - , apply_policy_default_values: Optional[bool] = None, autoscale: Optional[AutoScale] = None, autotermination_minutes: Optional[int] = None, aws_attributes: Optional[AwsAttributes] = None, azure_attributes: Optional[AzureAttributes] = None, cluster_log_conf: Optional[ClusterLogConf] = None, cluster_name: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, data_security_mode: Optional[DataSecurityMode] = None, docker_image: Optional[DockerImage] = None, driver_instance_pool_id: Optional[str] = None, driver_node_type_id: Optional[str] = None, enable_elastic_disk: Optional[bool] = None, enable_local_disk_encryption: Optional[bool] = None, gcp_attributes: Optional[GcpAttributes] = None, init_scripts: Optional[List[InitScriptInfo]] = None, instance_pool_id: Optional[str] = None, is_single_node: Optional[bool] = None, kind: Optional[Kind] = None, node_type_id: Optional[str] = None, num_workers: Optional[int] = None, policy_id: Optional[str] = None, runtime_engine: Optional[RuntimeEngine] = None, single_user_name: Optional[str] = None, spark_conf: Optional[Dict[str,str]] = None, spark_env_vars: Optional[Dict[str,str]] = None, ssh_public_keys: Optional[List[str]] = None, use_ml_runtime: Optional[bool] = None, workload_type: Optional[WorkloadType] = None, - timeout=timedelta(minutes=20)) -> ClusterDetails: - return self.edit(apply_policy_default_values=apply_policy_default_values, autoscale=autoscale, autotermination_minutes=autotermination_minutes, aws_attributes=aws_attributes, azure_attributes=azure_attributes, cluster_id=cluster_id, cluster_log_conf=cluster_log_conf, cluster_name=cluster_name, custom_tags=custom_tags, data_security_mode=data_security_mode, docker_image=docker_image, driver_instance_pool_id=driver_instance_pool_id, driver_node_type_id=driver_node_type_id, enable_elastic_disk=enable_elastic_disk, enable_local_disk_encryption=enable_local_disk_encryption, gcp_attributes=gcp_attributes, init_scripts=init_scripts, instance_pool_id=instance_pool_id, is_single_node=is_single_node, kind=kind, node_type_id=node_type_id, num_workers=num_workers, policy_id=policy_id, runtime_engine=runtime_engine, single_user_name=single_user_name, spark_conf=spark_conf, spark_env_vars=spark_env_vars, spark_version=spark_version, ssh_public_keys=ssh_public_keys, use_ml_runtime=use_ml_runtime, workload_type=workload_type).result(timeout=timeout) - - - - - def events(self - , cluster_id: str - , * - , end_time: Optional[int] = None, event_types: Optional[List[EventType]] = None, limit: Optional[int] = None, offset: Optional[int] = None, order: Optional[GetEventsOrder] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, start_time: Optional[int] = None) -> Iterator[ClusterEvent]: + if apply_policy_default_values is not None: + body["apply_policy_default_values"] = apply_policy_default_values + if autoscale is not None: + body["autoscale"] = autoscale.as_dict() + if autotermination_minutes is not None: + body["autotermination_minutes"] = autotermination_minutes + if aws_attributes is not None: + body["aws_attributes"] = aws_attributes.as_dict() + if azure_attributes is not None: + body["azure_attributes"] = azure_attributes.as_dict() + if cluster_id is not None: + body["cluster_id"] = cluster_id + if cluster_log_conf is not None: + body["cluster_log_conf"] = cluster_log_conf.as_dict() + if cluster_name is not None: + body["cluster_name"] = cluster_name + if custom_tags is not None: + body["custom_tags"] = custom_tags + if data_security_mode is not None: + body["data_security_mode"] = data_security_mode.value + if docker_image is not None: + body["docker_image"] = docker_image.as_dict() + if driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = driver_instance_pool_id + if driver_node_type_id is not None: + body["driver_node_type_id"] = driver_node_type_id + if enable_elastic_disk is not None: + body["enable_elastic_disk"] = enable_elastic_disk + if enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = enable_local_disk_encryption + if gcp_attributes is not None: + body["gcp_attributes"] = gcp_attributes.as_dict() + if init_scripts is not None: + body["init_scripts"] = [v.as_dict() for v in init_scripts] + if instance_pool_id is not None: + body["instance_pool_id"] = instance_pool_id + if is_single_node is not None: + body["is_single_node"] = is_single_node + if kind is not None: + body["kind"] = kind.value + if node_type_id is not None: + body["node_type_id"] = node_type_id + if num_workers is not None: + body["num_workers"] = num_workers + if policy_id is not None: + body["policy_id"] = policy_id + if runtime_engine is not None: + body["runtime_engine"] = runtime_engine.value + if single_user_name is not None: + body["single_user_name"] = single_user_name + if spark_conf is not None: + body["spark_conf"] = spark_conf + if spark_env_vars is not None: + body["spark_env_vars"] = spark_env_vars + if spark_version is not None: + body["spark_version"] = spark_version + if ssh_public_keys is not None: + body["ssh_public_keys"] = [v for v in ssh_public_keys] + if use_ml_runtime is not None: + body["use_ml_runtime"] = use_ml_runtime + if workload_type is not None: + body["workload_type"] = workload_type.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.1/clusters/edit", body=body, headers=headers) + return Wait( + self.wait_get_cluster_running, response=EditClusterResponse.from_dict(op_response), cluster_id=cluster_id + ) + + def edit_and_wait( + self, + cluster_id: str, + spark_version: str, + *, + apply_policy_default_values: Optional[bool] = None, + autoscale: Optional[AutoScale] = None, + autotermination_minutes: Optional[int] = None, + aws_attributes: Optional[AwsAttributes] = None, + azure_attributes: Optional[AzureAttributes] = None, + cluster_log_conf: Optional[ClusterLogConf] = None, + cluster_name: Optional[str] = None, + custom_tags: Optional[Dict[str, str]] = None, + data_security_mode: Optional[DataSecurityMode] = None, + docker_image: Optional[DockerImage] = None, + driver_instance_pool_id: Optional[str] = None, + driver_node_type_id: Optional[str] = None, + enable_elastic_disk: Optional[bool] = None, + enable_local_disk_encryption: Optional[bool] = None, + gcp_attributes: Optional[GcpAttributes] = None, + init_scripts: Optional[List[InitScriptInfo]] = None, + instance_pool_id: Optional[str] = None, + is_single_node: Optional[bool] = None, + kind: Optional[Kind] = None, + node_type_id: Optional[str] = None, + num_workers: Optional[int] = None, + policy_id: Optional[str] = None, + runtime_engine: Optional[RuntimeEngine] = None, + single_user_name: Optional[str] = None, + spark_conf: Optional[Dict[str, str]] = None, + spark_env_vars: Optional[Dict[str, str]] = None, + ssh_public_keys: Optional[List[str]] = None, + use_ml_runtime: Optional[bool] = None, + workload_type: Optional[WorkloadType] = None, + timeout=timedelta(minutes=20), + ) -> ClusterDetails: + return self.edit( + apply_policy_default_values=apply_policy_default_values, + autoscale=autoscale, + autotermination_minutes=autotermination_minutes, + aws_attributes=aws_attributes, + azure_attributes=azure_attributes, + cluster_id=cluster_id, + cluster_log_conf=cluster_log_conf, + cluster_name=cluster_name, + custom_tags=custom_tags, + data_security_mode=data_security_mode, + docker_image=docker_image, + driver_instance_pool_id=driver_instance_pool_id, + driver_node_type_id=driver_node_type_id, + enable_elastic_disk=enable_elastic_disk, + enable_local_disk_encryption=enable_local_disk_encryption, + gcp_attributes=gcp_attributes, + init_scripts=init_scripts, + instance_pool_id=instance_pool_id, + is_single_node=is_single_node, + kind=kind, + node_type_id=node_type_id, + num_workers=num_workers, + policy_id=policy_id, + runtime_engine=runtime_engine, + single_user_name=single_user_name, + spark_conf=spark_conf, + spark_env_vars=spark_env_vars, + spark_version=spark_version, + ssh_public_keys=ssh_public_keys, + use_ml_runtime=use_ml_runtime, + workload_type=workload_type, + ).result(timeout=timeout) + + def events( + self, + cluster_id: str, + *, + end_time: Optional[int] = None, + event_types: Optional[List[EventType]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, + order: Optional[GetEventsOrder] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + start_time: Optional[int] = None, + ) -> Iterator[ClusterEvent]: """List cluster activity events. - + Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more events to read, the response includes all the parameters necessary to request the next page of events. - + :param cluster_id: str The ID of the cluster to retrieve events about. :param end_time: int (optional) @@ -9173,12 +11031,12 @@ def events(self An optional set of event types to filter on. If empty, all event types are returned. :param limit: int (optional) Deprecated: use page_token in combination with page_size instead. - + The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed value is 500. :param offset: int (optional) Deprecated: use page_token in combination with page_size instead. - + The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results are requested in descending order, the end_time field is required. :param order: :class:`GetEventsOrder` (optional) @@ -9193,127 +11051,113 @@ def events(self previous page of events respectively. If page_token is empty, the first page is returned. :param start_time: int (optional) The start time in epoch milliseconds. If empty, returns events starting from the beginning of time. - + :returns: Iterator over :class:`ClusterEvent` """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - if end_time is not None: body['end_time'] = end_time - if event_types is not None: body['event_types'] = [v.value for v in event_types] - if limit is not None: body['limit'] = limit - if offset is not None: body['offset'] = offset - if order is not None: body['order'] = order.value - if page_size is not None: body['page_size'] = page_size - if page_token is not None: body['page_token'] = page_token - if start_time is not None: body['start_time'] = start_time - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - - - while True: - json = self._api.do('POST','/api/2.1/clusters/events', body=body - - , headers=headers - ) - if 'events' in json: - for v in json['events']: - yield ClusterEvent.from_dict(v) - if 'next_page' not in json or not json['next_page']: - return - body = json['next_page'] - - - - - + if cluster_id is not None: + body["cluster_id"] = cluster_id + if end_time is not None: + body["end_time"] = end_time + if event_types is not None: + body["event_types"] = [v.value for v in event_types] + if limit is not None: + body["limit"] = limit + if offset is not None: + body["offset"] = offset + if order is not None: + body["order"] = order.value + if page_size is not None: + body["page_size"] = page_size + if page_token is not None: + body["page_token"] = page_token + if start_time is not None: + body["start_time"] = start_time + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - def get(self - , cluster_id: str - ) -> ClusterDetails: + while True: + json = self._api.do("POST", "/api/2.1/clusters/events", body=body, headers=headers) + if "events" in json: + for v in json["events"]: + yield ClusterEvent.from_dict(v) + if "next_page" not in json or not json["next_page"]: + return + body = json["next_page"] + + def get(self, cluster_id: str) -> ClusterDetails: """Get cluster info. - + Retrieves the information for a cluster given its identifier. Clusters can be described while they are running, or up to 60 days after they are terminated. - + :param cluster_id: str The cluster about which to retrieve information. - + :returns: :class:`ClusterDetails` """ - + query = {} - if cluster_id is not None: query['cluster_id'] = cluster_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.1/clusters/get', query=query - - , headers=headers - ) - return ClusterDetails.from_dict(res) + if cluster_id is not None: + query["cluster_id"] = cluster_id + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.1/clusters/get", query=query, headers=headers) + return ClusterDetails.from_dict(res) - def get_permission_levels(self - , cluster_id: str - ) -> GetClusterPermissionLevelsResponse: + def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsResponse: """Get cluster permission levels. - + Gets the permission levels that a user can have on an object. - + :param cluster_id: str The cluster for which to get or manage permissions. - + :returns: :class:`GetClusterPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/clusters/{cluster_id}/permissionLevels' - - , headers=headers - ) - return GetClusterPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/clusters/{cluster_id}/permissionLevels", headers=headers) + return GetClusterPermissionLevelsResponse.from_dict(res) - def get_permissions(self - , cluster_id: str - ) -> ClusterPermissions: + def get_permissions(self, cluster_id: str) -> ClusterPermissions: """Get cluster permissions. - + Gets the permissions of a cluster. Clusters can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. - + :returns: :class:`ClusterPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/clusters/{cluster_id}' - - , headers=headers - ) - return ClusterPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/clusters/{cluster_id}", headers=headers) + return ClusterPermissions.from_dict(res) - def list(self - - , * - , filter_by: Optional[ListClustersFilterBy] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, sort_by: Optional[ListClustersSortBy] = None) -> Iterator[ClusterDetails]: + def list( + self, + *, + filter_by: Optional[ListClustersFilterBy] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + sort_by: Optional[ListClustersSortBy] = None, + ) -> Iterator[ClusterDetails]: """List clusters. - + Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. - + :param filter_by: :class:`ListClustersFilterBy` (optional) Filters to apply to the list of clusters. :param page_size: int (optional) @@ -9324,140 +11168,115 @@ def list(self previous page of clusters respectively. :param sort_by: :class:`ListClustersSortBy` (optional) Sort the list of clusters by a specific criteria. - + :returns: Iterator over :class:`ClusterDetails` """ - + query = {} - if filter_by is not None: query['filter_by'] = filter_by.as_dict() - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - if sort_by is not None: query['sort_by'] = sort_by.as_dict() - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.1/clusters/list', query=query - - , headers=headers - ) - if 'clusters' in json: - for v in json['clusters']: - yield ClusterDetails.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - + if filter_by is not None: + query["filter_by"] = filter_by.as_dict() + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if sort_by is not None: + query["sort_by"] = sort_by.as_dict() + headers = { + "Accept": "application/json", + } - - - + while True: + json = self._api.do("GET", "/api/2.1/clusters/list", query=query, headers=headers) + if "clusters" in json: + for v in json["clusters"]: + yield ClusterDetails.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] def list_node_types(self) -> ListNodeTypesResponse: """List node types. - + Returns a list of supported Spark node types. These node types can be used to launch a cluster. - + :returns: :class:`ListNodeTypesResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.1/clusters/list-node-types' - , headers=headers - ) - return ListNodeTypesResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.1/clusters/list-node-types", headers=headers) + return ListNodeTypesResponse.from_dict(res) def list_zones(self) -> ListAvailableZonesResponse: """List availability zones. - + Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These zones can be used to launch a cluster. - + :returns: :class:`ListAvailableZonesResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.1/clusters/list-zones' - , headers=headers - ) - return ListAvailableZonesResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.1/clusters/list-zones", headers=headers) + return ListAvailableZonesResponse.from_dict(res) - def permanent_delete(self - , cluster_id: str - ): + def permanent_delete(self, cluster_id: str): """Permanently delete cluster. - + Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously removed. - + In addition, users will no longer see permanently deleted clusters in the cluster list, and API users can no longer perform any action on permanently deleted clusters. - + :param cluster_id: str The cluster to be deleted. - - + + """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.1/clusters/permanent-delete', body=body - - , headers=headers - ) - + if cluster_id is not None: + body["cluster_id"] = cluster_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.1/clusters/permanent-delete", body=body, headers=headers) - def pin(self - , cluster_id: str - ): + def pin(self, cluster_id: str): """Pin cluster. - + Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a cluster that is already pinned will have no effect. This API can only be called by workspace admins. - + :param cluster_id: str - - + + """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.1/clusters/pin', body=body - - , headers=headers - ) - + if cluster_id is not None: + body["cluster_id"] = cluster_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.1/clusters/pin", body=body, headers=headers) - def resize(self - , cluster_id: str - , * - , autoscale: Optional[AutoScale] = None, num_workers: Optional[int] = None) -> Wait[ClusterDetails]: + def resize( + self, cluster_id: str, *, autoscale: Optional[AutoScale] = None, num_workers: Optional[int] = None + ) -> Wait[ClusterDetails]: """Resize cluster. - + Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a `RUNNING` state. - + :param cluster_id: str The cluster to be resized. :param autoscale: :class:`AutoScale` (optional) @@ -9466,204 +11285,176 @@ def resize(self :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if autoscale is not None: body['autoscale'] = autoscale.as_dict() - if cluster_id is not None: body['cluster_id'] = cluster_id - if num_workers is not None: body['num_workers'] = num_workers - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.1/clusters/resize', body=body - - , headers=headers - ) - return Wait(self.wait_get_cluster_running - , response = ResizeClusterResponse.from_dict(op_response) - , cluster_id=cluster_id) - - - def resize_and_wait(self - , cluster_id: str - , * - , autoscale: Optional[AutoScale] = None, num_workers: Optional[int] = None, - timeout=timedelta(minutes=20)) -> ClusterDetails: + if autoscale is not None: + body["autoscale"] = autoscale.as_dict() + if cluster_id is not None: + body["cluster_id"] = cluster_id + if num_workers is not None: + body["num_workers"] = num_workers + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.1/clusters/resize", body=body, headers=headers) + return Wait( + self.wait_get_cluster_running, response=ResizeClusterResponse.from_dict(op_response), cluster_id=cluster_id + ) + + def resize_and_wait( + self, + cluster_id: str, + *, + autoscale: Optional[AutoScale] = None, + num_workers: Optional[int] = None, + timeout=timedelta(minutes=20), + ) -> ClusterDetails: return self.resize(autoscale=autoscale, cluster_id=cluster_id, num_workers=num_workers).result(timeout=timeout) - - - - def restart(self - , cluster_id: str - , * - , restart_user: Optional[str] = None) -> Wait[ClusterDetails]: + def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wait[ClusterDetails]: """Restart cluster. - + Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state, nothing will happen. - + :param cluster_id: str The cluster to be started. :param restart_user: str (optional) - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - if restart_user is not None: body['restart_user'] = restart_user - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.1/clusters/restart', body=body - - , headers=headers - ) - return Wait(self.wait_get_cluster_running - , response = RestartClusterResponse.from_dict(op_response) - , cluster_id=cluster_id) - - - def restart_and_wait(self - , cluster_id: str - , * - , restart_user: Optional[str] = None, - timeout=timedelta(minutes=20)) -> ClusterDetails: + if cluster_id is not None: + body["cluster_id"] = cluster_id + if restart_user is not None: + body["restart_user"] = restart_user + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.1/clusters/restart", body=body, headers=headers) + return Wait( + self.wait_get_cluster_running, response=RestartClusterResponse.from_dict(op_response), cluster_id=cluster_id + ) + + def restart_and_wait( + self, cluster_id: str, *, restart_user: Optional[str] = None, timeout=timedelta(minutes=20) + ) -> ClusterDetails: return self.restart(cluster_id=cluster_id, restart_user=restart_user).result(timeout=timeout) - - - - def set_permissions(self - , cluster_id: str - , * - , access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions: + def set_permissions( + self, cluster_id: str, *, access_control_list: Optional[List[ClusterAccessControlRequest]] = None + ) -> ClusterPermissions: """Set cluster permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional) - + :returns: :class:`ClusterPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/clusters/{cluster_id}', body=body - - , headers=headers - ) - return ClusterPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PUT", f"/api/2.0/permissions/clusters/{cluster_id}", body=body, headers=headers) + return ClusterPermissions.from_dict(res) def spark_versions(self) -> GetSparkVersionsResponse: """List available Spark versions. - + Returns the list of available Spark versions. These versions can be used to launch a cluster. - + :returns: :class:`GetSparkVersionsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.1/clusters/spark-versions' - , headers=headers - ) - return GetSparkVersionsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.1/clusters/spark-versions", headers=headers) + return GetSparkVersionsResponse.from_dict(res) - def start(self - , cluster_id: str - ) -> Wait[ClusterDetails]: + def start(self, cluster_id: str) -> Wait[ClusterDetails]: """Start terminated cluster. - + Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster starts with the last specified cluster size. - If the previous cluster was an autoscaling cluster, the current cluster starts with the minimum number of nodes. - If the cluster is not currently in a ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job cannot be started. - + :param cluster_id: str The cluster to be started. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.1/clusters/start', body=body - - , headers=headers - ) - return Wait(self.wait_get_cluster_running - , response = StartClusterResponse.from_dict(op_response) - , cluster_id=cluster_id) + if cluster_id is not None: + body["cluster_id"] = cluster_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - def start_and_wait(self - , cluster_id: str - , - timeout=timedelta(minutes=20)) -> ClusterDetails: + op_response = self._api.do("POST", "/api/2.1/clusters/start", body=body, headers=headers) + return Wait( + self.wait_get_cluster_running, response=StartClusterResponse.from_dict(op_response), cluster_id=cluster_id + ) + + def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails: return self.start(cluster_id=cluster_id).result(timeout=timeout) - - - - def unpin(self - , cluster_id: str - ): + def unpin(self, cluster_id: str): """Unpin cluster. - + Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace admins. - + :param cluster_id: str - - + + """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.1/clusters/unpin', body=body - - , headers=headers - ) - + if cluster_id is not None: + body["cluster_id"] = cluster_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.1/clusters/unpin", body=body, headers=headers) - def update(self - , cluster_id: str, update_mask: str - , * - , cluster: Optional[UpdateClusterResource] = None) -> Wait[ClusterDetails]: + def update( + self, cluster_id: str, update_mask: str, *, cluster: Optional[UpdateClusterResource] = None + ) -> Wait[ClusterDetails]: """Update cluster configuration (partial). - + Updates the configuration of a cluster to match the partial set of attributes and size. Denote which fields to update using the `update_mask` field in the request body. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be @@ -9672,363 +11463,372 @@ def update(self is started using the `clusters/start` API. Attempts to update a cluster in any other state will be rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be updated. - + :param cluster_id: str ID of the cluster. :param update_mask: str Used to specify which cluster attributes and size fields to update. See https://google.aip.dev/161 for more details. - + The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. :param cluster: :class:`UpdateClusterResource` (optional) The cluster to be updated. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. """ body = {} - if cluster is not None: body['cluster'] = cluster.as_dict() - if cluster_id is not None: body['cluster_id'] = cluster_id - if update_mask is not None: body['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.1/clusters/update', body=body - - , headers=headers - ) - return Wait(self.wait_get_cluster_running - , response = UpdateClusterResponse.from_dict(op_response) - , cluster_id=cluster_id) - - - def update_and_wait(self - , cluster_id: str, update_mask: str - , * - , cluster: Optional[UpdateClusterResource] = None, - timeout=timedelta(minutes=20)) -> ClusterDetails: + if cluster is not None: + body["cluster"] = cluster.as_dict() + if cluster_id is not None: + body["cluster_id"] = cluster_id + if update_mask is not None: + body["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.1/clusters/update", body=body, headers=headers) + return Wait( + self.wait_get_cluster_running, response=UpdateClusterResponse.from_dict(op_response), cluster_id=cluster_id + ) + + def update_and_wait( + self, + cluster_id: str, + update_mask: str, + *, + cluster: Optional[UpdateClusterResource] = None, + timeout=timedelta(minutes=20), + ) -> ClusterDetails: return self.update(cluster=cluster, cluster_id=cluster_id, update_mask=update_mask).result(timeout=timeout) - - - - def update_permissions(self - , cluster_id: str - , * - , access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions: + def update_permissions( + self, cluster_id: str, *, access_control_list: Optional[List[ClusterAccessControlRequest]] = None + ) -> ClusterPermissions: """Update cluster permissions. - + Updates the permissions on a cluster. Clusters can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional) - + :returns: :class:`ClusterPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/clusters/{cluster_id}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/permissions/clusters/{cluster_id}", body=body, headers=headers) return ClusterPermissions.from_dict(res) - - + class CommandExecutionAPI: """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API only supports (classic) all-purpose clusters. Serverless compute is not supported.""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_command_status_command_execution_cancelled(self, cluster_id: str, command_id: str, context_id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (CommandStatus.CANCELLED, ) - failure_states = (CommandStatus.ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) - status = poll.status - status_message = f'current status: {status}' - if poll.results: - status_message = poll.results.cause - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach Cancelled, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_context_status_command_execution_running(self, cluster_id: str, context_id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (ContextStatus.RUNNING, ) - failure_states = (ContextStatus.ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.context_status(cluster_id=cluster_id, context_id=context_id) - status = poll.status - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach Running, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_command_status_command_execution_finished_or_error(self, cluster_id: str, command_id: str, context_id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, ) - failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) - status = poll.status - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach Finished or Error, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - - def cancel(self - - , * - , cluster_id: Optional[str] = None, command_id: Optional[str] = None, context_id: Optional[str] = None) -> Wait[CommandStatusResponse]: + + def wait_command_status_command_execution_cancelled( + self, + cluster_id: str, + command_id: str, + context_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[CommandStatusResponse], None]] = None, + ) -> CommandStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (CommandStatus.CANCELLED,) + failure_states = (CommandStatus.ERROR,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + status = poll.status + status_message = f"current status: {status}" + if poll.results: + status_message = poll.results.cause + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach Cancelled, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def wait_context_status_command_execution_running( + self, + cluster_id: str, + context_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[ContextStatusResponse], None]] = None, + ) -> ContextStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (ContextStatus.RUNNING,) + failure_states = (ContextStatus.ERROR,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.context_status(cluster_id=cluster_id, context_id=context_id) + status = poll.status + status_message = f"current status: {status}" + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach Running, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def wait_command_status_command_execution_finished_or_error( + self, + cluster_id: str, + command_id: str, + context_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[CommandStatusResponse], None]] = None, + ) -> CommandStatusResponse: + deadline = time.time() + timeout.total_seconds() + target_states = ( + CommandStatus.FINISHED, + CommandStatus.ERROR, + ) + failure_states = ( + CommandStatus.CANCELLED, + CommandStatus.CANCELLING, + ) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id) + status = poll.status + status_message = f"current status: {status}" + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach Finished or Error, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def cancel( + self, *, cluster_id: Optional[str] = None, command_id: Optional[str] = None, context_id: Optional[str] = None + ) -> Wait[CommandStatusResponse]: """Cancel a command. - + Cancels a currently running command within an execution context. - + The command ID is obtained from a prior successful call to __execute__. - + :param cluster_id: str (optional) :param command_id: str (optional) :param context_id: str (optional) - + :returns: Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_cancelled for more details. """ body = {} - if cluster_id is not None: body['clusterId'] = cluster_id - if command_id is not None: body['commandId'] = command_id - if context_id is not None: body['contextId'] = context_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/1.2/commands/cancel', body=body - - , headers=headers - ) - return Wait(self.wait_command_status_command_execution_cancelled - , response = CancelResponse.from_dict(op_response) - , cluster_id=cluster_id, command_id=command_id, context_id=context_id) - - - def cancel_and_wait(self - - , * - , cluster_id: Optional[str] = None, command_id: Optional[str] = None, context_id: Optional[str] = None, - timeout=timedelta(minutes=20)) -> CommandStatusResponse: + if cluster_id is not None: + body["clusterId"] = cluster_id + if command_id is not None: + body["commandId"] = command_id + if context_id is not None: + body["contextId"] = context_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/1.2/commands/cancel", body=body, headers=headers) + return Wait( + self.wait_command_status_command_execution_cancelled, + response=CancelResponse.from_dict(op_response), + cluster_id=cluster_id, + command_id=command_id, + context_id=context_id, + ) + + def cancel_and_wait( + self, + *, + cluster_id: Optional[str] = None, + command_id: Optional[str] = None, + context_id: Optional[str] = None, + timeout=timedelta(minutes=20), + ) -> CommandStatusResponse: return self.cancel(cluster_id=cluster_id, command_id=command_id, context_id=context_id).result(timeout=timeout) - - - - def command_status(self - , cluster_id: str, context_id: str, command_id: str - ) -> CommandStatusResponse: + def command_status(self, cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse: """Get command info. - + Gets the status of and, if available, the results from a currently executing command. - + The command ID is obtained from a prior successful call to __execute__. - + :param cluster_id: str :param context_id: str :param command_id: str - + :returns: :class:`CommandStatusResponse` """ - + query = {} - if cluster_id is not None: query['clusterId'] = cluster_id - if command_id is not None: query['commandId'] = command_id - if context_id is not None: query['contextId'] = context_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/1.2/commands/status', query=query - - , headers=headers - ) + if cluster_id is not None: + query["clusterId"] = cluster_id + if command_id is not None: + query["commandId"] = command_id + if context_id is not None: + query["contextId"] = context_id + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/1.2/commands/status", query=query, headers=headers) return CommandStatusResponse.from_dict(res) - - - - - def context_status(self - , cluster_id: str, context_id: str - ) -> ContextStatusResponse: + def context_status(self, cluster_id: str, context_id: str) -> ContextStatusResponse: """Get status. - + Gets the status for an execution context. - + :param cluster_id: str :param context_id: str - + :returns: :class:`ContextStatusResponse` """ - + query = {} - if cluster_id is not None: query['clusterId'] = cluster_id - if context_id is not None: query['contextId'] = context_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/1.2/contexts/status', query=query - - , headers=headers - ) + if cluster_id is not None: + query["clusterId"] = cluster_id + if context_id is not None: + query["contextId"] = context_id + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/1.2/contexts/status", query=query, headers=headers) return ContextStatusResponse.from_dict(res) - - - - - def create(self - - , * - , cluster_id: Optional[str] = None, language: Optional[Language] = None) -> Wait[ContextStatusResponse]: + def create( + self, *, cluster_id: Optional[str] = None, language: Optional[Language] = None + ) -> Wait[ContextStatusResponse]: """Create an execution context. - + Creates an execution context for running cluster commands. - + If successful, this method returns the ID of the new execution context. - + :param cluster_id: str (optional) Running cluster id :param language: :class:`Language` (optional) - + :returns: Long-running operation waiter for :class:`ContextStatusResponse`. See :method:wait_context_status_command_execution_running for more details. """ body = {} - if cluster_id is not None: body['clusterId'] = cluster_id - if language is not None: body['language'] = language.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/1.2/contexts/create', body=body - - , headers=headers - ) - return Wait(self.wait_context_status_command_execution_running - , response = Created.from_dict(op_response) - , cluster_id=cluster_id, context_id=op_response['id']) - - - def create_and_wait(self - - , * - , cluster_id: Optional[str] = None, language: Optional[Language] = None, - timeout=timedelta(minutes=20)) -> ContextStatusResponse: + if cluster_id is not None: + body["clusterId"] = cluster_id + if language is not None: + body["language"] = language.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/1.2/contexts/create", body=body, headers=headers) + return Wait( + self.wait_context_status_command_execution_running, + response=Created.from_dict(op_response), + cluster_id=cluster_id, + context_id=op_response["id"], + ) + + def create_and_wait( + self, *, cluster_id: Optional[str] = None, language: Optional[Language] = None, timeout=timedelta(minutes=20) + ) -> ContextStatusResponse: return self.create(cluster_id=cluster_id, language=language).result(timeout=timeout) - - - - def destroy(self - , cluster_id: str, context_id: str - ): + def destroy(self, cluster_id: str, context_id: str): """Delete an execution context. - + Deletes an execution context. - + :param cluster_id: str :param context_id: str - - - """ - body = {} - if cluster_id is not None: body['clusterId'] = cluster_id - if context_id is not None: body['contextId'] = context_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/1.2/contexts/destroy', body=body - - , headers=headers - ) - - - - - def execute(self - - , * - , cluster_id: Optional[str] = None, command: Optional[str] = None, context_id: Optional[str] = None, language: Optional[Language] = None) -> Wait[CommandStatusResponse]: + """ + body = {} + if cluster_id is not None: + body["clusterId"] = cluster_id + if context_id is not None: + body["contextId"] = context_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/1.2/contexts/destroy", body=body, headers=headers) + + def execute( + self, + *, + cluster_id: Optional[str] = None, + command: Optional[str] = None, + context_id: Optional[str] = None, + language: Optional[Language] = None, + ) -> Wait[CommandStatusResponse]: """Run a command. - + Runs a cluster command in the given execution context, using the provided language. - + If successful, it returns an ID for tracking the status of the command's execution. - + :param cluster_id: str (optional) Running cluster id :param command: str (optional) @@ -10036,63 +11836,67 @@ def execute(self :param context_id: str (optional) Running context id :param language: :class:`Language` (optional) - + :returns: Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_finished_or_error for more details. """ body = {} - if cluster_id is not None: body['clusterId'] = cluster_id - if command is not None: body['command'] = command - if context_id is not None: body['contextId'] = context_id - if language is not None: body['language'] = language.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/1.2/commands/execute', body=body - - , headers=headers - ) - return Wait(self.wait_command_status_command_execution_finished_or_error - , response = Created.from_dict(op_response) - , cluster_id=cluster_id, command_id=op_response['id'], context_id=context_id) + if cluster_id is not None: + body["clusterId"] = cluster_id + if command is not None: + body["command"] = command + if context_id is not None: + body["contextId"] = context_id + if language is not None: + body["language"] = language.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/1.2/commands/execute", body=body, headers=headers) + return Wait( + self.wait_command_status_command_execution_finished_or_error, + response=Created.from_dict(op_response), + cluster_id=cluster_id, + command_id=op_response["id"], + context_id=context_id, + ) + + def execute_and_wait( + self, + *, + cluster_id: Optional[str] = None, + command: Optional[str] = None, + context_id: Optional[str] = None, + language: Optional[Language] = None, + timeout=timedelta(minutes=20), + ) -> CommandStatusResponse: + return self.execute(cluster_id=cluster_id, command=command, context_id=context_id, language=language).result( + timeout=timeout + ) + - - def execute_and_wait(self - - , * - , cluster_id: Optional[str] = None, command: Optional[str] = None, context_id: Optional[str] = None, language: Optional[Language] = None, - timeout=timedelta(minutes=20)) -> CommandStatusResponse: - return self.execute(cluster_id=cluster_id, command=command, context_id=context_id, language=language).result(timeout=timeout) - - class GlobalInitScriptsAPI: """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace. These scripts run on every node in every cluster in the workspace. - + **Important:** Existing clusters must be restarted to pick up any changes made to global init scripts. Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark container fails to launch and init scripts with later position are skipped. If enough containers fail, the entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str, script: str - , * - , enabled: Optional[bool] = None, position: Optional[int] = None) -> CreateResponse: + def create( + self, name: str, script: str, *, enabled: Optional[bool] = None, position: Optional[int] = None + ) -> CreateResponse: """Create init script. - + Creates a new global init script in this workspace. - + :param name: str The name of the script :param script: str @@ -10102,115 +11906,94 @@ def create(self :param position: int (optional) The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. - + If you omit the numeric position for a new global init script, it defaults to last position. It will run after all current scripts. Setting any value greater than the position of the last script is equivalent to the last position. Example: Take three existing scripts with positions 0, 1, and 2. Any position of (3) or greater puts the script in the last position. If an explicit position value conflicts with an existing script value, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1. - + :returns: :class:`CreateResponse` """ body = {} - if enabled is not None: body['enabled'] = enabled - if name is not None: body['name'] = name - if position is not None: body['position'] = position - if script is not None: body['script'] = script - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/global-init-scripts', body=body - - , headers=headers - ) + if enabled is not None: + body["enabled"] = enabled + if name is not None: + body["name"] = name + if position is not None: + body["position"] = position + if script is not None: + body["script"] = script + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/global-init-scripts", body=body, headers=headers) return CreateResponse.from_dict(res) - - - - - def delete(self - , script_id: str - ): + def delete(self, script_id: str): """Delete init script. - + Deletes a global init script. - + :param script_id: str The ID of the global init script. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/global-init-scripts/{script_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get(self - , script_id: str - ) -> GlobalInitScriptDetailsWithContent: + self._api.do("DELETE", f"/api/2.0/global-init-scripts/{script_id}", headers=headers) + + def get(self, script_id: str) -> GlobalInitScriptDetailsWithContent: """Get an init script. - + Gets all the details of a script, including its Base64-encoded contents. - + :param script_id: str The ID of the global init script. - + :returns: :class:`GlobalInitScriptDetailsWithContent` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/global-init-scripts/{script_id}' - - , headers=headers - ) - return GlobalInitScriptDetailsWithContent.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/global-init-scripts/{script_id}", headers=headers) + return GlobalInitScriptDetailsWithContent.from_dict(res) def list(self) -> Iterator[GlobalInitScriptDetails]: """Get init scripts. - + Get a list of all global init scripts for this workspace. This returns all properties for each script but **not** the script contents. To retrieve the contents of a script, use the [get a global init script](:method:globalinitscripts/get) operation. - + :returns: Iterator over :class:`GlobalInitScriptDetails` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/global-init-scripts' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/global-init-scripts", headers=headers) parsed = ListGlobalInitScriptsResponse.from_dict(json).scripts return parsed if parsed is not None else [] - - - - - - def update(self - , script_id: str, name: str, script: str - , * - , enabled: Optional[bool] = None, position: Optional[int] = None): + def update( + self, script_id: str, name: str, script: str, *, enabled: Optional[bool] = None, position: Optional[int] = None + ): """Update init script. - + Updates a global init script, specifying only the fields to change. All fields are optional. Unspecified fields retain their current value. - + :param script_id: str The ID of the global init script. :param name: str @@ -10222,66 +12005,73 @@ def update(self :param position: int (optional) The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. To move the script to run first, set its position to 0. - + To move the script to the end, set its position to any value greater or equal to the position of the last script. Example, three existing scripts with positions 0, 1, and 2. Any position value of 2 or greater puts the script in the last position (2). - + If an explicit position value conflicts with an existing script, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1. - - + + """ body = {} - if enabled is not None: body['enabled'] = enabled - if name is not None: body['name'] = name - if position is not None: body['position'] = position - if script is not None: body['script'] = script - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/global-init-scripts/{script_id}', body=body - - , headers=headers - ) - + if enabled is not None: + body["enabled"] = enabled + if name is not None: + body["name"] = name + if position is not None: + body["position"] = position + if script is not None: + body["script"] = script + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/global-init-scripts/{script_id}", body=body, headers=headers) + - - class InstancePoolsAPI: """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times. - + Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle instances. If the pool has no idle instances, the pool expands by allocating a new instance from the instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that pool’s idle instances. - + You can specify a different pool for the driver node and worker nodes, or use the same pool for both. - + Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does apply. See pricing.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , instance_pool_name: str, node_type_id: str - , * - , aws_attributes: Optional[InstancePoolAwsAttributes] = None, azure_attributes: Optional[InstancePoolAzureAttributes] = None, custom_tags: Optional[Dict[str,str]] = None, disk_spec: Optional[DiskSpec] = None, enable_elastic_disk: Optional[bool] = None, gcp_attributes: Optional[InstancePoolGcpAttributes] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None, preloaded_docker_images: Optional[List[DockerImage]] = None, preloaded_spark_versions: Optional[List[str]] = None) -> CreateInstancePoolResponse: + def create( + self, + instance_pool_name: str, + node_type_id: str, + *, + aws_attributes: Optional[InstancePoolAwsAttributes] = None, + azure_attributes: Optional[InstancePoolAzureAttributes] = None, + custom_tags: Optional[Dict[str, str]] = None, + disk_spec: Optional[DiskSpec] = None, + enable_elastic_disk: Optional[bool] = None, + gcp_attributes: Optional[InstancePoolGcpAttributes] = None, + idle_instance_autotermination_minutes: Optional[int] = None, + max_capacity: Optional[int] = None, + min_idle_instances: Optional[int] = None, + preloaded_docker_images: Optional[List[DockerImage]] = None, + preloaded_spark_versions: Optional[List[str]] = None, + ) -> CreateInstancePoolResponse: """Create a new instance pool. - + Creates a new instance pool using idle and ready-to-use cloud instances. - + :param instance_pool_name: str Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters. @@ -10299,7 +12089,7 @@ def create(self :param custom_tags: Dict[str,str] (optional) Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags :param disk_spec: :class:`DiskSpec` (optional) Defines the specification of the disks that will be attached to all spark containers. @@ -10328,69 +12118,79 @@ def create(self A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. - + :returns: :class:`CreateInstancePoolResponse` """ body = {} - if aws_attributes is not None: body['aws_attributes'] = aws_attributes.as_dict() - if azure_attributes is not None: body['azure_attributes'] = azure_attributes.as_dict() - if custom_tags is not None: body['custom_tags'] = custom_tags - if disk_spec is not None: body['disk_spec'] = disk_spec.as_dict() - if enable_elastic_disk is not None: body['enable_elastic_disk'] = enable_elastic_disk - if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict() - if idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes - if instance_pool_name is not None: body['instance_pool_name'] = instance_pool_name - if max_capacity is not None: body['max_capacity'] = max_capacity - if min_idle_instances is not None: body['min_idle_instances'] = min_idle_instances - if node_type_id is not None: body['node_type_id'] = node_type_id - if preloaded_docker_images is not None: body['preloaded_docker_images'] = [v.as_dict() for v in preloaded_docker_images] - if preloaded_spark_versions is not None: body['preloaded_spark_versions'] = [v for v in preloaded_spark_versions] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/instance-pools/create', body=body - - , headers=headers - ) + if aws_attributes is not None: + body["aws_attributes"] = aws_attributes.as_dict() + if azure_attributes is not None: + body["azure_attributes"] = azure_attributes.as_dict() + if custom_tags is not None: + body["custom_tags"] = custom_tags + if disk_spec is not None: + body["disk_spec"] = disk_spec.as_dict() + if enable_elastic_disk is not None: + body["enable_elastic_disk"] = enable_elastic_disk + if gcp_attributes is not None: + body["gcp_attributes"] = gcp_attributes.as_dict() + if idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes + if instance_pool_name is not None: + body["instance_pool_name"] = instance_pool_name + if max_capacity is not None: + body["max_capacity"] = max_capacity + if min_idle_instances is not None: + body["min_idle_instances"] = min_idle_instances + if node_type_id is not None: + body["node_type_id"] = node_type_id + if preloaded_docker_images is not None: + body["preloaded_docker_images"] = [v.as_dict() for v in preloaded_docker_images] + if preloaded_spark_versions is not None: + body["preloaded_spark_versions"] = [v for v in preloaded_spark_versions] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/instance-pools/create", body=body, headers=headers) return CreateInstancePoolResponse.from_dict(res) - - - - - def delete(self - , instance_pool_id: str - ): + def delete(self, instance_pool_id: str): """Delete an instance pool. - + Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously. - + :param instance_pool_id: str The instance pool to be terminated. - - - """ - body = {} - if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/instance-pools/delete', body=body - - , headers=headers - ) - - - - - def edit(self - , instance_pool_id: str, instance_pool_name: str, node_type_id: str - , * - , custom_tags: Optional[Dict[str,str]] = None, idle_instance_autotermination_minutes: Optional[int] = None, max_capacity: Optional[int] = None, min_idle_instances: Optional[int] = None): + """ + body = {} + if instance_pool_id is not None: + body["instance_pool_id"] = instance_pool_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/instance-pools/delete", body=body, headers=headers) + + def edit( + self, + instance_pool_id: str, + instance_pool_name: str, + node_type_id: str, + *, + custom_tags: Optional[Dict[str, str]] = None, + idle_instance_autotermination_minutes: Optional[int] = None, + max_capacity: Optional[int] = None, + min_idle_instances: Optional[int] = None, + ): """Edit an existing instance pool. - + Modifies the configuration of an existing instance pool. - + :param instance_pool_id: str Instance pool ID :param instance_pool_name: str @@ -10404,7 +12204,7 @@ def edit(self :param custom_tags: Dict[str,str] (optional) Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags :param idle_instance_autotermination_minutes: int (optional) Automatically terminates the extra instances in the pool cache after they are inactive for this time @@ -10418,222 +12218,195 @@ def edit(self upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool - - - """ - body = {} - if custom_tags is not None: body['custom_tags'] = custom_tags - if idle_instance_autotermination_minutes is not None: body['idle_instance_autotermination_minutes'] = idle_instance_autotermination_minutes - if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id - if instance_pool_name is not None: body['instance_pool_name'] = instance_pool_name - if max_capacity is not None: body['max_capacity'] = max_capacity - if min_idle_instances is not None: body['min_idle_instances'] = min_idle_instances - if node_type_id is not None: body['node_type_id'] = node_type_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/instance-pools/edit', body=body - - , headers=headers - ) - - - - - def get(self - , instance_pool_id: str - ) -> GetInstancePool: + """ + body = {} + if custom_tags is not None: + body["custom_tags"] = custom_tags + if idle_instance_autotermination_minutes is not None: + body["idle_instance_autotermination_minutes"] = idle_instance_autotermination_minutes + if instance_pool_id is not None: + body["instance_pool_id"] = instance_pool_id + if instance_pool_name is not None: + body["instance_pool_name"] = instance_pool_name + if max_capacity is not None: + body["max_capacity"] = max_capacity + if min_idle_instances is not None: + body["min_idle_instances"] = min_idle_instances + if node_type_id is not None: + body["node_type_id"] = node_type_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/instance-pools/edit", body=body, headers=headers) + + def get(self, instance_pool_id: str) -> GetInstancePool: """Get instance pool information. - + Retrieve the information for an instance pool based on its identifier. - + :param instance_pool_id: str The canonical unique identifier for the instance pool. - + :returns: :class:`GetInstancePool` """ - + query = {} - if instance_pool_id is not None: query['instance_pool_id'] = instance_pool_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/instance-pools/get', query=query - - , headers=headers - ) - return GetInstancePool.from_dict(res) + if instance_pool_id is not None: + query["instance_pool_id"] = instance_pool_id + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/instance-pools/get", query=query, headers=headers) + return GetInstancePool.from_dict(res) - def get_permission_levels(self - , instance_pool_id: str - ) -> GetInstancePoolPermissionLevelsResponse: + def get_permission_levels(self, instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse: """Get instance pool permission levels. - + Gets the permission levels that a user can have on an object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. - + :returns: :class:`GetInstancePoolPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/instance-pools/{instance_pool_id}/permissionLevels' - - , headers=headers - ) - return GetInstancePoolPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_permissions(self - , instance_pool_id: str - ) -> InstancePoolPermissions: + res = self._api.do( + "GET", f"/api/2.0/permissions/instance-pools/{instance_pool_id}/permissionLevels", headers=headers + ) + return GetInstancePoolPermissionLevelsResponse.from_dict(res) + + def get_permissions(self, instance_pool_id: str) -> InstancePoolPermissions: """Get instance pool permissions. - + Gets the permissions of an instance pool. Instance pools can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. - + :returns: :class:`InstancePoolPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/instance-pools/{instance_pool_id}' - - , headers=headers - ) - return InstancePoolPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/instance-pools/{instance_pool_id}", headers=headers) + return InstancePoolPermissions.from_dict(res) def list(self) -> Iterator[InstancePoolAndStats]: """List instance pool info. - + Gets a list of instance pools with their statistics. - + :returns: Iterator over :class:`InstancePoolAndStats` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/instance-pools/list' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/instance-pools/list", headers=headers) parsed = ListInstancePools.from_dict(json).instance_pools return parsed if parsed is not None else [] - - - - - - def set_permissions(self - , instance_pool_id: str - , * - , access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None) -> InstancePoolPermissions: + def set_permissions( + self, instance_pool_id: str, *, access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None + ) -> InstancePoolPermissions: """Set instance pool permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional) - + :returns: :class:`InstancePoolPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/instance-pools/{instance_pool_id}', body=body - - , headers=headers - ) - return InstancePoolPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PUT", f"/api/2.0/permissions/instance-pools/{instance_pool_id}", body=body, headers=headers) + return InstancePoolPermissions.from_dict(res) - def update_permissions(self - , instance_pool_id: str - , * - , access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None) -> InstancePoolPermissions: + def update_permissions( + self, instance_pool_id: str, *, access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None + ) -> InstancePoolPermissions: """Update instance pool permissions. - + Updates the permissions on an instance pool. Instance pools can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional) - + :returns: :class:`InstancePoolPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/instance-pools/{instance_pool_id}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/permissions/instance-pools/{instance_pool_id}", body=body, headers=headers + ) return InstancePoolPermissions.from_dict(res) - - + class InstanceProfilesAPI: """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3 buckets] using instance profiles for more information. - - [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html""" - - def __init__(self, api_client): - self._api = api_client - - + [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html + """ - - - - + def __init__(self, api_client): + self._api = api_client - def add(self - , instance_profile_arn: str - , * - , iam_role_arn: Optional[str] = None, is_meta_instance_profile: Optional[bool] = None, skip_validation: Optional[bool] = None): + def add( + self, + instance_profile_arn: str, + *, + iam_role_arn: Optional[str] = None, + is_meta_instance_profile: Optional[bool] = None, + skip_validation: Optional[bool] = None, + ): """Register an instance profile. - + Registers an instance profile in Databricks. In the UI, you can then give users the permission to use this instance profile when launching clusters. - + This API is only available to admin users. - + :param instance_profile_arn: str The AWS ARN of the instance profile to register with Databricks. This field is required. :param iam_role_arn: str (optional) The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile with [Databricks SQL Serverless]. - + Otherwise, this field is optional. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html :param is_meta_instance_profile: bool (optional) Boolean flag indicating whether the instance profile should only be used in credential passthrough @@ -10646,367 +12419,314 @@ def add(self fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your requested instance type is not supported in your requested availability zone”), you can pass this flag to skip the validation and forcibly add the instance profile. - - - """ - body = {} - if iam_role_arn is not None: body['iam_role_arn'] = iam_role_arn - if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn - if is_meta_instance_profile is not None: body['is_meta_instance_profile'] = is_meta_instance_profile - if skip_validation is not None: body['skip_validation'] = skip_validation - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/instance-profiles/add', body=body - - , headers=headers - ) - - - - - def edit(self - , instance_profile_arn: str - , * - , iam_role_arn: Optional[str] = None, is_meta_instance_profile: Optional[bool] = None): + """ + body = {} + if iam_role_arn is not None: + body["iam_role_arn"] = iam_role_arn + if instance_profile_arn is not None: + body["instance_profile_arn"] = instance_profile_arn + if is_meta_instance_profile is not None: + body["is_meta_instance_profile"] = is_meta_instance_profile + if skip_validation is not None: + body["skip_validation"] = skip_validation + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/instance-profiles/add", body=body, headers=headers) + + def edit( + self, + instance_profile_arn: str, + *, + iam_role_arn: Optional[str] = None, + is_meta_instance_profile: Optional[bool] = None, + ): """Edit an instance profile. - + The only supported field to change is the optional IAM role ARN associated with the instance profile. It is required to specify the IAM role ARN if both of the following are true: - + * Your role name and instance profile name do not match. The name is the part after the last slash in each ARN. * You want to use the instance profile with [Databricks SQL Serverless]. - + To understand where these fields are in the AWS console, see [Enable serverless SQL warehouses]. - + This API is only available to admin users. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html - + :param instance_profile_arn: str The AWS ARN of the instance profile to register with Databricks. This field is required. :param iam_role_arn: str (optional) The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile with [Databricks SQL Serverless]. - + Otherwise, this field is optional. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html :param is_meta_instance_profile: bool (optional) Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios. If true, it means the instance profile contains an meta IAM role which could assume a wide range of roles. Therefore it should always be used with authorization. This field is optional, the default value is `false`. - - + + """ body = {} - if iam_role_arn is not None: body['iam_role_arn'] = iam_role_arn - if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn - if is_meta_instance_profile is not None: body['is_meta_instance_profile'] = is_meta_instance_profile - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/instance-profiles/edit', body=body - - , headers=headers - ) - + if iam_role_arn is not None: + body["iam_role_arn"] = iam_role_arn + if instance_profile_arn is not None: + body["instance_profile_arn"] = instance_profile_arn + if is_meta_instance_profile is not None: + body["is_meta_instance_profile"] = is_meta_instance_profile + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/instance-profiles/edit", body=body, headers=headers) def list(self) -> Iterator[InstanceProfile]: """List available instance profiles. - + List the instance profiles that the calling user can use to launch a cluster. - + This API is available to all users. - + :returns: Iterator over :class:`InstanceProfile` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/instance-profiles/list' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/instance-profiles/list", headers=headers) parsed = ListInstanceProfilesResponse.from_dict(json).instance_profiles return parsed if parsed is not None else [] - - - - - - def remove(self - , instance_profile_arn: str - ): + def remove(self, instance_profile_arn: str): """Remove the instance profile. - + Remove the instance profile with the provided ARN. Existing clusters with this instance profile will continue to function. - + This API is only accessible to admin users. - + :param instance_profile_arn: str The ARN of the instance profile to remove. This field is required. - - + + """ body = {} - if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/instance-profiles/remove', body=body - - , headers=headers - ) - + if instance_profile_arn is not None: + body["instance_profile_arn"] = instance_profile_arn + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/instance-profiles/remove", body=body, headers=headers) + - - class LibrariesAPI: """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster. - + To make third-party or custom code available to notebooks and jobs running on your clusters, you can install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories. - + Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library directly from a public repository such as PyPI or Maven, using a previously installed workspace library, or using an init script. - + When you uninstall a library from a cluster, the library is removed only when you restart the cluster. Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - def all_cluster_statuses(self) -> Iterator[ClusterLibraryStatuses]: """Get all statuses. - + Get the status of all libraries on all clusters. A status is returned for all libraries installed on this cluster via the API or the libraries UI. - + :returns: Iterator over :class:`ClusterLibraryStatuses` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/libraries/all-cluster-statuses' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/libraries/all-cluster-statuses", headers=headers) parsed = ListAllClusterLibraryStatusesResponse.from_dict(json).statuses return parsed if parsed is not None else [] - - - - - - def cluster_status(self - , cluster_id: str - ) -> Iterator[LibraryFullStatus]: + def cluster_status(self, cluster_id: str) -> Iterator[LibraryFullStatus]: """Get status. - + Get the status of libraries on a cluster. A status is returned for all libraries installed on this cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries set to be installed on this cluster, in the order that the libraries were added to the cluster, are returned first. 2. Libraries that were previously requested to be installed on this cluster or, but are now marked for removal, in no particular order, are returned last. - + :param cluster_id: str Unique identifier of the cluster whose status should be retrieved. - + :returns: Iterator over :class:`LibraryFullStatus` """ - + query = {} - if cluster_id is not None: query['cluster_id'] = cluster_id - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/libraries/cluster-status', query=query - - , headers=headers - ) + if cluster_id is not None: + query["cluster_id"] = cluster_id + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/libraries/cluster-status", query=query, headers=headers) parsed = ClusterLibraryStatuses.from_dict(json).library_statuses return parsed if parsed is not None else [] - - - - - - def install(self - , cluster_id: str, libraries: List[Library] - ): + def install(self, cluster_id: str, libraries: List[Library]): """Add a library. - + Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. - + :param cluster_id: str Unique identifier for the cluster on which to install these libraries. :param libraries: List[:class:`Library`] The libraries to install. - - + + """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/libraries/install', body=body - - , headers=headers - ) - + if cluster_id is not None: + body["cluster_id"] = cluster_id + if libraries is not None: + body["libraries"] = [v.as_dict() for v in libraries] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/libraries/install", body=body, headers=headers) - def uninstall(self - , cluster_id: str, libraries: List[Library] - ): + def uninstall(self, cluster_id: str, libraries: List[Library]): """Uninstall libraries. - + Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. - + :param cluster_id: str Unique identifier for the cluster on which to uninstall these libraries. :param libraries: List[:class:`Library`] The libraries to uninstall. - - + + """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/libraries/uninstall', body=body - - , headers=headers - ) - + if cluster_id is not None: + body["cluster_id"] = cluster_id + if libraries is not None: + body["libraries"] = [v.as_dict() for v in libraries] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/libraries/uninstall", body=body, headers=headers) + - - class PolicyComplianceForClustersAPI: """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace. - + A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce compliance API allows you to update a cluster to be compliant with the current version of its policy.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def enforce_compliance(self - , cluster_id: str - , * - , validate_only: Optional[bool] = None) -> EnforceClusterComplianceResponse: + def enforce_compliance( + self, cluster_id: str, *, validate_only: Optional[bool] = None + ) -> EnforceClusterComplianceResponse: """Enforce cluster policy compliance. - + Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. - + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes can take effect. - + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the cluster is started, the new attributes will take effect. - + Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API. Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs. - + :param cluster_id: str The ID of the cluster you want to enforce policy compliance on. :param validate_only: bool (optional) If set, previews the changes that would be made to a cluster to enforce compliance but does not update the cluster. - + :returns: :class:`EnforceClusterComplianceResponse` """ body = {} - if cluster_id is not None: body['cluster_id'] = cluster_id - if validate_only is not None: body['validate_only'] = validate_only - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/policies/clusters/enforce-compliance', body=body - - , headers=headers - ) - return EnforceClusterComplianceResponse.from_dict(res) + if cluster_id is not None: + body["cluster_id"] = cluster_id + if validate_only is not None: + body["validate_only"] = validate_only + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/policies/clusters/enforce-compliance", body=body, headers=headers) + return EnforceClusterComplianceResponse.from_dict(res) - def get_compliance(self - , cluster_id: str - ) -> GetClusterComplianceResponse: + def get_compliance(self, cluster_id: str) -> GetClusterComplianceResponse: """Get cluster policy compliance. - + Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + :param cluster_id: str The ID of the cluster to get the compliance status - + :returns: :class:`GetClusterComplianceResponse` """ - + query = {} - if cluster_id is not None: query['cluster_id'] = cluster_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/policies/clusters/get-compliance', query=query - - , headers=headers - ) - return GetClusterComplianceResponse.from_dict(res) + if cluster_id is not None: + query["cluster_id"] = cluster_id + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/policies/clusters/get-compliance", query=query, headers=headers) + return GetClusterComplianceResponse.from_dict(res) - def list_compliance(self - , policy_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ClusterCompliance]: + def list_compliance( + self, policy_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ClusterCompliance]: """List cluster policy compliance. - + Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + :param policy_id: str Canonical unique identifier for the cluster policy. :param page_size: int (optional) @@ -11015,121 +12735,96 @@ def list_compliance(self :param page_token: str (optional) A page token that can be used to navigate to the next page or previous page as returned by `next_page_token` or `prev_page_token`. - + :returns: Iterator over :class:`ClusterCompliance` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - if policy_id is not None: query['policy_id'] = policy_id - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if policy_id is not None: + query["policy_id"] = policy_id + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/policies/clusters/list-compliance', query=query - - , headers=headers - ) - if 'clusters' in json: - for v in json['clusters']: - yield ClusterCompliance.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - + json = self._api.do("GET", "/api/2.0/policies/clusters/list-compliance", query=query, headers=headers) + if "clusters" in json: + for v in json["clusters"]: + yield ClusterCompliance.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + - - class PolicyFamiliesAPI: """View available policy families. A policy family contains a policy definition providing best practices for configuring clusters for a particular use case. - + Databricks manages and provides policy families for several common cluster use cases. You cannot create, edit, or delete policy families. - + Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a policy family. Cluster policies created using a policy family inherit the policy family's policy definition.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - , policy_family_id: str - , * - , version: Optional[int] = None) -> PolicyFamily: + def get(self, policy_family_id: str, *, version: Optional[int] = None) -> PolicyFamily: """Get policy family information. - + Retrieve the information for an policy family based on its identifier and version - + :param policy_family_id: str The family ID about which to retrieve information. :param version: int (optional) The version number for the family to fetch. Defaults to the latest version. - + :returns: :class:`PolicyFamily` """ - + query = {} - if version is not None: query['version'] = version - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/policy-families/{policy_family_id}', query=query - - , headers=headers - ) - return PolicyFamily.from_dict(res) + if version is not None: + query["version"] = version + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", f"/api/2.0/policy-families/{policy_family_id}", query=query, headers=headers) + return PolicyFamily.from_dict(res) - def list(self - - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PolicyFamily]: + def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PolicyFamily]: """List policy families. - + Returns the list of policy definition types available to use at their latest version. This API is paginated. - + :param max_results: int (optional) Maximum number of policy families to return. :param page_token: str (optional) A token that can be used to get the next page of results. - + :returns: Iterator over :class:`PolicyFamily` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/policy-families', query=query - - , headers=headers - ) - if 'policy_families' in json: - for v in json['policy_families']: - yield PolicyFamily.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - - \ No newline at end of file + while True: + json = self._api.do("GET", "/api/2.0/policy-families", query=query, headers=headers) + if "policy_families" in json: + for v in json["policy_families"]: + yield PolicyFamily.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 1aab1390b..eb0bfbf16 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -1,68 +1,78 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading +from typing import Any, Callable, Dict, Iterator, List, Optional -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict -_LOG = logging.getLogger('databricks.sdk') +_LOG = logging.getLogger("databricks.sdk") from databricks.sdk.service import sql # all definitions in this file are in alphabetical order + @dataclass class AuthorizationDetails: grant_rules: Optional[List[AuthorizationDetailsGrantRule]] = None """Represents downscoped permission rules with specific access rights. This field is specific to `workspace_rule_set` constraint.""" - + resource_legacy_acl_path: Optional[str] = None """The acl path of the tree store resource resource.""" - + resource_name: Optional[str] = None """The resource name to which the authorization rule applies. This field is specific to `workspace_rule_set` constraint. Format: `workspaces/{workspace_id}/dashboards/{dashboard_id}`""" - + type: Optional[str] = None """The type of authorization downscoping policy. Ex: `workspace_rule_set` defines access rules for a specific workspace resource""" - + def as_dict(self) -> dict: """Serializes the AuthorizationDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.grant_rules: body['grant_rules'] = [v.as_dict() for v in self.grant_rules] - if self.resource_legacy_acl_path is not None: body['resource_legacy_acl_path'] = self.resource_legacy_acl_path - if self.resource_name is not None: body['resource_name'] = self.resource_name - if self.type is not None: body['type'] = self.type + if self.grant_rules: + body["grant_rules"] = [v.as_dict() for v in self.grant_rules] + if self.resource_legacy_acl_path is not None: + body["resource_legacy_acl_path"] = self.resource_legacy_acl_path + if self.resource_name is not None: + body["resource_name"] = self.resource_name + if self.type is not None: + body["type"] = self.type return body def as_shallow_dict(self) -> dict: """Serializes the AuthorizationDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.grant_rules: body['grant_rules'] = self.grant_rules - if self.resource_legacy_acl_path is not None: body['resource_legacy_acl_path'] = self.resource_legacy_acl_path - if self.resource_name is not None: body['resource_name'] = self.resource_name - if self.type is not None: body['type'] = self.type + if self.grant_rules: + body["grant_rules"] = self.grant_rules + if self.resource_legacy_acl_path is not None: + body["resource_legacy_acl_path"] = self.resource_legacy_acl_path + if self.resource_name is not None: + body["resource_name"] = self.resource_name + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AuthorizationDetails: """Deserializes the AuthorizationDetails from a dictionary.""" - return cls(grant_rules=_repeated_dict(d, 'grant_rules', AuthorizationDetailsGrantRule), resource_legacy_acl_path=d.get('resource_legacy_acl_path', None), resource_name=d.get('resource_name', None), type=d.get('type', None)) - - + return cls( + grant_rules=_repeated_dict(d, "grant_rules", AuthorizationDetailsGrantRule), + resource_legacy_acl_path=d.get("resource_legacy_acl_path", None), + resource_name=d.get("resource_name", None), + type=d.get("type", None), + ) @dataclass @@ -71,34 +81,25 @@ class AuthorizationDetailsGrantRule: """Permission sets for dashboard are defined in iam-common/rbac-common/permission-sets/definitions/TreeStoreBasePermissionSets Ex: `permissionSets/dashboard.runner`""" - + def as_dict(self) -> dict: """Serializes the AuthorizationDetailsGrantRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_set is not None: body['permission_set'] = self.permission_set + if self.permission_set is not None: + body["permission_set"] = self.permission_set return body def as_shallow_dict(self) -> dict: """Serializes the AuthorizationDetailsGrantRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_set is not None: body['permission_set'] = self.permission_set + if self.permission_set is not None: + body["permission_set"] = self.permission_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AuthorizationDetailsGrantRule: """Deserializes the AuthorizationDetailsGrantRule from a dictionary.""" - return cls(permission_set=d.get('permission_set', None)) - - - - - - - - - - - + return cls(permission_set=d.get("permission_set", None)) @dataclass @@ -108,61 +109,63 @@ class CronSchedule: Trigger] for details. [Cron Trigger]: http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" - + timezone_id: str """A Java timezone id. The schedule will be resolved with respect to this timezone. See [Java TimeZone] for details. [Java TimeZone]: https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html""" - + def as_dict(self) -> dict: """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: """Deserializes the CronSchedule from a dictionary.""" - return cls(quartz_cron_expression=d.get('quartz_cron_expression', None), timezone_id=d.get('timezone_id', None)) - - + return cls(quartz_cron_expression=d.get("quartz_cron_expression", None), timezone_id=d.get("timezone_id", None)) @dataclass class Dashboard: create_time: Optional[str] = None """The timestamp of when the dashboard was created.""" - + dashboard_id: Optional[str] = None """UUID identifying the dashboard.""" - + display_name: Optional[str] = None """The display name of the dashboard.""" - + etag: Optional[str] = None """The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has not been modified since the last read. This field is excluded in List Dashboards responses.""" - + lifecycle_state: Optional[LifecycleState] = None """The state of the dashboard resource. Used for tracking trashed status.""" - + parent_path: Optional[str] = None """The workspace path of the folder containing the dashboard. Includes leading slash and no trailing slash. This field is excluded in List Dashboards responses.""" - + path: Optional[str] = None """The workspace path of the dashboard asset, including the file name. Exported dashboards always have the file extension `.lvdash.json`. This field is excluded in List Dashboards responses.""" - + serialized_dashboard: Optional[str] = None """The contents of the dashboard in serialized string form. This field is excluded in List Dashboards responses. Use the [get dashboard API] to retrieve an example response, which @@ -170,58 +173,84 @@ class Dashboard: that represents the dashboard's layout and components. [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get""" - + update_time: Optional[str] = None """The timestamp of when the dashboard was last updated by the user. This field is excluded in List Dashboards responses.""" - + warehouse_id: Optional[str] = None """The warehouse ID used to run the dashboard.""" - + def as_dict(self) -> dict: """Serializes the Dashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.display_name is not None: body['display_name'] = self.display_name - if self.etag is not None: body['etag'] = self.etag - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.path is not None: body['path'] = self.path - if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.etag is not None: + body["etag"] = self.etag + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state.value + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.path is not None: + body["path"] = self.path + if self.serialized_dashboard is not None: + body["serialized_dashboard"] = self.serialized_dashboard + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the Dashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.display_name is not None: body['display_name'] = self.display_name - if self.etag is not None: body['etag'] = self.etag - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.path is not None: body['path'] = self.path - if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.etag is not None: + body["etag"] = self.etag + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.path is not None: + body["path"] = self.path + if self.serialized_dashboard is not None: + body["serialized_dashboard"] = self.serialized_dashboard + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Dashboard: """Deserializes the Dashboard from a dictionary.""" - return cls(create_time=d.get('create_time', None), dashboard_id=d.get('dashboard_id', None), display_name=d.get('display_name', None), etag=d.get('etag', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), parent_path=d.get('parent_path', None), path=d.get('path', None), serialized_dashboard=d.get('serialized_dashboard', None), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + create_time=d.get("create_time", None), + dashboard_id=d.get("dashboard_id", None), + display_name=d.get("display_name", None), + etag=d.get("etag", None), + lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), + parent_path=d.get("parent_path", None), + path=d.get("path", None), + serialized_dashboard=d.get("serialized_dashboard", None), + update_time=d.get("update_time", None), + warehouse_id=d.get("warehouse_id", None), + ) class DashboardView(Enum): - - - DASHBOARD_VIEW_BASIC = 'DASHBOARD_VIEW_BASIC' - + DASHBOARD_VIEW_BASIC = "DASHBOARD_VIEW_BASIC" @dataclass @@ -240,11 +269,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteScheduleResponse: """Deserializes the DeleteScheduleResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -263,175 +287,194 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteSubscriptionResponse: """Deserializes the DeleteSubscriptionResponse from a dictionary.""" return cls() - - @dataclass class GenieAttachment: """Genie AI Response""" - + attachment_id: Optional[str] = None """Attachment ID""" - + query: Optional[GenieQueryAttachment] = None """Query Attachment if Genie responds with a SQL query""" - + text: Optional[TextAttachment] = None """Text Attachment if Genie responds with text""" - + def as_dict(self) -> dict: """Serializes the GenieAttachment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attachment_id is not None: body['attachment_id'] = self.attachment_id - if self.query: body['query'] = self.query.as_dict() - if self.text: body['text'] = self.text.as_dict() + if self.attachment_id is not None: + body["attachment_id"] = self.attachment_id + if self.query: + body["query"] = self.query.as_dict() + if self.text: + body["text"] = self.text.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GenieAttachment into a shallow dictionary of its immediate attributes.""" body = {} - if self.attachment_id is not None: body['attachment_id'] = self.attachment_id - if self.query: body['query'] = self.query - if self.text: body['text'] = self.text + if self.attachment_id is not None: + body["attachment_id"] = self.attachment_id + if self.query: + body["query"] = self.query + if self.text: + body["text"] = self.text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieAttachment: """Deserializes the GenieAttachment from a dictionary.""" - return cls(attachment_id=d.get('attachment_id', None), query=_from_dict(d, 'query', GenieQueryAttachment), text=_from_dict(d, 'text', TextAttachment)) - - + return cls( + attachment_id=d.get("attachment_id", None), + query=_from_dict(d, "query", GenieQueryAttachment), + text=_from_dict(d, "text", TextAttachment), + ) @dataclass class GenieConversation: id: str """Conversation ID. Legacy identifier, use conversation_id instead""" - + space_id: str """Genie space ID""" - + user_id: int """ID of the user who created the conversation""" - + title: str """Conversation title""" - + conversation_id: str """Conversation ID""" - + created_timestamp: Optional[int] = None """Timestamp when the message was created""" - + last_updated_timestamp: Optional[int] = None """Timestamp when the message was last updated""" - + def as_dict(self) -> dict: """Serializes the GenieConversation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.conversation_id is not None: body['conversation_id'] = self.conversation_id - if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.space_id is not None: body['space_id'] = self.space_id - if self.title is not None: body['title'] = self.title - if self.user_id is not None: body['user_id'] = self.user_id + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.space_id is not None: + body["space_id"] = self.space_id + if self.title is not None: + body["title"] = self.title + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieConversation into a shallow dictionary of its immediate attributes.""" body = {} - if self.conversation_id is not None: body['conversation_id'] = self.conversation_id - if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.space_id is not None: body['space_id'] = self.space_id - if self.title is not None: body['title'] = self.title - if self.user_id is not None: body['user_id'] = self.user_id + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.space_id is not None: + body["space_id"] = self.space_id + if self.title is not None: + body["title"] = self.title + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieConversation: """Deserializes the GenieConversation from a dictionary.""" - return cls(conversation_id=d.get('conversation_id', None), created_timestamp=d.get('created_timestamp', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), space_id=d.get('space_id', None), title=d.get('title', None), user_id=d.get('user_id', None)) - - + return cls( + conversation_id=d.get("conversation_id", None), + created_timestamp=d.get("created_timestamp", None), + id=d.get("id", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + space_id=d.get("space_id", None), + title=d.get("title", None), + user_id=d.get("user_id", None), + ) @dataclass class GenieCreateConversationMessageRequest: content: str """User message content.""" - + conversation_id: Optional[str] = None """The ID associated with the conversation.""" - + space_id: Optional[str] = None """The ID associated with the Genie space where the conversation is started.""" - + def as_dict(self) -> dict: """Serializes the GenieCreateConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: body['content'] = self.content - if self.conversation_id is not None: body['conversation_id'] = self.conversation_id - if self.space_id is not None: body['space_id'] = self.space_id + if self.content is not None: + body["content"] = self.content + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.space_id is not None: + body["space_id"] = self.space_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieCreateConversationMessageRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: body['content'] = self.content - if self.conversation_id is not None: body['conversation_id'] = self.conversation_id - if self.space_id is not None: body['space_id'] = self.space_id + if self.content is not None: + body["content"] = self.content + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.space_id is not None: + body["space_id"] = self.space_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieCreateConversationMessageRequest: """Deserializes the GenieCreateConversationMessageRequest from a dictionary.""" - return cls(content=d.get('content', None), conversation_id=d.get('conversation_id', None), space_id=d.get('space_id', None)) - - - - - - - - - - - + return cls( + content=d.get("content", None), + conversation_id=d.get("conversation_id", None), + space_id=d.get("space_id", None), + ) @dataclass class GenieGenerateDownloadFullQueryResultResponse: download_id: Optional[str] = None """Download ID. Use this ID to track the download request in subsequent polling calls""" - + def as_dict(self) -> dict: """Serializes the GenieGenerateDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.download_id is not None: body['download_id'] = self.download_id + if self.download_id is not None: + body["download_id"] = self.download_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieGenerateDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.download_id is not None: body['download_id'] = self.download_id + if self.download_id is not None: + body["download_id"] = self.download_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieGenerateDownloadFullQueryResultResponse: """Deserializes the GenieGenerateDownloadFullQueryResultResponse from a dictionary.""" - return cls(download_id=d.get('download_id', None)) - - - - - - - - + return cls(download_id=d.get("download_id", None)) @dataclass @@ -439,31 +482,25 @@ class GenieGetDownloadFullQueryResultResponse: statement_response: Optional[sql.StatementResponse] = None """SQL Statement Execution response. See [Get status, manifest, and result first chunk](:method:statementexecution/getstatement) for more details.""" - + def as_dict(self) -> dict: """Serializes the GenieGetDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.statement_response: body['statement_response'] = self.statement_response.as_dict() + if self.statement_response: + body["statement_response"] = self.statement_response.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GenieGetDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.statement_response: body['statement_response'] = self.statement_response + if self.statement_response: + body["statement_response"] = self.statement_response return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieGetDownloadFullQueryResultResponse: """Deserializes the GenieGetDownloadFullQueryResultResponse from a dictionary.""" - return cls(statement_response=_from_dict(d, 'statement_response', sql.StatementResponse)) - - - - - - - - + return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) @dataclass @@ -471,99 +508,92 @@ class GenieGetMessageQueryResultResponse: statement_response: Optional[sql.StatementResponse] = None """SQL Statement Execution response. See [Get status, manifest, and result first chunk](:method:statementexecution/getstatement) for more details.""" - + def as_dict(self) -> dict: """Serializes the GenieGetMessageQueryResultResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.statement_response: body['statement_response'] = self.statement_response.as_dict() + if self.statement_response: + body["statement_response"] = self.statement_response.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GenieGetMessageQueryResultResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.statement_response: body['statement_response'] = self.statement_response + if self.statement_response: + body["statement_response"] = self.statement_response return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieGetMessageQueryResultResponse: """Deserializes the GenieGetMessageQueryResultResponse from a dictionary.""" - return cls(statement_response=_from_dict(d, 'statement_response', sql.StatementResponse)) - - - - - - - - - - - + return cls(statement_response=_from_dict(d, "statement_response", sql.StatementResponse)) @dataclass class GenieListSpacesResponse: next_page_token: Optional[str] = None """Token to get the next page of results""" - + spaces: Optional[List[GenieSpace]] = None """List of Genie spaces""" - + def as_dict(self) -> dict: """Serializes the GenieListSpacesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.spaces: body['spaces'] = [v.as_dict() for v in self.spaces] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.spaces: + body["spaces"] = [v.as_dict() for v in self.spaces] return body def as_shallow_dict(self) -> dict: """Serializes the GenieListSpacesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.spaces: body['spaces'] = self.spaces + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.spaces: + body["spaces"] = self.spaces return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieListSpacesResponse: """Deserializes the GenieListSpacesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), spaces=_repeated_dict(d, 'spaces', GenieSpace)) - - + return cls(next_page_token=d.get("next_page_token", None), spaces=_repeated_dict(d, "spaces", GenieSpace)) @dataclass class GenieMessage: id: str """Message ID. Legacy identifier, use message_id instead""" - + space_id: str """Genie space ID""" - + conversation_id: str """Conversation ID""" - + content: str """User message content""" - + message_id: str """Message ID""" - + attachments: Optional[List[GenieAttachment]] = None """AI-generated response to the message""" - + created_timestamp: Optional[int] = None """Timestamp when the message was created""" - + error: Optional[MessageError] = None """Error message if Genie failed to respond to the message""" - + last_updated_timestamp: Optional[int] = None """Timestamp when the message was last updated""" - + query_result: Optional[Result] = None """The result of SQL query if the message includes a query attachment. Deprecated. Use `query_result_metadata` in `GenieQueryAttachment` instead.""" - + status: Optional[MessageStatus] = None """MessageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * @@ -579,247 +609,312 @@ class GenieMessage: anymore. The user needs to rerun the query. Rerun the SQL query result by calling [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * `CANCELLED`: Message has been cancelled.""" - + user_id: Optional[int] = None """ID of the user who created the message""" - + def as_dict(self) -> dict: """Serializes the GenieMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attachments: body['attachments'] = [v.as_dict() for v in self.attachments] - if self.content is not None: body['content'] = self.content - if self.conversation_id is not None: body['conversation_id'] = self.conversation_id - if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp - if self.error: body['error'] = self.error.as_dict() - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.message_id is not None: body['message_id'] = self.message_id - if self.query_result: body['query_result'] = self.query_result.as_dict() - if self.space_id is not None: body['space_id'] = self.space_id - if self.status is not None: body['status'] = self.status.value - if self.user_id is not None: body['user_id'] = self.user_id + if self.attachments: + body["attachments"] = [v.as_dict() for v in self.attachments] + if self.content is not None: + body["content"] = self.content + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.error: + body["error"] = self.error.as_dict() + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.message_id is not None: + body["message_id"] = self.message_id + if self.query_result: + body["query_result"] = self.query_result.as_dict() + if self.space_id is not None: + body["space_id"] = self.space_id + if self.status is not None: + body["status"] = self.status.value + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.attachments: body['attachments'] = self.attachments - if self.content is not None: body['content'] = self.content - if self.conversation_id is not None: body['conversation_id'] = self.conversation_id - if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp - if self.error: body['error'] = self.error - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.message_id is not None: body['message_id'] = self.message_id - if self.query_result: body['query_result'] = self.query_result - if self.space_id is not None: body['space_id'] = self.space_id - if self.status is not None: body['status'] = self.status - if self.user_id is not None: body['user_id'] = self.user_id + if self.attachments: + body["attachments"] = self.attachments + if self.content is not None: + body["content"] = self.content + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.created_timestamp is not None: + body["created_timestamp"] = self.created_timestamp + if self.error: + body["error"] = self.error + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.message_id is not None: + body["message_id"] = self.message_id + if self.query_result: + body["query_result"] = self.query_result + if self.space_id is not None: + body["space_id"] = self.space_id + if self.status is not None: + body["status"] = self.status + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieMessage: """Deserializes the GenieMessage from a dictionary.""" - return cls(attachments=_repeated_dict(d, 'attachments', GenieAttachment), content=d.get('content', None), conversation_id=d.get('conversation_id', None), created_timestamp=d.get('created_timestamp', None), error=_from_dict(d, 'error', MessageError), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), message_id=d.get('message_id', None), query_result=_from_dict(d, 'query_result', Result), space_id=d.get('space_id', None), status=_enum(d, 'status', MessageStatus), user_id=d.get('user_id', None)) - - + return cls( + attachments=_repeated_dict(d, "attachments", GenieAttachment), + content=d.get("content", None), + conversation_id=d.get("conversation_id", None), + created_timestamp=d.get("created_timestamp", None), + error=_from_dict(d, "error", MessageError), + id=d.get("id", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + message_id=d.get("message_id", None), + query_result=_from_dict(d, "query_result", Result), + space_id=d.get("space_id", None), + status=_enum(d, "status", MessageStatus), + user_id=d.get("user_id", None), + ) @dataclass class GenieQueryAttachment: description: Optional[str] = None """Description of the query""" - + id: Optional[str] = None - + last_updated_timestamp: Optional[int] = None """Time when the user updated the query last""" - + query: Optional[str] = None """AI generated SQL query""" - + query_result_metadata: Optional[GenieResultMetadata] = None """Metadata associated with the query result.""" - + statement_id: Optional[str] = None """Statement Execution API statement id. Use [Get status, manifest, and result first chunk](:method:statementexecution/getstatement) to get the full result data.""" - + title: Optional[str] = None """Name of the query""" - + def as_dict(self) -> dict: """Serializes the GenieQueryAttachment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.query is not None: body['query'] = self.query - if self.query_result_metadata: body['query_result_metadata'] = self.query_result_metadata.as_dict() - if self.statement_id is not None: body['statement_id'] = self.statement_id - if self.title is not None: body['title'] = self.title + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.query is not None: + body["query"] = self.query + if self.query_result_metadata: + body["query_result_metadata"] = self.query_result_metadata.as_dict() + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.title is not None: + body["title"] = self.title return body def as_shallow_dict(self) -> dict: """Serializes the GenieQueryAttachment into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.query is not None: body['query'] = self.query - if self.query_result_metadata: body['query_result_metadata'] = self.query_result_metadata - if self.statement_id is not None: body['statement_id'] = self.statement_id - if self.title is not None: body['title'] = self.title + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.query is not None: + body["query"] = self.query + if self.query_result_metadata: + body["query_result_metadata"] = self.query_result_metadata + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.title is not None: + body["title"] = self.title return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieQueryAttachment: """Deserializes the GenieQueryAttachment from a dictionary.""" - return cls(description=d.get('description', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), query=d.get('query', None), query_result_metadata=_from_dict(d, 'query_result_metadata', GenieResultMetadata), statement_id=d.get('statement_id', None), title=d.get('title', None)) - - + return cls( + description=d.get("description", None), + id=d.get("id", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + query=d.get("query", None), + query_result_metadata=_from_dict(d, "query_result_metadata", GenieResultMetadata), + statement_id=d.get("statement_id", None), + title=d.get("title", None), + ) @dataclass class GenieResultMetadata: is_truncated: Optional[bool] = None """Indicates whether the result set is truncated.""" - + row_count: Optional[int] = None """The number of rows in the result set.""" - + def as_dict(self) -> dict: """Serializes the GenieResultMetadata into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_truncated is not None: body['is_truncated'] = self.is_truncated - if self.row_count is not None: body['row_count'] = self.row_count + if self.is_truncated is not None: + body["is_truncated"] = self.is_truncated + if self.row_count is not None: + body["row_count"] = self.row_count return body def as_shallow_dict(self) -> dict: """Serializes the GenieResultMetadata into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_truncated is not None: body['is_truncated'] = self.is_truncated - if self.row_count is not None: body['row_count'] = self.row_count + if self.is_truncated is not None: + body["is_truncated"] = self.is_truncated + if self.row_count is not None: + body["row_count"] = self.row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieResultMetadata: """Deserializes the GenieResultMetadata from a dictionary.""" - return cls(is_truncated=d.get('is_truncated', None), row_count=d.get('row_count', None)) - - + return cls(is_truncated=d.get("is_truncated", None), row_count=d.get("row_count", None)) @dataclass class GenieSpace: space_id: str """Genie space ID""" - + title: str """Title of the Genie Space""" - + description: Optional[str] = None """Description of the Genie Space""" - + def as_dict(self) -> dict: """Serializes the GenieSpace into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.space_id is not None: body['space_id'] = self.space_id - if self.title is not None: body['title'] = self.title + if self.description is not None: + body["description"] = self.description + if self.space_id is not None: + body["space_id"] = self.space_id + if self.title is not None: + body["title"] = self.title return body def as_shallow_dict(self) -> dict: """Serializes the GenieSpace into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.space_id is not None: body['space_id'] = self.space_id - if self.title is not None: body['title'] = self.title + if self.description is not None: + body["description"] = self.description + if self.space_id is not None: + body["space_id"] = self.space_id + if self.title is not None: + body["title"] = self.title return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieSpace: """Deserializes the GenieSpace from a dictionary.""" - return cls(description=d.get('description', None), space_id=d.get('space_id', None), title=d.get('title', None)) - - + return cls(description=d.get("description", None), space_id=d.get("space_id", None), title=d.get("title", None)) @dataclass class GenieStartConversationMessageRequest: content: str """The text of the message that starts the conversation.""" - + space_id: Optional[str] = None """The ID associated with the Genie space where you want to start a conversation.""" - + def as_dict(self) -> dict: """Serializes the GenieStartConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: body['content'] = self.content - if self.space_id is not None: body['space_id'] = self.space_id + if self.content is not None: + body["content"] = self.content + if self.space_id is not None: + body["space_id"] = self.space_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieStartConversationMessageRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: body['content'] = self.content - if self.space_id is not None: body['space_id'] = self.space_id + if self.content is not None: + body["content"] = self.content + if self.space_id is not None: + body["space_id"] = self.space_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieStartConversationMessageRequest: """Deserializes the GenieStartConversationMessageRequest from a dictionary.""" - return cls(content=d.get('content', None), space_id=d.get('space_id', None)) - - + return cls(content=d.get("content", None), space_id=d.get("space_id", None)) @dataclass class GenieStartConversationResponse: message_id: str """Message ID""" - + conversation_id: str """Conversation ID""" - + conversation: Optional[GenieConversation] = None - + message: Optional[GenieMessage] = None - + def as_dict(self) -> dict: """Serializes the GenieStartConversationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.conversation: body['conversation'] = self.conversation.as_dict() - if self.conversation_id is not None: body['conversation_id'] = self.conversation_id - if self.message: body['message'] = self.message.as_dict() - if self.message_id is not None: body['message_id'] = self.message_id + if self.conversation: + body["conversation"] = self.conversation.as_dict() + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.message: + body["message"] = self.message.as_dict() + if self.message_id is not None: + body["message_id"] = self.message_id return body def as_shallow_dict(self) -> dict: """Serializes the GenieStartConversationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.conversation: body['conversation'] = self.conversation - if self.conversation_id is not None: body['conversation_id'] = self.conversation_id - if self.message: body['message'] = self.message - if self.message_id is not None: body['message_id'] = self.message_id + if self.conversation: + body["conversation"] = self.conversation + if self.conversation_id is not None: + body["conversation_id"] = self.conversation_id + if self.message: + body["message"] = self.message + if self.message_id is not None: + body["message_id"] = self.message_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenieStartConversationResponse: """Deserializes the GenieStartConversationResponse from a dictionary.""" - return cls(conversation=_from_dict(d, 'conversation', GenieConversation), conversation_id=d.get('conversation_id', None), message=_from_dict(d, 'message', GenieMessage), message_id=d.get('message_id', None)) - - - - - - - - - - - + return cls( + conversation=_from_dict(d, "conversation", GenieConversation), + conversation_id=d.get("conversation_id", None), + message=_from_dict(d, "message", GenieMessage), + message_id=d.get("message_id", None), + ) @dataclass @@ -828,84 +923,84 @@ class GetPublishedDashboardTokenInfoResponse: """Authorization constraints for accessing the published dashboard. Currently includes `workspace_rule_set` and could be enriched with `unity_catalog_privileges` before oAuth token generation.""" - + custom_claim: Optional[str] = None """Custom claim generated from external_value and external_viewer_id. Format: `urn:aibi:external_data:::`""" - + scope: Optional[str] = None """Scope defining access permissions.""" - + def as_dict(self) -> dict: """Serializes the GetPublishedDashboardTokenInfoResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authorization_details: body['authorization_details'] = [v.as_dict() for v in self.authorization_details] - if self.custom_claim is not None: body['custom_claim'] = self.custom_claim - if self.scope is not None: body['scope'] = self.scope + if self.authorization_details: + body["authorization_details"] = [v.as_dict() for v in self.authorization_details] + if self.custom_claim is not None: + body["custom_claim"] = self.custom_claim + if self.scope is not None: + body["scope"] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the GetPublishedDashboardTokenInfoResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.authorization_details: body['authorization_details'] = self.authorization_details - if self.custom_claim is not None: body['custom_claim'] = self.custom_claim - if self.scope is not None: body['scope'] = self.scope + if self.authorization_details: + body["authorization_details"] = self.authorization_details + if self.custom_claim is not None: + body["custom_claim"] = self.custom_claim + if self.scope is not None: + body["scope"] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPublishedDashboardTokenInfoResponse: """Deserializes the GetPublishedDashboardTokenInfoResponse from a dictionary.""" - return cls(authorization_details=_repeated_dict(d, 'authorization_details', AuthorizationDetails), custom_claim=d.get('custom_claim', None), scope=d.get('scope', None)) - - - - - - - - + return cls( + authorization_details=_repeated_dict(d, "authorization_details", AuthorizationDetails), + custom_claim=d.get("custom_claim", None), + scope=d.get("scope", None), + ) class LifecycleState(Enum): - - - ACTIVE = 'ACTIVE' - TRASHED = 'TRASHED' - + ACTIVE = "ACTIVE" + TRASHED = "TRASHED" @dataclass class ListDashboardsResponse: dashboards: Optional[List[Dashboard]] = None - + next_page_token: Optional[str] = None """A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted, there are no subsequent dashboards.""" - + def as_dict(self) -> dict: """Serializes the ListDashboardsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboards: body['dashboards'] = [v.as_dict() for v in self.dashboards] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.dashboards: + body["dashboards"] = [v.as_dict() for v in self.dashboards] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListDashboardsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboards: body['dashboards'] = self.dashboards - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.dashboards: + body["dashboards"] = self.dashboards + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListDashboardsResponse: """Deserializes the ListDashboardsResponse from a dictionary.""" - return cls(dashboards=_repeated_dict(d, 'dashboards', Dashboard), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + dashboards=_repeated_dict(d, "dashboards", Dashboard), next_page_token=d.get("next_page_token", None) + ) @dataclass @@ -913,32 +1008,31 @@ class ListSchedulesResponse: next_page_token: Optional[str] = None """A token that can be used as a `page_token` in subsequent requests to retrieve the next page of results. If this field is omitted, there are no subsequent schedules.""" - + schedules: Optional[List[Schedule]] = None - + def as_dict(self) -> dict: """Serializes the ListSchedulesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.schedules: body['schedules'] = [v.as_dict() for v in self.schedules] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schedules: + body["schedules"] = [v.as_dict() for v in self.schedules] return body def as_shallow_dict(self) -> dict: """Serializes the ListSchedulesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.schedules: body['schedules'] = self.schedules + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schedules: + body["schedules"] = self.schedules return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSchedulesResponse: """Deserializes the ListSchedulesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), schedules=_repeated_dict(d, 'schedules', Schedule)) - - - - - + return cls(next_page_token=d.get("next_page_token", None), schedules=_repeated_dict(d, "schedules", Schedule)) @dataclass @@ -946,115 +1040,122 @@ class ListSubscriptionsResponse: next_page_token: Optional[str] = None """A token that can be used as a `page_token` in subsequent requests to retrieve the next page of results. If this field is omitted, there are no subsequent subscriptions.""" - + subscriptions: Optional[List[Subscription]] = None - + def as_dict(self) -> dict: """Serializes the ListSubscriptionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.subscriptions: + body["subscriptions"] = [v.as_dict() for v in self.subscriptions] return body def as_shallow_dict(self) -> dict: """Serializes the ListSubscriptionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.subscriptions: body['subscriptions'] = self.subscriptions + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.subscriptions: + body["subscriptions"] = self.subscriptions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSubscriptionsResponse: """Deserializes the ListSubscriptionsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), subscriptions=_repeated_dict(d, 'subscriptions', Subscription)) - - + return cls( + next_page_token=d.get("next_page_token", None), + subscriptions=_repeated_dict(d, "subscriptions", Subscription), + ) @dataclass class MessageError: error: Optional[str] = None - + type: Optional[MessageErrorType] = None - + def as_dict(self) -> dict: """Serializes the MessageError into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error is not None: body['error'] = self.error - if self.type is not None: body['type'] = self.type.value + if self.error is not None: + body["error"] = self.error + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the MessageError into a shallow dictionary of its immediate attributes.""" body = {} - if self.error is not None: body['error'] = self.error - if self.type is not None: body['type'] = self.type + if self.error is not None: + body["error"] = self.error + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MessageError: """Deserializes the MessageError from a dictionary.""" - return cls(error=d.get('error', None), type=_enum(d, 'type', MessageErrorType)) - - + return cls(error=d.get("error", None), type=_enum(d, "type", MessageErrorType)) class MessageErrorType(Enum): - - - BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION = 'BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION' - CHAT_COMPLETION_CLIENT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_EXCEPTION' - CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION' - CHAT_COMPLETION_NETWORK_EXCEPTION = 'CHAT_COMPLETION_NETWORK_EXCEPTION' - CONTENT_FILTER_EXCEPTION = 'CONTENT_FILTER_EXCEPTION' - CONTEXT_EXCEEDED_EXCEPTION = 'CONTEXT_EXCEEDED_EXCEPTION' - COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION = 'COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION' - COULD_NOT_GET_UC_SCHEMA_EXCEPTION = 'COULD_NOT_GET_UC_SCHEMA_EXCEPTION' - DEPLOYMENT_NOT_FOUND_EXCEPTION = 'DEPLOYMENT_NOT_FOUND_EXCEPTION' - DESCRIBE_QUERY_INVALID_SQL_ERROR = 'DESCRIBE_QUERY_INVALID_SQL_ERROR' - DESCRIBE_QUERY_TIMEOUT = 'DESCRIBE_QUERY_TIMEOUT' - DESCRIBE_QUERY_UNEXPECTED_FAILURE = 'DESCRIBE_QUERY_UNEXPECTED_FAILURE' - FUNCTIONS_NOT_AVAILABLE_EXCEPTION = 'FUNCTIONS_NOT_AVAILABLE_EXCEPTION' - FUNCTION_ARGUMENTS_INVALID_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_EXCEPTION' - FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION' - FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION' - FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = 'FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION' - GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION = 'GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION' - GENERIC_CHAT_COMPLETION_EXCEPTION = 'GENERIC_CHAT_COMPLETION_EXCEPTION' - GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = 'GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION' - GENERIC_SQL_EXEC_API_CALL_EXCEPTION = 'GENERIC_SQL_EXEC_API_CALL_EXCEPTION' - ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = 'ILLEGAL_PARAMETER_DEFINITION_EXCEPTION' - INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION' - INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION' - INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION = 'INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION' - INVALID_CHAT_COMPLETION_JSON_EXCEPTION = 'INVALID_CHAT_COMPLETION_JSON_EXCEPTION' - INVALID_COMPLETION_REQUEST_EXCEPTION = 'INVALID_COMPLETION_REQUEST_EXCEPTION' - INVALID_FUNCTION_CALL_EXCEPTION = 'INVALID_FUNCTION_CALL_EXCEPTION' - INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION = 'INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION' - INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION = 'INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION' - INVALID_SQL_UNKNOWN_TABLE_EXCEPTION = 'INVALID_SQL_UNKNOWN_TABLE_EXCEPTION' - INVALID_TABLE_IDENTIFIER_EXCEPTION = 'INVALID_TABLE_IDENTIFIER_EXCEPTION' - LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION' - MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION' - MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION' - MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION' - MISSING_SQL_QUERY_EXCEPTION = 'MISSING_SQL_QUERY_EXCEPTION' - NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = 'NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE' - NO_QUERY_TO_VISUALIZE_EXCEPTION = 'NO_QUERY_TO_VISUALIZE_EXCEPTION' - NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION' - RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION' - RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = 'RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION' - REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION' - RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION' - SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION' - STOP_PROCESS_DUE_TO_AUTO_REGENERATE = 'STOP_PROCESS_DUE_TO_AUTO_REGENERATE' - TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION' - TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION' - TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION' - UNEXPECTED_REPLY_PROCESS_EXCEPTION = 'UNEXPECTED_REPLY_PROCESS_EXCEPTION' - UNKNOWN_AI_MODEL = 'UNKNOWN_AI_MODEL' - WAREHOUSE_ACCESS_MISSING_EXCEPTION = 'WAREHOUSE_ACCESS_MISSING_EXCEPTION' - WAREHOUSE_NOT_FOUND_EXCEPTION = 'WAREHOUSE_NOT_FOUND_EXCEPTION' + + BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION = "BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION" + CHAT_COMPLETION_CLIENT_EXCEPTION = "CHAT_COMPLETION_CLIENT_EXCEPTION" + CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION = "CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION" + CHAT_COMPLETION_NETWORK_EXCEPTION = "CHAT_COMPLETION_NETWORK_EXCEPTION" + CONTENT_FILTER_EXCEPTION = "CONTENT_FILTER_EXCEPTION" + CONTEXT_EXCEEDED_EXCEPTION = "CONTEXT_EXCEEDED_EXCEPTION" + COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION = "COULD_NOT_GET_MODEL_DEPLOYMENTS_EXCEPTION" + COULD_NOT_GET_UC_SCHEMA_EXCEPTION = "COULD_NOT_GET_UC_SCHEMA_EXCEPTION" + DEPLOYMENT_NOT_FOUND_EXCEPTION = "DEPLOYMENT_NOT_FOUND_EXCEPTION" + DESCRIBE_QUERY_INVALID_SQL_ERROR = "DESCRIBE_QUERY_INVALID_SQL_ERROR" + DESCRIBE_QUERY_TIMEOUT = "DESCRIBE_QUERY_TIMEOUT" + DESCRIBE_QUERY_UNEXPECTED_FAILURE = "DESCRIBE_QUERY_UNEXPECTED_FAILURE" + FUNCTIONS_NOT_AVAILABLE_EXCEPTION = "FUNCTIONS_NOT_AVAILABLE_EXCEPTION" + FUNCTION_ARGUMENTS_INVALID_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_EXCEPTION" + FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION" + FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION" + FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = "FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION" + GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION = "GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION" + GENERIC_CHAT_COMPLETION_EXCEPTION = "GENERIC_CHAT_COMPLETION_EXCEPTION" + GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION" + GENERIC_SQL_EXEC_API_CALL_EXCEPTION = "GENERIC_SQL_EXEC_API_CALL_EXCEPTION" + ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION" + INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION" + INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" + INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION" + INVALID_CHAT_COMPLETION_JSON_EXCEPTION = "INVALID_CHAT_COMPLETION_JSON_EXCEPTION" + INVALID_COMPLETION_REQUEST_EXCEPTION = "INVALID_COMPLETION_REQUEST_EXCEPTION" + INVALID_FUNCTION_CALL_EXCEPTION = "INVALID_FUNCTION_CALL_EXCEPTION" + INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION = "INVALID_SQL_MULTIPLE_DATASET_REFERENCES_EXCEPTION" + INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION = "INVALID_SQL_MULTIPLE_STATEMENTS_EXCEPTION" + INVALID_SQL_UNKNOWN_TABLE_EXCEPTION = "INVALID_SQL_UNKNOWN_TABLE_EXCEPTION" + INVALID_TABLE_IDENTIFIER_EXCEPTION = "INVALID_TABLE_IDENTIFIER_EXCEPTION" + LOCAL_CONTEXT_EXCEEDED_EXCEPTION = "LOCAL_CONTEXT_EXCEEDED_EXCEPTION" + MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION" + MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION" + MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION" + MISSING_SQL_QUERY_EXCEPTION = "MISSING_SQL_QUERY_EXCEPTION" + NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = "NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE" + NO_QUERY_TO_VISUALIZE_EXCEPTION = "NO_QUERY_TO_VISUALIZE_EXCEPTION" + NO_TABLES_TO_QUERY_EXCEPTION = "NO_TABLES_TO_QUERY_EXCEPTION" + RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = "RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION" + RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = "RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION" + REPLY_PROCESS_TIMEOUT_EXCEPTION = "REPLY_PROCESS_TIMEOUT_EXCEPTION" + RETRYABLE_PROCESSING_EXCEPTION = "RETRYABLE_PROCESSING_EXCEPTION" + SQL_EXECUTION_EXCEPTION = "SQL_EXECUTION_EXCEPTION" + STOP_PROCESS_DUE_TO_AUTO_REGENERATE = "STOP_PROCESS_DUE_TO_AUTO_REGENERATE" + TABLES_MISSING_EXCEPTION = "TABLES_MISSING_EXCEPTION" + TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = "TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION" + TOO_MANY_TABLES_EXCEPTION = "TOO_MANY_TABLES_EXCEPTION" + UNEXPECTED_REPLY_PROCESS_EXCEPTION = "UNEXPECTED_REPLY_PROCESS_EXCEPTION" + UNKNOWN_AI_MODEL = "UNKNOWN_AI_MODEL" + WAREHOUSE_ACCESS_MISSING_EXCEPTION = "WAREHOUSE_ACCESS_MISSING_EXCEPTION" + WAREHOUSE_NOT_FOUND_EXCEPTION = "WAREHOUSE_NOT_FOUND_EXCEPTION" + class MessageStatus(Enum): """MessageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data @@ -1071,418 +1172,510 @@ class MessageStatus(Enum): anymore. The user needs to rerun the query. Rerun the SQL query result by calling [executeMessageAttachmentQuery](:method:genie/executeMessageAttachmentQuery) API. * `CANCELLED`: Message has been cancelled.""" - - ASKING_AI = 'ASKING_AI' - CANCELLED = 'CANCELLED' - COMPLETED = 'COMPLETED' - EXECUTING_QUERY = 'EXECUTING_QUERY' - FAILED = 'FAILED' - FETCHING_METADATA = 'FETCHING_METADATA' - FILTERING_CONTEXT = 'FILTERING_CONTEXT' - PENDING_WAREHOUSE = 'PENDING_WAREHOUSE' - QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED' - SUBMITTED = 'SUBMITTED' + + ASKING_AI = "ASKING_AI" + CANCELLED = "CANCELLED" + COMPLETED = "COMPLETED" + EXECUTING_QUERY = "EXECUTING_QUERY" + FAILED = "FAILED" + FETCHING_METADATA = "FETCHING_METADATA" + FILTERING_CONTEXT = "FILTERING_CONTEXT" + PENDING_WAREHOUSE = "PENDING_WAREHOUSE" + QUERY_RESULT_EXPIRED = "QUERY_RESULT_EXPIRED" + SUBMITTED = "SUBMITTED" + @dataclass class MigrateDashboardRequest: source_dashboard_id: str """UUID of the dashboard to be migrated.""" - + display_name: Optional[str] = None """Display name for the new Lakeview dashboard.""" - + parent_path: Optional[str] = None """The workspace path of the folder to contain the migrated Lakeview dashboard.""" - + update_parameter_syntax: Optional[bool] = None """Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax (:param) when converting datasets in the dashboard.""" - + def as_dict(self) -> dict: """Serializes the MigrateDashboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id - if self.update_parameter_syntax is not None: body['update_parameter_syntax'] = self.update_parameter_syntax + if self.display_name is not None: + body["display_name"] = self.display_name + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.source_dashboard_id is not None: + body["source_dashboard_id"] = self.source_dashboard_id + if self.update_parameter_syntax is not None: + body["update_parameter_syntax"] = self.update_parameter_syntax return body def as_shallow_dict(self) -> dict: """Serializes the MigrateDashboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id - if self.update_parameter_syntax is not None: body['update_parameter_syntax'] = self.update_parameter_syntax + if self.display_name is not None: + body["display_name"] = self.display_name + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.source_dashboard_id is not None: + body["source_dashboard_id"] = self.source_dashboard_id + if self.update_parameter_syntax is not None: + body["update_parameter_syntax"] = self.update_parameter_syntax return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MigrateDashboardRequest: """Deserializes the MigrateDashboardRequest from a dictionary.""" - return cls(display_name=d.get('display_name', None), parent_path=d.get('parent_path', None), source_dashboard_id=d.get('source_dashboard_id', None), update_parameter_syntax=d.get('update_parameter_syntax', None)) - - + return cls( + display_name=d.get("display_name", None), + parent_path=d.get("parent_path", None), + source_dashboard_id=d.get("source_dashboard_id", None), + update_parameter_syntax=d.get("update_parameter_syntax", None), + ) @dataclass class PublishRequest: dashboard_id: Optional[str] = None """UUID identifying the dashboard to be published.""" - + embed_credentials: Optional[bool] = None """Flag to indicate if the publisher's credentials should be embedded in the published dashboard. These embedded credentials will be used to execute the published dashboard's queries.""" - + warehouse_id: Optional[str] = None """The ID of the warehouse that can be used to override the warehouse which was set in the draft.""" - + def as_dict(self) -> dict: """Serializes the PublishRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.embed_credentials is not None: + body["embed_credentials"] = self.embed_credentials + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the PublishRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.embed_credentials is not None: + body["embed_credentials"] = self.embed_credentials + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PublishRequest: """Deserializes the PublishRequest from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), embed_credentials=d.get('embed_credentials', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + dashboard_id=d.get("dashboard_id", None), + embed_credentials=d.get("embed_credentials", None), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class PublishedDashboard: display_name: Optional[str] = None """The display name of the published dashboard.""" - + embed_credentials: Optional[bool] = None """Indicates whether credentials are embedded in the published dashboard.""" - + revision_create_time: Optional[str] = None """The timestamp of when the published dashboard was last revised.""" - + warehouse_id: Optional[str] = None """The warehouse ID used to run the published dashboard.""" - + def as_dict(self) -> dict: """Serializes the PublishedDashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials - if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.embed_credentials is not None: + body["embed_credentials"] = self.embed_credentials + if self.revision_create_time is not None: + body["revision_create_time"] = self.revision_create_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the PublishedDashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials - if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.embed_credentials is not None: + body["embed_credentials"] = self.embed_credentials + if self.revision_create_time is not None: + body["revision_create_time"] = self.revision_create_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PublishedDashboard: """Deserializes the PublishedDashboard from a dictionary.""" - return cls(display_name=d.get('display_name', None), embed_credentials=d.get('embed_credentials', None), revision_create_time=d.get('revision_create_time', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + display_name=d.get("display_name", None), + embed_credentials=d.get("embed_credentials", None), + revision_create_time=d.get("revision_create_time", None), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class Result: is_truncated: Optional[bool] = None """If result is truncated""" - + row_count: Optional[int] = None """Row count of the result""" - + statement_id: Optional[str] = None """Statement Execution API statement id. Use [Get status, manifest, and result first chunk](:method:statementexecution/getstatement) to get the full result data.""" - + def as_dict(self) -> dict: """Serializes the Result into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_truncated is not None: body['is_truncated'] = self.is_truncated - if self.row_count is not None: body['row_count'] = self.row_count - if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.is_truncated is not None: + body["is_truncated"] = self.is_truncated + if self.row_count is not None: + body["row_count"] = self.row_count + if self.statement_id is not None: + body["statement_id"] = self.statement_id return body def as_shallow_dict(self) -> dict: """Serializes the Result into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_truncated is not None: body['is_truncated'] = self.is_truncated - if self.row_count is not None: body['row_count'] = self.row_count - if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.is_truncated is not None: + body["is_truncated"] = self.is_truncated + if self.row_count is not None: + body["row_count"] = self.row_count + if self.statement_id is not None: + body["statement_id"] = self.statement_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Result: """Deserializes the Result from a dictionary.""" - return cls(is_truncated=d.get('is_truncated', None), row_count=d.get('row_count', None), statement_id=d.get('statement_id', None)) - - + return cls( + is_truncated=d.get("is_truncated", None), + row_count=d.get("row_count", None), + statement_id=d.get("statement_id", None), + ) @dataclass class Schedule: cron_schedule: CronSchedule """The cron expression describing the frequency of the periodic refresh for this schedule.""" - + create_time: Optional[str] = None """A timestamp indicating when the schedule was created.""" - + dashboard_id: Optional[str] = None """UUID identifying the dashboard to which the schedule belongs.""" - + display_name: Optional[str] = None """The display name for schedule.""" - + etag: Optional[str] = None """The etag for the schedule. Must be left empty on create, must be provided on updates to ensure that the schedule has not been modified since the last read, and can be optionally provided on delete.""" - + pause_status: Optional[SchedulePauseStatus] = None """The status indicates whether this schedule is paused or not.""" - + schedule_id: Optional[str] = None """UUID identifying the schedule.""" - + update_time: Optional[str] = None """A timestamp indicating when the schedule was last updated.""" - + warehouse_id: Optional[str] = None """The warehouse id to run the dashboard with for the schedule.""" - + def as_dict(self) -> dict: """Serializes the Schedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.cron_schedule: body['cron_schedule'] = self.cron_schedule.as_dict() - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.display_name is not None: body['display_name'] = self.display_name - if self.etag is not None: body['etag'] = self.etag - if self.pause_status is not None: body['pause_status'] = self.pause_status.value - if self.schedule_id is not None: body['schedule_id'] = self.schedule_id - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.cron_schedule: + body["cron_schedule"] = self.cron_schedule.as_dict() + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.etag is not None: + body["etag"] = self.etag + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value + if self.schedule_id is not None: + body["schedule_id"] = self.schedule_id + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the Schedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.cron_schedule: body['cron_schedule'] = self.cron_schedule - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.display_name is not None: body['display_name'] = self.display_name - if self.etag is not None: body['etag'] = self.etag - if self.pause_status is not None: body['pause_status'] = self.pause_status - if self.schedule_id is not None: body['schedule_id'] = self.schedule_id - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.cron_schedule: + body["cron_schedule"] = self.cron_schedule + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.display_name is not None: + body["display_name"] = self.display_name + if self.etag is not None: + body["etag"] = self.etag + if self.pause_status is not None: + body["pause_status"] = self.pause_status + if self.schedule_id is not None: + body["schedule_id"] = self.schedule_id + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Schedule: """Deserializes the Schedule from a dictionary.""" - return cls(create_time=d.get('create_time', None), cron_schedule=_from_dict(d, 'cron_schedule', CronSchedule), dashboard_id=d.get('dashboard_id', None), display_name=d.get('display_name', None), etag=d.get('etag', None), pause_status=_enum(d, 'pause_status', SchedulePauseStatus), schedule_id=d.get('schedule_id', None), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + create_time=d.get("create_time", None), + cron_schedule=_from_dict(d, "cron_schedule", CronSchedule), + dashboard_id=d.get("dashboard_id", None), + display_name=d.get("display_name", None), + etag=d.get("etag", None), + pause_status=_enum(d, "pause_status", SchedulePauseStatus), + schedule_id=d.get("schedule_id", None), + update_time=d.get("update_time", None), + warehouse_id=d.get("warehouse_id", None), + ) class SchedulePauseStatus(Enum): - - - PAUSED = 'PAUSED' - UNPAUSED = 'UNPAUSED' + + PAUSED = "PAUSED" + UNPAUSED = "UNPAUSED" + @dataclass class Subscriber: destination_subscriber: Optional[SubscriptionSubscriberDestination] = None """The destination to receive the subscription email. This parameter is mutually exclusive with `user_subscriber`.""" - + user_subscriber: Optional[SubscriptionSubscriberUser] = None """The user to receive the subscription email. This parameter is mutually exclusive with `destination_subscriber`.""" - + def as_dict(self) -> dict: """Serializes the Subscriber into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_subscriber: body['destination_subscriber'] = self.destination_subscriber.as_dict() - if self.user_subscriber: body['user_subscriber'] = self.user_subscriber.as_dict() + if self.destination_subscriber: + body["destination_subscriber"] = self.destination_subscriber.as_dict() + if self.user_subscriber: + body["user_subscriber"] = self.user_subscriber.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Subscriber into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_subscriber: body['destination_subscriber'] = self.destination_subscriber - if self.user_subscriber: body['user_subscriber'] = self.user_subscriber + if self.destination_subscriber: + body["destination_subscriber"] = self.destination_subscriber + if self.user_subscriber: + body["user_subscriber"] = self.user_subscriber return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Subscriber: """Deserializes the Subscriber from a dictionary.""" - return cls(destination_subscriber=_from_dict(d, 'destination_subscriber', SubscriptionSubscriberDestination), user_subscriber=_from_dict(d, 'user_subscriber', SubscriptionSubscriberUser)) - - + return cls( + destination_subscriber=_from_dict(d, "destination_subscriber", SubscriptionSubscriberDestination), + user_subscriber=_from_dict(d, "user_subscriber", SubscriptionSubscriberUser), + ) @dataclass class Subscription: subscriber: Subscriber """Subscriber details for users and destinations to be added as subscribers to the schedule.""" - + create_time: Optional[str] = None """A timestamp indicating when the subscription was created.""" - + created_by_user_id: Optional[int] = None """UserId of the user who adds subscribers (users or notification destinations) to the dashboard's schedule.""" - + dashboard_id: Optional[str] = None """UUID identifying the dashboard to which the subscription belongs.""" - + etag: Optional[str] = None """The etag for the subscription. Must be left empty on create, can be optionally provided on delete to ensure that the subscription has not been deleted since the last read.""" - + schedule_id: Optional[str] = None """UUID identifying the schedule to which the subscription belongs.""" - + subscription_id: Optional[str] = None """UUID identifying the subscription.""" - + update_time: Optional[str] = None """A timestamp indicating when the subscription was last updated.""" - + def as_dict(self) -> dict: """Serializes the Subscription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.created_by_user_id is not None: body['created_by_user_id'] = self.created_by_user_id - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.etag is not None: body['etag'] = self.etag - if self.schedule_id is not None: body['schedule_id'] = self.schedule_id - if self.subscriber: body['subscriber'] = self.subscriber.as_dict() - if self.subscription_id is not None: body['subscription_id'] = self.subscription_id - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by_user_id is not None: + body["created_by_user_id"] = self.created_by_user_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.etag is not None: + body["etag"] = self.etag + if self.schedule_id is not None: + body["schedule_id"] = self.schedule_id + if self.subscriber: + body["subscriber"] = self.subscriber.as_dict() + if self.subscription_id is not None: + body["subscription_id"] = self.subscription_id + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the Subscription into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.created_by_user_id is not None: body['created_by_user_id'] = self.created_by_user_id - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.etag is not None: body['etag'] = self.etag - if self.schedule_id is not None: body['schedule_id'] = self.schedule_id - if self.subscriber: body['subscriber'] = self.subscriber - if self.subscription_id is not None: body['subscription_id'] = self.subscription_id - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by_user_id is not None: + body["created_by_user_id"] = self.created_by_user_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.etag is not None: + body["etag"] = self.etag + if self.schedule_id is not None: + body["schedule_id"] = self.schedule_id + if self.subscriber: + body["subscriber"] = self.subscriber + if self.subscription_id is not None: + body["subscription_id"] = self.subscription_id + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Subscription: """Deserializes the Subscription from a dictionary.""" - return cls(create_time=d.get('create_time', None), created_by_user_id=d.get('created_by_user_id', None), dashboard_id=d.get('dashboard_id', None), etag=d.get('etag', None), schedule_id=d.get('schedule_id', None), subscriber=_from_dict(d, 'subscriber', Subscriber), subscription_id=d.get('subscription_id', None), update_time=d.get('update_time', None)) - - + return cls( + create_time=d.get("create_time", None), + created_by_user_id=d.get("created_by_user_id", None), + dashboard_id=d.get("dashboard_id", None), + etag=d.get("etag", None), + schedule_id=d.get("schedule_id", None), + subscriber=_from_dict(d, "subscriber", Subscriber), + subscription_id=d.get("subscription_id", None), + update_time=d.get("update_time", None), + ) @dataclass class SubscriptionSubscriberDestination: destination_id: str """The canonical identifier of the destination to receive email notification.""" - + def as_dict(self) -> dict: """Serializes the SubscriptionSubscriberDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_id is not None: body['destination_id'] = self.destination_id + if self.destination_id is not None: + body["destination_id"] = self.destination_id return body def as_shallow_dict(self) -> dict: """Serializes the SubscriptionSubscriberDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_id is not None: body['destination_id'] = self.destination_id + if self.destination_id is not None: + body["destination_id"] = self.destination_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriberDestination: """Deserializes the SubscriptionSubscriberDestination from a dictionary.""" - return cls(destination_id=d.get('destination_id', None)) - - + return cls(destination_id=d.get("destination_id", None)) @dataclass class SubscriptionSubscriberUser: user_id: int """UserId of the subscriber.""" - + def as_dict(self) -> dict: """Serializes the SubscriptionSubscriberUser into a dictionary suitable for use as a JSON request body.""" body = {} - if self.user_id is not None: body['user_id'] = self.user_id + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the SubscriptionSubscriberUser into a shallow dictionary of its immediate attributes.""" body = {} - if self.user_id is not None: body['user_id'] = self.user_id + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriberUser: """Deserializes the SubscriptionSubscriberUser from a dictionary.""" - return cls(user_id=d.get('user_id', None)) - - + return cls(user_id=d.get("user_id", None)) @dataclass class TextAttachment: content: Optional[str] = None """AI generated message""" - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: body['content'] = self.content - if self.id is not None: body['id'] = self.id + if self.content is not None: + body["content"] = self.content + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the TextAttachment into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: body['content'] = self.content - if self.id is not None: body['id'] = self.id + if self.content is not None: + body["content"] = self.content + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TextAttachment: """Deserializes the TextAttachment from a dictionary.""" - return cls(content=d.get('content', None), id=d.get('id', None)) - - - - - + return cls(content=d.get("content", None), id=d.get("id", None)) @dataclass @@ -1501,11 +1694,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> TrashDashboardResponse: """Deserializes the TrashDashboardResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -1524,16 +1712,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UnpublishDashboardResponse: """Deserializes the UnpublishDashboardResponse from a dictionary.""" return cls() - - - - - - - - - - class GenieAPI: @@ -1541,95 +1719,98 @@ class GenieAPI: business users can use to ask questions using natural language. Genie uses data registered to Unity Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks Assistant must be enabled.""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_get_message_genie_completed(self, conversation_id: str, message_id: str, space_id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage: - deadline = time.time() + timeout.total_seconds() - target_states = (MessageStatus.COMPLETED, ) - failure_states = (MessageStatus.FAILED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id) - status = poll.status - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach COMPLETED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - def create_message(self - , space_id: str, conversation_id: str, content: str - ) -> Wait[GenieMessage]: + def wait_get_message_genie_completed( + self, + conversation_id: str, + message_id: str, + space_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[GenieMessage], None]] = None, + ) -> GenieMessage: + deadline = time.time() + timeout.total_seconds() + target_states = (MessageStatus.COMPLETED,) + failure_states = (MessageStatus.FAILED,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id) + status = poll.status + status_message = f"current status: {status}" + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach COMPLETED, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]: """Create conversation message. - + Create new message in a [conversation](:method:genie/startconversation). The AI response uses all previously created messages in the conversation to respond. - + :param space_id: str The ID associated with the Genie space where the conversation is started. :param conversation_id: str The ID associated with the conversation. :param content: str User message content. - + :returns: Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. """ body = {} - if content is not None: body['content'] = content - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages', body=body - - , headers=headers - ) - return Wait(self.wait_get_message_genie_completed - , response = GenieMessage.from_dict(op_response) - , conversation_id=conversation_id, message_id=op_response['message_id'], space_id=space_id) - - - def create_message_and_wait(self - , space_id: str, conversation_id: str, content: str - , - timeout=timedelta(minutes=20)) -> GenieMessage: - return self.create_message(content=content, conversation_id=conversation_id, space_id=space_id).result(timeout=timeout) - - - - - def execute_message_attachment_query(self - , space_id: str, conversation_id: str, message_id: str, attachment_id: str - ) -> GenieGetMessageQueryResultResponse: + if content is not None: + body["content"] = content + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do( + "POST", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages", + body=body, + headers=headers, + ) + return Wait( + self.wait_get_message_genie_completed, + response=GenieMessage.from_dict(op_response), + conversation_id=conversation_id, + message_id=op_response["message_id"], + space_id=space_id, + ) + + def create_message_and_wait( + self, space_id: str, conversation_id: str, content: str, timeout=timedelta(minutes=20) + ) -> GenieMessage: + return self.create_message(content=content, conversation_id=conversation_id, space_id=space_id).result( + timeout=timeout + ) + + def execute_message_attachment_query( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGetMessageQueryResultResponse: """Execute message attachment SQL query. - + Execute the SQL for a message query attachment. Use this API when the query attachment has expired and needs to be re-executed. - + :param space_id: str Genie space ID :param conversation_id: str @@ -1638,62 +1819,60 @@ def execute_message_attachment_query(self Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/execute-query' - - , headers=headers - ) - return GenieGetMessageQueryResultResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/execute-query", + headers=headers, + ) + return GenieGetMessageQueryResultResponse.from_dict(res) - def execute_message_query(self - , space_id: str, conversation_id: str, message_id: str - ) -> GenieGetMessageQueryResultResponse: + def execute_message_query( + self, space_id: str, conversation_id: str, message_id: str + ) -> GenieGetMessageQueryResultResponse: """[Deprecated] Execute SQL query in a conversation message. - + Execute the SQL query in the message. - + :param space_id: str Genie space ID :param conversation_id: str Conversation ID :param message_id: str Message ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query' - - , headers=headers - ) - return GenieGetMessageQueryResultResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def generate_download_full_query_result(self - , space_id: str, conversation_id: str, message_id: str, attachment_id: str - ) -> GenieGenerateDownloadFullQueryResultResponse: + res = self._api.do( + "POST", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query", + headers=headers, + ) + return GenieGetMessageQueryResultResponse.from_dict(res) + + def generate_download_full_query_result( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGenerateDownloadFullQueryResultResponse: """Generate full query result download. - + Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of the download. The query result is stored in an external link and can be retrieved using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. See [Execute Statement](:method:statementexecution/executestatement) for more details. - + :param space_id: str Genie space ID :param conversation_id: str @@ -1702,27 +1881,26 @@ def generate_download_full_query_result(self Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads' - - , headers=headers - ) - return GenieGenerateDownloadFullQueryResultResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_download_full_query_result(self - , space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str - ) -> GenieGetDownloadFullQueryResultResponse: + res = self._api.do( + "POST", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads", + headers=headers, + ) + return GenieGenerateDownloadFullQueryResultResponse.from_dict(res) + + def get_download_full_query_result( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str + ) -> GenieGetDownloadFullQueryResultResponse: """Get download full query result. - + After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and successfully receiving a `download_id`, use this API to poll the download progress. When the download is complete, the API returns one or more external links to the query result files. Warning: Databricks @@ -1730,7 +1908,7 @@ def get_download_full_query_result(self You must not set an Authorization header in download requests. When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant temporary access to data. See [Execute Statement](:method:statementexecution/executestatement) for more details. - + :param space_id: str Genie space ID :param conversation_id: str @@ -1742,59 +1920,55 @@ def get_download_full_query_result(self :param download_id: str Download ID. This ID is provided by the [Generate Download endpoint](:method:genie/generateDownloadFullQueryResult) - + :returns: :class:`GenieGetDownloadFullQueryResultResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads/{download_id}' - - , headers=headers - ) - return GenieGetDownloadFullQueryResultResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/downloads/{download_id}", + headers=headers, + ) + return GenieGetDownloadFullQueryResultResponse.from_dict(res) - def get_message(self - , space_id: str, conversation_id: str, message_id: str - ) -> GenieMessage: + def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: """Get conversation message. - + Get message from conversation. - + :param space_id: str The ID associated with the Genie space where the target conversation is located. :param conversation_id: str The ID associated with the target conversation. :param message_id: str The ID associated with the target message from the identified conversation. - + :returns: :class:`GenieMessage` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}' - - , headers=headers - ) - return GenieMessage.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}", + headers=headers, + ) + return GenieMessage.from_dict(res) - def get_message_attachment_query_result(self - , space_id: str, conversation_id: str, message_id: str, attachment_id: str - ) -> GenieGetMessageQueryResultResponse: + def get_message_attachment_query_result( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGetMessageQueryResultResponse: """Get message attachment SQL query result. - + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. - + :param space_id: str Genie space ID :param conversation_id: str @@ -1803,60 +1977,58 @@ def get_message_attachment_query_result(self Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/query-result' - - , headers=headers - ) - return GenieGetMessageQueryResultResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_message_query_result(self - , space_id: str, conversation_id: str, message_id: str - ) -> GenieGetMessageQueryResultResponse: + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/query-result", + headers=headers, + ) + return GenieGetMessageQueryResultResponse.from_dict(res) + + def get_message_query_result( + self, space_id: str, conversation_id: str, message_id: str + ) -> GenieGetMessageQueryResultResponse: """[Deprecated] Get conversation message SQL query result. - + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY`. - + :param space_id: str Genie space ID :param conversation_id: str Conversation ID :param message_id: str Message ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result' - - , headers=headers - ) - return GenieGetMessageQueryResultResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_message_query_result_by_attachment(self - , space_id: str, conversation_id: str, message_id: str, attachment_id: str - ) -> GenieGetMessageQueryResultResponse: + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result", + headers=headers, + ) + return GenieGetMessageQueryResultResponse.from_dict(res) + + def get_message_query_result_by_attachment( + self, space_id: str, conversation_id: str, message_id: str, attachment_id: str + ) -> GenieGetMessageQueryResultResponse: """[Deprecated] Get conversation message SQL query result. - + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. - + :param space_id: str Genie space ID :param conversation_id: str @@ -1865,212 +2037,174 @@ def get_message_query_result_by_attachment(self Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}' - - , headers=headers - ) - return GenieGetMessageQueryResultResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}", + headers=headers, + ) + return GenieGetMessageQueryResultResponse.from_dict(res) - def get_space(self - , space_id: str - ) -> GenieSpace: + def get_space(self, space_id: str) -> GenieSpace: """Get Genie Space. - + Get details of a Genie Space. - + :param space_id: str The ID associated with the Genie space - + :returns: :class:`GenieSpace` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/genie/spaces/{space_id}' - - , headers=headers - ) - return GenieSpace.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/genie/spaces/{space_id}", headers=headers) + return GenieSpace.from_dict(res) - def list_spaces(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> GenieListSpacesResponse: + def list_spaces( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> GenieListSpacesResponse: """List Genie spaces. - + Get list of Genie Spaces. - + :param page_size: int (optional) Maximum number of spaces to return per page :param page_token: str (optional) Pagination token for getting the next page of results - + :returns: :class:`GenieListSpacesResponse` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/genie/spaces', query=query - - , headers=headers - ) + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/genie/spaces", query=query, headers=headers) return GenieListSpacesResponse.from_dict(res) - - - - - def start_conversation(self - , space_id: str, content: str - ) -> Wait[GenieMessage]: + def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]: """Start conversation. - + Start a new conversation. - + :param space_id: str The ID associated with the Genie space where you want to start a conversation. :param content: str The text of the message that starts the conversation. - + :returns: Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. """ body = {} - if content is not None: body['content'] = content - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/genie/spaces/{space_id}/start-conversation', body=body - - , headers=headers - ) - return Wait(self.wait_get_message_genie_completed - , response = GenieStartConversationResponse.from_dict(op_response) - , conversation_id=op_response['conversation_id'], message_id=op_response['message_id'], space_id=space_id) - - - def start_conversation_and_wait(self - , space_id: str, content: str - , - timeout=timedelta(minutes=20)) -> GenieMessage: + if content is not None: + body["content"] = content + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do( + "POST", f"/api/2.0/genie/spaces/{space_id}/start-conversation", body=body, headers=headers + ) + return Wait( + self.wait_get_message_genie_completed, + response=GenieStartConversationResponse.from_dict(op_response), + conversation_id=op_response["conversation_id"], + message_id=op_response["message_id"], + space_id=space_id, + ) + + def start_conversation_and_wait(self, space_id: str, content: str, timeout=timedelta(minutes=20)) -> GenieMessage: return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout) - - + + class LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can be done with Workspace API (import, export, get-status, list, delete).""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , dashboard: Dashboard - ) -> Dashboard: + def create(self, dashboard: Dashboard) -> Dashboard: """Create dashboard. - + Create a draft dashboard. - + :param dashboard: :class:`Dashboard` - + :returns: :class:`Dashboard` """ body = dashboard.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/lakeview/dashboards', body=body - - , headers=headers - ) - return Dashboard.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/lakeview/dashboards", body=body, headers=headers) + return Dashboard.from_dict(res) - def create_schedule(self - , dashboard_id: str, schedule: Schedule - ) -> Schedule: + def create_schedule(self, dashboard_id: str, schedule: Schedule) -> Schedule: """Create dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule: :class:`Schedule` - + :returns: :class:`Schedule` """ body = schedule.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules', body=body - - , headers=headers - ) - return Schedule.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules", body=body, headers=headers) + return Schedule.from_dict(res) - def create_subscription(self - , dashboard_id: str, schedule_id: str, subscription: Subscription - ) -> Subscription: + def create_subscription(self, dashboard_id: str, schedule_id: str, subscription: Subscription) -> Subscription: """Create schedule subscription. - + :param dashboard_id: str UUID identifying the dashboard to which the subscription belongs. :param schedule_id: str UUID identifying the schedule to which the subscription belongs. :param subscription: :class:`Subscription` - + :returns: :class:`Subscription` """ body = subscription.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions", + body=body, + headers=headers, + ) return Subscription.from_dict(res) - - - - - def delete_schedule(self - , dashboard_id: str, schedule_id: str - , * - , etag: Optional[str] = None): + def delete_schedule(self, dashboard_id: str, schedule_id: str, *, etag: Optional[str] = None): """Delete dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str @@ -2078,30 +2212,29 @@ def delete_schedule(self :param etag: str (optional) The etag for the schedule. Optionally, it can be provided to verify that the schedule has not been modified from its last retrieval. - - - """ - - query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}', query=query - - , headers=headers - ) - - - - - def delete_subscription(self - , dashboard_id: str, schedule_id: str, subscription_id: str - , * - , etag: Optional[str] = None): + """ + + query = {} + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}", + query=query, + headers=headers, + ) + + def delete_subscription( + self, dashboard_id: str, schedule_id: str, subscription_id: str, *, etag: Optional[str] = None + ): """Delete schedule subscription. - + :param dashboard_id: str UUID identifying the dashboard which the subscription belongs. :param schedule_id: str @@ -2111,132 +2244,114 @@ def delete_subscription(self :param etag: str (optional) The etag for the subscription. Can be optionally provided to ensure that the subscription has not been modified since the last read. - - - """ - - query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}', query=query - - , headers=headers - ) - - - - - def get(self - , dashboard_id: str - ) -> Dashboard: + """ + + query = {} + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}", + query=query, + headers=headers, + ) + + def get(self, dashboard_id: str) -> Dashboard: """Get dashboard. - + Get a draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard. - + :returns: :class:`Dashboard` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}' - - , headers=headers - ) - return Dashboard.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}", headers=headers) + return Dashboard.from_dict(res) - def get_published(self - , dashboard_id: str - ) -> PublishedDashboard: + def get_published(self, dashboard_id: str) -> PublishedDashboard: """Get published dashboard. - + Get the current published dashboard. - + :param dashboard_id: str UUID identifying the published dashboard. - + :returns: :class:`PublishedDashboard` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/published' - - , headers=headers - ) - return PublishedDashboard.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published", headers=headers) + return PublishedDashboard.from_dict(res) - def get_schedule(self - , dashboard_id: str, schedule_id: str - ) -> Schedule: + def get_schedule(self, dashboard_id: str, schedule_id: str) -> Schedule: """Get dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. - + :returns: :class:`Schedule` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}' - - , headers=headers - ) - return Schedule.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_subscription(self - , dashboard_id: str, schedule_id: str, subscription_id: str - ) -> Subscription: + res = self._api.do( + "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}", headers=headers + ) + return Schedule.from_dict(res) + + def get_subscription(self, dashboard_id: str, schedule_id: str, subscription_id: str) -> Subscription: """Get schedule subscription. - + :param dashboard_id: str UUID identifying the dashboard which the subscription belongs. :param schedule_id: str UUID identifying the schedule which the subscription belongs. :param subscription_id: str UUID identifying the subscription. - + :returns: :class:`Subscription` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}' - - , headers=headers - ) - return Subscription.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions/{subscription_id}", + headers=headers, + ) + return Subscription.from_dict(res) - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None, show_trashed: Optional[bool] = None, view: Optional[DashboardView] = None) -> Iterator[Dashboard]: + def list( + self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + show_trashed: Optional[bool] = None, + view: Optional[DashboardView] = None, + ) -> Iterator[Dashboard]: """List dashboards. - + :param page_size: int (optional) The number of dashboards to return per page. :param page_token: str (optional) @@ -2247,42 +2362,37 @@ def list(self returned. :param view: :class:`DashboardView` (optional) `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard. - + :returns: Iterator over :class:`Dashboard` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - if show_trashed is not None: query['show_trashed'] = show_trashed - if view is not None: query['view'] = view.value - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/lakeview/dashboards', query=query - - , headers=headers - ) - if 'dashboards' in json: - for v in json['dashboards']: - yield Dashboard.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if show_trashed is not None: + query["show_trashed"] = show_trashed + if view is not None: + query["view"] = view.value + headers = { + "Accept": "application/json", + } - def list_schedules(self - , dashboard_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Schedule]: + while True: + json = self._api.do("GET", "/api/2.0/lakeview/dashboards", query=query, headers=headers) + if "dashboards" in json: + for v in json["dashboards"]: + yield Dashboard.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_schedules( + self, dashboard_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[Schedule]: """List dashboard schedules. - + :param dashboard_id: str UUID identifying the dashboard to which the schedules belongs. :param page_size: int (optional) @@ -2290,40 +2400,35 @@ def list_schedules(self :param page_token: str (optional) A page token, received from a previous `ListSchedules` call. Use this to retrieve the subsequent page. - + :returns: Iterator over :class:`Schedule` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules', query=query - - , headers=headers - ) - if 'schedules' in json: - for v in json['schedules']: - yield Schedule.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def list_subscriptions(self - , dashboard_id: str, schedule_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Subscription]: + while True: + json = self._api.do( + "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules", query=query, headers=headers + ) + if "schedules" in json: + for v in json["schedules"]: + yield Schedule.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_subscriptions( + self, dashboard_id: str, schedule_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[Subscription]: """List schedule subscriptions. - + :param dashboard_id: str UUID identifying the dashboard which the subscriptions belongs. :param schedule_id: str @@ -2333,42 +2438,45 @@ def list_subscriptions(self :param page_token: str (optional) A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the subsequent page. - + :returns: Iterator over :class:`Subscription` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions', query=query - - , headers=headers - ) - if 'subscriptions' in json: - for v in json['subscriptions']: - yield Subscription.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def migrate(self - , source_dashboard_id: str - , * - , display_name: Optional[str] = None, parent_path: Optional[str] = None, update_parameter_syntax: Optional[bool] = None) -> Dashboard: + while True: + json = self._api.do( + "GET", + f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}/subscriptions", + query=query, + headers=headers, + ) + if "subscriptions" in json: + for v in json["subscriptions"]: + yield Subscription.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def migrate( + self, + source_dashboard_id: str, + *, + display_name: Optional[str] = None, + parent_path: Optional[str] = None, + update_parameter_syntax: Optional[bool] = None, + ) -> Dashboard: """Migrate dashboard. - + Migrates a classic SQL dashboard to Lakeview. - + :param source_dashboard_id: str UUID of the dashboard to be migrated. :param display_name: str (optional) @@ -2378,34 +2486,33 @@ def migrate(self :param update_parameter_syntax: bool (optional) Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax (:param) when converting datasets in the dashboard. - + :returns: :class:`Dashboard` """ body = {} - if display_name is not None: body['display_name'] = display_name - if parent_path is not None: body['parent_path'] = parent_path - if source_dashboard_id is not None: body['source_dashboard_id'] = source_dashboard_id - if update_parameter_syntax is not None: body['update_parameter_syntax'] = update_parameter_syntax - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/lakeview/dashboards/migrate', body=body - - , headers=headers - ) + if display_name is not None: + body["display_name"] = display_name + if parent_path is not None: + body["parent_path"] = parent_path + if source_dashboard_id is not None: + body["source_dashboard_id"] = source_dashboard_id + if update_parameter_syntax is not None: + body["update_parameter_syntax"] = update_parameter_syntax + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/lakeview/dashboards/migrate", body=body, headers=headers) return Dashboard.from_dict(res) - - - - - def publish(self - , dashboard_id: str - , * - , embed_credentials: Optional[bool] = None, warehouse_id: Optional[str] = None) -> PublishedDashboard: + def publish( + self, dashboard_id: str, *, embed_credentials: Optional[bool] = None, warehouse_id: Optional[str] = None + ) -> PublishedDashboard: """Publish dashboard. - + Publish the current draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard to be published. :param embed_credentials: bool (optional) @@ -2413,173 +2520,137 @@ def publish(self embedded credentials will be used to execute the published dashboard's queries. :param warehouse_id: str (optional) The ID of the warehouse that can be used to override the warehouse which was set in the draft. - + :returns: :class:`PublishedDashboard` """ body = {} - if embed_credentials is not None: body['embed_credentials'] = embed_credentials - if warehouse_id is not None: body['warehouse_id'] = warehouse_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', body=body - - , headers=headers - ) - return PublishedDashboard.from_dict(res) + if embed_credentials is not None: + body["embed_credentials"] = embed_credentials + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published", body=body, headers=headers) + return PublishedDashboard.from_dict(res) - def trash(self - , dashboard_id: str - ): + def trash(self, dashboard_id: str): """Trash dashboard. - + Trash a dashboard. - + :param dashboard_id: str UUID identifying the dashboard. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/lakeview/dashboards/{dashboard_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/lakeview/dashboards/{dashboard_id}", headers=headers) - def unpublish(self - , dashboard_id: str - ): + def unpublish(self, dashboard_id: str): """Unpublish dashboard. - + Unpublish the dashboard. - + :param dashboard_id: str UUID identifying the published dashboard. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/lakeview/dashboards/{dashboard_id}/published' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published", headers=headers) - def update(self - , dashboard_id: str, dashboard: Dashboard - ) -> Dashboard: + def update(self, dashboard_id: str, dashboard: Dashboard) -> Dashboard: """Update dashboard. - + Update a draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard. :param dashboard: :class:`Dashboard` - + :returns: :class:`Dashboard` """ body = dashboard.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/lakeview/dashboards/{dashboard_id}', body=body - - , headers=headers - ) - return Dashboard.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PATCH", f"/api/2.0/lakeview/dashboards/{dashboard_id}", body=body, headers=headers) + return Dashboard.from_dict(res) - def update_schedule(self - , dashboard_id: str, schedule_id: str, schedule: Schedule - ) -> Schedule: + def update_schedule(self, dashboard_id: str, schedule_id: str, schedule: Schedule) -> Schedule: """Update dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. :param schedule: :class:`Schedule` - + :returns: :class:`Schedule` """ body = schedule.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", f"/api/2.0/lakeview/dashboards/{dashboard_id}/schedules/{schedule_id}", body=body, headers=headers + ) return Schedule.from_dict(res) - - + class LakeviewEmbeddedAPI: """Token-based Lakeview APIs for embedding dashboards in external applications.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get_published_dashboard_token_info(self - , dashboard_id: str - , * - , external_value: Optional[str] = None, external_viewer_id: Optional[str] = None) -> GetPublishedDashboardTokenInfoResponse: + def get_published_dashboard_token_info( + self, dashboard_id: str, *, external_value: Optional[str] = None, external_viewer_id: Optional[str] = None + ) -> GetPublishedDashboardTokenInfoResponse: """Read an information of a published dashboard to mint an OAuth token. - + Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The `authorization_details` can be enriched to apply additional restriction. - + Example: Adding the following `authorization_details` object to downscope the viewer permission to specific table ``` { type: "unity_catalog_privileges", privileges: ["SELECT"], object_type: "TABLE", object_full_path: "main.default.testdata" } ``` - + :param dashboard_id: str UUID identifying the published dashboard. :param external_value: str (optional) Provided external value to be included in the custom claim. :param external_viewer_id: str (optional) Provided external viewer id to be included in the custom claim. - + :returns: :class:`GetPublishedDashboardTokenInfoResponse` """ - + query = {} - if external_value is not None: query['external_value'] = external_value - if external_viewer_id is not None: query['external_viewer_id'] = external_viewer_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/lakeview/dashboards/{dashboard_id}/published/tokeninfo', query=query - - , headers=headers - ) + if external_value is not None: + query["external_value"] = external_value + if external_viewer_id is not None: + query["external_viewer_id"] = external_viewer_id + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/lakeview/dashboards/{dashboard_id}/published/tokeninfo", query=query, headers=headers + ) return GetPublishedDashboardTokenInfoResponse.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index 0d5a2c1e1..c9a9c0ced 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -1,196 +1,218 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ._internal import _enum, _from_dict, _repeated_dict +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order - - - - - - - - - - - @dataclass class DatabaseCatalog: name: str """The name of the catalog in UC.""" - + database_instance_name: str """The name of the DatabaseInstance housing the database.""" - + database_name: str """The name of the database (in a instance) associated with the catalog.""" - + create_database_if_not_exists: Optional[bool] = None - + uid: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the DatabaseCatalog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_database_if_not_exists is not None: body['create_database_if_not_exists'] = self.create_database_if_not_exists - if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name - if self.database_name is not None: body['database_name'] = self.database_name - if self.name is not None: body['name'] = self.name - if self.uid is not None: body['uid'] = self.uid + if self.create_database_if_not_exists is not None: + body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database_instance_name is not None: + body["database_instance_name"] = self.database_instance_name + if self.database_name is not None: + body["database_name"] = self.database_name + if self.name is not None: + body["name"] = self.name + if self.uid is not None: + body["uid"] = self.uid return body def as_shallow_dict(self) -> dict: """Serializes the DatabaseCatalog into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_database_if_not_exists is not None: body['create_database_if_not_exists'] = self.create_database_if_not_exists - if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name - if self.database_name is not None: body['database_name'] = self.database_name - if self.name is not None: body['name'] = self.name - if self.uid is not None: body['uid'] = self.uid + if self.create_database_if_not_exists is not None: + body["create_database_if_not_exists"] = self.create_database_if_not_exists + if self.database_instance_name is not None: + body["database_instance_name"] = self.database_instance_name + if self.database_name is not None: + body["database_name"] = self.database_name + if self.name is not None: + body["name"] = self.name + if self.uid is not None: + body["uid"] = self.uid return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog: """Deserializes the DatabaseCatalog from a dictionary.""" - return cls(create_database_if_not_exists=d.get('create_database_if_not_exists', None), database_instance_name=d.get('database_instance_name', None), database_name=d.get('database_name', None), name=d.get('name', None), uid=d.get('uid', None)) - - + return cls( + create_database_if_not_exists=d.get("create_database_if_not_exists", None), + database_instance_name=d.get("database_instance_name", None), + database_name=d.get("database_name", None), + name=d.get("name", None), + uid=d.get("uid", None), + ) @dataclass class DatabaseCredential: token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the DatabaseCredential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token is not None: body['token'] = self.token + if self.token is not None: + body["token"] = self.token return body def as_shallow_dict(self) -> dict: """Serializes the DatabaseCredential into a shallow dictionary of its immediate attributes.""" body = {} - if self.token is not None: body['token'] = self.token + if self.token is not None: + body["token"] = self.token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabaseCredential: """Deserializes the DatabaseCredential from a dictionary.""" - return cls(token=d.get('token', None)) - - + return cls(token=d.get("token", None)) @dataclass class DatabaseInstance: """A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.""" - + name: str """The name of the instance. This is the unique identifier for the instance.""" - + capacity: Optional[str] = None """The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8".""" - + creation_time: Optional[str] = None """The timestamp when the instance was created.""" - + creator: Optional[str] = None """The email of the creator of the instance.""" - + pg_version: Optional[str] = None """The version of Postgres running on the instance.""" - + read_write_dns: Optional[str] = None """The DNS endpoint to connect to the instance for read+write access.""" - + state: Optional[DatabaseInstanceState] = None """The current state of the instance.""" - + stopped: Optional[bool] = None """Whether the instance is stopped.""" - + uid: Optional[str] = None """An immutable UUID identifier for the instance.""" - + def as_dict(self) -> dict: """Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body.""" body = {} - if self.capacity is not None: body['capacity'] = self.capacity - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.creator is not None: body['creator'] = self.creator - if self.name is not None: body['name'] = self.name - if self.pg_version is not None: body['pg_version'] = self.pg_version - if self.read_write_dns is not None: body['read_write_dns'] = self.read_write_dns - if self.state is not None: body['state'] = self.state.value - if self.stopped is not None: body['stopped'] = self.stopped - if self.uid is not None: body['uid'] = self.uid + if self.capacity is not None: + body["capacity"] = self.capacity + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.creator is not None: + body["creator"] = self.creator + if self.name is not None: + body["name"] = self.name + if self.pg_version is not None: + body["pg_version"] = self.pg_version + if self.read_write_dns is not None: + body["read_write_dns"] = self.read_write_dns + if self.state is not None: + body["state"] = self.state.value + if self.stopped is not None: + body["stopped"] = self.stopped + if self.uid is not None: + body["uid"] = self.uid return body def as_shallow_dict(self) -> dict: """Serializes the DatabaseInstance into a shallow dictionary of its immediate attributes.""" body = {} - if self.capacity is not None: body['capacity'] = self.capacity - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.creator is not None: body['creator'] = self.creator - if self.name is not None: body['name'] = self.name - if self.pg_version is not None: body['pg_version'] = self.pg_version - if self.read_write_dns is not None: body['read_write_dns'] = self.read_write_dns - if self.state is not None: body['state'] = self.state - if self.stopped is not None: body['stopped'] = self.stopped - if self.uid is not None: body['uid'] = self.uid + if self.capacity is not None: + body["capacity"] = self.capacity + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.creator is not None: + body["creator"] = self.creator + if self.name is not None: + body["name"] = self.name + if self.pg_version is not None: + body["pg_version"] = self.pg_version + if self.read_write_dns is not None: + body["read_write_dns"] = self.read_write_dns + if self.state is not None: + body["state"] = self.state + if self.stopped is not None: + body["stopped"] = self.stopped + if self.uid is not None: + body["uid"] = self.uid return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: """Deserializes the DatabaseInstance from a dictionary.""" - return cls(capacity=d.get('capacity', None), creation_time=d.get('creation_time', None), creator=d.get('creator', None), name=d.get('name', None), pg_version=d.get('pg_version', None), read_write_dns=d.get('read_write_dns', None), state=_enum(d, 'state', DatabaseInstanceState), stopped=d.get('stopped', None), uid=d.get('uid', None)) - - + return cls( + capacity=d.get("capacity", None), + creation_time=d.get("creation_time", None), + creator=d.get("creator", None), + name=d.get("name", None), + pg_version=d.get("pg_version", None), + read_write_dns=d.get("read_write_dns", None), + state=_enum(d, "state", DatabaseInstanceState), + stopped=d.get("stopped", None), + uid=d.get("uid", None), + ) class DatabaseInstanceState(Enum): - - - AVAILABLE = 'AVAILABLE' - DELETING = 'DELETING' - FAILING_OVER = 'FAILING_OVER' - STARTING = 'STARTING' - STOPPED = 'STOPPED' - UPDATING = 'UPDATING' + + AVAILABLE = "AVAILABLE" + DELETING = "DELETING" + FAILING_OVER = "FAILING_OVER" + STARTING = "STARTING" + STOPPED = "STOPPED" + UPDATING = "UPDATING" + @dataclass class DatabaseTable: """Next field marker: 13""" - + name: str """Full three-part (catalog, schema, table) name of the table.""" - + database_instance_name: Optional[str] = None """Name of the target database instance. This is required when creating database tables in standard catalogs. This is optional when creating database tables in registered catalogs. If this field is specified when creating database tables in registered catalogs, the database instance name MUST match that of the registered catalog (or the request will be rejected).""" - + logical_database_name: Optional[str] = None """Target Postgres database object (logical database) name for this table. This field is optional in all scenarios. @@ -204,37 +226,45 @@ class DatabaseTable: the standard catalog. In this scenario, specifying this field will allow targeting an arbitrary postgres database. Note that this has implications for the `create_database_objects_is_missing` field in `spec`.""" - + table_serving_url: Optional[str] = None """Data serving REST API URL for this table""" - + def as_dict(self) -> dict: """Serializes the DatabaseTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name - if self.logical_database_name is not None: body['logical_database_name'] = self.logical_database_name - if self.name is not None: body['name'] = self.name - if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url + if self.database_instance_name is not None: + body["database_instance_name"] = self.database_instance_name + if self.logical_database_name is not None: + body["logical_database_name"] = self.logical_database_name + if self.name is not None: + body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body def as_shallow_dict(self) -> dict: """Serializes the DatabaseTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name - if self.logical_database_name is not None: body['logical_database_name'] = self.logical_database_name - if self.name is not None: body['name'] = self.name - if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url + if self.database_instance_name is not None: + body["database_instance_name"] = self.database_instance_name + if self.logical_database_name is not None: + body["logical_database_name"] = self.logical_database_name + if self.name is not None: + body["name"] = self.name + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabaseTable: """Deserializes the DatabaseTable from a dictionary.""" - return cls(database_instance_name=d.get('database_instance_name', None), logical_database_name=d.get('logical_database_name', None), name=d.get('name', None), table_serving_url=d.get('table_serving_url', None)) - - - - - + return cls( + database_instance_name=d.get("database_instance_name", None), + logical_database_name=d.get("logical_database_name", None), + name=d.get("name", None), + table_serving_url=d.get("table_serving_url", None), + ) @dataclass @@ -253,11 +283,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseCatalogResponse: """Deserializes the DeleteDatabaseCatalogResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -276,11 +301,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseInstanceResponse: """Deserializes the DeleteDatabaseInstanceResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -299,11 +319,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseTableResponse: """Deserializes the DeleteDatabaseTableResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -322,151 +337,140 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse: """Deserializes the DeleteSyncedDatabaseTableResponse from a dictionary.""" return cls() - - - - - @dataclass class GenerateDatabaseCredentialRequest: """Generates a credential that can be used to access database instances""" - + instance_names: Optional[List[str]] = None """Instances to which the token will be scoped.""" - + request_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the GenerateDatabaseCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_names: body['instance_names'] = [v for v in self.instance_names] - if self.request_id is not None: body['request_id'] = self.request_id + if self.instance_names: + body["instance_names"] = [v for v in self.instance_names] + if self.request_id is not None: + body["request_id"] = self.request_id return body def as_shallow_dict(self) -> dict: """Serializes the GenerateDatabaseCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_names: body['instance_names'] = self.instance_names - if self.request_id is not None: body['request_id'] = self.request_id + if self.instance_names: + body["instance_names"] = self.instance_names + if self.request_id is not None: + body["request_id"] = self.request_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenerateDatabaseCredentialRequest: """Deserializes the GenerateDatabaseCredentialRequest from a dictionary.""" - return cls(instance_names=d.get('instance_names', None), request_id=d.get('request_id', None)) - - - - - - - - - - - - - - - - - + return cls(instance_names=d.get("instance_names", None), request_id=d.get("request_id", None)) @dataclass class ListDatabaseInstancesResponse: database_instances: Optional[List[DatabaseInstance]] = None """List of instances.""" - + next_page_token: Optional[str] = None """Pagination token to request the next page of instances.""" - + def as_dict(self) -> dict: """Serializes the ListDatabaseInstancesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.database_instances: body['database_instances'] = [v.as_dict() for v in self.database_instances] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.database_instances: + body["database_instances"] = [v.as_dict() for v in self.database_instances] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListDatabaseInstancesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.database_instances: body['database_instances'] = self.database_instances - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.database_instances: + body["database_instances"] = self.database_instances + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse: """Deserializes the ListDatabaseInstancesResponse from a dictionary.""" - return cls(database_instances=_repeated_dict(d, 'database_instances', DatabaseInstance), next_page_token=d.get('next_page_token', None)) - - + return cls( + database_instances=_repeated_dict(d, "database_instances", DatabaseInstance), + next_page_token=d.get("next_page_token", None), + ) @dataclass class NewPipelineSpec: """Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other fields of pipeline are still inferred by table def internally""" - + storage_catalog: Optional[str] = None """UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This needs to be a standard catalog where the user has permissions to create Delta tables.""" - + storage_schema: Optional[str] = None """UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This needs to be in the standard catalog where the user has permissions to create Delta tables.""" - + def as_dict(self) -> dict: """Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.storage_catalog is not None: body['storage_catalog'] = self.storage_catalog - if self.storage_schema is not None: body['storage_schema'] = self.storage_schema + if self.storage_catalog is not None: + body["storage_catalog"] = self.storage_catalog + if self.storage_schema is not None: + body["storage_schema"] = self.storage_schema return body def as_shallow_dict(self) -> dict: """Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.storage_catalog is not None: body['storage_catalog'] = self.storage_catalog - if self.storage_schema is not None: body['storage_schema'] = self.storage_schema + if self.storage_catalog is not None: + body["storage_catalog"] = self.storage_catalog + if self.storage_schema is not None: + body["storage_schema"] = self.storage_schema return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec: """Deserializes the NewPipelineSpec from a dictionary.""" - return cls(storage_catalog=d.get('storage_catalog', None), storage_schema=d.get('storage_schema', None)) - - + return cls(storage_catalog=d.get("storage_catalog", None), storage_schema=d.get("storage_schema", None)) class ProvisioningInfoState(Enum): - - - ACTIVE = 'ACTIVE' - DEGRADED = 'DEGRADED' - DELETING = 'DELETING' - FAILED = 'FAILED' - PROVISIONING = 'PROVISIONING' - UPDATING = 'UPDATING' + + ACTIVE = "ACTIVE" + DEGRADED = "DEGRADED" + DELETING = "DELETING" + FAILED = "FAILED" + PROVISIONING = "PROVISIONING" + UPDATING = "UPDATING" + @dataclass class SyncedDatabaseTable: """Next field marker: 12""" - + name: str """Full three-part (catalog, schema, table) name of the table.""" - + data_synchronization_status: Optional[SyncedTableStatus] = None """Synced Table data synchronization status""" - + database_instance_name: Optional[str] = None """Name of the target database instance. This is required when creating synced database tables in standard catalogs. This is optional when creating synced database tables in registered catalogs. If this field is specified when creating synced database tables in registered catalogs, the database instance name MUST match that of the registered catalog (or the request will be rejected).""" - + logical_database_name: Optional[str] = None """Target Postgres database object (logical database) name for this table. This field is optional in all scenarios. @@ -479,521 +483,564 @@ class SyncedDatabaseTable: When creating a synced table in a standard catalog, the target database name is inferred to be that of the standard catalog. In this scenario, specifying this field will allow targeting an arbitrary postgres database.""" - + spec: Optional[SyncedTableSpec] = None """Specification of a synced database table.""" - + table_serving_url: Optional[str] = None """Data serving REST API URL for this table""" - + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None """The provisioning state of the synced table entity in Unity Catalog. This is distinct from the state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline may be in "PROVISIONING" as it runs asynchronously).""" - + def as_dict(self) -> dict: """Serializes the SyncedDatabaseTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_synchronization_status: body['data_synchronization_status'] = self.data_synchronization_status.as_dict() - if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name - if self.logical_database_name is not None: body['logical_database_name'] = self.logical_database_name - if self.name is not None: body['name'] = self.name - if self.spec: body['spec'] = self.spec.as_dict() - if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value + if self.data_synchronization_status: + body["data_synchronization_status"] = self.data_synchronization_status.as_dict() + if self.database_instance_name is not None: + body["database_instance_name"] = self.database_instance_name + if self.logical_database_name is not None: + body["logical_database_name"] = self.logical_database_name + if self.name is not None: + body["name"] = self.name + if self.spec: + body["spec"] = self.spec.as_dict() + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value return body def as_shallow_dict(self) -> dict: """Serializes the SyncedDatabaseTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_synchronization_status: body['data_synchronization_status'] = self.data_synchronization_status - if self.database_instance_name is not None: body['database_instance_name'] = self.database_instance_name - if self.logical_database_name is not None: body['logical_database_name'] = self.logical_database_name - if self.name is not None: body['name'] = self.name - if self.spec: body['spec'] = self.spec - if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state + if self.data_synchronization_status: + body["data_synchronization_status"] = self.data_synchronization_status + if self.database_instance_name is not None: + body["database_instance_name"] = self.database_instance_name + if self.logical_database_name is not None: + body["logical_database_name"] = self.logical_database_name + if self.name is not None: + body["name"] = self.name + if self.spec: + body["spec"] = self.spec + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable: """Deserializes the SyncedDatabaseTable from a dictionary.""" - return cls(data_synchronization_status=_from_dict(d, 'data_synchronization_status', SyncedTableStatus), database_instance_name=d.get('database_instance_name', None), logical_database_name=d.get('logical_database_name', None), name=d.get('name', None), spec=_from_dict(d, 'spec', SyncedTableSpec), table_serving_url=d.get('table_serving_url', None), unity_catalog_provisioning_state=_enum(d, 'unity_catalog_provisioning_state', ProvisioningInfoState)) - - + return cls( + data_synchronization_status=_from_dict(d, "data_synchronization_status", SyncedTableStatus), + database_instance_name=d.get("database_instance_name", None), + logical_database_name=d.get("logical_database_name", None), + name=d.get("name", None), + spec=_from_dict(d, "spec", SyncedTableSpec), + table_serving_url=d.get("table_serving_url", None), + unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), + ) @dataclass class SyncedTableContinuousUpdateStatus: """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE or the SYNCED_UPDATING_PIPELINE_RESOURCES state.""" - + initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None """Progress of the initial data synchronization.""" - + last_processed_commit_version: Optional[int] = None """The last source table Delta version that was synced to the synced table. Note that this Delta version may not be completely synced to the synced table yet.""" - + timestamp: Optional[str] = None """The timestamp of the last time any data was synchronized from the source table to the synced table.""" - + def as_dict(self) -> dict: """Serializes the SyncedTableContinuousUpdateStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict() - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the SyncedTableContinuousUpdateStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SyncedTableContinuousUpdateStatus: """Deserializes the SyncedTableContinuousUpdateStatus from a dictionary.""" - return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', SyncedTablePipelineProgress), last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None)) - - + return cls( + initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", SyncedTablePipelineProgress), + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + ) @dataclass class SyncedTableFailedStatus: """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the SYNCED_PIPELINE_FAILED state.""" - + last_processed_commit_version: Optional[int] = None """The last source table Delta version that was synced to the synced table. Note that this Delta version may only be partially synced to the synced table. Only populated if the table is still synced and available for serving.""" - + timestamp: Optional[str] = None """The timestamp of the last time any data was synchronized from the source table to the synced table. Only populated if the table is still synced and available for serving.""" - + def as_dict(self) -> dict: """Serializes the SyncedTableFailedStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the SyncedTableFailedStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SyncedTableFailedStatus: """Deserializes the SyncedTableFailedStatus from a dictionary.""" - return cls(last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None)) - - + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + ) @dataclass class SyncedTablePipelineProgress: """Progress information of the Synced Table data synchronization pipeline.""" - + estimated_completion_time_seconds: Optional[float] = None """The estimated time remaining to complete this update in seconds.""" - + latest_version_currently_processing: Optional[int] = None """The source table Delta version that was last processed by the pipeline. The pipeline may not have completely processed this version yet.""" - + sync_progress_completion: Optional[float] = None """The completion ratio of this update. This is a number between 0 and 1.""" - + synced_row_count: Optional[int] = None """The number of rows that have been synced in this update.""" - + total_row_count: Optional[int] = None """The total number of rows that need to be synced in this update. This number may be an estimate.""" - + def as_dict(self) -> dict: """Serializes the SyncedTablePipelineProgress into a dictionary suitable for use as a JSON request body.""" body = {} - if self.estimated_completion_time_seconds is not None: body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: body['latest_version_currently_processing'] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: body['sync_progress_completion'] = self.sync_progress_completion - if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count - if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count return body def as_shallow_dict(self) -> dict: """Serializes the SyncedTablePipelineProgress into a shallow dictionary of its immediate attributes.""" body = {} - if self.estimated_completion_time_seconds is not None: body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: body['latest_version_currently_processing'] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: body['sync_progress_completion'] = self.sync_progress_completion - if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count - if self.total_row_count is not None: body['total_row_count'] = self.total_row_count + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SyncedTablePipelineProgress: """Deserializes the SyncedTablePipelineProgress from a dictionary.""" - return cls(estimated_completion_time_seconds=d.get('estimated_completion_time_seconds', None), latest_version_currently_processing=d.get('latest_version_currently_processing', None), sync_progress_completion=d.get('sync_progress_completion', None), synced_row_count=d.get('synced_row_count', None), total_row_count=d.get('total_row_count', None)) - - + return cls( + estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None), + latest_version_currently_processing=d.get("latest_version_currently_processing", None), + sync_progress_completion=d.get("sync_progress_completion", None), + synced_row_count=d.get("synced_row_count", None), + total_row_count=d.get("total_row_count", None), + ) @dataclass class SyncedTableProvisioningStatus: """Detailed status of a synced table. Shown if the synced table is in the PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - + initial_pipeline_sync_progress: Optional[SyncedTablePipelineProgress] = None """Details about initial data synchronization. Only populated when in the PROVISIONING_INITIAL_SNAPSHOT state.""" - + def as_dict(self) -> dict: """Serializes the SyncedTableProvisioningStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict() + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SyncedTableProvisioningStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.initial_pipeline_sync_progress: body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SyncedTableProvisioningStatus: """Deserializes the SyncedTableProvisioningStatus from a dictionary.""" - return cls(initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', SyncedTablePipelineProgress)) - - + return cls( + initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", SyncedTablePipelineProgress) + ) class SyncedTableSchedulingPolicy(Enum): - - - CONTINUOUS = 'CONTINUOUS' - SNAPSHOT = 'SNAPSHOT' - TRIGGERED = 'TRIGGERED' + + CONTINUOUS = "CONTINUOUS" + SNAPSHOT = "SNAPSHOT" + TRIGGERED = "TRIGGERED" + @dataclass class SyncedTableSpec: """Specification of a synced database table.""" - + create_database_objects_if_missing: Optional[bool] = None """If true, the synced table's logical database and schema resources in PG will be created if they do not already exist.""" - + new_pipeline_spec: Optional[NewPipelineSpec] = None """Spec of new pipeline. Should be empty if pipeline_id is set""" - + pipeline_id: Optional[str] = None """ID of the associated pipeline. Should be empty if new_pipeline_spec is set""" - + primary_key_columns: Optional[List[str]] = None """Primary Key columns to be used for data insert/update in the destination.""" - + scheduling_policy: Optional[SyncedTableSchedulingPolicy] = None """Scheduling policy of the underlying pipeline.""" - + source_table_full_name: Optional[str] = None """Three-part (catalog, schema, table) name of the source Delta table.""" - + timeseries_key: Optional[str] = None """Time series key to deduplicate (tie-break) rows with the same primary key.""" - + def as_dict(self) -> dict: """Serializes the SyncedTableSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_database_objects_if_missing is not None: body['create_database_objects_if_missing'] = self.create_database_objects_if_missing - if self.new_pipeline_spec: body['new_pipeline_spec'] = self.new_pipeline_spec.as_dict() - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.primary_key_columns: body['primary_key_columns'] = [v for v in self.primary_key_columns] - if self.scheduling_policy is not None: body['scheduling_policy'] = self.scheduling_policy.value - if self.source_table_full_name is not None: body['source_table_full_name'] = self.source_table_full_name - if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key + if self.create_database_objects_if_missing is not None: + body["create_database_objects_if_missing"] = self.create_database_objects_if_missing + if self.new_pipeline_spec: + body["new_pipeline_spec"] = self.new_pipeline_spec.as_dict() + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.primary_key_columns: + body["primary_key_columns"] = [v for v in self.primary_key_columns] + if self.scheduling_policy is not None: + body["scheduling_policy"] = self.scheduling_policy.value + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key return body def as_shallow_dict(self) -> dict: """Serializes the SyncedTableSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_database_objects_if_missing is not None: body['create_database_objects_if_missing'] = self.create_database_objects_if_missing - if self.new_pipeline_spec: body['new_pipeline_spec'] = self.new_pipeline_spec - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.primary_key_columns: body['primary_key_columns'] = self.primary_key_columns - if self.scheduling_policy is not None: body['scheduling_policy'] = self.scheduling_policy - if self.source_table_full_name is not None: body['source_table_full_name'] = self.source_table_full_name - if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key + if self.create_database_objects_if_missing is not None: + body["create_database_objects_if_missing"] = self.create_database_objects_if_missing + if self.new_pipeline_spec: + body["new_pipeline_spec"] = self.new_pipeline_spec + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.primary_key_columns: + body["primary_key_columns"] = self.primary_key_columns + if self.scheduling_policy is not None: + body["scheduling_policy"] = self.scheduling_policy + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SyncedTableSpec: """Deserializes the SyncedTableSpec from a dictionary.""" - return cls(create_database_objects_if_missing=d.get('create_database_objects_if_missing', None), new_pipeline_spec=_from_dict(d, 'new_pipeline_spec', NewPipelineSpec), pipeline_id=d.get('pipeline_id', None), primary_key_columns=d.get('primary_key_columns', None), scheduling_policy=_enum(d, 'scheduling_policy', SyncedTableSchedulingPolicy), source_table_full_name=d.get('source_table_full_name', None), timeseries_key=d.get('timeseries_key', None)) - - + return cls( + create_database_objects_if_missing=d.get("create_database_objects_if_missing", None), + new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec), + pipeline_id=d.get("pipeline_id", None), + primary_key_columns=d.get("primary_key_columns", None), + scheduling_policy=_enum(d, "scheduling_policy", SyncedTableSchedulingPolicy), + source_table_full_name=d.get("source_table_full_name", None), + timeseries_key=d.get("timeseries_key", None), + ) class SyncedTableState(Enum): """The state of a synced table.""" - - SYNCED_TABLED_OFFLINE = 'SYNCED_TABLED_OFFLINE' - SYNCED_TABLE_OFFLINE_FAILED = 'SYNCED_TABLE_OFFLINE_FAILED' - SYNCED_TABLE_ONLINE = 'SYNCED_TABLE_ONLINE' - SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE = 'SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE' - SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE = 'SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE' - SYNCED_TABLE_ONLINE_PIPELINE_FAILED = 'SYNCED_TABLE_ONLINE_PIPELINE_FAILED' - SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE = 'SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE' - SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES = 'SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES' - SYNCED_TABLE_PROVISIONING = 'SYNCED_TABLE_PROVISIONING' - SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT = 'SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT' - SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES = 'SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES' + + SYNCED_TABLED_OFFLINE = "SYNCED_TABLED_OFFLINE" + SYNCED_TABLE_OFFLINE_FAILED = "SYNCED_TABLE_OFFLINE_FAILED" + SYNCED_TABLE_ONLINE = "SYNCED_TABLE_ONLINE" + SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE = "SYNCED_TABLE_ONLINE_CONTINUOUS_UPDATE" + SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE = "SYNCED_TABLE_ONLINE_NO_PENDING_UPDATE" + SYNCED_TABLE_ONLINE_PIPELINE_FAILED = "SYNCED_TABLE_ONLINE_PIPELINE_FAILED" + SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE = "SYNCED_TABLE_ONLINE_TRIGGERED_UPDATE" + SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES = "SYNCED_TABLE_ONLINE_UPDATING_PIPELINE_RESOURCES" + SYNCED_TABLE_PROVISIONING = "SYNCED_TABLE_PROVISIONING" + SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT = "SYNCED_TABLE_PROVISIONING_INITIAL_SNAPSHOT" + SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES = "SYNCED_TABLE_PROVISIONING_PIPELINE_RESOURCES" + @dataclass class SyncedTableStatus: """Status of a synced table.""" - + continuous_update_status: Optional[SyncedTableContinuousUpdateStatus] = None """Detailed status of a synced table. Shown if the synced table is in the SYNCED_CONTINUOUS_UPDATE or the SYNCED_UPDATING_PIPELINE_RESOURCES state.""" - + detailed_state: Optional[SyncedTableState] = None """The state of the synced table.""" - + failed_status: Optional[SyncedTableFailedStatus] = None """Detailed status of a synced table. Shown if the synced table is in the OFFLINE_FAILED or the SYNCED_PIPELINE_FAILED state.""" - + message: Optional[str] = None """A text description of the current state of the synced table.""" - + provisioning_status: Optional[SyncedTableProvisioningStatus] = None """Detailed status of a synced table. Shown if the synced table is in the PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - + triggered_update_status: Optional[SyncedTableTriggeredUpdateStatus] = None """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE or the SYNCED_NO_PENDING_UPDATE state.""" - + def as_dict(self) -> dict: """Serializes the SyncedTableStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status.as_dict() - if self.detailed_state is not None: body['detailed_state'] = self.detailed_state.value - if self.failed_status: body['failed_status'] = self.failed_status.as_dict() - if self.message is not None: body['message'] = self.message - if self.provisioning_status: body['provisioning_status'] = self.provisioning_status.as_dict() - if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status.as_dict() + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status.as_dict() + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state.value + if self.failed_status: + body["failed_status"] = self.failed_status.as_dict() + if self.message is not None: + body["message"] = self.message + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status.as_dict() + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SyncedTableStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status - if self.detailed_state is not None: body['detailed_state'] = self.detailed_state - if self.failed_status: body['failed_status'] = self.failed_status - if self.message is not None: body['message'] = self.message - if self.provisioning_status: body['provisioning_status'] = self.provisioning_status - if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state + if self.failed_status: + body["failed_status"] = self.failed_status + if self.message is not None: + body["message"] = self.message + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SyncedTableStatus: """Deserializes the SyncedTableStatus from a dictionary.""" - return cls(continuous_update_status=_from_dict(d, 'continuous_update_status', SyncedTableContinuousUpdateStatus), detailed_state=_enum(d, 'detailed_state', SyncedTableState), failed_status=_from_dict(d, 'failed_status', SyncedTableFailedStatus), message=d.get('message', None), provisioning_status=_from_dict(d, 'provisioning_status', SyncedTableProvisioningStatus), triggered_update_status=_from_dict(d, 'triggered_update_status', SyncedTableTriggeredUpdateStatus)) - - + return cls( + continuous_update_status=_from_dict(d, "continuous_update_status", SyncedTableContinuousUpdateStatus), + detailed_state=_enum(d, "detailed_state", SyncedTableState), + failed_status=_from_dict(d, "failed_status", SyncedTableFailedStatus), + message=d.get("message", None), + provisioning_status=_from_dict(d, "provisioning_status", SyncedTableProvisioningStatus), + triggered_update_status=_from_dict(d, "triggered_update_status", SyncedTableTriggeredUpdateStatus), + ) @dataclass class SyncedTableTriggeredUpdateStatus: """Detailed status of a synced table. Shown if the synced table is in the SYNCED_TRIGGERED_UPDATE or the SYNCED_NO_PENDING_UPDATE state.""" - + last_processed_commit_version: Optional[int] = None """The last source table Delta version that was synced to the synced table. Note that this Delta version may not be completely synced to the synced table yet.""" - + timestamp: Optional[str] = None """The timestamp of the last time any data was synchronized from the source table to the synced table.""" - + triggered_update_progress: Optional[SyncedTablePipelineProgress] = None """Progress of the active data synchronization pipeline.""" - + def as_dict(self) -> dict: """Serializes the SyncedTableTriggeredUpdateStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress.as_dict() + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SyncedTableTriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_processed_commit_version is not None: body['last_processed_commit_version'] = self.last_processed_commit_version - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SyncedTableTriggeredUpdateStatus: """Deserializes the SyncedTableTriggeredUpdateStatus from a dictionary.""" - return cls(last_processed_commit_version=d.get('last_processed_commit_version', None), timestamp=d.get('timestamp', None), triggered_update_progress=_from_dict(d, 'triggered_update_progress', SyncedTablePipelineProgress)) - - - - - - - + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + triggered_update_progress=_from_dict(d, "triggered_update_progress", SyncedTablePipelineProgress), + ) class DatabaseAPI: """Database Instances provide access to a database via REST API or direct SQL.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_database_catalog(self - , catalog: DatabaseCatalog - ) -> DatabaseCatalog: + def create_database_catalog(self, catalog: DatabaseCatalog) -> DatabaseCatalog: """Create a Database Catalog. - + :param catalog: :class:`DatabaseCatalog` - + :returns: :class:`DatabaseCatalog` """ body = catalog.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/database/catalogs', body=body - - , headers=headers - ) - return DatabaseCatalog.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/database/catalogs", body=body, headers=headers) + return DatabaseCatalog.from_dict(res) - def create_database_instance(self - , database_instance: DatabaseInstance - ) -> DatabaseInstance: + def create_database_instance(self, database_instance: DatabaseInstance) -> DatabaseInstance: """Create a Database Instance. - + :param database_instance: :class:`DatabaseInstance` A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. - + :returns: :class:`DatabaseInstance` """ body = database_instance.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/database/instances', body=body - - , headers=headers - ) - return DatabaseInstance.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers) + return DatabaseInstance.from_dict(res) - def create_database_table(self - , table: DatabaseTable - ) -> DatabaseTable: + def create_database_table(self, table: DatabaseTable) -> DatabaseTable: """Create a Database Table. - + :param table: :class:`DatabaseTable` Next field marker: 13 - + :returns: :class:`DatabaseTable` """ body = table.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/database/tables', body=body - - , headers=headers - ) - return DatabaseTable.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/database/tables", body=body, headers=headers) + return DatabaseTable.from_dict(res) - def create_synced_database_table(self - , synced_table: SyncedDatabaseTable - ) -> SyncedDatabaseTable: + def create_synced_database_table(self, synced_table: SyncedDatabaseTable) -> SyncedDatabaseTable: """Create a Synced Database Table. - + :param synced_table: :class:`SyncedDatabaseTable` Next field marker: 12 - + :returns: :class:`SyncedDatabaseTable` """ body = synced_table.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/database/synced_tables', body=body - - , headers=headers - ) - return SyncedDatabaseTable.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/database/synced_tables", body=body, headers=headers) + return SyncedDatabaseTable.from_dict(res) - def delete_database_catalog(self - , name: str - ): + def delete_database_catalog(self, name: str): """Delete a Database Catalog. - + :param name: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/database/catalogs/{name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/database/catalogs/{name}", headers=headers) - def delete_database_instance(self - , name: str - , * - , force: Optional[bool] = None, purge: Optional[bool] = None): + def delete_database_instance(self, name: str, *, force: Optional[bool] = None, purge: Optional[bool] = None): """Delete a Database Instance. - + :param name: str Name of the instance to delete. :param force: bool (optional) @@ -1004,273 +1051,206 @@ def delete_database_instance(self deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by calling the undelete API for a limited time. If true, the database instance is hard deleted and cannot be undeleted. - - + + """ - + query = {} - if force is not None: query['force'] = force - if purge is not None: query['purge'] = purge - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/database/instances/{name}', query=query - - , headers=headers - ) - + if force is not None: + query["force"] = force + if purge is not None: + query["purge"] = purge + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", f"/api/2.0/database/instances/{name}", query=query, headers=headers) - def delete_database_table(self - , name: str - ): + def delete_database_table(self, name: str): """Delete a Database Table. - + :param name: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/database/tables/{name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/database/tables/{name}", headers=headers) - def delete_synced_database_table(self - , name: str - ): + def delete_synced_database_table(self, name: str): """Delete a Synced Database Table. - + :param name: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/database/synced_tables/{name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/database/synced_tables/{name}", headers=headers) - def find_database_instance_by_uid(self - - , * - , uid: Optional[str] = None) -> DatabaseInstance: + def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> DatabaseInstance: """Find a Database Instance by uid. - + :param uid: str (optional) UID of the cluster to get. - + :returns: :class:`DatabaseInstance` """ - + query = {} - if uid is not None: query['uid'] = uid - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/database/instances:findByUid', query=query - - , headers=headers - ) - return DatabaseInstance.from_dict(res) + if uid is not None: + query["uid"] = uid + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/database/instances:findByUid", query=query, headers=headers) + return DatabaseInstance.from_dict(res) - def generate_database_credential(self - - , * - , instance_names: Optional[List[str]] = None, request_id: Optional[str] = None) -> DatabaseCredential: + def generate_database_credential( + self, *, instance_names: Optional[List[str]] = None, request_id: Optional[str] = None + ) -> DatabaseCredential: """Generates a credential that can be used to access database instances. - + :param instance_names: List[str] (optional) Instances to which the token will be scoped. :param request_id: str (optional) - + :returns: :class:`DatabaseCredential` """ body = {} - if instance_names is not None: body['instance_names'] = [v for v in instance_names] - if request_id is not None: body['request_id'] = request_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/database/credentials', body=body - - , headers=headers - ) + if instance_names is not None: + body["instance_names"] = [v for v in instance_names] + if request_id is not None: + body["request_id"] = request_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/database/credentials", body=body, headers=headers) return DatabaseCredential.from_dict(res) - - - - - def get_database_catalog(self - , name: str - ) -> DatabaseCatalog: + def get_database_catalog(self, name: str) -> DatabaseCatalog: """Get a Database Catalog. - + :param name: str - + :returns: :class:`DatabaseCatalog` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/database/catalogs/{name}' - - , headers=headers - ) - return DatabaseCatalog.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/database/catalogs/{name}", headers=headers) + return DatabaseCatalog.from_dict(res) - def get_database_instance(self - , name: str - ) -> DatabaseInstance: + def get_database_instance(self, name: str) -> DatabaseInstance: """Get a Database Instance. - + :param name: str Name of the cluster to get. - + :returns: :class:`DatabaseInstance` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/database/instances/{name}' - - , headers=headers - ) - return DatabaseInstance.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/database/instances/{name}", headers=headers) + return DatabaseInstance.from_dict(res) - def get_database_table(self - , name: str - ) -> DatabaseTable: + def get_database_table(self, name: str) -> DatabaseTable: """Get a Database Table. - + :param name: str - + :returns: :class:`DatabaseTable` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/database/tables/{name}' - - , headers=headers - ) - return DatabaseTable.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/database/tables/{name}", headers=headers) + return DatabaseTable.from_dict(res) - def get_synced_database_table(self - , name: str - ) -> SyncedDatabaseTable: + def get_synced_database_table(self, name: str) -> SyncedDatabaseTable: """Get a Synced Database Table. - + :param name: str - + :returns: :class:`SyncedDatabaseTable` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/database/synced_tables/{name}' - - , headers=headers - ) - return SyncedDatabaseTable.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers) + return SyncedDatabaseTable.from_dict(res) - def list_database_instances(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[DatabaseInstance]: + def list_database_instances( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DatabaseInstance]: """List Database Instances. - + :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of Database Instances. Requests first page if absent. - + :returns: Iterator over :class:`DatabaseInstance` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/database/instances', query=query - - , headers=headers - ) - if 'database_instances' in json: - for v in json['database_instances']: - yield DatabaseInstance.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update_database_instance(self - , name: str, database_instance: DatabaseInstance, update_mask: str - ) -> DatabaseInstance: + while True: + json = self._api.do("GET", "/api/2.0/database/instances", query=query, headers=headers) + if "database_instances" in json: + for v in json["database_instances"]: + yield DatabaseInstance.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_database_instance( + self, name: str, database_instance: DatabaseInstance, update_mask: str + ) -> DatabaseInstance: """Update a Database Instance. - + :param name: str The name of the instance. This is the unique identifier for the instance. :param database_instance: :class:`DatabaseInstance` A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. :param update_mask: str The list of fields to update. - + :returns: :class:`DatabaseInstance` """ body = database_instance.as_dict() query = {} - if update_mask is not None: query['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/database/instances/{name}', query=query, body=body - - , headers=headers - ) + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers) return DatabaseInstance.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index 51edd6751..a50e83a30 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -1,54 +1,49 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations -from dataclasses import dataclass -from datetime import timedelta -from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +import logging +from dataclasses import dataclass +from typing import Any, BinaryIO, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ._internal import _escape_multi_segment_path_parameter, _repeated_dict +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AddBlock: handle: int """The handle on an open stream.""" - + data: str """The base64-encoded data to append to the stream. This has a limit of 1 MB.""" - + def as_dict(self) -> dict: """Serializes the AddBlock into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data is not None: body['data'] = self.data - if self.handle is not None: body['handle'] = self.handle + if self.data is not None: + body["data"] = self.data + if self.handle is not None: + body["handle"] = self.handle return body def as_shallow_dict(self) -> dict: """Serializes the AddBlock into a shallow dictionary of its immediate attributes.""" body = {} - if self.data is not None: body['data'] = self.data - if self.handle is not None: body['handle'] = self.handle + if self.data is not None: + body["data"] = self.data + if self.handle is not None: + body["handle"] = self.handle return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AddBlock: """Deserializes the AddBlock from a dictionary.""" - return cls(data=d.get('data', None), handle=d.get('handle', None)) - - + return cls(data=d.get("data", None), handle=d.get("handle", None)) @dataclass @@ -67,33 +62,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AddBlockResponse: """Deserializes the AddBlockResponse from a dictionary.""" return cls() - - @dataclass class Close: handle: int """The handle on an open stream.""" - + def as_dict(self) -> dict: """Serializes the Close into a dictionary suitable for use as a JSON request body.""" body = {} - if self.handle is not None: body['handle'] = self.handle + if self.handle is not None: + body["handle"] = self.handle return body def as_shallow_dict(self) -> dict: """Serializes the Close into a shallow dictionary of its immediate attributes.""" body = {} - if self.handle is not None: body['handle'] = self.handle + if self.handle is not None: + body["handle"] = self.handle return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Close: """Deserializes the Close from a dictionary.""" - return cls(handle=d.get('handle', None)) - - + return cls(handle=d.get("handle", None)) @dataclass @@ -112,41 +105,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CloseResponse: """Deserializes the CloseResponse from a dictionary.""" return cls() - - @dataclass class Create: path: str """The path of the new file. The path should be the absolute DBFS path.""" - + overwrite: Optional[bool] = None """The flag that specifies whether to overwrite existing file/files.""" - + def as_dict(self) -> dict: """Serializes the Create into a dictionary suitable for use as a JSON request body.""" body = {} - if self.overwrite is not None: body['overwrite'] = self.overwrite - if self.path is not None: body['path'] = self.path + if self.overwrite is not None: + body["overwrite"] = self.overwrite + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the Create into a shallow dictionary of its immediate attributes.""" body = {} - if self.overwrite is not None: body['overwrite'] = self.overwrite - if self.path is not None: body['path'] = self.path + if self.overwrite is not None: + body["overwrite"] = self.overwrite + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Create: """Deserializes the Create from a dictionary.""" - return cls(overwrite=d.get('overwrite', None), path=d.get('path', None)) - - - - - + return cls(overwrite=d.get("overwrite", None), path=d.get("path", None)) @dataclass @@ -165,8 +155,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CreateDirectoryResponse: """Deserializes the CreateDirectoryResponse from a dictionary.""" return cls() - - @dataclass @@ -174,59 +162,58 @@ class CreateResponse: handle: Optional[int] = None """Handle which should subsequently be passed into the AddBlock and Close calls when writing to a file through a stream.""" - + def as_dict(self) -> dict: """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.handle is not None: body['handle'] = self.handle + if self.handle is not None: + body["handle"] = self.handle return body def as_shallow_dict(self) -> dict: """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.handle is not None: body['handle'] = self.handle + if self.handle is not None: + body["handle"] = self.handle return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" - return cls(handle=d.get('handle', None)) - - + return cls(handle=d.get("handle", None)) @dataclass class Delete: path: str """The path of the file or directory to delete. The path should be the absolute DBFS path.""" - + recursive: Optional[bool] = None """Whether or not to recursively delete the directory's contents. Deleting empty directories can be done without providing the recursive flag.""" - + def as_dict(self) -> dict: """Serializes the Delete into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: body['path'] = self.path - if self.recursive is not None: body['recursive'] = self.recursive + if self.path is not None: + body["path"] = self.path + if self.recursive is not None: + body["recursive"] = self.recursive return body def as_shallow_dict(self) -> dict: """Serializes the Delete into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: body['path'] = self.path - if self.recursive is not None: body['recursive'] = self.recursive + if self.path is not None: + body["path"] = self.path + if self.recursive is not None: + body["recursive"] = self.recursive return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Delete: """Deserializes the Delete from a dictionary.""" - return cls(path=d.get('path', None), recursive=d.get('recursive', None)) - - - - - + return cls(path=d.get("path", None), recursive=d.get("recursive", None)) @dataclass @@ -245,11 +232,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteDirectoryResponse: """Deserializes the DeleteDirectoryResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -268,137 +250,165 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - @dataclass class DirectoryEntry: file_size: Optional[int] = None """The length of the file in bytes. This field is omitted for directories.""" - + is_directory: Optional[bool] = None """True if the path is a directory.""" - + last_modified: Optional[int] = None """Last modification time of given file in milliseconds since unix epoch.""" - + name: Optional[str] = None """The name of the file or directory. This is the last component of the path.""" - + path: Optional[str] = None """The absolute path of the file or directory.""" - + def as_dict(self) -> dict: """Serializes the DirectoryEntry into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_size is not None: body['file_size'] = self.file_size - if self.is_directory is not None: body['is_directory'] = self.is_directory - if self.last_modified is not None: body['last_modified'] = self.last_modified - if self.name is not None: body['name'] = self.name - if self.path is not None: body['path'] = self.path + if self.file_size is not None: + body["file_size"] = self.file_size + if self.is_directory is not None: + body["is_directory"] = self.is_directory + if self.last_modified is not None: + body["last_modified"] = self.last_modified + if self.name is not None: + body["name"] = self.name + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the DirectoryEntry into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_size is not None: body['file_size'] = self.file_size - if self.is_directory is not None: body['is_directory'] = self.is_directory - if self.last_modified is not None: body['last_modified'] = self.last_modified - if self.name is not None: body['name'] = self.name - if self.path is not None: body['path'] = self.path + if self.file_size is not None: + body["file_size"] = self.file_size + if self.is_directory is not None: + body["is_directory"] = self.is_directory + if self.last_modified is not None: + body["last_modified"] = self.last_modified + if self.name is not None: + body["name"] = self.name + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DirectoryEntry: """Deserializes the DirectoryEntry from a dictionary.""" - return cls(file_size=d.get('file_size', None), is_directory=d.get('is_directory', None), last_modified=d.get('last_modified', None), name=d.get('name', None), path=d.get('path', None)) - - - - - + return cls( + file_size=d.get("file_size", None), + is_directory=d.get("is_directory", None), + last_modified=d.get("last_modified", None), + name=d.get("name", None), + path=d.get("path", None), + ) @dataclass class DownloadResponse: content_length: Optional[int] = None """The length of the HTTP response body in bytes.""" - + content_type: Optional[str] = None - + contents: Optional[BinaryIO] = None - + last_modified: Optional[str] = None """The last modified time of the file in HTTP-date (RFC 7231) format.""" - + def as_dict(self) -> dict: """Serializes the DownloadResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content_length is not None: body['content-length'] = self.content_length - if self.content_type is not None: body['content-type'] = self.content_type - if self.contents: body['contents'] = self.contents - if self.last_modified is not None: body['last-modified'] = self.last_modified + if self.content_length is not None: + body["content-length"] = self.content_length + if self.content_type is not None: + body["content-type"] = self.content_type + if self.contents: + body["contents"] = self.contents + if self.last_modified is not None: + body["last-modified"] = self.last_modified return body def as_shallow_dict(self) -> dict: """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.content_length is not None: body['content-length'] = self.content_length - if self.content_type is not None: body['content-type'] = self.content_type - if self.contents: body['contents'] = self.contents - if self.last_modified is not None: body['last-modified'] = self.last_modified + if self.content_length is not None: + body["content-length"] = self.content_length + if self.content_type is not None: + body["content-type"] = self.content_type + if self.contents: + body["contents"] = self.contents + if self.last_modified is not None: + body["last-modified"] = self.last_modified return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DownloadResponse: """Deserializes the DownloadResponse from a dictionary.""" - return cls(content_length= int(d.get('content-length', None)), content_type=d.get('content-type', None), contents=d.get('contents', None), last_modified=d.get('last-modified', None)) - - + return cls( + content_length=int(d.get("content-length", None)), + content_type=d.get("content-type", None), + contents=d.get("contents", None), + last_modified=d.get("last-modified", None), + ) @dataclass class FileInfo: file_size: Optional[int] = None """The length of the file in bytes. This field is omitted for directories.""" - + is_dir: Optional[bool] = None """True if the path is a directory.""" - + modification_time: Optional[int] = None """Last modification time of given file in milliseconds since epoch.""" - + path: Optional[str] = None """The absolute path of the file or directory.""" - + def as_dict(self) -> dict: """Serializes the FileInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_size is not None: body['file_size'] = self.file_size - if self.is_dir is not None: body['is_dir'] = self.is_dir - if self.modification_time is not None: body['modification_time'] = self.modification_time - if self.path is not None: body['path'] = self.path + if self.file_size is not None: + body["file_size"] = self.file_size + if self.is_dir is not None: + body["is_dir"] = self.is_dir + if self.modification_time is not None: + body["modification_time"] = self.modification_time + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the FileInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_size is not None: body['file_size'] = self.file_size - if self.is_dir is not None: body['is_dir'] = self.is_dir - if self.modification_time is not None: body['modification_time'] = self.modification_time - if self.path is not None: body['path'] = self.path + if self.file_size is not None: + body["file_size"] = self.file_size + if self.is_dir is not None: + body["is_dir"] = self.is_dir + if self.modification_time is not None: + body["modification_time"] = self.modification_time + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileInfo: """Deserializes the FileInfo from a dictionary.""" - return cls(file_size=d.get('file_size', None), is_dir=d.get('is_dir', None), modification_time=d.get('modification_time', None), path=d.get('path', None)) - - - - - + return cls( + file_size=d.get("file_size", None), + is_dir=d.get("is_dir", None), + modification_time=d.get("modification_time", None), + path=d.get("path", None), + ) @dataclass @@ -417,134 +427,132 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> GetDirectoryMetadataResponse: """Deserializes the GetDirectoryMetadataResponse from a dictionary.""" return cls() - - - - - @dataclass class GetMetadataResponse: content_length: Optional[int] = None """The length of the HTTP response body in bytes.""" - + content_type: Optional[str] = None - + last_modified: Optional[str] = None """The last modified time of the file in HTTP-date (RFC 7231) format.""" - + def as_dict(self) -> dict: """Serializes the GetMetadataResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content_length is not None: body['content-length'] = self.content_length - if self.content_type is not None: body['content-type'] = self.content_type - if self.last_modified is not None: body['last-modified'] = self.last_modified + if self.content_length is not None: + body["content-length"] = self.content_length + if self.content_type is not None: + body["content-type"] = self.content_type + if self.last_modified is not None: + body["last-modified"] = self.last_modified return body def as_shallow_dict(self) -> dict: """Serializes the GetMetadataResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.content_length is not None: body['content-length'] = self.content_length - if self.content_type is not None: body['content-type'] = self.content_type - if self.last_modified is not None: body['last-modified'] = self.last_modified + if self.content_length is not None: + body["content-length"] = self.content_length + if self.content_type is not None: + body["content-type"] = self.content_type + if self.last_modified is not None: + body["last-modified"] = self.last_modified return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetMetadataResponse: """Deserializes the GetMetadataResponse from a dictionary.""" - return cls(content_length= int(d.get('content-length', None)), content_type=d.get('content-type', None), last_modified=d.get('last-modified', None)) - - - - - - - - - - - + return cls( + content_length=int(d.get("content-length", None)), + content_type=d.get("content-type", None), + last_modified=d.get("last-modified", None), + ) @dataclass class ListDirectoryResponse: contents: Optional[List[DirectoryEntry]] = None """Array of DirectoryEntry.""" - + next_page_token: Optional[str] = None """A token, which can be sent as `page_token` to retrieve the next page.""" - + def as_dict(self) -> dict: """Serializes the ListDirectoryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: body['contents'] = [v.as_dict() for v in self.contents] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.contents: + body["contents"] = [v.as_dict() for v in self.contents] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListDirectoryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: body['contents'] = self.contents - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.contents: + body["contents"] = self.contents + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListDirectoryResponse: """Deserializes the ListDirectoryResponse from a dictionary.""" - return cls(contents=_repeated_dict(d, 'contents', DirectoryEntry), next_page_token=d.get('next_page_token', None)) - - + return cls( + contents=_repeated_dict(d, "contents", DirectoryEntry), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListStatusResponse: files: Optional[List[FileInfo]] = None """A list of FileInfo's that describe contents of directory or file. See example above.""" - + def as_dict(self) -> dict: """Serializes the ListStatusResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.files: body['files'] = [v.as_dict() for v in self.files] + if self.files: + body["files"] = [v.as_dict() for v in self.files] return body def as_shallow_dict(self) -> dict: """Serializes the ListStatusResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.files: body['files'] = self.files + if self.files: + body["files"] = self.files return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListStatusResponse: """Deserializes the ListStatusResponse from a dictionary.""" - return cls(files=_repeated_dict(d, 'files', FileInfo)) - - + return cls(files=_repeated_dict(d, "files", FileInfo)) @dataclass class MkDirs: path: str """The path of the new directory. The path should be the absolute DBFS path.""" - + def as_dict(self) -> dict: """Serializes the MkDirs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: body['path'] = self.path + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the MkDirs into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: body['path'] = self.path + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MkDirs: """Deserializes the MkDirs from a dictionary.""" - return cls(path=d.get('path', None)) - - + return cls(path=d.get("path", None)) @dataclass @@ -563,38 +571,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> MkDirsResponse: """Deserializes the MkDirsResponse from a dictionary.""" return cls() - - @dataclass class Move: source_path: str """The source path of the file or directory. The path should be the absolute DBFS path.""" - + destination_path: str """The destination path of the file or directory. The path should be the absolute DBFS path.""" - + def as_dict(self) -> dict: """Serializes the Move into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_path is not None: body['destination_path'] = self.destination_path - if self.source_path is not None: body['source_path'] = self.source_path + if self.destination_path is not None: + body["destination_path"] = self.destination_path + if self.source_path is not None: + body["source_path"] = self.source_path return body def as_shallow_dict(self) -> dict: """Serializes the Move into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_path is not None: body['destination_path'] = self.destination_path - if self.source_path is not None: body['source_path'] = self.source_path + if self.destination_path is not None: + body["destination_path"] = self.destination_path + if self.source_path is not None: + body["source_path"] = self.source_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Move: """Deserializes the Move from a dictionary.""" - return cls(destination_path=d.get('destination_path', None), source_path=d.get('source_path', None)) - - + return cls(destination_path=d.get("destination_path", None), source_path=d.get("source_path", None)) @dataclass @@ -613,43 +621,45 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> MoveResponse: """Deserializes the MoveResponse from a dictionary.""" return cls() - - @dataclass class Put: path: str """The path of the new file. The path should be the absolute DBFS path.""" - + contents: Optional[str] = None """This parameter might be absent, and instead a posted file will be used.""" - + overwrite: Optional[bool] = None """The flag that specifies whether to overwrite existing file/files.""" - + def as_dict(self) -> dict: """Serializes the Put into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents is not None: body['contents'] = self.contents - if self.overwrite is not None: body['overwrite'] = self.overwrite - if self.path is not None: body['path'] = self.path + if self.contents is not None: + body["contents"] = self.contents + if self.overwrite is not None: + body["overwrite"] = self.overwrite + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the Put into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents is not None: body['contents'] = self.contents - if self.overwrite is not None: body['overwrite'] = self.overwrite - if self.path is not None: body['path'] = self.path + if self.contents is not None: + body["contents"] = self.contents + if self.overwrite is not None: + body["overwrite"] = self.overwrite + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Put: """Deserializes the Put from a dictionary.""" - return cls(contents=d.get('contents', None), overwrite=d.get('overwrite', None), path=d.get('path', None)) - - + return cls(contents=d.get("contents", None), overwrite=d.get("overwrite", None), path=d.get("path", None)) @dataclass @@ -668,11 +678,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PutResponse: """Deserializes the PutResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -680,33 +685,32 @@ class ReadResponse: bytes_read: Optional[int] = None """The number of bytes read (could be less than ``length`` if we hit end of file). This refers to number of bytes read in unencoded version (response data is base64-encoded).""" - + data: Optional[str] = None """The base64-encoded contents of the file read.""" - + def as_dict(self) -> dict: """Serializes the ReadResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bytes_read is not None: body['bytes_read'] = self.bytes_read - if self.data is not None: body['data'] = self.data + if self.bytes_read is not None: + body["bytes_read"] = self.bytes_read + if self.data is not None: + body["data"] = self.data return body def as_shallow_dict(self) -> dict: """Serializes the ReadResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bytes_read is not None: body['bytes_read'] = self.bytes_read - if self.data is not None: body['data'] = self.data + if self.bytes_read is not None: + body["bytes_read"] = self.bytes_read + if self.data is not None: + body["data"] = self.data return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ReadResponse: """Deserializes the ReadResponse from a dictionary.""" - return cls(bytes_read=d.get('bytes_read', None), data=d.get('data', None)) - - - - - + return cls(bytes_read=d.get("bytes_read", None), data=d.get("data", None)) @dataclass @@ -725,349 +729,283 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UploadResponse: """Deserializes the UploadResponse from a dictionary.""" return cls() - - - - class DbfsAPI: """DBFS API makes it simple to interact with various data sources without having to include a users credentials every time to read a file.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def add_block(self - , handle: int, data: str - ): + def add_block(self, handle: int, data: str): """Append data block. - + Appends a block of data to the stream specified by the input handle. If the handle does not exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``. - + If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``. - + :param handle: int The handle on an open stream. :param data: str The base64-encoded data to append to the stream. This has a limit of 1 MB. - - + + """ body = {} - if data is not None: body['data'] = data - if handle is not None: body['handle'] = handle - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/dbfs/add-block', body=body - - , headers=headers - ) - - - - - - - def close(self - , handle: int - ): + if data is not None: + body["data"] = data + if handle is not None: + body["handle"] = handle + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/dbfs/add-block", body=body, headers=headers) + + def close(self, handle: int): """Close the stream. - + Closes the stream specified by the input handle. If the handle does not exist, this call throws an exception with ``RESOURCE_DOES_NOT_EXIST``. - + :param handle: int The handle on an open stream. - - + + """ body = {} - if handle is not None: body['handle'] = handle - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/dbfs/close', body=body - - , headers=headers - ) - - - - - - - def create(self - , path: str - , * - , overwrite: Optional[bool] = None) -> CreateResponse: + if handle is not None: + body["handle"] = handle + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/dbfs/close", body=body, headers=headers) + + def create(self, path: str, *, overwrite: Optional[bool] = None) -> CreateResponse: """Open a stream. - + Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``. - + A typical workflow for file upload would be: - + 1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with the handle you have. 3. Issue a ``close`` call with the handle you have. - + :param path: str The path of the new file. The path should be the absolute DBFS path. :param overwrite: bool (optional) The flag that specifies whether to overwrite existing file/files. - + :returns: :class:`CreateResponse` """ body = {} - if overwrite is not None: body['overwrite'] = overwrite - if path is not None: body['path'] = path - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/dbfs/create', body=body - - , headers=headers - ) + if overwrite is not None: + body["overwrite"] = overwrite + if path is not None: + body["path"] = path + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/dbfs/create", body=body, headers=headers) return CreateResponse.from_dict(res) - - - - - def delete(self - , path: str - , * - , recursive: Optional[bool] = None): + def delete(self, path: str, *, recursive: Optional[bool] = None): """Delete a file/directory. - + Delete the file or directory (optionally recursively delete all files in the directory). This call throws an exception with `IO_ERROR` if the path is a non-empty directory and `recursive` is set to `false` or on other similar errors. - + When you delete a large number of files, the delete operation is done in increments. The call returns a response after approximately 45 seconds with an error message (503 Service Unavailable) asking you to re-invoke the delete operation until the directory structure is fully deleted. - + For operations that delete more than 10K files, we discourage using the DBFS REST API, but advise you to perform such operations in the context of a cluster, using the [File system utility (dbutils.fs)](/dev-tools/databricks-utils.html#dbutils-fs). `dbutils.fs` covers the functional scope of the DBFS REST API, but from notebooks. Running such operations using notebooks provides better control and manageability, such as selective deletes, and the possibility to automate periodic delete jobs. - + :param path: str The path of the file or directory to delete. The path should be the absolute DBFS path. :param recursive: bool (optional) Whether or not to recursively delete the directory's contents. Deleting empty directories can be done without providing the recursive flag. - - + + """ body = {} - if path is not None: body['path'] = path - if recursive is not None: body['recursive'] = recursive - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/dbfs/delete', body=body - - , headers=headers - ) - - - - - - - def get_status(self - , path: str - ) -> FileInfo: + if path is not None: + body["path"] = path + if recursive is not None: + body["recursive"] = recursive + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/dbfs/delete", body=body, headers=headers) + + def get_status(self, path: str) -> FileInfo: """Get the information of a file or directory. - + Gets the file information for a file or directory. If the file or directory does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The path of the file or directory. The path should be the absolute DBFS path. - + :returns: :class:`FileInfo` """ - + query = {} - if path is not None: query['path'] = path - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/dbfs/get-status', query=query - - , headers=headers - ) - return FileInfo.from_dict(res) + if path is not None: + query["path"] = path + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/dbfs/get-status", query=query, headers=headers) + return FileInfo.from_dict(res) - def list(self - , path: str - ) -> Iterator[FileInfo]: + def list(self, path: str) -> Iterator[FileInfo]: """List directory contents or file details. - + List the contents of a directory, or details of the file. If the file or directory does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. - + When calling list on a large directory, the list operation will time out after approximately 60 seconds. We strongly recommend using list only on directories containing less than 10K files and discourage using the DBFS REST API for operations that list more than 10K files. Instead, we recommend that you perform such operations in the context of a cluster, using the [File system utility (dbutils.fs)](/dev-tools/databricks-utils.html#dbutils-fs), which provides the same functionality without timing out. - + :param path: str The path of the file or directory. The path should be the absolute DBFS path. - + :returns: Iterator over :class:`FileInfo` """ - + query = {} - if path is not None: query['path'] = path - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/dbfs/list', query=query - - , headers=headers - ) + if path is not None: + query["path"] = path + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/dbfs/list", query=query, headers=headers) parsed = ListStatusResponse.from_dict(json).files return parsed if parsed is not None else [] - - - - - - def mkdirs(self - , path: str - ): + def mkdirs(self, path: str): """Create a directory. - + Creates the given directory and necessary parent directories if they do not exist. If a file (not a directory) exists at any prefix of the input path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. **Note**: If this operation fails, it might have succeeded in creating some of the necessary parent directories. - + :param path: str The path of the new directory. The path should be the absolute DBFS path. - - + + """ body = {} - if path is not None: body['path'] = path - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/dbfs/mkdirs', body=body - - , headers=headers - ) - - - - - - - def move(self - , source_path: str, destination_path: str - ): + if path is not None: + body["path"] = path + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/dbfs/mkdirs", body=body, headers=headers) + + def move(self, source_path: str, destination_path: str): """Move a file. - + Moves a file from one location to another location within DBFS. If the source file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source path is a directory, this call always recursively moves all files. - + :param source_path: str The source path of the file or directory. The path should be the absolute DBFS path. :param destination_path: str The destination path of the file or directory. The path should be the absolute DBFS path. - - + + """ body = {} - if destination_path is not None: body['destination_path'] = destination_path - if source_path is not None: body['source_path'] = source_path - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/dbfs/move', body=body - - , headers=headers - ) - - - - - - - def put(self - , path: str - , * - , contents: Optional[str] = None, overwrite: Optional[bool] = None): + if destination_path is not None: + body["destination_path"] = destination_path + if source_path is not None: + body["source_path"] = source_path + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/dbfs/move", body=body, headers=headers) + + def put(self, path: str, *, contents: Optional[str] = None, overwrite: Optional[bool] = None): """Upload a file. - + Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but can also be used as a convenient single call for data upload. - + Alternatively you can pass contents as base64 string. - + The amount of data that can be passed (when not streaming) using the __contents__ parameter is limited to 1 MB. `MAX_BLOCK_SIZE_EXCEEDED` will be thrown if this limit is exceeded. - + If you want to upload large files, use the streaming upload. For details, see :method:dbfs/create, :method:dbfs/addBlock, :method:dbfs/close. - + :param path: str The path of the new file. The path should be the absolute DBFS path. :param contents: str (optional) This parameter might be absent, and instead a posted file will be used. :param overwrite: bool (optional) The flag that specifies whether to overwrite existing file/files. - - + + """ body = {} - if contents is not None: body['contents'] = contents - if overwrite is not None: body['overwrite'] = overwrite - if path is not None: body['path'] = path - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/dbfs/put', body=body - - , headers=headers - ) - - - - - - - def read(self - , path: str - , * - , length: Optional[int] = None, offset: Optional[int] = None) -> ReadResponse: + if contents is not None: + body["contents"] = contents + if overwrite is not None: + body["overwrite"] = overwrite + if path is not None: + body["path"] = path + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/dbfs/put", body=body, headers=headers) + + def read(self, path: str, *, length: Optional[int] = None, offset: Optional[int] = None) -> ReadResponse: """Get the contents of a file. - + Returns the contents of a file. If the file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`. - + If `offset + length` exceeds the number of bytes in a file, it reads the contents until the end of file. - + :param path: str The path of the file to read. The path should be the absolute DBFS path. :param length: int (optional) @@ -1075,237 +1013,198 @@ def read(self of 0.5 MB. :param offset: int (optional) The offset to read from in bytes. - + :returns: :class:`ReadResponse` """ - + query = {} - if length is not None: query['length'] = length - if offset is not None: query['offset'] = offset - if path is not None: query['path'] = path - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/dbfs/read', query=query - - , headers=headers - ) + if length is not None: + query["length"] = length + if offset is not None: + query["offset"] = offset + if path is not None: + query["path"] = path + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/dbfs/read", query=query, headers=headers) return ReadResponse.from_dict(res) - - + class FilesAPI: """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI. The API makes working with file content as raw bytes easier and more efficient. - + The API supports [Unity Catalog volumes], where files and directories to operate on are specified using their volume URI path, which follows the format /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>. - + The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI path. The path is always absolute. - + Some Files API client features are currently experimental. To enable them, set `enable_experimental_files_api_client = True` in your configuration profile or use the environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. - + Use of Files API may incur Databricks data transfer charges. - + [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_directory(self - , directory_path: str - ): + def create_directory(self, directory_path: str): """Create a directory. - + Creates an empty directory. If necessary, also creates any parent directories of the new, empty directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success response; this method is idempotent (it will succeed if the directory already exists). - + :param directory_path: str The absolute path of a directory. - - + + """ - + headers = {} - - self._api.do('PUT',f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}' - - , headers=headers - ) - - - - - - - def delete(self - , file_path: str - ): + + self._api.do( + "PUT", f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}", headers=headers + ) + + def delete(self, file_path: str): """Delete a file. - + Deletes a file. If the request is successful, there is no response body. - + :param file_path: str The absolute path of the file. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}' - - , headers=headers - ) - - - - - - - def delete_directory(self - , directory_path: str - ): + + self._api.do("DELETE", f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", headers=headers) + + def delete_directory(self, directory_path: str): """Delete a directory. - + Deletes an empty directory. - + To delete a non-empty directory, first delete all of its contents. This can be done by listing the directory contents and deleting each file and subdirectory recursively. - + :param directory_path: str The absolute path of a directory. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}' - - , headers=headers - ) - - - - - - - def download(self - , file_path: str - ) -> DownloadResponse: + + self._api.do( + "DELETE", f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}", headers=headers + ) + + def download(self, file_path: str) -> DownloadResponse: """Download a file. - + Downloads a file. The file contents are the response body. This is a standard HTTP file download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers. - + :param file_path: str The absolute path of the file. - + :returns: :class:`DownloadResponse` """ - - headers = {'Accept': 'application/octet-stream',} - response_headers = ['content-length','content-type','last-modified',] - res = self._api.do('GET',f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}' - - , headers=headers - , response_headers=response_headers, raw=True) - return DownloadResponse.from_dict(res) - - - + headers = { + "Accept": "application/octet-stream", + } + response_headers = [ + "content-length", + "content-type", + "last-modified", + ] + res = self._api.do( + "GET", + f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", + headers=headers, + response_headers=response_headers, + raw=True, + ) + return DownloadResponse.from_dict(res) - def get_directory_metadata(self - , directory_path: str - ): + def get_directory_metadata(self, directory_path: str): """Get directory metadata. - + Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response body. - + This method is useful to check if a directory exists and the caller has access to it. - + If you wish to ensure the directory exists, you can instead use `PUT`, which will create the directory if it does not exist, and is idempotent (it will succeed if the directory already exists). - + :param directory_path: str The absolute path of a directory. - - + + """ - + headers = {} - - self._api.do('HEAD',f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}' - - , headers=headers - ) - - - - - - - def get_metadata(self - , file_path: str - ) -> GetMetadataResponse: + + self._api.do( + "HEAD", f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}", headers=headers + ) + + def get_metadata(self, file_path: str) -> GetMetadataResponse: """Get file metadata. - + Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body. - + :param file_path: str The absolute path of the file. - + :returns: :class:`GetMetadataResponse` """ - + headers = {} - response_headers = ['content-length','content-type','last-modified',] - res = self._api.do('HEAD',f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}' - - , headers=headers - , response_headers=response_headers) + response_headers = [ + "content-length", + "content-type", + "last-modified", + ] + res = self._api.do( + "HEAD", + f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", + headers=headers, + response_headers=response_headers, + ) return GetMetadataResponse.from_dict(res) - - - - - def list_directory_contents(self - , directory_path: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[DirectoryEntry]: + def list_directory_contents( + self, directory_path: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[DirectoryEntry]: """List directory contents. - + Returns the contents of a directory. If there is no directory at the specified path, the API returns a HTTP 404 error. - + :param directory_path: str The absolute path of a directory. :param page_size: int (optional) The maximum number of directory entries to return. The response may contain fewer entries. If the response contains a `next_page_token`, there may be more entries, even if fewer than `page_size` entries are in the response. - + We recommend not to set this value unless you are intentionally listing less than the complete directory contents. - + If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values above 1000 will be coerced to 1000. :param page_token: str (optional) @@ -1315,64 +1214,62 @@ def list_directory_contents(self request. To list all of the entries in a directory, it is necessary to continue requesting pages of entries until the response contains no `next_page_token`. Note that the number of entries returned must not be used to determine when the listing is complete. - + :returns: Iterator over :class:`DirectoryEntry` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}', query=query - - , headers=headers - ) - if 'contents' in json: - for v in json['contents']: - yield DirectoryEntry.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def upload(self - , file_path: str, contents: BinaryIO - , * - , overwrite: Optional[bool] = None): + json = self._api.do( + "GET", + f"/api/2.0/fs/directories{_escape_multi_segment_path_parameter(directory_path)}", + query=query, + headers=headers, + ) + if "contents" in json: + for v in json["contents"]: + yield DirectoryEntry.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def upload(self, file_path: str, contents: BinaryIO, *, overwrite: Optional[bool] = None): """Upload a file. - + Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an octet stream); do not encode or otherwise modify the bytes before sending. The contents of the resulting file will be exactly the bytes sent in the request body. If the request is successful, there is no response body. - + :param file_path: str The absolute path of the file. :param contents: BinaryIO :param overwrite: bool (optional) If true or unspecified, an existing file will be overwritten. If false, an error will be returned if the path points to an existing file. - - + + """ - + query = {} - if overwrite is not None: query['overwrite'] = overwrite - headers = {'Content-Type': 'application/octet-stream',} - - self._api.do('PUT',f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}', query=query - - , headers=headers - , data=contents) - - - - \ No newline at end of file + if overwrite is not None: + query["overwrite"] = overwrite + headers = { + "Content-Type": "application/octet-stream", + } + + self._api.do( + "PUT", + f"/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}", + query=query, + headers=headers, + data=contents, + ) diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 637b3c578..3b43f74b0 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -1,243 +1,266 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[PermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the AccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccessControlRequest: """Deserializes the AccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", PermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class AccessControlResponse: all_permissions: Optional[List[Permission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the AccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccessControlResponse: """Deserializes the AccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', Permission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", Permission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class Actor: """represents an identity trying to access a resource - user or a service principal group can be a principal of a permission set assignment but an actor is always a user or a service principal""" - + actor_id: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the Actor into a dictionary suitable for use as a JSON request body.""" body = {} - if self.actor_id is not None: body['actor_id'] = self.actor_id + if self.actor_id is not None: + body["actor_id"] = self.actor_id return body def as_shallow_dict(self) -> dict: """Serializes the Actor into a shallow dictionary of its immediate attributes.""" body = {} - if self.actor_id is not None: body['actor_id'] = self.actor_id + if self.actor_id is not None: + body["actor_id"] = self.actor_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Actor: """Deserializes the Actor from a dictionary.""" - return cls(actor_id=d.get('actor_id', None)) - - - - - + return cls(actor_id=d.get("actor_id", None)) @dataclass class CheckPolicyResponse: consistency_token: ConsistencyToken - + is_permitted: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the CheckPolicyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.consistency_token: body['consistency_token'] = self.consistency_token.as_dict() - if self.is_permitted is not None: body['is_permitted'] = self.is_permitted + if self.consistency_token: + body["consistency_token"] = self.consistency_token.as_dict() + if self.is_permitted is not None: + body["is_permitted"] = self.is_permitted return body def as_shallow_dict(self) -> dict: """Serializes the CheckPolicyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.consistency_token: body['consistency_token'] = self.consistency_token - if self.is_permitted is not None: body['is_permitted'] = self.is_permitted + if self.consistency_token: + body["consistency_token"] = self.consistency_token + if self.is_permitted is not None: + body["is_permitted"] = self.is_permitted return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CheckPolicyResponse: """Deserializes the CheckPolicyResponse from a dictionary.""" - return cls(consistency_token=_from_dict(d, 'consistency_token', ConsistencyToken), is_permitted=d.get('is_permitted', None)) - - + return cls( + consistency_token=_from_dict(d, "consistency_token", ConsistencyToken), + is_permitted=d.get("is_permitted", None), + ) @dataclass class ComplexValue: display: Optional[str] = None - + primary: Optional[bool] = None - + ref: Optional[str] = None - + type: Optional[str] = None - + value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ComplexValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display is not None: body['display'] = self.display - if self.primary is not None: body['primary'] = self.primary - if self.ref is not None: body['$ref'] = self.ref - if self.type is not None: body['type'] = self.type - if self.value is not None: body['value'] = self.value + if self.display is not None: + body["display"] = self.display + if self.primary is not None: + body["primary"] = self.primary + if self.ref is not None: + body["$ref"] = self.ref + if self.type is not None: + body["type"] = self.type + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ComplexValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.display is not None: body['display'] = self.display - if self.primary is not None: body['primary'] = self.primary - if self.ref is not None: body['$ref'] = self.ref - if self.type is not None: body['type'] = self.type - if self.value is not None: body['value'] = self.value + if self.display is not None: + body["display"] = self.display + if self.primary is not None: + body["primary"] = self.primary + if self.ref is not None: + body["$ref"] = self.ref + if self.type is not None: + body["type"] = self.type + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComplexValue: """Deserializes the ComplexValue from a dictionary.""" - return cls(display=d.get('display', None), primary=d.get('primary', None), ref=d.get('$ref', None), type=d.get('type', None), value=d.get('value', None)) - - + return cls( + display=d.get("display", None), + primary=d.get("primary", None), + ref=d.get("$ref", None), + type=d.get("type", None), + value=d.get("value", None), + ) @dataclass class ConsistencyToken: value: str - + def as_dict(self) -> dict: """Serializes the ConsistencyToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ConsistencyToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ConsistencyToken: """Deserializes the ConsistencyToken from a dictionary.""" - return cls(value=d.get('value', None)) - - - - - - - - - - - - - - + return cls(value=d.get("value", None)) @dataclass @@ -256,17 +279,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - - - - - - - - - - @dataclass @@ -285,803 +297,898 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteWorkspacePermissionAssignmentResponse: """Deserializes the DeleteWorkspacePermissionAssignmentResponse from a dictionary.""" return cls() - - - - - - - - - - - - - - @dataclass class GetAssignableRolesForResourceResponse: roles: Optional[List[Role]] = None - + def as_dict(self) -> dict: """Serializes the GetAssignableRolesForResourceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.roles: body['roles'] = [v.as_dict() for v in self.roles] + if self.roles: + body["roles"] = [v.as_dict() for v in self.roles] return body def as_shallow_dict(self) -> dict: """Serializes the GetAssignableRolesForResourceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.roles: body['roles'] = self.roles + if self.roles: + body["roles"] = self.roles return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetAssignableRolesForResourceResponse: """Deserializes the GetAssignableRolesForResourceResponse from a dictionary.""" - return cls(roles=_repeated_dict(d, 'roles', Role)) - - - - - + return cls(roles=_repeated_dict(d, "roles", Role)) @dataclass class GetPasswordPermissionLevelsResponse: permission_levels: Optional[List[PasswordPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetPasswordPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetPasswordPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPasswordPermissionLevelsResponse: """Deserializes the GetPasswordPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', PasswordPermissionsDescription)) - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", PasswordPermissionsDescription)) @dataclass class GetPermissionLevelsResponse: permission_levels: Optional[List[PermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPermissionLevelsResponse: """Deserializes the GetPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', PermissionsDescription)) - - - - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", PermissionsDescription)) class GetSortOrder(Enum): - - - ASCENDING = 'ascending' - DESCENDING = 'descending' - - - - + ASCENDING = "ascending" + DESCENDING = "descending" @dataclass class GrantRule: role: str """Role that is assigned to the list of principals.""" - + principals: Optional[List[str]] = None """Principals this grant rule applies to. A principal can be a user (for end users), a service principal (for applications and compute workloads), or an account group. Each principal has its own identifier format: * users/ * groups/ * servicePrincipals/""" - + def as_dict(self) -> dict: """Serializes the GrantRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principals: body['principals'] = [v for v in self.principals] - if self.role is not None: body['role'] = self.role + if self.principals: + body["principals"] = [v for v in self.principals] + if self.role is not None: + body["role"] = self.role return body def as_shallow_dict(self) -> dict: """Serializes the GrantRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.principals: body['principals'] = self.principals - if self.role is not None: body['role'] = self.role + if self.principals: + body["principals"] = self.principals + if self.role is not None: + body["role"] = self.role return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GrantRule: """Deserializes the GrantRule from a dictionary.""" - return cls(principals=d.get('principals', None), role=d.get('role', None)) - - + return cls(principals=d.get("principals", None), role=d.get("role", None)) @dataclass class Group: display_name: Optional[str] = None """String that represents a human-readable group name""" - + entitlements: Optional[List[ComplexValue]] = None """Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements""" - + external_id: Optional[str] = None - + groups: Optional[List[ComplexValue]] = None - + id: Optional[str] = None """Databricks group ID""" - + members: Optional[List[ComplexValue]] = None - + meta: Optional[ResourceMeta] = None """Container for the group identifier. Workspace local versus account.""" - + roles: Optional[List[ComplexValue]] = None """Corresponds to AWS instance profile/arn role.""" - + schemas: Optional[List[GroupSchema]] = None """The schema of the group.""" - + def as_dict(self) -> dict: """Serializes the Group into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: body['displayName'] = self.display_name - if self.entitlements: body['entitlements'] = [v.as_dict() for v in self.entitlements] - if self.external_id is not None: body['externalId'] = self.external_id - if self.groups: body['groups'] = [v.as_dict() for v in self.groups] - if self.id is not None: body['id'] = self.id - if self.members: body['members'] = [v.as_dict() for v in self.members] - if self.meta: body['meta'] = self.meta.as_dict() - if self.roles: body['roles'] = [v.as_dict() for v in self.roles] - if self.schemas: body['schemas'] = [v.value for v in self.schemas] + if self.display_name is not None: + body["displayName"] = self.display_name + if self.entitlements: + body["entitlements"] = [v.as_dict() for v in self.entitlements] + if self.external_id is not None: + body["externalId"] = self.external_id + if self.groups: + body["groups"] = [v.as_dict() for v in self.groups] + if self.id is not None: + body["id"] = self.id + if self.members: + body["members"] = [v.as_dict() for v in self.members] + if self.meta: + body["meta"] = self.meta.as_dict() + if self.roles: + body["roles"] = [v.as_dict() for v in self.roles] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the Group into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: body['displayName'] = self.display_name - if self.entitlements: body['entitlements'] = self.entitlements - if self.external_id is not None: body['externalId'] = self.external_id - if self.groups: body['groups'] = self.groups - if self.id is not None: body['id'] = self.id - if self.members: body['members'] = self.members - if self.meta: body['meta'] = self.meta - if self.roles: body['roles'] = self.roles - if self.schemas: body['schemas'] = self.schemas + if self.display_name is not None: + body["displayName"] = self.display_name + if self.entitlements: + body["entitlements"] = self.entitlements + if self.external_id is not None: + body["externalId"] = self.external_id + if self.groups: + body["groups"] = self.groups + if self.id is not None: + body["id"] = self.id + if self.members: + body["members"] = self.members + if self.meta: + body["meta"] = self.meta + if self.roles: + body["roles"] = self.roles + if self.schemas: + body["schemas"] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Group: """Deserializes the Group from a dictionary.""" - return cls(display_name=d.get('displayName', None), entitlements=_repeated_dict(d, 'entitlements', ComplexValue), external_id=d.get('externalId', None), groups=_repeated_dict(d, 'groups', ComplexValue), id=d.get('id', None), members=_repeated_dict(d, 'members', ComplexValue), meta=_from_dict(d, 'meta', ResourceMeta), roles=_repeated_dict(d, 'roles', ComplexValue), schemas=_repeated_enum(d, 'schemas', GroupSchema)) - - + return cls( + display_name=d.get("displayName", None), + entitlements=_repeated_dict(d, "entitlements", ComplexValue), + external_id=d.get("externalId", None), + groups=_repeated_dict(d, "groups", ComplexValue), + id=d.get("id", None), + members=_repeated_dict(d, "members", ComplexValue), + meta=_from_dict(d, "meta", ResourceMeta), + roles=_repeated_dict(d, "roles", ComplexValue), + schemas=_repeated_enum(d, "schemas", GroupSchema), + ) class GroupSchema(Enum): - - - URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_GROUP = 'urn:ietf:params:scim:schemas:core:2.0:Group' - - - - - - - - - - + URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_GROUP = "urn:ietf:params:scim:schemas:core:2.0:Group" @dataclass class ListGroupsResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - + resources: Optional[List[Group]] = None """User objects returned in the response.""" - + schemas: Optional[List[ListResponseSchema]] = None """The schema of the service principal.""" - + start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" - + total_results: Optional[int] = None """Total results that match the request filters.""" - + def as_dict(self) -> dict: """Serializes the ListGroupsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page - if self.resources: body['Resources'] = [v.as_dict() for v in self.resources] - if self.schemas: body['schemas'] = [v.value for v in self.schemas] - if self.start_index is not None: body['startIndex'] = self.start_index - if self.total_results is not None: body['totalResults'] = self.total_results + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = [v.as_dict() for v in self.resources] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results return body def as_shallow_dict(self) -> dict: """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page - if self.resources: body['Resources'] = self.resources - if self.schemas: body['schemas'] = self.schemas - if self.start_index is not None: body['startIndex'] = self.start_index - if self.total_results is not None: body['totalResults'] = self.total_results + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = self.resources + if self.schemas: + body["schemas"] = self.schemas + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListGroupsResponse: """Deserializes the ListGroupsResponse from a dictionary.""" - return cls(items_per_page=d.get('itemsPerPage', None), resources=_repeated_dict(d, 'Resources', Group), schemas=_repeated_enum(d, 'schemas', ListResponseSchema), start_index=d.get('startIndex', None), total_results=d.get('totalResults', None)) - - + return cls( + items_per_page=d.get("itemsPerPage", None), + resources=_repeated_dict(d, "Resources", Group), + schemas=_repeated_enum(d, "schemas", ListResponseSchema), + start_index=d.get("startIndex", None), + total_results=d.get("totalResults", None), + ) class ListResponseSchema(Enum): - - - URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = 'urn:ietf:params:scim:api:messages:2.0:ListResponse' + + URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_LIST_RESPONSE = "urn:ietf:params:scim:api:messages:2.0:ListResponse" + @dataclass class ListServicePrincipalResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - + resources: Optional[List[ServicePrincipal]] = None """User objects returned in the response.""" - + schemas: Optional[List[ListResponseSchema]] = None """The schema of the List response.""" - + start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" - + total_results: Optional[int] = None """Total results that match the request filters.""" - + def as_dict(self) -> dict: """Serializes the ListServicePrincipalResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page - if self.resources: body['Resources'] = [v.as_dict() for v in self.resources] - if self.schemas: body['schemas'] = [v.value for v in self.schemas] - if self.start_index is not None: body['startIndex'] = self.start_index - if self.total_results is not None: body['totalResults'] = self.total_results + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = [v.as_dict() for v in self.resources] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results return body def as_shallow_dict(self) -> dict: """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page - if self.resources: body['Resources'] = self.resources - if self.schemas: body['schemas'] = self.schemas - if self.start_index is not None: body['startIndex'] = self.start_index - if self.total_results is not None: body['totalResults'] = self.total_results + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = self.resources + if self.schemas: + body["schemas"] = self.schemas + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalResponse: """Deserializes the ListServicePrincipalResponse from a dictionary.""" - return cls(items_per_page=d.get('itemsPerPage', None), resources=_repeated_dict(d, 'Resources', ServicePrincipal), schemas=_repeated_enum(d, 'schemas', ListResponseSchema), start_index=d.get('startIndex', None), total_results=d.get('totalResults', None)) - - - - - + return cls( + items_per_page=d.get("itemsPerPage", None), + resources=_repeated_dict(d, "Resources", ServicePrincipal), + schemas=_repeated_enum(d, "schemas", ListResponseSchema), + start_index=d.get("startIndex", None), + total_results=d.get("totalResults", None), + ) class ListSortOrder(Enum): - - - ASCENDING = 'ascending' - DESCENDING = 'descending' - + ASCENDING = "ascending" + DESCENDING = "descending" @dataclass class ListUsersResponse: items_per_page: Optional[int] = None """Total results returned in the response.""" - + resources: Optional[List[User]] = None """User objects returned in the response.""" - + schemas: Optional[List[ListResponseSchema]] = None """The schema of the List response.""" - + start_index: Optional[int] = None """Starting index of all the results that matched the request filters. First item is number 1.""" - + total_results: Optional[int] = None """Total results that match the request filters.""" - + def as_dict(self) -> dict: """Serializes the ListUsersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page - if self.resources: body['Resources'] = [v.as_dict() for v in self.resources] - if self.schemas: body['schemas'] = [v.value for v in self.schemas] - if self.start_index is not None: body['startIndex'] = self.start_index - if self.total_results is not None: body['totalResults'] = self.total_results + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = [v.as_dict() for v in self.resources] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results return body def as_shallow_dict(self) -> dict: """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page - if self.resources: body['Resources'] = self.resources - if self.schemas: body['schemas'] = self.schemas - if self.start_index is not None: body['startIndex'] = self.start_index - if self.total_results is not None: body['totalResults'] = self.total_results + if self.items_per_page is not None: + body["itemsPerPage"] = self.items_per_page + if self.resources: + body["Resources"] = self.resources + if self.schemas: + body["schemas"] = self.schemas + if self.start_index is not None: + body["startIndex"] = self.start_index + if self.total_results is not None: + body["totalResults"] = self.total_results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: """Deserializes the ListUsersResponse from a dictionary.""" - return cls(items_per_page=d.get('itemsPerPage', None), resources=_repeated_dict(d, 'Resources', User), schemas=_repeated_enum(d, 'schemas', ListResponseSchema), start_index=d.get('startIndex', None), total_results=d.get('totalResults', None)) - - - - - + return cls( + items_per_page=d.get("itemsPerPage", None), + resources=_repeated_dict(d, "Resources", User), + schemas=_repeated_enum(d, "schemas", ListResponseSchema), + start_index=d.get("startIndex", None), + total_results=d.get("totalResults", None), + ) @dataclass class MigratePermissionsRequest: workspace_id: int """WorkspaceId of the associated workspace where the permission migration will occur.""" - + from_workspace_group_name: str """The name of the workspace group that permissions will be migrated from.""" - + to_account_group_name: str """The name of the account group that permissions will be migrated to.""" - + size: Optional[int] = None """The maximum number of permissions that will be migrated.""" - + def as_dict(self) -> dict: """Serializes the MigratePermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.from_workspace_group_name is not None: body['from_workspace_group_name'] = self.from_workspace_group_name - if self.size is not None: body['size'] = self.size - if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.from_workspace_group_name is not None: + body["from_workspace_group_name"] = self.from_workspace_group_name + if self.size is not None: + body["size"] = self.size + if self.to_account_group_name is not None: + body["to_account_group_name"] = self.to_account_group_name + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the MigratePermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.from_workspace_group_name is not None: body['from_workspace_group_name'] = self.from_workspace_group_name - if self.size is not None: body['size'] = self.size - if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.from_workspace_group_name is not None: + body["from_workspace_group_name"] = self.from_workspace_group_name + if self.size is not None: + body["size"] = self.size + if self.to_account_group_name is not None: + body["to_account_group_name"] = self.to_account_group_name + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MigratePermissionsRequest: """Deserializes the MigratePermissionsRequest from a dictionary.""" - return cls(from_workspace_group_name=d.get('from_workspace_group_name', None), size=d.get('size', None), to_account_group_name=d.get('to_account_group_name', None), workspace_id=d.get('workspace_id', None)) - - + return cls( + from_workspace_group_name=d.get("from_workspace_group_name", None), + size=d.get("size", None), + to_account_group_name=d.get("to_account_group_name", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass class MigratePermissionsResponse: permissions_migrated: Optional[int] = None """Number of permissions migrated.""" - + def as_dict(self) -> dict: """Serializes the MigratePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated + if self.permissions_migrated is not None: + body["permissions_migrated"] = self.permissions_migrated return body def as_shallow_dict(self) -> dict: """Serializes the MigratePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated + if self.permissions_migrated is not None: + body["permissions_migrated"] = self.permissions_migrated return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MigratePermissionsResponse: """Deserializes the MigratePermissionsResponse from a dictionary.""" - return cls(permissions_migrated=d.get('permissions_migrated', None)) - - + return cls(permissions_migrated=d.get("permissions_migrated", None)) @dataclass class Name: family_name: Optional[str] = None """Family name of the Databricks user.""" - + given_name: Optional[str] = None """Given name of the Databricks user.""" - + def as_dict(self) -> dict: """Serializes the Name into a dictionary suitable for use as a JSON request body.""" body = {} - if self.family_name is not None: body['familyName'] = self.family_name - if self.given_name is not None: body['givenName'] = self.given_name + if self.family_name is not None: + body["familyName"] = self.family_name + if self.given_name is not None: + body["givenName"] = self.given_name return body def as_shallow_dict(self) -> dict: """Serializes the Name into a shallow dictionary of its immediate attributes.""" body = {} - if self.family_name is not None: body['familyName'] = self.family_name - if self.given_name is not None: body['givenName'] = self.given_name + if self.family_name is not None: + body["familyName"] = self.family_name + if self.given_name is not None: + body["givenName"] = self.given_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Name: """Deserializes the Name from a dictionary.""" - return cls(family_name=d.get('familyName', None), given_name=d.get('givenName', None)) - - + return cls(family_name=d.get("familyName", None), given_name=d.get("givenName", None)) @dataclass class ObjectPermissions: access_control_list: Optional[List[AccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ObjectPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ObjectPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ObjectPermissions: """Deserializes the ObjectPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", AccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class PartialUpdate: id: Optional[str] = None """Unique ID in the Databricks workspace.""" - + operations: Optional[List[Patch]] = None - + schemas: Optional[List[PatchSchema]] = None """The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].""" - + def as_dict(self) -> dict: """Serializes the PartialUpdate into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.operations: body['Operations'] = [v.as_dict() for v in self.operations] - if self.schemas: body['schemas'] = [v.value for v in self.schemas] + if self.id is not None: + body["id"] = self.id + if self.operations: + body["Operations"] = [v.as_dict() for v in self.operations] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the PartialUpdate into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.operations: body['Operations'] = self.operations - if self.schemas: body['schemas'] = self.schemas + if self.id is not None: + body["id"] = self.id + if self.operations: + body["Operations"] = self.operations + if self.schemas: + body["schemas"] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PartialUpdate: """Deserializes the PartialUpdate from a dictionary.""" - return cls(id=d.get('id', None), operations=_repeated_dict(d, 'Operations', Patch), schemas=_repeated_enum(d, 'schemas', PatchSchema)) - - + return cls( + id=d.get("id", None), + operations=_repeated_dict(d, "Operations", Patch), + schemas=_repeated_enum(d, "schemas", PatchSchema), + ) @dataclass class PasswordAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[PasswordPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the PasswordAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PasswordAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordAccessControlRequest: """Deserializes the PasswordAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', PasswordPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", PasswordPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class PasswordAccessControlResponse: all_permissions: Optional[List[PasswordPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the PasswordAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PasswordAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordAccessControlResponse: """Deserializes the PasswordAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', PasswordPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", PasswordPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class PasswordPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[PasswordPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PasswordPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PasswordPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordPermission: """Deserializes the PasswordPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', PasswordPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", PasswordPermissionLevel), + ) class PasswordPermissionLevel(Enum): """Permission level""" - - CAN_USE = 'CAN_USE' + + CAN_USE = "CAN_USE" + @dataclass class PasswordPermissions: access_control_list: Optional[List[PasswordAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the PasswordPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the PasswordPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissions: """Deserializes the PasswordPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', PasswordAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", PasswordAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class PasswordPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[PasswordPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PasswordPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PasswordPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissionsDescription: """Deserializes the PasswordPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', PasswordPermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", PasswordPermissionLevel), + ) @dataclass class PasswordPermissionsRequest: access_control_list: Optional[List[PasswordAccessControlRequest]] = None - + def as_dict(self) -> dict: """Serializes the PasswordPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] return body def as_shallow_dict(self) -> dict: """Serializes the PasswordPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.access_control_list: + body["access_control_list"] = self.access_control_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissionsRequest: """Deserializes the PasswordPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', PasswordAccessControlRequest)) - - + return cls(access_control_list=_repeated_dict(d, "access_control_list", PasswordAccessControlRequest)) @dataclass class Patch: op: Optional[PatchOp] = None """Type of patch operation.""" - + path: Optional[str] = None """Selection of patch operation""" - + value: Optional[Any] = None """Value to modify""" - + def as_dict(self) -> dict: """Serializes the Patch into a dictionary suitable for use as a JSON request body.""" body = {} - if self.op is not None: body['op'] = self.op.value - if self.path is not None: body['path'] = self.path - if self.value: body['value'] = self.value + if self.op is not None: + body["op"] = self.op.value + if self.path is not None: + body["path"] = self.path + if self.value: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the Patch into a shallow dictionary of its immediate attributes.""" body = {} - if self.op is not None: body['op'] = self.op - if self.path is not None: body['path'] = self.path - if self.value: body['value'] = self.value + if self.op is not None: + body["op"] = self.op + if self.path is not None: + body["path"] = self.path + if self.value: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Patch: """Deserializes the Patch from a dictionary.""" - return cls(op=_enum(d, 'op', PatchOp), path=d.get('path', None), value=d.get('value', None)) - - + return cls(op=_enum(d, "op", PatchOp), path=d.get("path", None), value=d.get("value", None)) class PatchOp(Enum): """Type of patch operation.""" - - ADD = 'add' - REMOVE = 'remove' - REPLACE = 'replace' + + ADD = "add" + REMOVE = "remove" + REPLACE = "replace" + @dataclass class PatchResponse: @@ -1099,279 +1206,325 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PatchResponse: """Deserializes the PatchResponse from a dictionary.""" return cls() - - class PatchSchema(Enum): - - - URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP = 'urn:ietf:params:scim:api:messages:2.0:PatchOp' + + URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP = "urn:ietf:params:scim:api:messages:2.0:PatchOp" + @dataclass class Permission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[PermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the Permission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the Permission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Permission: """Deserializes the Permission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', PermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", PermissionLevel), + ) @dataclass class PermissionAssignment: """The output format for existing workspace PermissionAssignment records, which contains some info for user consumption.""" - + error: Optional[str] = None """Error response associated with a workspace permission assignment, if any.""" - + permissions: Optional[List[WorkspacePermission]] = None """The permissions level of the principal.""" - + principal: Optional[PrincipalOutput] = None """Information about the principal assigned to the workspace.""" - + def as_dict(self) -> dict: """Serializes the PermissionAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error is not None: body['error'] = self.error - if self.permissions: body['permissions'] = [v.value for v in self.permissions] - if self.principal: body['principal'] = self.principal.as_dict() + if self.error is not None: + body["error"] = self.error + if self.permissions: + body["permissions"] = [v.value for v in self.permissions] + if self.principal: + body["principal"] = self.principal.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PermissionAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.error is not None: body['error'] = self.error - if self.permissions: body['permissions'] = self.permissions - if self.principal: body['principal'] = self.principal + if self.error is not None: + body["error"] = self.error + if self.permissions: + body["permissions"] = self.permissions + if self.principal: + body["principal"] = self.principal return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionAssignment: """Deserializes the PermissionAssignment from a dictionary.""" - return cls(error=d.get('error', None), permissions=_repeated_enum(d, 'permissions', WorkspacePermission), principal=_from_dict(d, 'principal', PrincipalOutput)) - - + return cls( + error=d.get("error", None), + permissions=_repeated_enum(d, "permissions", WorkspacePermission), + principal=_from_dict(d, "principal", PrincipalOutput), + ) @dataclass class PermissionAssignments: permission_assignments: Optional[List[PermissionAssignment]] = None """Array of permissions assignments defined for a workspace.""" - + def as_dict(self) -> dict: """Serializes the PermissionAssignments into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_assignments: body['permission_assignments'] = [v.as_dict() for v in self.permission_assignments] + if self.permission_assignments: + body["permission_assignments"] = [v.as_dict() for v in self.permission_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the PermissionAssignments into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_assignments: body['permission_assignments'] = self.permission_assignments + if self.permission_assignments: + body["permission_assignments"] = self.permission_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionAssignments: """Deserializes the PermissionAssignments from a dictionary.""" - return cls(permission_assignments=_repeated_dict(d, 'permission_assignments', PermissionAssignment)) - - + return cls(permission_assignments=_repeated_dict(d, "permission_assignments", PermissionAssignment)) class PermissionLevel(Enum): """Permission level""" - - CAN_ATTACH_TO = 'CAN_ATTACH_TO' - CAN_BIND = 'CAN_BIND' - CAN_CREATE = 'CAN_CREATE' - CAN_EDIT = 'CAN_EDIT' - CAN_EDIT_METADATA = 'CAN_EDIT_METADATA' - CAN_MANAGE = 'CAN_MANAGE' - CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' - CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' - CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' - CAN_MONITOR = 'CAN_MONITOR' - CAN_MONITOR_ONLY = 'CAN_MONITOR_ONLY' - CAN_QUERY = 'CAN_QUERY' - CAN_READ = 'CAN_READ' - CAN_RESTART = 'CAN_RESTART' - CAN_RUN = 'CAN_RUN' - CAN_USE = 'CAN_USE' - CAN_VIEW = 'CAN_VIEW' - CAN_VIEW_METADATA = 'CAN_VIEW_METADATA' - IS_OWNER = 'IS_OWNER' + + CAN_ATTACH_TO = "CAN_ATTACH_TO" + CAN_BIND = "CAN_BIND" + CAN_CREATE = "CAN_CREATE" + CAN_EDIT = "CAN_EDIT" + CAN_EDIT_METADATA = "CAN_EDIT_METADATA" + CAN_MANAGE = "CAN_MANAGE" + CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS" + CAN_MANAGE_RUN = "CAN_MANAGE_RUN" + CAN_MANAGE_STAGING_VERSIONS = "CAN_MANAGE_STAGING_VERSIONS" + CAN_MONITOR = "CAN_MONITOR" + CAN_MONITOR_ONLY = "CAN_MONITOR_ONLY" + CAN_QUERY = "CAN_QUERY" + CAN_READ = "CAN_READ" + CAN_RESTART = "CAN_RESTART" + CAN_RUN = "CAN_RUN" + CAN_USE = "CAN_USE" + CAN_VIEW = "CAN_VIEW" + CAN_VIEW_METADATA = "CAN_VIEW_METADATA" + IS_OWNER = "IS_OWNER" + @dataclass class PermissionOutput: description: Optional[str] = None """The results of a permissions query.""" - + permission_level: Optional[WorkspacePermission] = None - + def as_dict(self) -> dict: """Serializes the PermissionOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PermissionOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionOutput: """Deserializes the PermissionOutput from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', WorkspacePermission)) - - + return cls( + description=d.get("description", None), permission_level=_enum(d, "permission_level", WorkspacePermission) + ) @dataclass class PermissionsDescription: description: Optional[str] = None - + permission_level: Optional[PermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsDescription: """Deserializes the PermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', PermissionLevel)) - - + return cls( + description=d.get("description", None), permission_level=_enum(d, "permission_level", PermissionLevel) + ) @dataclass class PrincipalOutput: """Information about the principal assigned to the workspace.""" - + display_name: Optional[str] = None """The display name of the principal.""" - + group_name: Optional[str] = None """The group name of the group. Present only if the principal is a group.""" - + principal_id: Optional[int] = None """The unique, opaque id of the principal.""" - + service_principal_name: Optional[str] = None """The name of the service principal. Present only if the principal is a service principal.""" - + user_name: Optional[str] = None """The username of the user. Present only if the principal is a user.""" - + def as_dict(self) -> dict: """Serializes the PrincipalOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.principal_id is not None: body['principal_id'] = self.principal_id - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.principal_id is not None: + body["principal_id"] = self.principal_id + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PrincipalOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.principal_id is not None: body['principal_id'] = self.principal_id - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.principal_id is not None: + body["principal_id"] = self.principal_id + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrincipalOutput: """Deserializes the PrincipalOutput from a dictionary.""" - return cls(display_name=d.get('display_name', None), group_name=d.get('group_name', None), principal_id=d.get('principal_id', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + principal_id=d.get("principal_id", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) class RequestAuthzIdentity(Enum): """Defines the identity to be used for authZ of the request on the server side. See one pager for for more information: http://go/acl/service-identity""" - - REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = 'REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY' - REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = 'REQUEST_AUTHZ_IDENTITY_USER_CONTEXT' + + REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = "REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY" + REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = "REQUEST_AUTHZ_IDENTITY_USER_CONTEXT" + @dataclass class ResourceInfo: id: str """Id of the current resource.""" - + legacy_acl_path: Optional[str] = None """The legacy acl path of the current resource.""" - + parent_resource_info: Optional[ResourceInfo] = None """Parent resource info for the current resource. The parent may have another parent.""" - + def as_dict(self) -> dict: """Serializes the ResourceInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path - if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info.as_dict() + if self.id is not None: + body["id"] = self.id + if self.legacy_acl_path is not None: + body["legacy_acl_path"] = self.legacy_acl_path + if self.parent_resource_info: + body["parent_resource_info"] = self.parent_resource_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ResourceInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path - if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info + if self.id is not None: + body["id"] = self.id + if self.legacy_acl_path is not None: + body["legacy_acl_path"] = self.legacy_acl_path + if self.parent_resource_info: + body["parent_resource_info"] = self.parent_resource_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResourceInfo: """Deserializes the ResourceInfo from a dictionary.""" - return cls(id=d.get('id', None), legacy_acl_path=d.get('legacy_acl_path', None), parent_resource_info=_from_dict(d, 'parent_resource_info', ResourceInfo)) - - + return cls( + id=d.get("id", None), + legacy_acl_path=d.get("legacy_acl_path", None), + parent_resource_info=_from_dict(d, "parent_resource_info", ResourceInfo), + ) @dataclass @@ -1379,57 +1532,57 @@ class ResourceMeta: resource_type: Optional[str] = None """Identifier for group type. Can be local workspace group (`WorkspaceGroup`) or account group (`Group`).""" - + def as_dict(self) -> dict: """Serializes the ResourceMeta into a dictionary suitable for use as a JSON request body.""" body = {} - if self.resource_type is not None: body['resourceType'] = self.resource_type + if self.resource_type is not None: + body["resourceType"] = self.resource_type return body def as_shallow_dict(self) -> dict: """Serializes the ResourceMeta into a shallow dictionary of its immediate attributes.""" body = {} - if self.resource_type is not None: body['resourceType'] = self.resource_type + if self.resource_type is not None: + body["resourceType"] = self.resource_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResourceMeta: """Deserializes the ResourceMeta from a dictionary.""" - return cls(resource_type=d.get('resourceType', None)) - - + return cls(resource_type=d.get("resourceType", None)) @dataclass class Role: name: str """Role to assign to a principal or a list of principals on a resource.""" - + def as_dict(self) -> dict: """Serializes the Role into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the Role into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Role: """Deserializes the Role from a dictionary.""" - return cls(name=d.get('name', None)) - - + return cls(name=d.get("name", None)) @dataclass class RuleSetResponse: name: str """Name of the rule set.""" - + etag: str """Identifies the version of the rule set returned. Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way to @@ -1438,38 +1591,44 @@ class RuleSetResponse: rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating.""" - + grant_rules: Optional[List[GrantRule]] = None - + def as_dict(self) -> dict: """Serializes the RuleSetResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.grant_rules: body['grant_rules'] = [v.as_dict() for v in self.grant_rules] - if self.name is not None: body['name'] = self.name + if self.etag is not None: + body["etag"] = self.etag + if self.grant_rules: + body["grant_rules"] = [v.as_dict() for v in self.grant_rules] + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the RuleSetResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.grant_rules: body['grant_rules'] = self.grant_rules - if self.name is not None: body['name'] = self.name + if self.etag is not None: + body["etag"] = self.etag + if self.grant_rules: + body["grant_rules"] = self.grant_rules + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RuleSetResponse: """Deserializes the RuleSetResponse from a dictionary.""" - return cls(etag=d.get('etag', None), grant_rules=_repeated_dict(d, 'grant_rules', GrantRule), name=d.get('name', None)) - - + return cls( + etag=d.get("etag", None), grant_rules=_repeated_dict(d, "grant_rules", GrantRule), name=d.get("name", None) + ) @dataclass class RuleSetUpdateRequest: name: str """Name of the rule set.""" - + etag: str """Identifies the version of the rule set returned. Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for optimistic concurrency control as a way to @@ -1478,174 +1637,222 @@ class RuleSetUpdateRequest: rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating.""" - + grant_rules: Optional[List[GrantRule]] = None - + def as_dict(self) -> dict: """Serializes the RuleSetUpdateRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.grant_rules: body['grant_rules'] = [v.as_dict() for v in self.grant_rules] - if self.name is not None: body['name'] = self.name + if self.etag is not None: + body["etag"] = self.etag + if self.grant_rules: + body["grant_rules"] = [v.as_dict() for v in self.grant_rules] + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the RuleSetUpdateRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.grant_rules: body['grant_rules'] = self.grant_rules - if self.name is not None: body['name'] = self.name + if self.etag is not None: + body["etag"] = self.etag + if self.grant_rules: + body["grant_rules"] = self.grant_rules + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RuleSetUpdateRequest: """Deserializes the RuleSetUpdateRequest from a dictionary.""" - return cls(etag=d.get('etag', None), grant_rules=_repeated_dict(d, 'grant_rules', GrantRule), name=d.get('name', None)) - - + return cls( + etag=d.get("etag", None), grant_rules=_repeated_dict(d, "grant_rules", GrantRule), name=d.get("name", None) + ) @dataclass class ServicePrincipal: active: Optional[bool] = None """If this user is active""" - + application_id: Optional[str] = None """UUID relating to the service principal""" - + display_name: Optional[str] = None """String that represents a concatenation of given and family names.""" - + entitlements: Optional[List[ComplexValue]] = None """Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements""" - + external_id: Optional[str] = None - + groups: Optional[List[ComplexValue]] = None - + id: Optional[str] = None """Databricks service principal ID.""" - + roles: Optional[List[ComplexValue]] = None """Corresponds to AWS instance profile/arn role.""" - + schemas: Optional[List[ServicePrincipalSchema]] = None """The schema of the List response.""" - + def as_dict(self) -> dict: """Serializes the ServicePrincipal into a dictionary suitable for use as a JSON request body.""" body = {} - if self.active is not None: body['active'] = self.active - if self.application_id is not None: body['applicationId'] = self.application_id - if self.display_name is not None: body['displayName'] = self.display_name - if self.entitlements: body['entitlements'] = [v.as_dict() for v in self.entitlements] - if self.external_id is not None: body['externalId'] = self.external_id - if self.groups: body['groups'] = [v.as_dict() for v in self.groups] - if self.id is not None: body['id'] = self.id - if self.roles: body['roles'] = [v.as_dict() for v in self.roles] - if self.schemas: body['schemas'] = [v.value for v in self.schemas] + if self.active is not None: + body["active"] = self.active + if self.application_id is not None: + body["applicationId"] = self.application_id + if self.display_name is not None: + body["displayName"] = self.display_name + if self.entitlements: + body["entitlements"] = [v.as_dict() for v in self.entitlements] + if self.external_id is not None: + body["externalId"] = self.external_id + if self.groups: + body["groups"] = [v.as_dict() for v in self.groups] + if self.id is not None: + body["id"] = self.id + if self.roles: + body["roles"] = [v.as_dict() for v in self.roles] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] return body def as_shallow_dict(self) -> dict: """Serializes the ServicePrincipal into a shallow dictionary of its immediate attributes.""" body = {} - if self.active is not None: body['active'] = self.active - if self.application_id is not None: body['applicationId'] = self.application_id - if self.display_name is not None: body['displayName'] = self.display_name - if self.entitlements: body['entitlements'] = self.entitlements - if self.external_id is not None: body['externalId'] = self.external_id - if self.groups: body['groups'] = self.groups - if self.id is not None: body['id'] = self.id - if self.roles: body['roles'] = self.roles - if self.schemas: body['schemas'] = self.schemas + if self.active is not None: + body["active"] = self.active + if self.application_id is not None: + body["applicationId"] = self.application_id + if self.display_name is not None: + body["displayName"] = self.display_name + if self.entitlements: + body["entitlements"] = self.entitlements + if self.external_id is not None: + body["externalId"] = self.external_id + if self.groups: + body["groups"] = self.groups + if self.id is not None: + body["id"] = self.id + if self.roles: + body["roles"] = self.roles + if self.schemas: + body["schemas"] = self.schemas return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServicePrincipal: """Deserializes the ServicePrincipal from a dictionary.""" - return cls(active=d.get('active', None), application_id=d.get('applicationId', None), display_name=d.get('displayName', None), entitlements=_repeated_dict(d, 'entitlements', ComplexValue), external_id=d.get('externalId', None), groups=_repeated_dict(d, 'groups', ComplexValue), id=d.get('id', None), roles=_repeated_dict(d, 'roles', ComplexValue), schemas=_repeated_enum(d, 'schemas', ServicePrincipalSchema)) - - + return cls( + active=d.get("active", None), + application_id=d.get("applicationId", None), + display_name=d.get("displayName", None), + entitlements=_repeated_dict(d, "entitlements", ComplexValue), + external_id=d.get("externalId", None), + groups=_repeated_dict(d, "groups", ComplexValue), + id=d.get("id", None), + roles=_repeated_dict(d, "roles", ComplexValue), + schemas=_repeated_enum(d, "schemas", ServicePrincipalSchema), + ) class ServicePrincipalSchema(Enum): - - - URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = 'urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal' + + URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = "urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal" + @dataclass class SetObjectPermissions: access_control_list: Optional[List[AccessControlRequest]] = None - + request_object_id: Optional[str] = None """The id of the request object.""" - + request_object_type: Optional[str] = None """The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" - + def as_dict(self) -> dict: """Serializes the SetObjectPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.request_object_id is not None: body['request_object_id'] = self.request_object_id - if self.request_object_type is not None: body['request_object_type'] = self.request_object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.request_object_id is not None: + body["request_object_id"] = self.request_object_id + if self.request_object_type is not None: + body["request_object_type"] = self.request_object_type return body def as_shallow_dict(self) -> dict: """Serializes the SetObjectPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.request_object_id is not None: body['request_object_id'] = self.request_object_id - if self.request_object_type is not None: body['request_object_type'] = self.request_object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.request_object_id is not None: + body["request_object_id"] = self.request_object_id + if self.request_object_type is not None: + body["request_object_type"] = self.request_object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetObjectPermissions: """Deserializes the SetObjectPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControlRequest), request_object_id=d.get('request_object_id', None), request_object_type=d.get('request_object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", AccessControlRequest), + request_object_id=d.get("request_object_id", None), + request_object_type=d.get("request_object_type", None), + ) @dataclass class UpdateObjectPermissions: access_control_list: Optional[List[AccessControlRequest]] = None - + request_object_id: Optional[str] = None """The id of the request object.""" - + request_object_type: Optional[str] = None """The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" - + def as_dict(self) -> dict: """Serializes the UpdateObjectPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.request_object_id is not None: body['request_object_id'] = self.request_object_id - if self.request_object_type is not None: body['request_object_type'] = self.request_object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.request_object_id is not None: + body["request_object_id"] = self.request_object_id + if self.request_object_type is not None: + body["request_object_type"] = self.request_object_type return body def as_shallow_dict(self) -> dict: """Serializes the UpdateObjectPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.request_object_id is not None: body['request_object_id'] = self.request_object_id - if self.request_object_type is not None: body['request_object_type'] = self.request_object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.request_object_id is not None: + body["request_object_id"] = self.request_object_id + if self.request_object_type is not None: + body["request_object_type"] = self.request_object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateObjectPermissions: """Deserializes the UpdateObjectPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControlRequest), request_object_id=d.get('request_object_id', None), request_object_type=d.get('request_object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", AccessControlRequest), + request_object_id=d.get("request_object_id", None), + request_object_type=d.get("request_object_type", None), + ) @dataclass @@ -1664,37 +1871,37 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() - - @dataclass class UpdateRuleSetRequest: name: str """Name of the rule set.""" - + rule_set: RuleSetUpdateRequest - + def as_dict(self) -> dict: """Serializes the UpdateRuleSetRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.rule_set: body['rule_set'] = self.rule_set.as_dict() + if self.name is not None: + body["name"] = self.name + if self.rule_set: + body["rule_set"] = self.rule_set.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRuleSetRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.rule_set: body['rule_set'] = self.rule_set + if self.name is not None: + body["name"] = self.name + if self.rule_set: + body["rule_set"] = self.rule_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRuleSetRequest: """Deserializes the UpdateRuleSetRequest from a dictionary.""" - return cls(name=d.get('name', None), rule_set=_from_dict(d, 'rule_set', RuleSetUpdateRequest)) - - + return cls(name=d.get("name", None), rule_set=_from_dict(d, "rule_set", RuleSetUpdateRequest)) @dataclass @@ -1705,177 +1912,215 @@ class UpdateWorkspaceAssignments: values will be ignored. Note that excluding this field, or providing unsupported values, will have the same effect as providing an empty list, which will result in the deletion of all permissions for the principal.""" - + principal_id: Optional[int] = None """The ID of the user, service principal, or group.""" - + workspace_id: Optional[int] = None """The workspace ID.""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permissions: body['permissions'] = [v.value for v in self.permissions] - if self.principal_id is not None: body['principal_id'] = self.principal_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.permissions: + body["permissions"] = [v.value for v in self.permissions] + if self.principal_id is not None: + body["principal_id"] = self.principal_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceAssignments into a shallow dictionary of its immediate attributes.""" body = {} - if self.permissions: body['permissions'] = self.permissions - if self.principal_id is not None: body['principal_id'] = self.principal_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.permissions: + body["permissions"] = self.permissions + if self.principal_id is not None: + body["principal_id"] = self.principal_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceAssignments: """Deserializes the UpdateWorkspaceAssignments from a dictionary.""" - return cls(permissions=_repeated_enum(d, 'permissions', WorkspacePermission), principal_id=d.get('principal_id', None), workspace_id=d.get('workspace_id', None)) - - + return cls( + permissions=_repeated_enum(d, "permissions", WorkspacePermission), + principal_id=d.get("principal_id", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass class User: active: Optional[bool] = None """If this user is active""" - + display_name: Optional[str] = None """String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation""" - + emails: Optional[List[ComplexValue]] = None """All the emails associated with the Databricks user.""" - + entitlements: Optional[List[ComplexValue]] = None """Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements""" - + external_id: Optional[str] = None """External ID is not currently supported. It is reserved for future use.""" - + groups: Optional[List[ComplexValue]] = None - + id: Optional[str] = None """Databricks user ID.""" - + name: Optional[Name] = None - + roles: Optional[List[ComplexValue]] = None """Corresponds to AWS instance profile/arn role.""" - + schemas: Optional[List[UserSchema]] = None """The schema of the user.""" - + user_name: Optional[str] = None """Email address of the Databricks user.""" - + def as_dict(self) -> dict: """Serializes the User into a dictionary suitable for use as a JSON request body.""" body = {} - if self.active is not None: body['active'] = self.active - if self.display_name is not None: body['displayName'] = self.display_name - if self.emails: body['emails'] = [v.as_dict() for v in self.emails] - if self.entitlements: body['entitlements'] = [v.as_dict() for v in self.entitlements] - if self.external_id is not None: body['externalId'] = self.external_id - if self.groups: body['groups'] = [v.as_dict() for v in self.groups] - if self.id is not None: body['id'] = self.id - if self.name: body['name'] = self.name.as_dict() - if self.roles: body['roles'] = [v.as_dict() for v in self.roles] - if self.schemas: body['schemas'] = [v.value for v in self.schemas] - if self.user_name is not None: body['userName'] = self.user_name + if self.active is not None: + body["active"] = self.active + if self.display_name is not None: + body["displayName"] = self.display_name + if self.emails: + body["emails"] = [v.as_dict() for v in self.emails] + if self.entitlements: + body["entitlements"] = [v.as_dict() for v in self.entitlements] + if self.external_id is not None: + body["externalId"] = self.external_id + if self.groups: + body["groups"] = [v.as_dict() for v in self.groups] + if self.id is not None: + body["id"] = self.id + if self.name: + body["name"] = self.name.as_dict() + if self.roles: + body["roles"] = [v.as_dict() for v in self.roles] + if self.schemas: + body["schemas"] = [v.value for v in self.schemas] + if self.user_name is not None: + body["userName"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the User into a shallow dictionary of its immediate attributes.""" body = {} - if self.active is not None: body['active'] = self.active - if self.display_name is not None: body['displayName'] = self.display_name - if self.emails: body['emails'] = self.emails - if self.entitlements: body['entitlements'] = self.entitlements - if self.external_id is not None: body['externalId'] = self.external_id - if self.groups: body['groups'] = self.groups - if self.id is not None: body['id'] = self.id - if self.name: body['name'] = self.name - if self.roles: body['roles'] = self.roles - if self.schemas: body['schemas'] = self.schemas - if self.user_name is not None: body['userName'] = self.user_name + if self.active is not None: + body["active"] = self.active + if self.display_name is not None: + body["displayName"] = self.display_name + if self.emails: + body["emails"] = self.emails + if self.entitlements: + body["entitlements"] = self.entitlements + if self.external_id is not None: + body["externalId"] = self.external_id + if self.groups: + body["groups"] = self.groups + if self.id is not None: + body["id"] = self.id + if self.name: + body["name"] = self.name + if self.roles: + body["roles"] = self.roles + if self.schemas: + body["schemas"] = self.schemas + if self.user_name is not None: + body["userName"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> User: """Deserializes the User from a dictionary.""" - return cls(active=d.get('active', None), display_name=d.get('displayName', None), emails=_repeated_dict(d, 'emails', ComplexValue), entitlements=_repeated_dict(d, 'entitlements', ComplexValue), external_id=d.get('externalId', None), groups=_repeated_dict(d, 'groups', ComplexValue), id=d.get('id', None), name=_from_dict(d, 'name', Name), roles=_repeated_dict(d, 'roles', ComplexValue), schemas=_repeated_enum(d, 'schemas', UserSchema), user_name=d.get('userName', None)) - - + return cls( + active=d.get("active", None), + display_name=d.get("displayName", None), + emails=_repeated_dict(d, "emails", ComplexValue), + entitlements=_repeated_dict(d, "entitlements", ComplexValue), + external_id=d.get("externalId", None), + groups=_repeated_dict(d, "groups", ComplexValue), + id=d.get("id", None), + name=_from_dict(d, "name", Name), + roles=_repeated_dict(d, "roles", ComplexValue), + schemas=_repeated_enum(d, "schemas", UserSchema), + user_name=d.get("userName", None), + ) class UserSchema(Enum): - - - URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_USER = 'urn:ietf:params:scim:schemas:core:2.0:User' - URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER = 'urn:ietf:params:scim:schemas:extension:workspace:2.0:User' + + URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_USER = "urn:ietf:params:scim:schemas:core:2.0:User" + URN_IETF_PARAMS_SCIM_SCHEMAS_EXTENSION_WORKSPACE_2_0_USER = ( + "urn:ietf:params:scim:schemas:extension:workspace:2.0:User" + ) + class WorkspacePermission(Enum): - - - ADMIN = 'ADMIN' - UNKNOWN = 'UNKNOWN' - USER = 'USER' + + ADMIN = "ADMIN" + UNKNOWN = "UNKNOWN" + USER = "USER" + @dataclass class WorkspacePermissions: permissions: Optional[List[PermissionOutput]] = None """Array of permissions defined for a workspace.""" - + def as_dict(self) -> dict: """Serializes the WorkspacePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permissions: body['permissions'] = [v.as_dict() for v in self.permissions] + if self.permissions: + body["permissions"] = [v.as_dict() for v in self.permissions] return body def as_shallow_dict(self) -> dict: """Serializes the WorkspacePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.permissions: body['permissions'] = self.permissions + if self.permissions: + body["permissions"] = self.permissions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspacePermissions: """Deserializes the WorkspacePermissions from a dictionary.""" - return cls(permissions=_repeated_dict(d, 'permissions', PermissionOutput)) - - - - + return cls(permissions=_repeated_dict(d, "permissions", PermissionOutput)) class AccessControlAPI: """Rule based Access Control for Databricks Resources.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def check_policy(self - , actor: Actor, permission: str, resource: str, consistency_token: ConsistencyToken, authz_identity: RequestAuthzIdentity - , * - , resource_info: Optional[ResourceInfo] = None) -> CheckPolicyResponse: + def check_policy( + self, + actor: Actor, + permission: str, + resource: str, + consistency_token: ConsistencyToken, + authz_identity: RequestAuthzIdentity, + *, + resource_info: Optional[ResourceInfo] = None, + ) -> CheckPolicyResponse: """Check access policy to a resource. - + :param actor: :class:`Actor` :param permission: str :param resource: str @@ -1884,87 +2129,80 @@ def check_policy(self :param consistency_token: :class:`ConsistencyToken` :param authz_identity: :class:`RequestAuthzIdentity` :param resource_info: :class:`ResourceInfo` (optional) - + :returns: :class:`CheckPolicyResponse` """ - + query = {} - if actor is not None: query['actor'] = actor.as_dict() - if authz_identity is not None: query['authz_identity'] = authz_identity.value - if consistency_token is not None: query['consistency_token'] = consistency_token.as_dict() - if permission is not None: query['permission'] = permission - if resource is not None: query['resource'] = resource - if resource_info is not None: query['resource_info'] = resource_info.as_dict() - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/access-control/check-policy-v2', query=query - - , headers=headers - ) + if actor is not None: + query["actor"] = actor.as_dict() + if authz_identity is not None: + query["authz_identity"] = authz_identity.value + if consistency_token is not None: + query["consistency_token"] = consistency_token.as_dict() + if permission is not None: + query["permission"] = permission + if resource is not None: + query["resource"] = resource + if resource_info is not None: + query["resource_info"] = resource_info.as_dict() + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/access-control/check-policy-v2", query=query, headers=headers) return CheckPolicyResponse.from_dict(res) - - + class AccountAccessControlAPI: """These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is called a rule set.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get_assignable_roles_for_resource(self - , resource: str - ) -> GetAssignableRolesForResourceResponse: + def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRolesForResourceResponse: """Get assignable roles for a resource. - + Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. - + :param resource: str The resource name for which assignable roles will be listed. - + Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service principal. - + :returns: :class:`GetAssignableRolesForResourceResponse` """ - + query = {} - if resource is not None: query['resource'] = resource - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/preview/accounts/{self._api.account_id}/access-control/assignable-roles', query=query - - , headers=headers - ) + if resource is not None: + query["resource"] = resource + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/preview/accounts/{self._api.account_id}/access-control/assignable-roles", + query=query, + headers=headers, + ) return GetAssignableRolesForResourceResponse.from_dict(res) - - - - - def get_rule_set(self - , name: str, etag: str - ) -> RuleSetResponse: + def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: """Get a rule set. - + Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. - + :param name: str The ruleset name associated with the request. - + Examples | Summary :--- | :--- `name=accounts//ruleSets/default` | A name for a rule set on the account. `name=accounts//groups//ruleSets/default` | A name for a rule set on the group. @@ -1977,116 +2215,108 @@ def get_rule_set(self modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating. - + Examples | Summary :--- | :--- `etag=` | An empty etag can only be used in GET to indicate no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` | An etag encoded a specific version of the rule set to get or to be updated. - + :returns: :class:`RuleSetResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - if name is not None: query['name'] = name - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + if name is not None: + query["name"] = name + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets", + query=query, + headers=headers, + ) return RuleSetResponse.from_dict(res) - - - - - def update_rule_set(self - , name: str, rule_set: RuleSetUpdateRequest - ) -> RuleSetResponse: + def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse: """Update a rule set. - + Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. - + :param name: str Name of the rule set. :param rule_set: :class:`RuleSetUpdateRequest` - + :returns: :class:`RuleSetResponse` """ body = {} - if name is not None: body['name'] = name - if rule_set is not None: body['rule_set'] = rule_set.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets', body=body - - , headers=headers - ) + if name is not None: + body["name"] = name + if rule_set is not None: + body["rule_set"] = rule_set.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", + f"/api/2.0/preview/accounts/{self._api.account_id}/access-control/rule-sets", + body=body, + headers=headers, + ) return RuleSetResponse.from_dict(res) - - + class AccountAccessControlProxyAPI: """These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is called a rule set. A workspace must belong to an account for these APIs to work""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get_assignable_roles_for_resource(self - , resource: str - ) -> GetAssignableRolesForResourceResponse: + def get_assignable_roles_for_resource(self, resource: str) -> GetAssignableRolesForResourceResponse: """Get assignable roles for a resource. - + Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. - + :param resource: str The resource name for which assignable roles will be listed. - + Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service principal. - + :returns: :class:`GetAssignableRolesForResourceResponse` """ - + query = {} - if resource is not None: query['resource'] = resource - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/preview/accounts/access-control/assignable-roles', query=query - - , headers=headers - ) + if resource is not None: + query["resource"] = resource + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/preview/accounts/access-control/assignable-roles", query=query, headers=headers + ) return GetAssignableRolesForResourceResponse.from_dict(res) - - - - - def get_rule_set(self - , name: str, etag: str - ) -> RuleSetResponse: + def get_rule_set(self, name: str, etag: str) -> RuleSetResponse: """Get a rule set. - + Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. - + :param name: str The ruleset name associated with the request. - + Examples | Summary :--- | :--- `name=accounts//ruleSets/default` | A name for a rule set on the account. `name=accounts//groups//ruleSets/default` | A name for a rule set on the group. @@ -2099,89 +2329,86 @@ def get_rule_set(self modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating. - + Examples | Summary :--- | :--- `etag=` | An empty etag can only be used in GET to indicate no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` | An etag encoded a specific version of the rule set to get or to be updated. - + :returns: :class:`RuleSetResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - if name is not None: query['name'] = name - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/preview/accounts/access-control/rule-sets', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + if name is not None: + query["name"] = name + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/preview/accounts/access-control/rule-sets", query=query, headers=headers) return RuleSetResponse.from_dict(res) - - - - - def update_rule_set(self - , name: str, rule_set: RuleSetUpdateRequest - ) -> RuleSetResponse: + def update_rule_set(self, name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse: """Update a rule set. - + Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. - + :param name: str Name of the rule set. :param rule_set: :class:`RuleSetUpdateRequest` - + :returns: :class:`RuleSetResponse` """ body = {} - if name is not None: body['name'] = name - if rule_set is not None: body['rule_set'] = rule_set.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT','/api/2.0/preview/accounts/access-control/rule-sets', body=body - - , headers=headers - ) + if name is not None: + body["name"] = name + if rule_set is not None: + body["rule_set"] = rule_set.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", "/api/2.0/preview/accounts/access-control/rule-sets", body=body, headers=headers) return RuleSetResponse.from_dict(res) - - + class AccountGroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects. - + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks account identities can be assigned as members of groups, and members inherit permissions that are assigned to their group.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, members: Optional[List[ComplexValue]] = None, meta: Optional[ResourceMeta] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None) -> Group: + def create( + self, + *, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + id: Optional[str] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[GroupSchema]] = None, + ) -> Group: """Create a new group. - + Creates a group in the Databricks account with a unique name, using the supplied group details. - + :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -2194,89 +2421,86 @@ def create(self Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - + :returns: :class:`Group` """ body = {} - if display_name is not None: body['displayName'] = display_name - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if id is not None: body['id'] = id - if members is not None: body['members'] = [v.as_dict() for v in members] - if meta is not None: body['meta'] = meta.as_dict() - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups', body=body - - , headers=headers - ) + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if id is not None: + body["id"] = id + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups", body=body, headers=headers + ) return Group.from_dict(res) - - - - - def delete(self - , id: str - ): + def delete(self, id: str): """Delete a group. - + Deletes a group from the Databricks account. - + :param id: str Unique ID for a group in the Databricks account. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", headers=headers) - def get(self - , id: str - ) -> Group: + def get(self, id: str) -> Group: """Get group details. - + Gets the information for a specific group in the Databricks account. - + :param id: str Unique ID for a group in the Databricks account. - + :returns: :class:`Group` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}' - - , headers=headers - ) - return Group.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", headers=headers) + return Group.from_dict(res) - def list(self - - , * - , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[Group]: + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[Group]: """List group details. - + Gets all details of the groups associated with the Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -2288,7 +2512,7 @@ def list(self contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -2296,85 +2520,92 @@ def list(self The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`Group` """ - + query = {} - if attributes is not None: query['attributes'] = attributes - if count is not None: query['count'] = count - if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes - if filter is not None: query['filter'] = filter - if sort_by is not None: query['sortBy'] = sort_by - if sort_order is not None: query['sortOrder'] = sort_order.value - if start_index is not None: query['startIndex'] = start_index - headers = {'Accept': 'application/json',} - - + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + # deduplicate items that may have been added during iteration seen = set() - query['startIndex'] =1 - if "count" not in query: query['count'] = 10000 + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups', query=query - - , headers=headers - ) - if 'Resources' in json: - for v in json['Resources']: - i = v['id'] - if i in seen: - continue - seen.add(i) - yield Group.from_dict(v) - if 'Resources' not in json or not json['Resources']: - return - query['startIndex'] += len(json['Resources']) - - - - - - - def patch(self - , id: str - , * - , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + json = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups", query=query, headers=headers + ) + if "Resources" in json: + for v in json["Resources"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield Group.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): """Update group details. - + Partially updates the details of a group. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}', body=body - - , headers=headers - ) - - - - - def update(self - , id: str - , * - , display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, members: Optional[List[ComplexValue]] = None, meta: Optional[ResourceMeta] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None): + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", body=body, headers=headers + ) + + def update( + self, + id: str, + *, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[GroupSchema]] = None, + ): """Replace a group. - + Updates the details of a group by replacing the entire group entity. - + :param id: str Databricks group ID :param display_name: str (optional) @@ -2382,7 +2613,7 @@ def update(self :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -2393,54 +2624,60 @@ def update(self Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - - + + """ body = {} - if display_name is not None: body['displayName'] = display_name - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if members is not None: body['members'] = [v.as_dict() for v in members] - if meta is not None: body['meta'] = meta.as_dict() - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}', body=body - - , headers=headers - ) - + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Groups/{id}", body=body, headers=headers) + - - class AccountServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. Databricks recommends creating service principals to run production jobs or modify production data. If all processes that act on production data run with service principals, interactive users do not need any write, delete, or modify privileges in production. This eliminates the risk of a user overwriting production data by accident.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , active: Optional[bool] = None, application_id: Optional[str] = None, display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None) -> ServicePrincipal: + def create( + self, + *, + active: Optional[bool] = None, + application_id: Optional[str] = None, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + id: Optional[str] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[ServicePrincipalSchema]] = None, + ) -> ServicePrincipal: """Create a service principal. - + Creates a new service principal in the Databricks account. - + :param active: bool (optional) If this user is active :param application_id: str (optional) @@ -2450,7 +2687,7 @@ def create(self :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -2460,89 +2697,90 @@ def create(self Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - + :returns: :class:`ServicePrincipal` """ body = {} - if active is not None: body['active'] = active - if application_id is not None: body['applicationId'] = application_id - if display_name is not None: body['displayName'] = display_name - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if id is not None: body['id'] = id - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals', body=body - - , headers=headers - ) + if active is not None: + body["active"] = active + if application_id is not None: + body["applicationId"] = application_id + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if id is not None: + body["id"] = id + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals", body=body, headers=headers + ) return ServicePrincipal.from_dict(res) - - - - - def delete(self - , id: str - ): + def delete(self, id: str): """Delete a service principal. - + Delete a single service principal in the Databricks account. - + :param id: str Unique ID for a service principal in the Databricks account. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}' - - , headers=headers - ) - - - - + self._api.do( + "DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", headers=headers + ) - def get(self - , id: str - ) -> ServicePrincipal: + def get(self, id: str) -> ServicePrincipal: """Get service principal details. - + Gets the details for a single service principal define in the Databricks account. - + :param id: str Unique ID for a service principal in the Databricks account. - + :returns: :class:`ServicePrincipal` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}' - - , headers=headers - ) - return ServicePrincipal.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", headers=headers + ) + return ServicePrincipal.from_dict(res) - def list(self - - , * - , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[ServicePrincipal]: + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[ServicePrincipal]: """List service principals. - + Gets the set of service principals associated with a Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -2554,7 +2792,7 @@ def list(self contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -2562,87 +2800,100 @@ def list(self The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`ServicePrincipal` """ - + query = {} - if attributes is not None: query['attributes'] = attributes - if count is not None: query['count'] = count - if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes - if filter is not None: query['filter'] = filter - if sort_by is not None: query['sortBy'] = sort_by - if sort_order is not None: query['sortOrder'] = sort_order.value - if start_index is not None: query['startIndex'] = start_index - headers = {'Accept': 'application/json',} - - + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + # deduplicate items that may have been added during iteration seen = set() - query['startIndex'] =1 - if "count" not in query: query['count'] = 10000 + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals', query=query - - , headers=headers - ) - if 'Resources' in json: - for v in json['Resources']: - i = v['id'] - if i in seen: - continue - seen.add(i) - yield ServicePrincipal.from_dict(v) - if 'Resources' not in json or not json['Resources']: - return - query['startIndex'] += len(json['Resources']) - - - - - - - def patch(self - , id: str - , * - , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals", + query=query, + headers=headers, + ) + if "Resources" in json: + for v in json["Resources"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield ServicePrincipal.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): """Update service principal details. - + Partially updates the details of a single service principal in the Databricks account. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}', body=body - - , headers=headers - ) - - - - - def update(self - , id: str - , * - , active: Optional[bool] = None, application_id: Optional[str] = None, display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None): + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", + body=body, + headers=headers, + ) + + def update( + self, + id: str, + *, + active: Optional[bool] = None, + application_id: Optional[str] = None, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[ServicePrincipalSchema]] = None, + ): """Replace service principal. - + Updates the details of a single service principal. - + This action replaces the existing service principal with the same name. - + :param id: str Databricks service principal ID. :param active: bool (optional) @@ -2654,7 +2905,7 @@ def update(self :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -2662,31 +2913,41 @@ def update(self Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - - + + """ body = {} - if active is not None: body['active'] = active - if application_id is not None: body['applicationId'] = application_id - if display_name is not None: body['displayName'] = display_name - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}', body=body - - , headers=headers - ) - + if active is not None: + body["active"] = active + if application_id is not None: + body["applicationId"] = application_id + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/scim/v2/ServicePrincipals/{id}", + body=body, + headers=headers, + ) + - - class AccountUsersAPI: """User identities recognized by Databricks and represented by email addresses. - + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your identity provider to create users and groups in Databricks account and give them the proper level of @@ -2694,40 +2955,43 @@ class AccountUsersAPI: terminate the user in your identity provider and that user’s account will also be removed from Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from accessing sensitive data.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , active: Optional[bool] = None, display_name: Optional[str] = None, emails: Optional[List[ComplexValue]] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, name: Optional[Name] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None) -> User: + def create( + self, + *, + active: Optional[bool] = None, + display_name: Optional[str] = None, + emails: Optional[List[ComplexValue]] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + id: Optional[str] = None, + name: Optional[Name] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[UserSchema]] = None, + user_name: Optional[str] = None, + ) -> User: """Create a new user. - + Creates a new user in the Databricks account. This new user will also be added to the Databricks account. - + :param active: bool (optional) If this user is active :param display_name: str (optional) String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -2741,67 +3005,74 @@ def create(self The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - + :returns: :class:`User` """ body = {} - if active is not None: body['active'] = active - if display_name is not None: body['displayName'] = display_name - if emails is not None: body['emails'] = [v.as_dict() for v in emails] - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if id is not None: body['id'] = id - if name is not None: body['name'] = name.as_dict() - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - if user_name is not None: body['userName'] = user_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users', body=body - - , headers=headers - ) + if active is not None: + body["active"] = active + if display_name is not None: + body["displayName"] = display_name + if emails is not None: + body["emails"] = [v.as_dict() for v in emails] + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if id is not None: + body["id"] = id + if name is not None: + body["name"] = name.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + if user_name is not None: + body["userName"] = user_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users", body=body, headers=headers + ) return User.from_dict(res) - - - - - def delete(self - , id: str - ): + def delete(self, id: str): """Delete a user. - + Deletes a user. Deleting a user from a Databricks account also removes objects associated with the user. - + :param id: str Unique ID for a user in the Databricks account. - - + + """ - - headers = {} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}' - - , headers=headers - ) - - - - + headers = {} - def get(self - , id: str - , * - , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[GetSortOrder] = None, start_index: Optional[int] = None) -> User: + self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", headers=headers) + + def get( + self, + id: str, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[GetSortOrder] = None, + start_index: Optional[int] = None, + ) -> User: """Get user details. - + Gets information for a specific user in Databricks account. - + :param id: str Unique ID for a user in the Databricks account. :param attributes: str (optional) @@ -2815,7 +3086,7 @@ def get(self contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -2824,38 +3095,49 @@ def get(self The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: :class:`User` """ - + query = {} - if attributes is not None: query['attributes'] = attributes - if count is not None: query['count'] = count - if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes - if filter is not None: query['filter'] = filter - if sort_by is not None: query['sortBy'] = sort_by - if sort_order is not None: query['sortOrder'] = sort_order.value - if start_index is not None: query['startIndex'] = start_index - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}', query=query - - , headers=headers - ) + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", query=query, headers=headers + ) return User.from_dict(res) - - - - - def list(self - - , * - , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[User]: + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[User]: """List users. - + Gets details for all the users associated with a Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -2867,7 +3149,7 @@ def list(self contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -2876,85 +3158,94 @@ def list(self The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`User` """ - + query = {} - if attributes is not None: query['attributes'] = attributes - if count is not None: query['count'] = count - if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes - if filter is not None: query['filter'] = filter - if sort_by is not None: query['sortBy'] = sort_by - if sort_order is not None: query['sortOrder'] = sort_order.value - if start_index is not None: query['startIndex'] = start_index - headers = {'Accept': 'application/json',} - - + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + # deduplicate items that may have been added during iteration seen = set() - query['startIndex'] =1 - if "count" not in query: query['count'] = 10000 + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users', query=query - - , headers=headers - ) - if 'Resources' in json: - for v in json['Resources']: - i = v['id'] - if i in seen: - continue - seen.add(i) - yield User.from_dict(v) - if 'Resources' not in json or not json['Resources']: - return - query['startIndex'] += len(json['Resources']) - - - - - - - def patch(self - , id: str - , * - , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + json = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users", query=query, headers=headers + ) + if "Resources" in json: + for v in json["Resources"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield User.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): """Update user details. - + Partially updates a user resource by applying the supplied operations on specific user attributes. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}', body=body - - , headers=headers - ) - - - - - def update(self - , id: str - , * - , active: Optional[bool] = None, display_name: Optional[str] = None, emails: Optional[List[ComplexValue]] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, name: Optional[Name] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None): + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers + ) + + def update( + self, + id: str, + *, + active: Optional[bool] = None, + display_name: Optional[str] = None, + emails: Optional[List[ComplexValue]] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + name: Optional[Name] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[UserSchema]] = None, + user_name: Optional[str] = None, + ): """Replace a user. - + Replaces a user's information with the data supplied in request. - + :param id: str Databricks user ID. :param active: bool (optional) @@ -2963,13 +3254,13 @@ def update(self String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -2981,94 +3272,93 @@ def update(self The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - - + + """ body = {} - if active is not None: body['active'] = active - if display_name is not None: body['displayName'] = display_name - if emails is not None: body['emails'] = [v.as_dict() for v in emails] - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if name is not None: body['name'] = name.as_dict() - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - if user_name is not None: body['userName'] = user_name - headers = {'Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}', body=body - - , headers=headers - ) - + if active is not None: + body["active"] = active + if display_name is not None: + body["displayName"] = display_name + if emails is not None: + body["emails"] = [v.as_dict() for v in emails] + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if name is not None: + body["name"] = name.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + if user_name is not None: + body["userName"] = user_name + headers = { + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/accounts/{self._api.account_id}/scim/v2/Users/{id}", body=body, headers=headers) + - - class CurrentUserAPI: """This API allows retrieving information about currently authenticated user or service principal.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - def me(self) -> User: """Get current user info. - + Get details about the current method caller's identity. - + :returns: :class:`User` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/preview/scim/v2/Me' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/preview/scim/v2/Me", headers=headers) return User.from_dict(res) - - + class GroupsAPI: """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. - + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks workspace identities can be assigned as members of groups, and members inherit permissions that are assigned to their group.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, members: Optional[List[ComplexValue]] = None, meta: Optional[ResourceMeta] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None) -> Group: + def create( + self, + *, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + id: Optional[str] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[GroupSchema]] = None, + ) -> Group: """Create a new group. - + Creates a group in the Databricks workspace with a unique name, using the supplied group details. - + :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -3081,89 +3371,84 @@ def create(self Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - + :returns: :class:`Group` """ body = {} - if display_name is not None: body['displayName'] = display_name - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if id is not None: body['id'] = id - if members is not None: body['members'] = [v.as_dict() for v in members] - if meta is not None: body['meta'] = meta.as_dict() - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/preview/scim/v2/Groups', body=body - - , headers=headers - ) + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if id is not None: + body["id"] = id + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/scim/v2/Groups", body=body, headers=headers) return Group.from_dict(res) - - - - - def delete(self - , id: str - ): + def delete(self, id: str): """Delete a group. - + Deletes a group from the Databricks workspace. - + :param id: str Unique ID for a group in the Databricks workspace. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/preview/scim/v2/Groups/{id}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers) - def get(self - , id: str - ) -> Group: + def get(self, id: str) -> Group: """Get group details. - + Gets the information for a specific group in the Databricks workspace. - + :param id: str Unique ID for a group in the Databricks workspace. - + :returns: :class:`Group` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/preview/scim/v2/Groups/{id}' - - , headers=headers - ) - return Group.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/scim/v2/Groups/{id}", headers=headers) + return Group.from_dict(res) - def list(self - - , * - , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[Group]: + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[Group]: """List group details. - + Gets all details of the groups associated with the Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -3175,7 +3460,7 @@ def list(self contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -3183,85 +3468,88 @@ def list(self The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`Group` """ - + query = {} - if attributes is not None: query['attributes'] = attributes - if count is not None: query['count'] = count - if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes - if filter is not None: query['filter'] = filter - if sort_by is not None: query['sortBy'] = sort_by - if sort_order is not None: query['sortOrder'] = sort_order.value - if start_index is not None: query['startIndex'] = start_index - headers = {'Accept': 'application/json',} - - + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + # deduplicate items that may have been added during iteration seen = set() - query['startIndex'] =1 - if "count" not in query: query['count'] = 10000 + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 while True: - json = self._api.do('GET','/api/2.0/preview/scim/v2/Groups', query=query - - , headers=headers - ) - if 'Resources' in json: - for v in json['Resources']: - i = v['id'] - if i in seen: - continue - seen.add(i) - yield Group.from_dict(v) - if 'Resources' not in json or not json['Resources']: - return - query['startIndex'] += len(json['Resources']) - - - - - - - def patch(self - , id: str - , * - , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + json = self._api.do("GET", "/api/2.0/preview/scim/v2/Groups", query=query, headers=headers) + if "Resources" in json: + for v in json["Resources"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield Group.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): """Update group details. - + Partially updates the details of a group. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/preview/scim/v2/Groups/{id}', body=body - - , headers=headers - ) - - - - - def update(self - , id: str - , * - , display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, members: Optional[List[ComplexValue]] = None, meta: Optional[ResourceMeta] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[GroupSchema]] = None): + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) + + def update( + self, + id: str, + *, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + members: Optional[List[ComplexValue]] = None, + meta: Optional[ResourceMeta] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[GroupSchema]] = None, + ): """Replace a group. - + Updates the details of a group by replacing the entire group entity. - + :param id: str Databricks group ID :param display_name: str (optional) @@ -3269,7 +3557,7 @@ def update(self :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -3280,48 +3568,49 @@ def update(self Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - - + + """ body = {} - if display_name is not None: body['displayName'] = display_name - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if members is not None: body['members'] = [v.as_dict() for v in members] - if meta is not None: body['meta'] = meta.as_dict() - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/preview/scim/v2/Groups/{id}', body=body - - , headers=headers - ) - + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if members is not None: + body["members"] = [v.as_dict() for v in members] + if meta is not None: + body["meta"] = meta.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/preview/scim/v2/Groups/{id}", body=body, headers=headers) + - - class PermissionMigrationAPI: """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def migrate_permissions(self - , workspace_id: int, from_workspace_group_name: str, to_account_group_name: str - , * - , size: Optional[int] = None) -> MigratePermissionsResponse: + def migrate_permissions( + self, + workspace_id: int, + from_workspace_group_name: str, + to_account_group_name: str, + *, + size: Optional[int] = None, + ) -> MigratePermissionsResponse: """Migrate Permissions. - + :param workspace_id: int WorkspaceId of the associated workspace where the permission migration will occur. :param from_workspace_group_name: str @@ -3330,24 +3619,27 @@ def migrate_permissions(self The name of the account group that permissions will be migrated to. :param size: int (optional) The maximum number of permissions that will be migrated. - + :returns: :class:`MigratePermissionsResponse` """ body = {} - if from_workspace_group_name is not None: body['from_workspace_group_name'] = from_workspace_group_name - if size is not None: body['size'] = size - if to_account_group_name is not None: body['to_account_group_name'] = to_account_group_name - if workspace_id is not None: body['workspace_id'] = workspace_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/permissionmigration', body=body - - , headers=headers - ) + if from_workspace_group_name is not None: + body["from_workspace_group_name"] = from_workspace_group_name + if size is not None: + body["size"] = size + if to_account_group_name is not None: + body["to_account_group_name"] = to_account_group_name + if workspace_id is not None: + body["workspace_id"] = workspace_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/permissionmigration", body=body, headers=headers) return MigratePermissionsResponse.from_dict(res) - - + class PermissionsAPI: """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints. * **[Apps permissions](:service:apps)** — Manage which users can manage @@ -3369,88 +3661,71 @@ class PermissionsAPI: the required permissions for specific actions or abilities and other important information, see [Access Control]. Note that to manage access control on service principals, use **[Account Access Control Proxy](:service:accountaccesscontrolproxy)**. - + [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - , request_object_type: str, request_object_id: str - ) -> ObjectPermissions: + def get(self, request_object_type: str, request_object_id: str) -> ObjectPermissions: """Get object permissions. - + Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. - + :returns: :class:`ObjectPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/{request_object_type}/{request_object_id}' - - , headers=headers - ) - return ObjectPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", headers=headers) + return ObjectPermissions.from_dict(res) - def get_permission_levels(self - , request_object_type: str, request_object_id: str - ) -> GetPermissionLevelsResponse: + def get_permission_levels(self, request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse: """Get object permission levels. - + Gets the permission levels that a user can have on an object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str - + :returns: :class:`GetPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels' - - , headers=headers - ) - return GetPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/permissions/{request_object_type}/{request_object_id}/permissionLevels", headers=headers + ) + return GetPermissionLevelsResponse.from_dict(res) - def set(self - , request_object_type: str, request_object_id: str - , * - , access_control_list: Optional[List[AccessControlRequest]] = None) -> ObjectPermissions: + def set( + self, + request_object_type: str, + request_object_id: str, + *, + access_control_list: Optional[List[AccessControlRequest]] = None, + ) -> ObjectPermissions: """Set object permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, @@ -3458,32 +3733,34 @@ def set(self :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) - + :returns: :class:`ObjectPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/{request_object_type}/{request_object_id}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers + ) return ObjectPermissions.from_dict(res) - - - - - def update(self - , request_object_type: str, request_object_id: str - , * - , access_control_list: Optional[List[AccessControlRequest]] = None) -> ObjectPermissions: + def update( + self, + request_object_type: str, + request_object_id: str, + *, + access_control_list: Optional[List[AccessControlRequest]] = None, + ) -> ObjectPermissions: """Update object permissions. - + Updates the permissions on an object. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, @@ -3491,47 +3768,50 @@ def update(self :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) - + :returns: :class:`ObjectPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/{request_object_type}/{request_object_id}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/permissions/{request_object_type}/{request_object_id}", body=body, headers=headers + ) return ObjectPermissions.from_dict(res) - - + class ServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. Databricks recommends creating service principals to run production jobs or modify production data. If all processes that act on production data run with service principals, interactive users do not need any write, delete, or modify privileges in production. This eliminates the risk of a user overwriting production data by accident.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , active: Optional[bool] = None, application_id: Optional[str] = None, display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None) -> ServicePrincipal: + def create( + self, + *, + active: Optional[bool] = None, + application_id: Optional[str] = None, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + id: Optional[str] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[ServicePrincipalSchema]] = None, + ) -> ServicePrincipal: """Create a service principal. - + Creates a new service principal in the Databricks workspace. - + :param active: bool (optional) If this user is active :param application_id: str (optional) @@ -3541,7 +3821,7 @@ def create(self :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -3551,89 +3831,84 @@ def create(self Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - + :returns: :class:`ServicePrincipal` """ body = {} - if active is not None: body['active'] = active - if application_id is not None: body['applicationId'] = application_id - if display_name is not None: body['displayName'] = display_name - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if id is not None: body['id'] = id - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/preview/scim/v2/ServicePrincipals', body=body - - , headers=headers - ) + if active is not None: + body["active"] = active + if application_id is not None: + body["applicationId"] = application_id + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if id is not None: + body["id"] = id + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/scim/v2/ServicePrincipals", body=body, headers=headers) return ServicePrincipal.from_dict(res) - - - - - def delete(self - , id: str - ): + def delete(self, id: str): """Delete a service principal. - + Delete a single service principal in the Databricks workspace. - + :param id: str Unique ID for a service principal in the Databricks workspace. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", headers=headers) - def get(self - , id: str - ) -> ServicePrincipal: + def get(self, id: str) -> ServicePrincipal: """Get service principal details. - + Gets the details for a single service principal define in the Databricks workspace. - + :param id: str Unique ID for a service principal in the Databricks workspace. - + :returns: :class:`ServicePrincipal` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}' - - , headers=headers - ) - return ServicePrincipal.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", headers=headers) + return ServicePrincipal.from_dict(res) - def list(self - - , * - , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[ServicePrincipal]: + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[ServicePrincipal]: """List service principals. - + Gets the set of service principals associated with a Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -3645,7 +3920,7 @@ def list(self contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -3653,87 +3928,90 @@ def list(self The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`ServicePrincipal` """ - + query = {} - if attributes is not None: query['attributes'] = attributes - if count is not None: query['count'] = count - if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes - if filter is not None: query['filter'] = filter - if sort_by is not None: query['sortBy'] = sort_by - if sort_order is not None: query['sortOrder'] = sort_order.value - if start_index is not None: query['startIndex'] = start_index - headers = {'Accept': 'application/json',} - - + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + # deduplicate items that may have been added during iteration seen = set() - query['startIndex'] =1 - if "count" not in query: query['count'] = 10000 + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 while True: - json = self._api.do('GET','/api/2.0/preview/scim/v2/ServicePrincipals', query=query - - , headers=headers - ) - if 'Resources' in json: - for v in json['Resources']: - i = v['id'] - if i in seen: - continue - seen.add(i) - yield ServicePrincipal.from_dict(v) - if 'Resources' not in json or not json['Resources']: - return - query['startIndex'] += len(json['Resources']) - - - - - - - def patch(self - , id: str - , * - , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + json = self._api.do("GET", "/api/2.0/preview/scim/v2/ServicePrincipals", query=query, headers=headers) + if "Resources" in json: + for v in json["Resources"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield ServicePrincipal.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): """Update service principal details. - + Partially updates the details of a single service principal in the Databricks workspace. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}', body=body - - , headers=headers - ) - - - - - def update(self - , id: str - , * - , active: Optional[bool] = None, application_id: Optional[str] = None, display_name: Optional[str] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[ServicePrincipalSchema]] = None): + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", body=body, headers=headers) + + def update( + self, + id: str, + *, + active: Optional[bool] = None, + application_id: Optional[str] = None, + display_name: Optional[str] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[ServicePrincipalSchema]] = None, + ): """Replace service principal. - + Updates the details of a single service principal. - + This action replaces the existing service principal with the same name. - + :param id: str Databricks service principal ID. :param active: bool (optional) @@ -3745,7 +4023,7 @@ def update(self :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -3753,31 +4031,36 @@ def update(self Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - - + + """ body = {} - if active is not None: body['active'] = active - if application_id is not None: body['applicationId'] = application_id - if display_name is not None: body['displayName'] = display_name - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/preview/scim/v2/ServicePrincipals/{id}', body=body - - , headers=headers - ) - + if active is not None: + body["active"] = active + if application_id is not None: + body["applicationId"] = application_id + if display_name is not None: + body["displayName"] = display_name + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/preview/scim/v2/ServicePrincipals/{id}", body=body, headers=headers) + - - class UsersAPI: """User identities recognized by Databricks and represented by email addresses. - + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your identity provider to create users and groups in Databricks workspace and give them the proper level of @@ -3785,40 +4068,43 @@ class UsersAPI: terminate the user in your identity provider and that user’s account will also be removed from Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from accessing sensitive data.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , active: Optional[bool] = None, display_name: Optional[str] = None, emails: Optional[List[ComplexValue]] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, id: Optional[str] = None, name: Optional[Name] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None) -> User: + def create( + self, + *, + active: Optional[bool] = None, + display_name: Optional[str] = None, + emails: Optional[List[ComplexValue]] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + id: Optional[str] = None, + name: Optional[Name] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[UserSchema]] = None, + user_name: Optional[str] = None, + ) -> User: """Create a new user. - + Creates a new user in the Databricks workspace. This new user will also be added to the Databricks account. - + :param active: bool (optional) If this user is active :param display_name: str (optional) String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -3832,67 +4118,72 @@ def create(self The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - + :returns: :class:`User` """ body = {} - if active is not None: body['active'] = active - if display_name is not None: body['displayName'] = display_name - if emails is not None: body['emails'] = [v.as_dict() for v in emails] - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if id is not None: body['id'] = id - if name is not None: body['name'] = name.as_dict() - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - if user_name is not None: body['userName'] = user_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/preview/scim/v2/Users', body=body - - , headers=headers - ) + if active is not None: + body["active"] = active + if display_name is not None: + body["displayName"] = display_name + if emails is not None: + body["emails"] = [v.as_dict() for v in emails] + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if id is not None: + body["id"] = id + if name is not None: + body["name"] = name.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + if user_name is not None: + body["userName"] = user_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/scim/v2/Users", body=body, headers=headers) return User.from_dict(res) - - - - - def delete(self - , id: str - ): + def delete(self, id: str): """Delete a user. - + Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the user. - + :param id: str Unique ID for a user in the Databricks workspace. - - + + """ - - headers = {} - - self._api.do('DELETE',f'/api/2.0/preview/scim/v2/Users/{id}' - - , headers=headers - ) - - - - + headers = {} - def get(self - , id: str - , * - , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[GetSortOrder] = None, start_index: Optional[int] = None) -> User: + self._api.do("DELETE", f"/api/2.0/preview/scim/v2/Users/{id}", headers=headers) + + def get( + self, + id: str, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[GetSortOrder] = None, + start_index: Optional[int] = None, + ) -> User: """Get user details. - + Gets information for a specific user in Databricks workspace. - + :param id: str Unique ID for a user in the Databricks workspace. :param attributes: str (optional) @@ -3906,7 +4197,7 @@ def get(self contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -3915,76 +4206,77 @@ def get(self The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: :class:`User` """ - + query = {} - if attributes is not None: query['attributes'] = attributes - if count is not None: query['count'] = count - if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes - if filter is not None: query['filter'] = filter - if sort_by is not None: query['sortBy'] = sort_by - if sort_order is not None: query['sortOrder'] = sort_order.value - if start_index is not None: query['startIndex'] = start_index - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/preview/scim/v2/Users/{id}', query=query - - , headers=headers - ) + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/scim/v2/Users/{id}", query=query, headers=headers) return User.from_dict(res) - - - - def get_permission_levels(self) -> GetPasswordPermissionLevelsResponse: """Get password permission levels. - + Gets the permission levels that a user can have on an object. - + :returns: :class:`GetPasswordPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/permissions/authorization/passwords/permissionLevels' - , headers=headers - ) - return GetPasswordPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/permissions/authorization/passwords/permissionLevels", headers=headers) + return GetPasswordPermissionLevelsResponse.from_dict(res) def get_permissions(self) -> PasswordPermissions: """Get password permissions. - + Gets the permissions of all passwords. Passwords can inherit permissions from their root object. - + :returns: :class:`PasswordPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/permissions/authorization/passwords' - , headers=headers - ) - return PasswordPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/permissions/authorization/passwords", headers=headers) + return PasswordPermissions.from_dict(res) - def list(self - - , * - , attributes: Optional[str] = None, count: Optional[int] = None, excluded_attributes: Optional[str] = None, filter: Optional[str] = None, sort_by: Optional[str] = None, sort_order: Optional[ListSortOrder] = None, start_index: Optional[int] = None) -> Iterator[User]: + def list( + self, + *, + attributes: Optional[str] = None, + count: Optional[int] = None, + excluded_attributes: Optional[str] = None, + filter: Optional[str] = None, + sort_by: Optional[str] = None, + sort_order: Optional[ListSortOrder] = None, + start_index: Optional[int] = None, + ) -> Iterator[User]: """List users. - + Gets details for all the users associated with a Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -3996,7 +4288,7 @@ def list(self contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -4005,112 +4297,113 @@ def list(self The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`User` """ - + query = {} - if attributes is not None: query['attributes'] = attributes - if count is not None: query['count'] = count - if excluded_attributes is not None: query['excludedAttributes'] = excluded_attributes - if filter is not None: query['filter'] = filter - if sort_by is not None: query['sortBy'] = sort_by - if sort_order is not None: query['sortOrder'] = sort_order.value - if start_index is not None: query['startIndex'] = start_index - headers = {'Accept': 'application/json',} - - + if attributes is not None: + query["attributes"] = attributes + if count is not None: + query["count"] = count + if excluded_attributes is not None: + query["excludedAttributes"] = excluded_attributes + if filter is not None: + query["filter"] = filter + if sort_by is not None: + query["sortBy"] = sort_by + if sort_order is not None: + query["sortOrder"] = sort_order.value + if start_index is not None: + query["startIndex"] = start_index + headers = { + "Accept": "application/json", + } + # deduplicate items that may have been added during iteration seen = set() - query['startIndex'] =1 - if "count" not in query: query['count'] = 10000 + query["startIndex"] = 1 + if "count" not in query: + query["count"] = 10000 while True: - json = self._api.do('GET','/api/2.0/preview/scim/v2/Users', query=query - - , headers=headers - ) - if 'Resources' in json: - for v in json['Resources']: - i = v['id'] - if i in seen: - continue - seen.add(i) - yield User.from_dict(v) - if 'Resources' not in json or not json['Resources']: - return - query['startIndex'] += len(json['Resources']) - - - - - - - def patch(self - , id: str - , * - , operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): + json = self._api.do("GET", "/api/2.0/preview/scim/v2/Users", query=query, headers=headers) + if "Resources" in json: + for v in json["Resources"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield User.from_dict(v) + if "Resources" not in json or not json["Resources"]: + return + query["startIndex"] += len(json["Resources"]) + + def patch(self, id: str, *, operations: Optional[List[Patch]] = None, schemas: Optional[List[PatchSchema]] = None): """Update user details. - + Partially updates a user resource by applying the supplied operations on specific user attributes. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - - """ - body = {} - if operations is not None: body['Operations'] = [v.as_dict() for v in operations] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - headers = {'Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/preview/scim/v2/Users/{id}', body=body - - , headers=headers - ) - - - - - def set_permissions(self - - , * - , access_control_list: Optional[List[PasswordAccessControlRequest]] = None) -> PasswordPermissions: + """ + body = {} + if operations is not None: + body["Operations"] = [v.as_dict() for v in operations] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + headers = { + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/preview/scim/v2/Users/{id}", body=body, headers=headers) + + def set_permissions( + self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None + ) -> PasswordPermissions: """Set password permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - + :returns: :class:`PasswordPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT','/api/2.0/permissions/authorization/passwords', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) return PasswordPermissions.from_dict(res) - - - - - def update(self - , id: str - , * - , active: Optional[bool] = None, display_name: Optional[str] = None, emails: Optional[List[ComplexValue]] = None, entitlements: Optional[List[ComplexValue]] = None, external_id: Optional[str] = None, groups: Optional[List[ComplexValue]] = None, name: Optional[Name] = None, roles: Optional[List[ComplexValue]] = None, schemas: Optional[List[UserSchema]] = None, user_name: Optional[str] = None): + def update( + self, + id: str, + *, + active: Optional[bool] = None, + display_name: Optional[str] = None, + emails: Optional[List[ComplexValue]] = None, + entitlements: Optional[List[ComplexValue]] = None, + external_id: Optional[str] = None, + groups: Optional[List[ComplexValue]] = None, + name: Optional[Name] = None, + roles: Optional[List[ComplexValue]] = None, + schemas: Optional[List[UserSchema]] = None, + user_name: Optional[str] = None, + ): """Replace a user. - + Replaces a user's information with the data supplied in request. - + :param id: str Databricks user ID. :param active: bool (optional) @@ -4119,13 +4412,13 @@ def update(self String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -4137,160 +4430,143 @@ def update(self The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - - - """ - body = {} - if active is not None: body['active'] = active - if display_name is not None: body['displayName'] = display_name - if emails is not None: body['emails'] = [v.as_dict() for v in emails] - if entitlements is not None: body['entitlements'] = [v.as_dict() for v in entitlements] - if external_id is not None: body['externalId'] = external_id - if groups is not None: body['groups'] = [v.as_dict() for v in groups] - if name is not None: body['name'] = name.as_dict() - if roles is not None: body['roles'] = [v.as_dict() for v in roles] - if schemas is not None: body['schemas'] = [v.value for v in schemas] - if user_name is not None: body['userName'] = user_name - headers = {'Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/preview/scim/v2/Users/{id}', body=body - - , headers=headers - ) - - - - - def update_permissions(self - - , * - , access_control_list: Optional[List[PasswordAccessControlRequest]] = None) -> PasswordPermissions: + """ + body = {} + if active is not None: + body["active"] = active + if display_name is not None: + body["displayName"] = display_name + if emails is not None: + body["emails"] = [v.as_dict() for v in emails] + if entitlements is not None: + body["entitlements"] = [v.as_dict() for v in entitlements] + if external_id is not None: + body["externalId"] = external_id + if groups is not None: + body["groups"] = [v.as_dict() for v in groups] + if name is not None: + body["name"] = name.as_dict() + if roles is not None: + body["roles"] = [v.as_dict() for v in roles] + if schemas is not None: + body["schemas"] = [v.value for v in schemas] + if user_name is not None: + body["userName"] = user_name + headers = { + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/preview/scim/v2/Users/{id}", body=body, headers=headers) + + def update_permissions( + self, *, access_control_list: Optional[List[PasswordAccessControlRequest]] = None + ) -> PasswordPermissions: """Update password permissions. - + Updates the permissions on all passwords. Passwords can inherit permissions from their root object. - + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - + :returns: :class:`PasswordPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/permissions/authorization/passwords', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", "/api/2.0/permissions/authorization/passwords", body=body, headers=headers) return PasswordPermissions.from_dict(res) - - + class WorkspaceAssignmentAPI: """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - , workspace_id: int, principal_id: int - ): + def delete(self, workspace_id: int, principal_id: int): """Delete permissions assignment. - + Deletes the workspace permissions assignment in a given account and workspace for the specified principal. - + :param workspace_id: int The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", + headers=headers, + ) - def get(self - , workspace_id: int - ) -> WorkspacePermissions: + def get(self, workspace_id: int) -> WorkspacePermissions: """List workspace permissions. - + Get an array of workspace permissions for the specified account and workspace. - + :param workspace_id: int The workspace ID. - + :returns: :class:`WorkspacePermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions' - - , headers=headers - ) - return WorkspacePermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/permissions", + headers=headers, + ) + return WorkspacePermissions.from_dict(res) - def list(self - , workspace_id: int - ) -> Iterator[PermissionAssignment]: + def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: """Get permission assignments. - + Get the permission assignments for the specified Databricks account and Databricks workspace. - + :param workspace_id: int The workspace ID for the account. - + :returns: Iterator over :class:`PermissionAssignment` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments' - - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments", + headers=headers, + ) parsed = PermissionAssignments.from_dict(json).permission_assignments return parsed if parsed is not None else [] - - - - - - def update(self - , workspace_id: int, principal_id: int - , * - , permissions: Optional[List[WorkspacePermission]] = None) -> PermissionAssignment: + def update( + self, workspace_id: int, principal_id: int, *, permissions: Optional[List[WorkspacePermission]] = None + ) -> PermissionAssignment: """Create or update permissions assignment. - + Creates or updates the workspace permissions assignment in a given account and workspace for the specified principal. - + :param workspace_id: int The workspace ID. :param principal_id: int @@ -4301,18 +4577,21 @@ def update(self will be ignored. Note that excluding this field, or providing unsupported values, will have the same effect as providing an empty list, which will result in the deletion of all permissions for the principal. - + :returns: :class:`PermissionAssignment` """ body = {} - if permissions is not None: body['permissions'] = [v.value for v in permissions] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}', body=body - - , headers=headers - ) + if permissions is not None: + body["permissions"] = [v.value for v in permissions] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/permissionassignments/principals/{principal_id}", + body=body, + headers=headers, + ) return PermissionAssignment.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 75aac8f51..3d3635e2a 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -1,95 +1,112 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading +from typing import Any, Callable, Dict, Iterator, List, Optional -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict -_LOG = logging.getLogger('databricks.sdk') +_LOG = logging.getLogger("databricks.sdk") -from databricks.sdk.service import compute -from databricks.sdk.service import compute from databricks.sdk.service import compute # all definitions in this file are in alphabetical order + class AuthenticationMethod(Enum): - - - OAUTH = 'OAUTH' - PAT = 'PAT' + + OAUTH = "OAUTH" + PAT = "PAT" + @dataclass class BaseJob: created_time: Optional[int] = None """The time at which this job was created in epoch milliseconds (milliseconds since 1/1/1970 UTC).""" - + creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" - + effective_budget_policy_id: Optional[str] = None """The id of the budget policy used by this job for cost attribution purposes. This may be set through (in order of precedence): 1. Budget admins through the account or workspace console 2. Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" - + has_more: Optional[bool] = None """Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list requests with `expand_tasks=true`.""" - + job_id: Optional[int] = None """The canonical identifier for this job.""" - + settings: Optional[JobSettings] = None """Settings for this job and all of its runs. These settings can be updated using the `resetJob` method.""" - + trigger_state: Optional[TriggerStateProto] = None """State of the trigger associated with the job.""" - + def as_dict(self) -> dict: """Serializes the BaseJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_time is not None: body['created_time'] = self.created_time - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.has_more is not None: body['has_more'] = self.has_more - if self.job_id is not None: body['job_id'] = self.job_id - if self.settings: body['settings'] = self.settings.as_dict() - if self.trigger_state: body['trigger_state'] = self.trigger_state.as_dict() + if self.created_time is not None: + body["created_time"] = self.created_time + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.has_more is not None: + body["has_more"] = self.has_more + if self.job_id is not None: + body["job_id"] = self.job_id + if self.settings: + body["settings"] = self.settings.as_dict() + if self.trigger_state: + body["trigger_state"] = self.trigger_state.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the BaseJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_time is not None: body['created_time'] = self.created_time - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.has_more is not None: body['has_more'] = self.has_more - if self.job_id is not None: body['job_id'] = self.job_id - if self.settings: body['settings'] = self.settings - if self.trigger_state: body['trigger_state'] = self.trigger_state + if self.created_time is not None: + body["created_time"] = self.created_time + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.has_more is not None: + body["has_more"] = self.has_more + if self.job_id is not None: + body["job_id"] = self.job_id + if self.settings: + body["settings"] = self.settings + if self.trigger_state: + body["trigger_state"] = self.trigger_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BaseJob: """Deserializes the BaseJob from a dictionary.""" - return cls(created_time=d.get('created_time', None), creator_user_name=d.get('creator_user_name', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), has_more=d.get('has_more', None), job_id=d.get('job_id', None), settings=_from_dict(d, 'settings', JobSettings), trigger_state=_from_dict(d, 'trigger_state', TriggerStateProto)) - - + return cls( + created_time=d.get("created_time", None), + creator_user_name=d.get("creator_user_name", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + has_more=d.get("has_more", None), + job_id=d.get("job_id", None), + settings=_from_dict(d, "settings", JobSettings), + trigger_state=_from_dict(d, "trigger_state", TriggerStateProto), + ) @dataclass @@ -100,27 +117,27 @@ class BaseRun: (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" - + cleanup_duration: Optional[int] = None """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `cleanup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + cluster_instance: Optional[ClusterInstance] = None """The cluster used for this run. If the run is specified to use a new cluster, this field is set once the Jobs service has requested a cluster for the run.""" - + cluster_spec: Optional[ClusterSpec] = None """A snapshot of the job’s cluster specification when this run was created.""" - + creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" - + description: Optional[str] = None """Description of the run""" - + effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from the client-set performance target on the request depending on whether the performance mode is @@ -129,18 +146,18 @@ class BaseRun: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" - + execution_duration: Optional[int] = None """The time in milliseconds it took to execute the commands in the JAR or notebook until they completed, failed, timed out, were cancelled, or encountered an unexpected error. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `execution_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -150,91 +167,91 @@ class BaseRun: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + has_more: Optional[bool] = None """Indicates if the run has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 :method:jobs/listruns requests with `expand_tasks=true`.""" - + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. If more than 100 job clusters are available, you can paginate through them using :method:jobs/getrun.""" - + job_id: Optional[int] = None """The canonical identifier of the job that contains this run.""" - + job_parameters: Optional[List[JobParameter]] = None """Job-level parameters used in the run""" - + job_run_id: Optional[int] = None """ID of the job run that this run belongs to. For legacy and single-task job runs the field is populated with the job run ID. For task runs, the field is populated with the ID of the job run that the task run belongs to.""" - + number_in_job: Optional[int] = None """A unique identifier for this job run. This is set to the same value as `run_id`.""" - + original_attempt_run_id: Optional[int] = None """If this run is a retry of a prior run attempt, this field contains the run_id of the original attempt; otherwise, it is the same as the run_id.""" - + overriding_parameters: Optional[RunParameters] = None """The parameters used for this run.""" - + queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" - + repair_history: Optional[List[RepairHistoryItem]] = None """The repair history of the run.""" - + run_duration: Optional[int] = None """The time in milliseconds it took the job run and all of its repairs to finish.""" - + run_id: Optional[int] = None """The canonical identifier of the run. This ID is unique across all runs of all jobs.""" - + run_name: Optional[str] = None """An optional name for the run. The maximum length is 4096 bytes in UTF-8 encoding.""" - + run_page_url: Optional[str] = None """The URL to the detail page of the run.""" - + run_type: Optional[RunType] = None """The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with :method:jobs/submit. [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow""" - + schedule: Optional[CronSchedule] = None """The cron schedule that triggered this run if it was triggered by the periodic scheduler.""" - + setup_duration: Optional[int] = None """The time in milliseconds it took to set up the cluster. For runs that run on new clusters this is the cluster creation time, for runs that run on existing clusters this time should be very short. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `setup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + start_time: Optional[int] = None """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC). This may not be the time when the job task starts executing, for example, if the job is scheduled to run on a new cluster, this is the time the cluster creation call is issued.""" - + state: Optional[RunState] = None """Deprecated. Please use the `status` field instead.""" - + status: Optional[RunStatus] = None """The current status of the run""" - + tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object root to determine if more results are available.""" - + trigger: Optional[TriggerType] = None """The type of trigger that fired this run. @@ -246,92 +263,190 @@ class BaseRun: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run.""" - + trigger_info: Optional[TriggerInfo] = None """Additional details about what triggered the run""" - + def as_dict(self) -> dict: """Serializes the BaseRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attempt_number is not None: body['attempt_number'] = self.attempt_number - if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration - if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict() - if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict() - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.description is not None: body['description'] = self.description - if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target.value - if self.end_time is not None: body['end_time'] = self.end_time - if self.execution_duration is not None: body['execution_duration'] = self.execution_duration - if self.git_source: body['git_source'] = self.git_source.as_dict() - if self.has_more is not None: body['has_more'] = self.has_more - if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters] - if self.job_run_id is not None: body['job_run_id'] = self.job_run_id - if self.number_in_job is not None: body['number_in_job'] = self.number_in_job - if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id - if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict() - if self.queue_duration is not None: body['queue_duration'] = self.queue_duration - if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history] - if self.run_duration is not None: body['run_duration'] = self.run_duration - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.run_page_url is not None: body['run_page_url'] = self.run_page_url - if self.run_type is not None: body['run_type'] = self.run_type.value - if self.schedule: body['schedule'] = self.schedule.as_dict() - if self.setup_duration is not None: body['setup_duration'] = self.setup_duration - if self.start_time is not None: body['start_time'] = self.start_time - if self.state: body['state'] = self.state.as_dict() - if self.status: body['status'] = self.status.as_dict() - if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] - if self.trigger is not None: body['trigger'] = self.trigger.value - if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict() + if self.attempt_number is not None: + body["attempt_number"] = self.attempt_number + if self.cleanup_duration is not None: + body["cleanup_duration"] = self.cleanup_duration + if self.cluster_instance: + body["cluster_instance"] = self.cluster_instance.as_dict() + if self.cluster_spec: + body["cluster_spec"] = self.cluster_spec.as_dict() + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.description is not None: + body["description"] = self.description + if self.effective_performance_target is not None: + body["effective_performance_target"] = self.effective_performance_target.value + if self.end_time is not None: + body["end_time"] = self.end_time + if self.execution_duration is not None: + body["execution_duration"] = self.execution_duration + if self.git_source: + body["git_source"] = self.git_source.as_dict() + if self.has_more is not None: + body["has_more"] = self.has_more + if self.job_clusters: + body["job_clusters"] = [v.as_dict() for v in self.job_clusters] + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_parameters: + body["job_parameters"] = [v.as_dict() for v in self.job_parameters] + if self.job_run_id is not None: + body["job_run_id"] = self.job_run_id + if self.number_in_job is not None: + body["number_in_job"] = self.number_in_job + if self.original_attempt_run_id is not None: + body["original_attempt_run_id"] = self.original_attempt_run_id + if self.overriding_parameters: + body["overriding_parameters"] = self.overriding_parameters.as_dict() + if self.queue_duration is not None: + body["queue_duration"] = self.queue_duration + if self.repair_history: + body["repair_history"] = [v.as_dict() for v in self.repair_history] + if self.run_duration is not None: + body["run_duration"] = self.run_duration + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.run_page_url is not None: + body["run_page_url"] = self.run_page_url + if self.run_type is not None: + body["run_type"] = self.run_type.value + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.setup_duration is not None: + body["setup_duration"] = self.setup_duration + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state.as_dict() + if self.status: + body["status"] = self.status.as_dict() + if self.tasks: + body["tasks"] = [v.as_dict() for v in self.tasks] + if self.trigger is not None: + body["trigger"] = self.trigger.value + if self.trigger_info: + body["trigger_info"] = self.trigger_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the BaseRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.attempt_number is not None: body['attempt_number'] = self.attempt_number - if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration - if self.cluster_instance: body['cluster_instance'] = self.cluster_instance - if self.cluster_spec: body['cluster_spec'] = self.cluster_spec - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.description is not None: body['description'] = self.description - if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target - if self.end_time is not None: body['end_time'] = self.end_time - if self.execution_duration is not None: body['execution_duration'] = self.execution_duration - if self.git_source: body['git_source'] = self.git_source - if self.has_more is not None: body['has_more'] = self.has_more - if self.job_clusters: body['job_clusters'] = self.job_clusters - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.job_run_id is not None: body['job_run_id'] = self.job_run_id - if self.number_in_job is not None: body['number_in_job'] = self.number_in_job - if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id - if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters - if self.queue_duration is not None: body['queue_duration'] = self.queue_duration - if self.repair_history: body['repair_history'] = self.repair_history - if self.run_duration is not None: body['run_duration'] = self.run_duration - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.run_page_url is not None: body['run_page_url'] = self.run_page_url - if self.run_type is not None: body['run_type'] = self.run_type - if self.schedule: body['schedule'] = self.schedule - if self.setup_duration is not None: body['setup_duration'] = self.setup_duration - if self.start_time is not None: body['start_time'] = self.start_time - if self.state: body['state'] = self.state - if self.status: body['status'] = self.status - if self.tasks: body['tasks'] = self.tasks - if self.trigger is not None: body['trigger'] = self.trigger - if self.trigger_info: body['trigger_info'] = self.trigger_info + if self.attempt_number is not None: + body["attempt_number"] = self.attempt_number + if self.cleanup_duration is not None: + body["cleanup_duration"] = self.cleanup_duration + if self.cluster_instance: + body["cluster_instance"] = self.cluster_instance + if self.cluster_spec: + body["cluster_spec"] = self.cluster_spec + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.description is not None: + body["description"] = self.description + if self.effective_performance_target is not None: + body["effective_performance_target"] = self.effective_performance_target + if self.end_time is not None: + body["end_time"] = self.end_time + if self.execution_duration is not None: + body["execution_duration"] = self.execution_duration + if self.git_source: + body["git_source"] = self.git_source + if self.has_more is not None: + body["has_more"] = self.has_more + if self.job_clusters: + body["job_clusters"] = self.job_clusters + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.job_run_id is not None: + body["job_run_id"] = self.job_run_id + if self.number_in_job is not None: + body["number_in_job"] = self.number_in_job + if self.original_attempt_run_id is not None: + body["original_attempt_run_id"] = self.original_attempt_run_id + if self.overriding_parameters: + body["overriding_parameters"] = self.overriding_parameters + if self.queue_duration is not None: + body["queue_duration"] = self.queue_duration + if self.repair_history: + body["repair_history"] = self.repair_history + if self.run_duration is not None: + body["run_duration"] = self.run_duration + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.run_page_url is not None: + body["run_page_url"] = self.run_page_url + if self.run_type is not None: + body["run_type"] = self.run_type + if self.schedule: + body["schedule"] = self.schedule + if self.setup_duration is not None: + body["setup_duration"] = self.setup_duration + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state + if self.status: + body["status"] = self.status + if self.tasks: + body["tasks"] = self.tasks + if self.trigger is not None: + body["trigger"] = self.trigger + if self.trigger_info: + body["trigger_info"] = self.trigger_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BaseRun: """Deserializes the BaseRun from a dictionary.""" - return cls(attempt_number=d.get('attempt_number', None), cleanup_duration=d.get('cleanup_duration', None), cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance), cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec), creator_user_name=d.get('creator_user_name', None), description=d.get('description', None), effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), git_source=_from_dict(d, 'git_source', GitSource), has_more=d.get('has_more', None), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), job_parameters=_repeated_dict(d, 'job_parameters', JobParameter), job_run_id=d.get('job_run_id', None), number_in_job=d.get('number_in_job', None), original_attempt_run_id=d.get('original_attempt_run_id', None), overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters), queue_duration=d.get('queue_duration', None), repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem), run_duration=d.get('run_duration', None), run_id=d.get('run_id', None), run_name=d.get('run_name', None), run_page_url=d.get('run_page_url', None), run_type=_enum(d, 'run_type', RunType), schedule=_from_dict(d, 'schedule', CronSchedule), setup_duration=d.get('setup_duration', None), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), status=_from_dict(d, 'status', RunStatus), tasks=_repeated_dict(d, 'tasks', RunTask), trigger=_enum(d, 'trigger', TriggerType), trigger_info=_from_dict(d, 'trigger_info', TriggerInfo)) - - + return cls( + attempt_number=d.get("attempt_number", None), + cleanup_duration=d.get("cleanup_duration", None), + cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance), + cluster_spec=_from_dict(d, "cluster_spec", ClusterSpec), + creator_user_name=d.get("creator_user_name", None), + description=d.get("description", None), + effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), + end_time=d.get("end_time", None), + execution_duration=d.get("execution_duration", None), + git_source=_from_dict(d, "git_source", GitSource), + has_more=d.get("has_more", None), + job_clusters=_repeated_dict(d, "job_clusters", JobCluster), + job_id=d.get("job_id", None), + job_parameters=_repeated_dict(d, "job_parameters", JobParameter), + job_run_id=d.get("job_run_id", None), + number_in_job=d.get("number_in_job", None), + original_attempt_run_id=d.get("original_attempt_run_id", None), + overriding_parameters=_from_dict(d, "overriding_parameters", RunParameters), + queue_duration=d.get("queue_duration", None), + repair_history=_repeated_dict(d, "repair_history", RepairHistoryItem), + run_duration=d.get("run_duration", None), + run_id=d.get("run_id", None), + run_name=d.get("run_name", None), + run_page_url=d.get("run_page_url", None), + run_type=_enum(d, "run_type", RunType), + schedule=_from_dict(d, "schedule", CronSchedule), + setup_duration=d.get("setup_duration", None), + start_time=d.get("start_time", None), + state=_from_dict(d, "state", RunState), + status=_from_dict(d, "status", RunStatus), + tasks=_repeated_dict(d, "tasks", RunTask), + trigger=_enum(d, "trigger", TriggerType), + trigger_info=_from_dict(d, "trigger_info", TriggerInfo), + ) @dataclass @@ -339,30 +454,32 @@ class CancelAllRuns: all_queued_runs: Optional[bool] = None """Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in the workspace are canceled.""" - + job_id: Optional[int] = None """The canonical identifier of the job to cancel all runs of.""" - + def as_dict(self) -> dict: """Serializes the CancelAllRuns into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_queued_runs is not None: body['all_queued_runs'] = self.all_queued_runs - if self.job_id is not None: body['job_id'] = self.job_id + if self.all_queued_runs is not None: + body["all_queued_runs"] = self.all_queued_runs + if self.job_id is not None: + body["job_id"] = self.job_id return body def as_shallow_dict(self) -> dict: """Serializes the CancelAllRuns into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_queued_runs is not None: body['all_queued_runs'] = self.all_queued_runs - if self.job_id is not None: body['job_id'] = self.job_id + if self.all_queued_runs is not None: + body["all_queued_runs"] = self.all_queued_runs + if self.job_id is not None: + body["job_id"] = self.job_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CancelAllRuns: """Deserializes the CancelAllRuns from a dictionary.""" - return cls(all_queued_runs=d.get('all_queued_runs', None), job_id=d.get('job_id', None)) - - + return cls(all_queued_runs=d.get("all_queued_runs", None), job_id=d.get("job_id", None)) @dataclass @@ -381,33 +498,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelAllRunsResponse: """Deserializes the CancelAllRunsResponse from a dictionary.""" return cls() - - @dataclass class CancelRun: run_id: int """This field is required.""" - + def as_dict(self) -> dict: """Serializes the CancelRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the CancelRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CancelRun: """Deserializes the CancelRun from a dictionary.""" - return cls(run_id=d.get('run_id', None)) - - + return cls(run_id=d.get("run_id", None)) @dataclass @@ -426,151 +541,175 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelRunResponse: """Deserializes the CancelRunResponse from a dictionary.""" return cls() - - class CleanRoomTaskRunLifeCycleState(Enum): """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove coupling with jobs API definition""" - - BLOCKED = 'BLOCKED' - INTERNAL_ERROR = 'INTERNAL_ERROR' - PENDING = 'PENDING' - QUEUED = 'QUEUED' - RUNNING = 'RUNNING' - RUN_LIFE_CYCLE_STATE_UNSPECIFIED = 'RUN_LIFE_CYCLE_STATE_UNSPECIFIED' - SKIPPED = 'SKIPPED' - TERMINATED = 'TERMINATED' - TERMINATING = 'TERMINATING' - WAITING_FOR_RETRY = 'WAITING_FOR_RETRY' + + BLOCKED = "BLOCKED" + INTERNAL_ERROR = "INTERNAL_ERROR" + PENDING = "PENDING" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + RUN_LIFE_CYCLE_STATE_UNSPECIFIED = "RUN_LIFE_CYCLE_STATE_UNSPECIFIED" + SKIPPED = "SKIPPED" + TERMINATED = "TERMINATED" + TERMINATING = "TERMINATING" + WAITING_FOR_RETRY = "WAITING_FOR_RETRY" + class CleanRoomTaskRunResultState(Enum): """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid cyclic dependency.""" - - CANCELED = 'CANCELED' - DISABLED = 'DISABLED' - EVICTED = 'EVICTED' - EXCLUDED = 'EXCLUDED' - FAILED = 'FAILED' - MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED' - RUN_RESULT_STATE_UNSPECIFIED = 'RUN_RESULT_STATE_UNSPECIFIED' - SUCCESS = 'SUCCESS' - SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES' - TIMEDOUT = 'TIMEDOUT' - UPSTREAM_CANCELED = 'UPSTREAM_CANCELED' - UPSTREAM_EVICTED = 'UPSTREAM_EVICTED' - UPSTREAM_FAILED = 'UPSTREAM_FAILED' + + CANCELED = "CANCELED" + DISABLED = "DISABLED" + EVICTED = "EVICTED" + EXCLUDED = "EXCLUDED" + FAILED = "FAILED" + MAXIMUM_CONCURRENT_RUNS_REACHED = "MAXIMUM_CONCURRENT_RUNS_REACHED" + RUN_RESULT_STATE_UNSPECIFIED = "RUN_RESULT_STATE_UNSPECIFIED" + SUCCESS = "SUCCESS" + SUCCESS_WITH_FAILURES = "SUCCESS_WITH_FAILURES" + TIMEDOUT = "TIMEDOUT" + UPSTREAM_CANCELED = "UPSTREAM_CANCELED" + UPSTREAM_EVICTED = "UPSTREAM_EVICTED" + UPSTREAM_FAILED = "UPSTREAM_FAILED" + @dataclass class CleanRoomTaskRunState: """Stores the run state of the clean rooms notebook task.""" - + life_cycle_state: Optional[CleanRoomTaskRunLifeCycleState] = None """A value indicating the run's current lifecycle state. This field is always available in the response. Note: Additional states might be introduced in future releases.""" - + result_state: Optional[CleanRoomTaskRunResultState] = None """A value indicating the run's result. This field is only available for terminal lifecycle states. Note: Additional states might be introduced in future releases.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomTaskRunState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state.value - if self.result_state is not None: body['result_state'] = self.result_state.value + if self.life_cycle_state is not None: + body["life_cycle_state"] = self.life_cycle_state.value + if self.result_state is not None: + body["result_state"] = self.result_state.value return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomTaskRunState into a shallow dictionary of its immediate attributes.""" body = {} - if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state - if self.result_state is not None: body['result_state'] = self.result_state + if self.life_cycle_state is not None: + body["life_cycle_state"] = self.life_cycle_state + if self.result_state is not None: + body["result_state"] = self.result_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomTaskRunState: """Deserializes the CleanRoomTaskRunState from a dictionary.""" - return cls(life_cycle_state=_enum(d, 'life_cycle_state', CleanRoomTaskRunLifeCycleState), result_state=_enum(d, 'result_state', CleanRoomTaskRunResultState)) - - + return cls( + life_cycle_state=_enum(d, "life_cycle_state", CleanRoomTaskRunLifeCycleState), + result_state=_enum(d, "result_state", CleanRoomTaskRunResultState), + ) @dataclass class CleanRoomsNotebookTask: clean_room_name: str """The clean room that the notebook belongs to.""" - + notebook_name: str """Name of the notebook being run.""" - + etag: Optional[str] = None """Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the latest version). It can be fetched by calling the :method:cleanroomassets/get API.""" - - notebook_base_parameters: Optional[Dict[str,str]] = None + + notebook_base_parameters: Optional[Dict[str, str]] = None """Base parameters to be used for the clean room notebook job.""" - + def as_dict(self) -> dict: """Serializes the CleanRoomsNotebookTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name - if self.etag is not None: body['etag'] = self.etag - if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters - if self.notebook_name is not None: body['notebook_name'] = self.notebook_name + if self.clean_room_name is not None: + body["clean_room_name"] = self.clean_room_name + if self.etag is not None: + body["etag"] = self.etag + if self.notebook_base_parameters: + body["notebook_base_parameters"] = self.notebook_base_parameters + if self.notebook_name is not None: + body["notebook_name"] = self.notebook_name return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomsNotebookTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name - if self.etag is not None: body['etag'] = self.etag - if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters - if self.notebook_name is not None: body['notebook_name'] = self.notebook_name + if self.clean_room_name is not None: + body["clean_room_name"] = self.clean_room_name + if self.etag is not None: + body["etag"] = self.etag + if self.notebook_base_parameters: + body["notebook_base_parameters"] = self.notebook_base_parameters + if self.notebook_name is not None: + body["notebook_name"] = self.notebook_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomsNotebookTask: """Deserializes the CleanRoomsNotebookTask from a dictionary.""" - return cls(clean_room_name=d.get('clean_room_name', None), etag=d.get('etag', None), notebook_base_parameters=d.get('notebook_base_parameters', None), notebook_name=d.get('notebook_name', None)) - - + return cls( + clean_room_name=d.get("clean_room_name", None), + etag=d.get("etag", None), + notebook_base_parameters=d.get("notebook_base_parameters", None), + notebook_name=d.get("notebook_name", None), + ) @dataclass class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput: clean_room_job_run_state: Optional[CleanRoomTaskRunState] = None """The run state of the clean rooms notebook task.""" - + notebook_output: Optional[NotebookOutput] = None """The notebook output for the clean room run""" - + output_schema_info: Optional[OutputSchemaInfo] = None """Information on how to access the output schema for the clean room run""" - + def as_dict(self) -> dict: """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state.as_dict() - if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict() - if self.output_schema_info: body['output_schema_info'] = self.output_schema_info.as_dict() + if self.clean_room_job_run_state: + body["clean_room_job_run_state"] = self.clean_room_job_run_state.as_dict() + if self.notebook_output: + body["notebook_output"] = self.notebook_output.as_dict() + if self.output_schema_info: + body["output_schema_info"] = self.output_schema_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state - if self.notebook_output: body['notebook_output'] = self.notebook_output - if self.output_schema_info: body['output_schema_info'] = self.output_schema_info + if self.clean_room_job_run_state: + body["clean_room_job_run_state"] = self.clean_room_job_run_state + if self.notebook_output: + body["notebook_output"] = self.notebook_output + if self.output_schema_info: + body["output_schema_info"] = self.output_schema_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput: """Deserializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput from a dictionary.""" - return cls(clean_room_job_run_state=_from_dict(d, 'clean_room_job_run_state', CleanRoomTaskRunState), notebook_output=_from_dict(d, 'notebook_output', NotebookOutput), output_schema_info=_from_dict(d, 'output_schema_info', OutputSchemaInfo)) - - + return cls( + clean_room_job_run_state=_from_dict(d, "clean_room_job_run_state", CleanRoomTaskRunState), + notebook_output=_from_dict(d, "notebook_output", NotebookOutput), + output_schema_info=_from_dict(d, "output_schema_info", OutputSchemaInfo), + ) @dataclass @@ -583,7 +722,7 @@ class ClusterInstance: completes. The response won’t include this field if the identifier is not available yet.""" - + spark_context_id: Optional[str] = None """The canonical identifier for the Spark context used by a run. This field is filled in once the run begins execution. This value can be used to view the Spark UI by browsing to @@ -591,27 +730,29 @@ class ClusterInstance: the run has completed. The response won’t include this field if the identifier is not available yet.""" - + def as_dict(self) -> dict: """Serializes the ClusterInstance into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.spark_context_id is not None: + body["spark_context_id"] = self.spark_context_id return body def as_shallow_dict(self) -> dict: """Serializes the ClusterInstance into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.spark_context_id is not None: + body["spark_context_id"] = self.spark_context_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterInstance: """Deserializes the ClusterInstance from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), spark_context_id=d.get('spark_context_id', None)) - - + return cls(cluster_id=d.get("cluster_id", None), spark_context_id=d.get("spark_context_id", None)) @dataclass @@ -620,84 +761,103 @@ class ClusterSpec: """If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops responding. We suggest running jobs and tasks on new clusters for greater reliability""" - + job_cluster_key: Optional[str] = None """If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.""" - + libraries: Optional[List[compute.Library]] = None """An optional list of libraries to be installed on the cluster. The default value is an empty list.""" - + new_cluster: Optional[compute.ClusterSpec] = None """If new_cluster, a description of a new cluster that is created for each run.""" - + def as_dict(self) -> dict: """Serializes the ClusterSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id - if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() + if self.existing_cluster_id is not None: + body["existing_cluster_id"] = self.existing_cluster_id + if self.job_cluster_key is not None: + body["job_cluster_key"] = self.job_cluster_key + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.new_cluster: + body["new_cluster"] = self.new_cluster.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id - if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key - if self.libraries: body['libraries'] = self.libraries - if self.new_cluster: body['new_cluster'] = self.new_cluster + if self.existing_cluster_id is not None: + body["existing_cluster_id"] = self.existing_cluster_id + if self.job_cluster_key is not None: + body["job_cluster_key"] = self.job_cluster_key + if self.libraries: + body["libraries"] = self.libraries + if self.new_cluster: + body["new_cluster"] = self.new_cluster return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterSpec: """Deserializes the ClusterSpec from a dictionary.""" - return cls(existing_cluster_id=d.get('existing_cluster_id', None), job_cluster_key=d.get('job_cluster_key', None), libraries=_repeated_dict(d, 'libraries', compute.Library), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec)) - - + return cls( + existing_cluster_id=d.get("existing_cluster_id", None), + job_cluster_key=d.get("job_cluster_key", None), + libraries=_repeated_dict(d, "libraries", compute.Library), + new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), + ) @dataclass class ComputeConfig: num_gpus: int """Number of GPUs.""" - + gpu_node_pool_id: Optional[str] = None """IDof the GPU pool to use.""" - + gpu_type: Optional[str] = None """GPU type.""" - + def as_dict(self) -> dict: """Serializes the ComputeConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gpu_node_pool_id is not None: body['gpu_node_pool_id'] = self.gpu_node_pool_id - if self.gpu_type is not None: body['gpu_type'] = self.gpu_type - if self.num_gpus is not None: body['num_gpus'] = self.num_gpus + if self.gpu_node_pool_id is not None: + body["gpu_node_pool_id"] = self.gpu_node_pool_id + if self.gpu_type is not None: + body["gpu_type"] = self.gpu_type + if self.num_gpus is not None: + body["num_gpus"] = self.num_gpus return body def as_shallow_dict(self) -> dict: """Serializes the ComputeConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.gpu_node_pool_id is not None: body['gpu_node_pool_id'] = self.gpu_node_pool_id - if self.gpu_type is not None: body['gpu_type'] = self.gpu_type - if self.num_gpus is not None: body['num_gpus'] = self.num_gpus + if self.gpu_node_pool_id is not None: + body["gpu_node_pool_id"] = self.gpu_node_pool_id + if self.gpu_type is not None: + body["gpu_type"] = self.gpu_type + if self.num_gpus is not None: + body["num_gpus"] = self.num_gpus return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComputeConfig: """Deserializes the ComputeConfig from a dictionary.""" - return cls(gpu_node_pool_id=d.get('gpu_node_pool_id', None), gpu_type=d.get('gpu_type', None), num_gpus=d.get('num_gpus', None)) - - + return cls( + gpu_node_pool_id=d.get("gpu_node_pool_id", None), + gpu_type=d.get("gpu_type", None), + num_gpus=d.get("num_gpus", None), + ) class Condition(Enum): - - - ALL_UPDATED = 'ALL_UPDATED' - ANY_UPDATED = 'ANY_UPDATED' + + ALL_UPDATED = "ALL_UPDATED" + ANY_UPDATED = "ANY_UPDATED" + @dataclass class ConditionTask: @@ -711,37 +871,41 @@ class ConditionTask: The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.""" - + left: str """The left operand of the condition task. Can be either a string value or a job state or parameter reference.""" - + right: str """The right operand of the condition task. Can be either a string value or a job state or parameter reference.""" - + def as_dict(self) -> dict: """Serializes the ConditionTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.left is not None: body['left'] = self.left - if self.op is not None: body['op'] = self.op.value - if self.right is not None: body['right'] = self.right + if self.left is not None: + body["left"] = self.left + if self.op is not None: + body["op"] = self.op.value + if self.right is not None: + body["right"] = self.right return body def as_shallow_dict(self) -> dict: """Serializes the ConditionTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.left is not None: body['left'] = self.left - if self.op is not None: body['op'] = self.op - if self.right is not None: body['right'] = self.right + if self.left is not None: + body["left"] = self.left + if self.op is not None: + body["op"] = self.op + if self.right is not None: + body["right"] = self.right return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ConditionTask: """Deserializes the ConditionTask from a dictionary.""" - return cls(left=d.get('left', None), op=_enum(d, 'op', ConditionTaskOp), right=d.get('right', None)) - - + return cls(left=d.get("left", None), op=_enum(d, "op", ConditionTaskOp), right=d.get("right", None)) class ConditionTaskOp(Enum): @@ -750,84 +914,85 @@ class ConditionTaskOp(Enum): `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to `false`. - + The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.""" - - EQUAL_TO = 'EQUAL_TO' - GREATER_THAN = 'GREATER_THAN' - GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL' - LESS_THAN = 'LESS_THAN' - LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL' - NOT_EQUAL = 'NOT_EQUAL' + + EQUAL_TO = "EQUAL_TO" + GREATER_THAN = "GREATER_THAN" + GREATER_THAN_OR_EQUAL = "GREATER_THAN_OR_EQUAL" + LESS_THAN = "LESS_THAN" + LESS_THAN_OR_EQUAL = "LESS_THAN_OR_EQUAL" + NOT_EQUAL = "NOT_EQUAL" + @dataclass class Continuous: pause_status: Optional[PauseStatus] = None """Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.""" - + def as_dict(self) -> dict: """Serializes the Continuous into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: body['pause_status'] = self.pause_status.value + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value return body def as_shallow_dict(self) -> dict: """Serializes the Continuous into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: body['pause_status'] = self.pause_status + if self.pause_status is not None: + body["pause_status"] = self.pause_status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Continuous: """Deserializes the Continuous from a dictionary.""" - return cls(pause_status=_enum(d, 'pause_status', PauseStatus)) - - + return cls(pause_status=_enum(d, "pause_status", PauseStatus)) @dataclass class CreateJob: access_control_list: Optional[List[JobAccessControlRequest]] = None """List of permissions to set on the job.""" - + budget_policy_id: Optional[str] = None """The id of the user specified budget policy to use for this job. If not specified, a default budget policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the budget policy used by this workload.""" - + continuous: Optional[Continuous] = None """An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.""" - + deployment: Optional[JobDeployment] = None """Deployment information for jobs managed by external sources.""" - + description: Optional[str] = None """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" - + edit_mode: Optional[JobEditMode] = None """Edit mode of the job. * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.""" - + environments: Optional[List[JobEnvironment]] = None """A list of task execution environment specifications that can be referenced by serverless tasks of this job. An environment is required to be present for serverless tasks. For serverless notebook tasks, the environment is accessible in the notebook environment panel. For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.""" - + format: Optional[Format] = None """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -837,15 +1002,15 @@ class CreateJob: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.""" - + max_concurrent_runs: Optional[int] = None """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be able to execute multiple runs of the same job concurrently. This is useful for example if you @@ -855,17 +1020,17 @@ class CreateJob: concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs. This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.""" - + name: Optional[str] = None """An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.""" - + notification_settings: Optional[JobNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this job.""" - + parameters: Optional[List[JobParameterDefinition]] = None """Job-level parameter definitions""" - + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. @@ -873,136 +1038,210 @@ class CreateJob: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + queue: Optional[QueueSettings] = None """The queue settings of the job.""" - + run_as: Optional[JobRunAs] = None """Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" - + schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - - tags: Optional[Dict[str,str]] = None + + tags: Optional[Dict[str, str]] = None """A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.""" - + tasks: Optional[List[Task]] = None """A list of task specifications to be executed by this job. It supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available.""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - + trigger: Optional[TriggerSettings] = None """A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when runs of this job begin or complete.""" - + def as_dict(self) -> dict: """Serializes the CreateJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.continuous: body['continuous'] = self.continuous.as_dict() - if self.deployment: body['deployment'] = self.deployment.as_dict() - if self.description is not None: body['description'] = self.description - if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value - if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() - if self.environments: body['environments'] = [v.as_dict() for v in self.environments] - if self.format is not None: body['format'] = self.format.value - if self.git_source: body['git_source'] = self.git_source.as_dict() - if self.health: body['health'] = self.health.as_dict() - if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] - if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs - if self.name is not None: body['name'] = self.name - if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] - if self.performance_target is not None: body['performance_target'] = self.performance_target.value - if self.queue: body['queue'] = self.queue.as_dict() - if self.run_as: body['run_as'] = self.run_as.as_dict() - if self.schedule: body['schedule'] = self.schedule.as_dict() - if self.tags: body['tags'] = self.tags - if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.trigger: body['trigger'] = self.trigger.as_dict() - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.continuous: + body["continuous"] = self.continuous.as_dict() + if self.deployment: + body["deployment"] = self.deployment.as_dict() + if self.description is not None: + body["description"] = self.description + if self.edit_mode is not None: + body["edit_mode"] = self.edit_mode.value + if self.email_notifications: + body["email_notifications"] = self.email_notifications.as_dict() + if self.environments: + body["environments"] = [v.as_dict() for v in self.environments] + if self.format is not None: + body["format"] = self.format.value + if self.git_source: + body["git_source"] = self.git_source.as_dict() + if self.health: + body["health"] = self.health.as_dict() + if self.job_clusters: + body["job_clusters"] = [v.as_dict() for v in self.job_clusters] + if self.max_concurrent_runs is not None: + body["max_concurrent_runs"] = self.max_concurrent_runs + if self.name is not None: + body["name"] = self.name + if self.notification_settings: + body["notification_settings"] = self.notification_settings.as_dict() + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] + if self.performance_target is not None: + body["performance_target"] = self.performance_target.value + if self.queue: + body["queue"] = self.queue.as_dict() + if self.run_as: + body["run_as"] = self.run_as.as_dict() + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.tags: + body["tags"] = self.tags + if self.tasks: + body["tasks"] = [v.as_dict() for v in self.tasks] + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.trigger: + body["trigger"] = self.trigger.as_dict() + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.continuous: body['continuous'] = self.continuous - if self.deployment: body['deployment'] = self.deployment - if self.description is not None: body['description'] = self.description - if self.edit_mode is not None: body['edit_mode'] = self.edit_mode - if self.email_notifications: body['email_notifications'] = self.email_notifications - if self.environments: body['environments'] = self.environments - if self.format is not None: body['format'] = self.format - if self.git_source: body['git_source'] = self.git_source - if self.health: body['health'] = self.health - if self.job_clusters: body['job_clusters'] = self.job_clusters - if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs - if self.name is not None: body['name'] = self.name - if self.notification_settings: body['notification_settings'] = self.notification_settings - if self.parameters: body['parameters'] = self.parameters - if self.performance_target is not None: body['performance_target'] = self.performance_target - if self.queue: body['queue'] = self.queue - if self.run_as: body['run_as'] = self.run_as - if self.schedule: body['schedule'] = self.schedule - if self.tags: body['tags'] = self.tags - if self.tasks: body['tasks'] = self.tasks - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.trigger: body['trigger'] = self.trigger - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.continuous: + body["continuous"] = self.continuous + if self.deployment: + body["deployment"] = self.deployment + if self.description is not None: + body["description"] = self.description + if self.edit_mode is not None: + body["edit_mode"] = self.edit_mode + if self.email_notifications: + body["email_notifications"] = self.email_notifications + if self.environments: + body["environments"] = self.environments + if self.format is not None: + body["format"] = self.format + if self.git_source: + body["git_source"] = self.git_source + if self.health: + body["health"] = self.health + if self.job_clusters: + body["job_clusters"] = self.job_clusters + if self.max_concurrent_runs is not None: + body["max_concurrent_runs"] = self.max_concurrent_runs + if self.name is not None: + body["name"] = self.name + if self.notification_settings: + body["notification_settings"] = self.notification_settings + if self.parameters: + body["parameters"] = self.parameters + if self.performance_target is not None: + body["performance_target"] = self.performance_target + if self.queue: + body["queue"] = self.queue + if self.run_as: + body["run_as"] = self.run_as + if self.schedule: + body["schedule"] = self.schedule + if self.tags: + body["tags"] = self.tags + if self.tasks: + body["tasks"] = self.tasks + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.trigger: + body["trigger"] = self.trigger + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateJob: """Deserializes the CreateJob from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), budget_policy_id=d.get('budget_policy_id', None), continuous=_from_dict(d, 'continuous', Continuous), deployment=_from_dict(d, 'deployment', JobDeployment), description=d.get('description', None), edit_mode=_enum(d, 'edit_mode', JobEditMode), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environments=_repeated_dict(d, 'environments', JobEnvironment), format=_enum(d, 'format', Format), git_source=_from_dict(d, 'git_source', GitSource), health=_from_dict(d, 'health', JobsHealthRules), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), max_concurrent_runs=d.get('max_concurrent_runs', None), name=d.get('name', None), notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings), parameters=_repeated_dict(d, 'parameters', JobParameterDefinition), performance_target=_enum(d, 'performance_target', PerformanceTarget), queue=_from_dict(d, 'queue', QueueSettings), run_as=_from_dict(d, 'run_as', JobRunAs), schedule=_from_dict(d, 'schedule', CronSchedule), tags=d.get('tags', None), tasks=_repeated_dict(d, 'tasks', Task), timeout_seconds=d.get('timeout_seconds', None), trigger=_from_dict(d, 'trigger', TriggerSettings), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), + budget_policy_id=d.get("budget_policy_id", None), + continuous=_from_dict(d, "continuous", Continuous), + deployment=_from_dict(d, "deployment", JobDeployment), + description=d.get("description", None), + edit_mode=_enum(d, "edit_mode", JobEditMode), + email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), + environments=_repeated_dict(d, "environments", JobEnvironment), + format=_enum(d, "format", Format), + git_source=_from_dict(d, "git_source", GitSource), + health=_from_dict(d, "health", JobsHealthRules), + job_clusters=_repeated_dict(d, "job_clusters", JobCluster), + max_concurrent_runs=d.get("max_concurrent_runs", None), + name=d.get("name", None), + notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), + parameters=_repeated_dict(d, "parameters", JobParameterDefinition), + performance_target=_enum(d, "performance_target", PerformanceTarget), + queue=_from_dict(d, "queue", QueueSettings), + run_as=_from_dict(d, "run_as", JobRunAs), + schedule=_from_dict(d, "schedule", CronSchedule), + tags=d.get("tags", None), + tasks=_repeated_dict(d, "tasks", Task), + timeout_seconds=d.get("timeout_seconds", None), + trigger=_from_dict(d, "trigger", TriggerSettings), + webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), + ) @dataclass class CreateResponse: """Job was created successfully""" - + job_id: Optional[int] = None """The canonical identifier for the newly created job.""" - + def as_dict(self) -> dict: """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id + if self.job_id is not None: + body["job_id"] = self.job_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id + if self.job_id is not None: + body["job_id"] = self.job_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: """Deserializes the CreateResponse from a dictionary.""" - return cls(job_id=d.get('job_id', None)) - - + return cls(job_id=d.get("job_id", None)) @dataclass @@ -1012,277 +1251,325 @@ class CronSchedule: for details. This field is required. [Cron Trigger]: http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" - + timezone_id: str """A Java timezone ID. The schedule for a job is resolved with respect to this timezone. See [Java TimeZone] for details. This field is required. [Java TimeZone]: https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html""" - + pause_status: Optional[PauseStatus] = None """Indicate whether this schedule is paused or not.""" - + def as_dict(self) -> dict: """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: body['pause_status'] = self.pause_status.value - if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: body['pause_status'] = self.pause_status - if self.quartz_cron_expression is not None: body['quartz_cron_expression'] = self.quartz_cron_expression - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.pause_status is not None: + body["pause_status"] = self.pause_status + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: """Deserializes the CronSchedule from a dictionary.""" - return cls(pause_status=_enum(d, 'pause_status', PauseStatus), quartz_cron_expression=d.get('quartz_cron_expression', None), timezone_id=d.get('timezone_id', None)) - - + return cls( + pause_status=_enum(d, "pause_status", PauseStatus), + quartz_cron_expression=d.get("quartz_cron_expression", None), + timezone_id=d.get("timezone_id", None), + ) @dataclass class DashboardPageSnapshot: page_display_name: Optional[str] = None - + widget_error_details: Optional[List[WidgetErrorDetail]] = None - + def as_dict(self) -> dict: """Serializes the DashboardPageSnapshot into a dictionary suitable for use as a JSON request body.""" body = {} - if self.page_display_name is not None: body['page_display_name'] = self.page_display_name - if self.widget_error_details: body['widget_error_details'] = [v.as_dict() for v in self.widget_error_details] + if self.page_display_name is not None: + body["page_display_name"] = self.page_display_name + if self.widget_error_details: + body["widget_error_details"] = [v.as_dict() for v in self.widget_error_details] return body def as_shallow_dict(self) -> dict: """Serializes the DashboardPageSnapshot into a shallow dictionary of its immediate attributes.""" body = {} - if self.page_display_name is not None: body['page_display_name'] = self.page_display_name - if self.widget_error_details: body['widget_error_details'] = self.widget_error_details + if self.page_display_name is not None: + body["page_display_name"] = self.page_display_name + if self.widget_error_details: + body["widget_error_details"] = self.widget_error_details return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardPageSnapshot: """Deserializes the DashboardPageSnapshot from a dictionary.""" - return cls(page_display_name=d.get('page_display_name', None), widget_error_details=_repeated_dict(d, 'widget_error_details', WidgetErrorDetail)) - - + return cls( + page_display_name=d.get("page_display_name", None), + widget_error_details=_repeated_dict(d, "widget_error_details", WidgetErrorDetail), + ) @dataclass class DashboardTask: """Configures the Lakeview Dashboard job task type.""" - + dashboard_id: Optional[str] = None """The identifier of the dashboard to refresh.""" - + subscription: Optional[Subscription] = None """Optional: subscription configuration for sending the dashboard snapshot.""" - + warehouse_id: Optional[str] = None """Optional: The warehouse id to execute the dashboard with for the schedule. If not specified, the default warehouse of the dashboard will be used.""" - + def as_dict(self) -> dict: """Serializes the DashboardTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.subscription: body['subscription'] = self.subscription.as_dict() - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.subscription: + body["subscription"] = self.subscription.as_dict() + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the DashboardTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.subscription: body['subscription'] = self.subscription - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.subscription: + body["subscription"] = self.subscription + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardTask: """Deserializes the DashboardTask from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), subscription=_from_dict(d, 'subscription', Subscription), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + dashboard_id=d.get("dashboard_id", None), + subscription=_from_dict(d, "subscription", Subscription), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class DashboardTaskOutput: page_snapshots: Optional[List[DashboardPageSnapshot]] = None """Should only be populated for manual PDF download jobs.""" - + def as_dict(self) -> dict: """Serializes the DashboardTaskOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.page_snapshots: body['page_snapshots'] = [v.as_dict() for v in self.page_snapshots] + if self.page_snapshots: + body["page_snapshots"] = [v.as_dict() for v in self.page_snapshots] return body def as_shallow_dict(self) -> dict: """Serializes the DashboardTaskOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.page_snapshots: body['page_snapshots'] = self.page_snapshots + if self.page_snapshots: + body["page_snapshots"] = self.page_snapshots return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardTaskOutput: """Deserializes the DashboardTaskOutput from a dictionary.""" - return cls(page_snapshots=_repeated_dict(d, 'page_snapshots', DashboardPageSnapshot)) - - + return cls(page_snapshots=_repeated_dict(d, "page_snapshots", DashboardPageSnapshot)) @dataclass class DbtCloudJobRunStep: """Format of response retrieved from dbt Cloud, for inclusion in output""" - + index: Optional[int] = None """Orders the steps in the job""" - + logs: Optional[str] = None """Output of the step""" - + name: Optional[str] = None """Name of the step in the job""" - + status: Optional[DbtCloudRunStatus] = None """State of the step""" - + def as_dict(self) -> dict: """Serializes the DbtCloudJobRunStep into a dictionary suitable for use as a JSON request body.""" body = {} - if self.index is not None: body['index'] = self.index - if self.logs is not None: body['logs'] = self.logs - if self.name is not None: body['name'] = self.name - if self.status is not None: body['status'] = self.status.value + if self.index is not None: + body["index"] = self.index + if self.logs is not None: + body["logs"] = self.logs + if self.name is not None: + body["name"] = self.name + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the DbtCloudJobRunStep into a shallow dictionary of its immediate attributes.""" body = {} - if self.index is not None: body['index'] = self.index - if self.logs is not None: body['logs'] = self.logs - if self.name is not None: body['name'] = self.name - if self.status is not None: body['status'] = self.status + if self.index is not None: + body["index"] = self.index + if self.logs is not None: + body["logs"] = self.logs + if self.name is not None: + body["name"] = self.name + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DbtCloudJobRunStep: """Deserializes the DbtCloudJobRunStep from a dictionary.""" - return cls(index=d.get('index', None), logs=d.get('logs', None), name=d.get('name', None), status=_enum(d, 'status', DbtCloudRunStatus)) - - + return cls( + index=d.get("index", None), + logs=d.get("logs", None), + name=d.get("name", None), + status=_enum(d, "status", DbtCloudRunStatus), + ) class DbtCloudRunStatus(Enum): """Response enumeration from calling the dbt Cloud API, for inclusion in output""" - - CANCELLED = 'CANCELLED' - ERROR = 'ERROR' - QUEUED = 'QUEUED' - RUNNING = 'RUNNING' - STARTING = 'STARTING' - SUCCESS = 'SUCCESS' + + CANCELLED = "CANCELLED" + ERROR = "ERROR" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + STARTING = "STARTING" + SUCCESS = "SUCCESS" + @dataclass class DbtCloudTask: connection_resource_name: Optional[str] = None """The resource name of the UC connection that authenticates the dbt Cloud for this task""" - + dbt_cloud_job_id: Optional[int] = None """Id of the dbt Cloud job to be triggered""" - + def as_dict(self) -> dict: """Serializes the DbtCloudTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_resource_name is not None: body['connection_resource_name'] = self.connection_resource_name - if self.dbt_cloud_job_id is not None: body['dbt_cloud_job_id'] = self.dbt_cloud_job_id + if self.connection_resource_name is not None: + body["connection_resource_name"] = self.connection_resource_name + if self.dbt_cloud_job_id is not None: + body["dbt_cloud_job_id"] = self.dbt_cloud_job_id return body def as_shallow_dict(self) -> dict: """Serializes the DbtCloudTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_resource_name is not None: body['connection_resource_name'] = self.connection_resource_name - if self.dbt_cloud_job_id is not None: body['dbt_cloud_job_id'] = self.dbt_cloud_job_id + if self.connection_resource_name is not None: + body["connection_resource_name"] = self.connection_resource_name + if self.dbt_cloud_job_id is not None: + body["dbt_cloud_job_id"] = self.dbt_cloud_job_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DbtCloudTask: """Deserializes the DbtCloudTask from a dictionary.""" - return cls(connection_resource_name=d.get('connection_resource_name', None), dbt_cloud_job_id=d.get('dbt_cloud_job_id', None)) - - + return cls( + connection_resource_name=d.get("connection_resource_name", None), + dbt_cloud_job_id=d.get("dbt_cloud_job_id", None), + ) @dataclass class DbtCloudTaskOutput: dbt_cloud_job_run_id: Optional[int] = None """Id of the job run in dbt Cloud""" - + dbt_cloud_job_run_output: Optional[List[DbtCloudJobRunStep]] = None """Steps of the job run as received from dbt Cloud""" - + dbt_cloud_job_run_url: Optional[str] = None """Url where full run details can be viewed""" - + def as_dict(self) -> dict: """Serializes the DbtCloudTaskOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbt_cloud_job_run_id is not None: body['dbt_cloud_job_run_id'] = self.dbt_cloud_job_run_id - if self.dbt_cloud_job_run_output: body['dbt_cloud_job_run_output'] = [v.as_dict() for v in self.dbt_cloud_job_run_output] - if self.dbt_cloud_job_run_url is not None: body['dbt_cloud_job_run_url'] = self.dbt_cloud_job_run_url + if self.dbt_cloud_job_run_id is not None: + body["dbt_cloud_job_run_id"] = self.dbt_cloud_job_run_id + if self.dbt_cloud_job_run_output: + body["dbt_cloud_job_run_output"] = [v.as_dict() for v in self.dbt_cloud_job_run_output] + if self.dbt_cloud_job_run_url is not None: + body["dbt_cloud_job_run_url"] = self.dbt_cloud_job_run_url return body def as_shallow_dict(self) -> dict: """Serializes the DbtCloudTaskOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbt_cloud_job_run_id is not None: body['dbt_cloud_job_run_id'] = self.dbt_cloud_job_run_id - if self.dbt_cloud_job_run_output: body['dbt_cloud_job_run_output'] = self.dbt_cloud_job_run_output - if self.dbt_cloud_job_run_url is not None: body['dbt_cloud_job_run_url'] = self.dbt_cloud_job_run_url + if self.dbt_cloud_job_run_id is not None: + body["dbt_cloud_job_run_id"] = self.dbt_cloud_job_run_id + if self.dbt_cloud_job_run_output: + body["dbt_cloud_job_run_output"] = self.dbt_cloud_job_run_output + if self.dbt_cloud_job_run_url is not None: + body["dbt_cloud_job_run_url"] = self.dbt_cloud_job_run_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DbtCloudTaskOutput: """Deserializes the DbtCloudTaskOutput from a dictionary.""" - return cls(dbt_cloud_job_run_id=d.get('dbt_cloud_job_run_id', None), dbt_cloud_job_run_output=_repeated_dict(d, 'dbt_cloud_job_run_output', DbtCloudJobRunStep), dbt_cloud_job_run_url=d.get('dbt_cloud_job_run_url', None)) - - + return cls( + dbt_cloud_job_run_id=d.get("dbt_cloud_job_run_id", None), + dbt_cloud_job_run_output=_repeated_dict(d, "dbt_cloud_job_run_output", DbtCloudJobRunStep), + dbt_cloud_job_run_url=d.get("dbt_cloud_job_run_url", None), + ) @dataclass class DbtOutput: - artifacts_headers: Optional[Dict[str,str]] = None + artifacts_headers: Optional[Dict[str, str]] = None """An optional map of headers to send when retrieving the artifact from the `artifacts_link`.""" - + artifacts_link: Optional[str] = None """A pre-signed URL to download the (compressed) dbt artifacts. This link is valid for a limited time (30 minutes). This information is only available after the run has finished.""" - + def as_dict(self) -> dict: """Serializes the DbtOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifacts_headers: body['artifacts_headers'] = self.artifacts_headers - if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link + if self.artifacts_headers: + body["artifacts_headers"] = self.artifacts_headers + if self.artifacts_link is not None: + body["artifacts_link"] = self.artifacts_link return body def as_shallow_dict(self) -> dict: """Serializes the DbtOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifacts_headers: body['artifacts_headers'] = self.artifacts_headers - if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link + if self.artifacts_headers: + body["artifacts_headers"] = self.artifacts_headers + if self.artifacts_link is not None: + body["artifacts_link"] = self.artifacts_link return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DbtOutput: """Deserializes the DbtOutput from a dictionary.""" - return cls(artifacts_headers=d.get('artifacts_headers', None), artifacts_link=d.get('artifacts_link', None)) - - + return cls(artifacts_headers=d.get("artifacts_headers", None), artifacts_link=d.get("artifacts_link", None)) @dataclass @@ -1290,24 +1577,24 @@ class DbtTask: commands: List[str] """A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.""" - + catalog: Optional[str] = None """Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1.""" - + profiles_directory: Optional[str] = None """Optional (relative) path to the profiles directory. Can only be specified if no warehouse_id is specified. If no warehouse_id is specified and this folder is unset, the root directory is used.""" - + project_directory: Optional[str] = None """Path to the project directory. Optional for Git sourced tasks, in which case if no value is provided, the root of the Git repository is used.""" - + schema: Optional[str] = None """Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the `default` schema is used.""" - + source: Optional[Source] = None """Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved from the local Databricks workspace. When set to `GIT`, the project will be retrieved @@ -1316,67 +1603,87 @@ class DbtTask: * `WORKSPACE`: Project is located in Databricks workspace. * `GIT`: Project is located in cloud Git provider.""" - + warehouse_id: Optional[str] = None """ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the `--profiles-dir` command line argument.""" - + def as_dict(self) -> dict: """Serializes the DbtTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog is not None: body['catalog'] = self.catalog - if self.commands: body['commands'] = [v for v in self.commands] - if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory - if self.project_directory is not None: body['project_directory'] = self.project_directory - if self.schema is not None: body['schema'] = self.schema - if self.source is not None: body['source'] = self.source.value - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.catalog is not None: + body["catalog"] = self.catalog + if self.commands: + body["commands"] = [v for v in self.commands] + if self.profiles_directory is not None: + body["profiles_directory"] = self.profiles_directory + if self.project_directory is not None: + body["project_directory"] = self.project_directory + if self.schema is not None: + body["schema"] = self.schema + if self.source is not None: + body["source"] = self.source.value + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the DbtTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog is not None: body['catalog'] = self.catalog - if self.commands: body['commands'] = self.commands - if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory - if self.project_directory is not None: body['project_directory'] = self.project_directory - if self.schema is not None: body['schema'] = self.schema - if self.source is not None: body['source'] = self.source - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.catalog is not None: + body["catalog"] = self.catalog + if self.commands: + body["commands"] = self.commands + if self.profiles_directory is not None: + body["profiles_directory"] = self.profiles_directory + if self.project_directory is not None: + body["project_directory"] = self.project_directory + if self.schema is not None: + body["schema"] = self.schema + if self.source is not None: + body["source"] = self.source + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DbtTask: """Deserializes the DbtTask from a dictionary.""" - return cls(catalog=d.get('catalog', None), commands=d.get('commands', None), profiles_directory=d.get('profiles_directory', None), project_directory=d.get('project_directory', None), schema=d.get('schema', None), source=_enum(d, 'source', Source), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + catalog=d.get("catalog", None), + commands=d.get("commands", None), + profiles_directory=d.get("profiles_directory", None), + project_directory=d.get("project_directory", None), + schema=d.get("schema", None), + source=_enum(d, "source", Source), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class DeleteJob: job_id: int """The canonical identifier of the job to delete. This field is required.""" - + def as_dict(self) -> dict: """Serializes the DeleteJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id + if self.job_id is not None: + body["job_id"] = self.job_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id + if self.job_id is not None: + body["job_id"] = self.job_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteJob: """Deserializes the DeleteJob from a dictionary.""" - return cls(job_id=d.get('job_id', None)) - - + return cls(job_id=d.get("job_id", None)) @dataclass @@ -1395,33 +1702,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - @dataclass class DeleteRun: run_id: int """ID of the run to delete.""" - + def as_dict(self) -> dict: """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRun: """Deserializes the DeleteRun from a dictionary.""" - return cls(run_id=d.get('run_id', None)) - - + return cls(run_id=d.get("run_id", None)) @dataclass @@ -1440,80 +1745,86 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse: """Deserializes the DeleteRunResponse from a dictionary.""" return cls() - - @dataclass class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: """Represents a change to the job cluster's settings that would be required for the job clusters to become compliant with their policies.""" - + field: Optional[str] = None """The field where this change would be made, prepended with the job cluster key.""" - + new_value: Optional[str] = None """The new value of this field after enforcing policy compliance (either a number, a boolean, or a string) converted to a string. This is intended to be read by a human. The typed new value of this field can be retrieved by reading the settings field in the API response.""" - + previous_value: Optional[str] = None """The previous value of this field before enforcing policy compliance (either a number, a boolean, or a string) converted to a string. This is intended to be read by a human. The type of the field can be retrieved by reading the settings field in the API response.""" - + def as_dict(self) -> dict: """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.field is not None: body['field'] = self.field - if self.new_value is not None: body['new_value'] = self.new_value - if self.previous_value is not None: body['previous_value'] = self.previous_value + if self.field is not None: + body["field"] = self.field + if self.new_value is not None: + body["new_value"] = self.new_value + if self.previous_value is not None: + body["previous_value"] = self.previous_value return body def as_shallow_dict(self) -> dict: """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a shallow dictionary of its immediate attributes.""" body = {} - if self.field is not None: body['field'] = self.field - if self.new_value is not None: body['new_value'] = self.new_value - if self.previous_value is not None: body['previous_value'] = self.previous_value + if self.field is not None: + body["field"] = self.field + if self.new_value is not None: + body["new_value"] = self.new_value + if self.previous_value is not None: + body["previous_value"] = self.previous_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: """Deserializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange from a dictionary.""" - return cls(field=d.get('field', None), new_value=d.get('new_value', None), previous_value=d.get('previous_value', None)) - - + return cls( + field=d.get("field", None), new_value=d.get("new_value", None), previous_value=d.get("previous_value", None) + ) @dataclass class EnforcePolicyComplianceRequest: job_id: int """The ID of the job you want to enforce policy compliance on.""" - + validate_only: Optional[bool] = None """If set, previews changes made to the job to comply with its policy, but does not update the job.""" - + def as_dict(self) -> dict: """Serializes the EnforcePolicyComplianceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id - if self.validate_only is not None: body['validate_only'] = self.validate_only + if self.job_id is not None: + body["job_id"] = self.job_id + if self.validate_only is not None: + body["validate_only"] = self.validate_only return body def as_shallow_dict(self) -> dict: """Serializes the EnforcePolicyComplianceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id - if self.validate_only is not None: body['validate_only'] = self.validate_only + if self.job_id is not None: + body["job_id"] = self.job_id + if self.validate_only is not None: + body["validate_only"] = self.validate_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceRequest: """Deserializes the EnforcePolicyComplianceRequest from a dictionary.""" - return cls(job_id=d.get('job_id', None), validate_only=d.get('validate_only', None)) - - + return cls(job_id=d.get("job_id", None), validate_only=d.get("validate_only", None)) @dataclass @@ -1521,72 +1832,79 @@ class EnforcePolicyComplianceResponse: has_changes: Optional[bool] = None """Whether any changes have been made to the job cluster settings for the job to become compliant with its policies.""" - + job_cluster_changes: Optional[List[EnforcePolicyComplianceForJobResponseJobClusterSettingsChange]] = None """A list of job cluster changes that have been made to the job’s cluster settings in order for all job clusters to become compliant with their policies.""" - + settings: Optional[JobSettings] = None """Updated job settings after policy enforcement. Policy enforcement only applies to job clusters that are created when running the job (which are specified in new_cluster) and does not apply to existing all-purpose clusters. Updated job settings are derived by applying policy default values to the existing job clusters in order to satisfy policy requirements.""" - + def as_dict(self) -> dict: """Serializes the EnforcePolicyComplianceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.has_changes is not None: body['has_changes'] = self.has_changes - if self.job_cluster_changes: body['job_cluster_changes'] = [v.as_dict() for v in self.job_cluster_changes] - if self.settings: body['settings'] = self.settings.as_dict() + if self.has_changes is not None: + body["has_changes"] = self.has_changes + if self.job_cluster_changes: + body["job_cluster_changes"] = [v.as_dict() for v in self.job_cluster_changes] + if self.settings: + body["settings"] = self.settings.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EnforcePolicyComplianceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.has_changes is not None: body['has_changes'] = self.has_changes - if self.job_cluster_changes: body['job_cluster_changes'] = self.job_cluster_changes - if self.settings: body['settings'] = self.settings + if self.has_changes is not None: + body["has_changes"] = self.has_changes + if self.job_cluster_changes: + body["job_cluster_changes"] = self.job_cluster_changes + if self.settings: + body["settings"] = self.settings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceResponse: """Deserializes the EnforcePolicyComplianceResponse from a dictionary.""" - return cls(has_changes=d.get('has_changes', None), job_cluster_changes=_repeated_dict(d, 'job_cluster_changes', EnforcePolicyComplianceForJobResponseJobClusterSettingsChange), settings=_from_dict(d, 'settings', JobSettings)) - - + return cls( + has_changes=d.get("has_changes", None), + job_cluster_changes=_repeated_dict( + d, "job_cluster_changes", EnforcePolicyComplianceForJobResponseJobClusterSettingsChange + ), + settings=_from_dict(d, "settings", JobSettings), + ) @dataclass class ExportRunOutput: """Run was exported successfully.""" - + views: Optional[List[ViewItem]] = None """The exported content in HTML format (one for every view item). To extract the HTML notebook from the JSON response, download and run this [Python script]. [Python script]: https://docs.databricks.com/en/_static/examples/extract.py""" - + def as_dict(self) -> dict: """Serializes the ExportRunOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.views: body['views'] = [v.as_dict() for v in self.views] + if self.views: + body["views"] = [v.as_dict() for v in self.views] return body def as_shallow_dict(self) -> dict: """Serializes the ExportRunOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.views: body['views'] = self.views + if self.views: + body["views"] = self.views return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExportRunOutput: """Deserializes the ExportRunOutput from a dictionary.""" - return cls(views=_repeated_dict(d, 'views', ViewItem)) - - - - - + return cls(views=_repeated_dict(d, "views", ViewItem)) @dataclass @@ -1594,325 +1912,380 @@ class FileArrivalTriggerConfiguration: url: str """URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.""" - + min_time_between_triggers_seconds: Optional[int] = None """If set, the trigger starts a run only after the specified amount of time passed since the last time the trigger fired. The minimum allowed value is 60 seconds""" - + wait_after_last_change_seconds: Optional[int] = None """If set, the trigger starts a run only after no file activity has occurred for the specified amount of time. This makes it possible to wait for a batch of incoming files to arrive before triggering a run. The minimum allowed value is 60 seconds.""" - + def as_dict(self) -> dict: """Serializes the FileArrivalTriggerConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.min_time_between_triggers_seconds is not None: body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds - if self.url is not None: body['url'] = self.url - if self.wait_after_last_change_seconds is not None: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.url is not None: + body["url"] = self.url + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds return body def as_shallow_dict(self) -> dict: """Serializes the FileArrivalTriggerConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.min_time_between_triggers_seconds is not None: body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds - if self.url is not None: body['url'] = self.url - if self.wait_after_last_change_seconds is not None: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.url is not None: + body["url"] = self.url + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileArrivalTriggerConfiguration: """Deserializes the FileArrivalTriggerConfiguration from a dictionary.""" - return cls(min_time_between_triggers_seconds=d.get('min_time_between_triggers_seconds', None), url=d.get('url', None), wait_after_last_change_seconds=d.get('wait_after_last_change_seconds', None)) - - + return cls( + min_time_between_triggers_seconds=d.get("min_time_between_triggers_seconds", None), + url=d.get("url", None), + wait_after_last_change_seconds=d.get("wait_after_last_change_seconds", None), + ) @dataclass class FileArrivalTriggerState: using_file_events: Optional[bool] = None """Indicates whether the trigger leverages file events to detect file arrivals.""" - + def as_dict(self) -> dict: """Serializes the FileArrivalTriggerState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.using_file_events is not None: body['using_file_events'] = self.using_file_events + if self.using_file_events is not None: + body["using_file_events"] = self.using_file_events return body def as_shallow_dict(self) -> dict: """Serializes the FileArrivalTriggerState into a shallow dictionary of its immediate attributes.""" body = {} - if self.using_file_events is not None: body['using_file_events'] = self.using_file_events + if self.using_file_events is not None: + body["using_file_events"] = self.using_file_events return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileArrivalTriggerState: """Deserializes the FileArrivalTriggerState from a dictionary.""" - return cls(using_file_events=d.get('using_file_events', None)) - - + return cls(using_file_events=d.get("using_file_events", None)) @dataclass class ForEachStats: error_message_stats: Optional[List[ForEachTaskErrorMessageStats]] = None """Sample of 3 most common error messages occurred during the iteration.""" - + task_run_stats: Optional[ForEachTaskTaskRunStats] = None """Describes stats of the iteration. Only latest retries are considered.""" - + def as_dict(self) -> dict: """Serializes the ForEachStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error_message_stats: body['error_message_stats'] = [v.as_dict() for v in self.error_message_stats] - if self.task_run_stats: body['task_run_stats'] = self.task_run_stats.as_dict() + if self.error_message_stats: + body["error_message_stats"] = [v.as_dict() for v in self.error_message_stats] + if self.task_run_stats: + body["task_run_stats"] = self.task_run_stats.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ForEachStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.error_message_stats: body['error_message_stats'] = self.error_message_stats - if self.task_run_stats: body['task_run_stats'] = self.task_run_stats + if self.error_message_stats: + body["error_message_stats"] = self.error_message_stats + if self.task_run_stats: + body["task_run_stats"] = self.task_run_stats return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForEachStats: """Deserializes the ForEachStats from a dictionary.""" - return cls(error_message_stats=_repeated_dict(d, 'error_message_stats', ForEachTaskErrorMessageStats), task_run_stats=_from_dict(d, 'task_run_stats', ForEachTaskTaskRunStats)) - - + return cls( + error_message_stats=_repeated_dict(d, "error_message_stats", ForEachTaskErrorMessageStats), + task_run_stats=_from_dict(d, "task_run_stats", ForEachTaskTaskRunStats), + ) @dataclass class ForEachTask: inputs: str """Array for task to iterate on. This can be a JSON string or a reference to an array parameter.""" - + task: Task """Configuration for the task that will be run for each element in the array""" - + concurrency: Optional[int] = None """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to be able to execute multiple runs of the task concurrently.""" - + def as_dict(self) -> dict: """Serializes the ForEachTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.concurrency is not None: body['concurrency'] = self.concurrency - if self.inputs is not None: body['inputs'] = self.inputs - if self.task: body['task'] = self.task.as_dict() + if self.concurrency is not None: + body["concurrency"] = self.concurrency + if self.inputs is not None: + body["inputs"] = self.inputs + if self.task: + body["task"] = self.task.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ForEachTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.concurrency is not None: body['concurrency'] = self.concurrency - if self.inputs is not None: body['inputs'] = self.inputs - if self.task: body['task'] = self.task + if self.concurrency is not None: + body["concurrency"] = self.concurrency + if self.inputs is not None: + body["inputs"] = self.inputs + if self.task: + body["task"] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForEachTask: """Deserializes the ForEachTask from a dictionary.""" - return cls(concurrency=d.get('concurrency', None), inputs=d.get('inputs', None), task=_from_dict(d, 'task', Task)) - - + return cls( + concurrency=d.get("concurrency", None), inputs=d.get("inputs", None), task=_from_dict(d, "task", Task) + ) @dataclass class ForEachTaskErrorMessageStats: count: Optional[int] = None """Describes the count of such error message encountered during the iterations.""" - + error_message: Optional[str] = None """Describes the error message occured during the iterations.""" - + termination_category: Optional[str] = None """Describes the termination reason for the error message.""" - + def as_dict(self) -> dict: """Serializes the ForEachTaskErrorMessageStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.count is not None: body['count'] = self.count - if self.error_message is not None: body['error_message'] = self.error_message - if self.termination_category is not None: body['termination_category'] = self.termination_category + if self.count is not None: + body["count"] = self.count + if self.error_message is not None: + body["error_message"] = self.error_message + if self.termination_category is not None: + body["termination_category"] = self.termination_category return body def as_shallow_dict(self) -> dict: """Serializes the ForEachTaskErrorMessageStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.count is not None: body['count'] = self.count - if self.error_message is not None: body['error_message'] = self.error_message - if self.termination_category is not None: body['termination_category'] = self.termination_category + if self.count is not None: + body["count"] = self.count + if self.error_message is not None: + body["error_message"] = self.error_message + if self.termination_category is not None: + body["termination_category"] = self.termination_category return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForEachTaskErrorMessageStats: """Deserializes the ForEachTaskErrorMessageStats from a dictionary.""" - return cls(count=d.get('count', None), error_message=d.get('error_message', None), termination_category=d.get('termination_category', None)) - - + return cls( + count=d.get("count", None), + error_message=d.get("error_message", None), + termination_category=d.get("termination_category", None), + ) @dataclass class ForEachTaskTaskRunStats: active_iterations: Optional[int] = None """Describes the iteration runs having an active lifecycle state or an active run sub state.""" - + completed_iterations: Optional[int] = None """Describes the number of failed and succeeded iteration runs.""" - + failed_iterations: Optional[int] = None """Describes the number of failed iteration runs.""" - + scheduled_iterations: Optional[int] = None """Describes the number of iteration runs that have been scheduled.""" - + succeeded_iterations: Optional[int] = None """Describes the number of succeeded iteration runs.""" - + total_iterations: Optional[int] = None """Describes the length of the list of items to iterate over.""" - + def as_dict(self) -> dict: """Serializes the ForEachTaskTaskRunStats into a dictionary suitable for use as a JSON request body.""" body = {} - if self.active_iterations is not None: body['active_iterations'] = self.active_iterations - if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations - if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations - if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations - if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations - if self.total_iterations is not None: body['total_iterations'] = self.total_iterations + if self.active_iterations is not None: + body["active_iterations"] = self.active_iterations + if self.completed_iterations is not None: + body["completed_iterations"] = self.completed_iterations + if self.failed_iterations is not None: + body["failed_iterations"] = self.failed_iterations + if self.scheduled_iterations is not None: + body["scheduled_iterations"] = self.scheduled_iterations + if self.succeeded_iterations is not None: + body["succeeded_iterations"] = self.succeeded_iterations + if self.total_iterations is not None: + body["total_iterations"] = self.total_iterations return body def as_shallow_dict(self) -> dict: """Serializes the ForEachTaskTaskRunStats into a shallow dictionary of its immediate attributes.""" body = {} - if self.active_iterations is not None: body['active_iterations'] = self.active_iterations - if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations - if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations - if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations - if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations - if self.total_iterations is not None: body['total_iterations'] = self.total_iterations + if self.active_iterations is not None: + body["active_iterations"] = self.active_iterations + if self.completed_iterations is not None: + body["completed_iterations"] = self.completed_iterations + if self.failed_iterations is not None: + body["failed_iterations"] = self.failed_iterations + if self.scheduled_iterations is not None: + body["scheduled_iterations"] = self.scheduled_iterations + if self.succeeded_iterations is not None: + body["succeeded_iterations"] = self.succeeded_iterations + if self.total_iterations is not None: + body["total_iterations"] = self.total_iterations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForEachTaskTaskRunStats: """Deserializes the ForEachTaskTaskRunStats from a dictionary.""" - return cls(active_iterations=d.get('active_iterations', None), completed_iterations=d.get('completed_iterations', None), failed_iterations=d.get('failed_iterations', None), scheduled_iterations=d.get('scheduled_iterations', None), succeeded_iterations=d.get('succeeded_iterations', None), total_iterations=d.get('total_iterations', None)) - - + return cls( + active_iterations=d.get("active_iterations", None), + completed_iterations=d.get("completed_iterations", None), + failed_iterations=d.get("failed_iterations", None), + scheduled_iterations=d.get("scheduled_iterations", None), + succeeded_iterations=d.get("succeeded_iterations", None), + total_iterations=d.get("total_iterations", None), + ) class Format(Enum): - - - MULTI_TASK = 'MULTI_TASK' - SINGLE_TASK = 'SINGLE_TASK' + + MULTI_TASK = "MULTI_TASK" + SINGLE_TASK = "SINGLE_TASK" + @dataclass class GenAiComputeTask: dl_runtime_image: str """Runtime image""" - + command: Optional[str] = None """Command launcher to run the actual script, e.g. bash, python etc.""" - + compute: Optional[ComputeConfig] = None - + mlflow_experiment_name: Optional[str] = None """Optional string containing the name of the MLflow experiment to log the run to. If name is not found, backend will create the mlflow experiment using the name.""" - + source: Optional[Source] = None """Optional location type of the training script. When set to `WORKSPACE`, the script will be retrieved from the local Databricks workspace. When set to `GIT`, the script will be retrieved from a Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Script is located in Databricks workspace. * `GIT`: Script is located in cloud Git provider.""" - + training_script_path: Optional[str] = None """The training script file path to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.""" - + yaml_parameters: Optional[str] = None """Optional string containing model parameters passed to the training script in yaml format. If present, then the content in yaml_parameters_file_path will be ignored.""" - + yaml_parameters_file_path: Optional[str] = None """Optional path to a YAML file containing model parameters passed to the training script.""" - + def as_dict(self) -> dict: """Serializes the GenAiComputeTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.command is not None: body['command'] = self.command - if self.compute: body['compute'] = self.compute.as_dict() - if self.dl_runtime_image is not None: body['dl_runtime_image'] = self.dl_runtime_image - if self.mlflow_experiment_name is not None: body['mlflow_experiment_name'] = self.mlflow_experiment_name - if self.source is not None: body['source'] = self.source.value - if self.training_script_path is not None: body['training_script_path'] = self.training_script_path - if self.yaml_parameters is not None: body['yaml_parameters'] = self.yaml_parameters - if self.yaml_parameters_file_path is not None: body['yaml_parameters_file_path'] = self.yaml_parameters_file_path + if self.command is not None: + body["command"] = self.command + if self.compute: + body["compute"] = self.compute.as_dict() + if self.dl_runtime_image is not None: + body["dl_runtime_image"] = self.dl_runtime_image + if self.mlflow_experiment_name is not None: + body["mlflow_experiment_name"] = self.mlflow_experiment_name + if self.source is not None: + body["source"] = self.source.value + if self.training_script_path is not None: + body["training_script_path"] = self.training_script_path + if self.yaml_parameters is not None: + body["yaml_parameters"] = self.yaml_parameters + if self.yaml_parameters_file_path is not None: + body["yaml_parameters_file_path"] = self.yaml_parameters_file_path return body def as_shallow_dict(self) -> dict: """Serializes the GenAiComputeTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.command is not None: body['command'] = self.command - if self.compute: body['compute'] = self.compute - if self.dl_runtime_image is not None: body['dl_runtime_image'] = self.dl_runtime_image - if self.mlflow_experiment_name is not None: body['mlflow_experiment_name'] = self.mlflow_experiment_name - if self.source is not None: body['source'] = self.source - if self.training_script_path is not None: body['training_script_path'] = self.training_script_path - if self.yaml_parameters is not None: body['yaml_parameters'] = self.yaml_parameters - if self.yaml_parameters_file_path is not None: body['yaml_parameters_file_path'] = self.yaml_parameters_file_path + if self.command is not None: + body["command"] = self.command + if self.compute: + body["compute"] = self.compute + if self.dl_runtime_image is not None: + body["dl_runtime_image"] = self.dl_runtime_image + if self.mlflow_experiment_name is not None: + body["mlflow_experiment_name"] = self.mlflow_experiment_name + if self.source is not None: + body["source"] = self.source + if self.training_script_path is not None: + body["training_script_path"] = self.training_script_path + if self.yaml_parameters is not None: + body["yaml_parameters"] = self.yaml_parameters + if self.yaml_parameters_file_path is not None: + body["yaml_parameters_file_path"] = self.yaml_parameters_file_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenAiComputeTask: """Deserializes the GenAiComputeTask from a dictionary.""" - return cls(command=d.get('command', None), compute=_from_dict(d, 'compute', ComputeConfig), dl_runtime_image=d.get('dl_runtime_image', None), mlflow_experiment_name=d.get('mlflow_experiment_name', None), source=_enum(d, 'source', Source), training_script_path=d.get('training_script_path', None), yaml_parameters=d.get('yaml_parameters', None), yaml_parameters_file_path=d.get('yaml_parameters_file_path', None)) - - - - - + return cls( + command=d.get("command", None), + compute=_from_dict(d, "compute", ComputeConfig), + dl_runtime_image=d.get("dl_runtime_image", None), + mlflow_experiment_name=d.get("mlflow_experiment_name", None), + source=_enum(d, "source", Source), + training_script_path=d.get("training_script_path", None), + yaml_parameters=d.get("yaml_parameters", None), + yaml_parameters_file_path=d.get("yaml_parameters_file_path", None), + ) @dataclass class GetJobPermissionLevelsResponse: permission_levels: Optional[List[JobPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetJobPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetJobPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetJobPermissionLevelsResponse: """Deserializes the GetJobPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', JobPermissionsDescription)) - - - - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", JobPermissionsDescription)) @dataclass @@ -1921,179 +2294,195 @@ class GetPolicyComplianceResponse: """Whether the job is compliant with its policies or not. Jobs could be out of compliance if a policy they are using was updated after the job was last edited and some of its job clusters no longer comply with their updated policies.""" - - violations: Optional[Dict[str,str]] = None + + violations: Optional[Dict[str, str]] = None """An object containing key-value mappings representing the first 200 policy validation errors. The keys indicate the path where the policy validation error is occurring. An identifier for the job cluster is prepended to the path. The values indicate an error message describing the policy validation error.""" - + def as_dict(self) -> dict: """Serializes the GetPolicyComplianceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_compliant is not None: body['is_compliant'] = self.is_compliant - if self.violations: body['violations'] = self.violations + if self.is_compliant is not None: + body["is_compliant"] = self.is_compliant + if self.violations: + body["violations"] = self.violations return body def as_shallow_dict(self) -> dict: """Serializes the GetPolicyComplianceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_compliant is not None: body['is_compliant'] = self.is_compliant - if self.violations: body['violations'] = self.violations + if self.is_compliant is not None: + body["is_compliant"] = self.is_compliant + if self.violations: + body["violations"] = self.violations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPolicyComplianceResponse: """Deserializes the GetPolicyComplianceResponse from a dictionary.""" - return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None)) - - - - - - + return cls(is_compliant=d.get("is_compliant", None), violations=d.get("violations", None)) +class GitProvider(Enum): + AWS_CODE_COMMIT = "awsCodeCommit" + AZURE_DEV_OPS_SERVICES = "azureDevOpsServices" + BITBUCKET_CLOUD = "bitbucketCloud" + BITBUCKET_SERVER = "bitbucketServer" + GIT_HUB = "gitHub" + GIT_HUB_ENTERPRISE = "gitHubEnterprise" + GIT_LAB = "gitLab" + GIT_LAB_ENTERPRISE_EDITION = "gitLabEnterpriseEdition" -class GitProvider(Enum): - - - AWS_CODE_COMMIT = 'awsCodeCommit' - AZURE_DEV_OPS_SERVICES = 'azureDevOpsServices' - BITBUCKET_CLOUD = 'bitbucketCloud' - BITBUCKET_SERVER = 'bitbucketServer' - GIT_HUB = 'gitHub' - GIT_HUB_ENTERPRISE = 'gitHubEnterprise' - GIT_LAB = 'gitLab' - GIT_LAB_ENTERPRISE_EDITION = 'gitLabEnterpriseEdition' @dataclass class GitSnapshot: """Read-only state of the remote repository at the time the job was run. This field is only included on job runs.""" - + used_commit: Optional[str] = None """Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to.""" - + def as_dict(self) -> dict: """Serializes the GitSnapshot into a dictionary suitable for use as a JSON request body.""" body = {} - if self.used_commit is not None: body['used_commit'] = self.used_commit + if self.used_commit is not None: + body["used_commit"] = self.used_commit return body def as_shallow_dict(self) -> dict: """Serializes the GitSnapshot into a shallow dictionary of its immediate attributes.""" body = {} - if self.used_commit is not None: body['used_commit'] = self.used_commit + if self.used_commit is not None: + body["used_commit"] = self.used_commit return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GitSnapshot: """Deserializes the GitSnapshot from a dictionary.""" - return cls(used_commit=d.get('used_commit', None)) - - + return cls(used_commit=d.get("used_commit", None)) @dataclass class GitSource: """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + git_url: str """URL of the repository to be cloned by this job.""" - + git_provider: GitProvider """Unique identifier of the service used to host the Git repository. The value is case insensitive.""" - + git_branch: Optional[str] = None """Name of the branch to be checked out and used by this job. This field cannot be specified in conjunction with git_tag or git_commit.""" - + git_commit: Optional[str] = None """Commit to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_tag.""" - + git_snapshot: Optional[GitSnapshot] = None """Read-only state of the remote repository at the time the job was run. This field is only included on job runs.""" - + git_tag: Optional[str] = None """Name of the tag to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_commit.""" - + job_source: Optional[JobSource] = None """The source of the job specification in the remote repository when the job is source controlled.""" - + def as_dict(self) -> dict: """Serializes the GitSource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.git_branch is not None: body['git_branch'] = self.git_branch - if self.git_commit is not None: body['git_commit'] = self.git_commit - if self.git_provider is not None: body['git_provider'] = self.git_provider.value - if self.git_snapshot: body['git_snapshot'] = self.git_snapshot.as_dict() - if self.git_tag is not None: body['git_tag'] = self.git_tag - if self.git_url is not None: body['git_url'] = self.git_url - if self.job_source: body['job_source'] = self.job_source.as_dict() + if self.git_branch is not None: + body["git_branch"] = self.git_branch + if self.git_commit is not None: + body["git_commit"] = self.git_commit + if self.git_provider is not None: + body["git_provider"] = self.git_provider.value + if self.git_snapshot: + body["git_snapshot"] = self.git_snapshot.as_dict() + if self.git_tag is not None: + body["git_tag"] = self.git_tag + if self.git_url is not None: + body["git_url"] = self.git_url + if self.job_source: + body["job_source"] = self.job_source.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GitSource into a shallow dictionary of its immediate attributes.""" body = {} - if self.git_branch is not None: body['git_branch'] = self.git_branch - if self.git_commit is not None: body['git_commit'] = self.git_commit - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_snapshot: body['git_snapshot'] = self.git_snapshot - if self.git_tag is not None: body['git_tag'] = self.git_tag - if self.git_url is not None: body['git_url'] = self.git_url - if self.job_source: body['job_source'] = self.job_source + if self.git_branch is not None: + body["git_branch"] = self.git_branch + if self.git_commit is not None: + body["git_commit"] = self.git_commit + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_snapshot: + body["git_snapshot"] = self.git_snapshot + if self.git_tag is not None: + body["git_tag"] = self.git_tag + if self.git_url is not None: + body["git_url"] = self.git_url + if self.job_source: + body["job_source"] = self.job_source return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GitSource: """Deserializes the GitSource from a dictionary.""" - return cls(git_branch=d.get('git_branch', None), git_commit=d.get('git_commit', None), git_provider=_enum(d, 'git_provider', GitProvider), git_snapshot=_from_dict(d, 'git_snapshot', GitSnapshot), git_tag=d.get('git_tag', None), git_url=d.get('git_url', None), job_source=_from_dict(d, 'job_source', JobSource)) - - + return cls( + git_branch=d.get("git_branch", None), + git_commit=d.get("git_commit", None), + git_provider=_enum(d, "git_provider", GitProvider), + git_snapshot=_from_dict(d, "git_snapshot", GitSnapshot), + git_tag=d.get("git_tag", None), + git_url=d.get("git_url", None), + job_source=_from_dict(d, "job_source", JobSource), + ) @dataclass class Job: """Job was retrieved successfully.""" - + created_time: Optional[int] = None """The time at which this job was created in epoch milliseconds (milliseconds since 1/1/1970 UTC).""" - + creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" - + effective_budget_policy_id: Optional[str] = None """The id of the budget policy used by this job for cost attribution purposes. This may be set through (in order of precedence): 1. Budget admins through the account or workspace console 2. Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based on accessible budget policies of the run_as identity on job creation or modification.""" - + has_more: Optional[bool] = None """Indicates if the job has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list requests with `expand_tasks=true`.""" - + job_id: Optional[int] = None """The canonical identifier for this job.""" - + next_page_token: Optional[str] = None """A token that can be used to list the next page of array properties.""" - + run_as_user_name: Optional[str] = None """The email of an active workspace user or the application ID of a service principal that the job runs as. This value can be changed by setting the `run_as` field when creating or updating a @@ -2102,133 +2491,184 @@ class Job: By default, `run_as_user_name` is based on the current job settings and is set to the creator of the job if job access control is disabled or to the user with the `is_owner` permission if job access control is enabled.""" - + settings: Optional[JobSettings] = None """Settings for this job and all of its runs. These settings can be updated using the `resetJob` method.""" - + trigger_state: Optional[TriggerStateProto] = None """State of the trigger associated with the job.""" - + def as_dict(self) -> dict: """Serializes the Job into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_time is not None: body['created_time'] = self.created_time - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.has_more is not None: body['has_more'] = self.has_more - if self.job_id is not None: body['job_id'] = self.job_id - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name - if self.settings: body['settings'] = self.settings.as_dict() - if self.trigger_state: body['trigger_state'] = self.trigger_state.as_dict() + if self.created_time is not None: + body["created_time"] = self.created_time + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.has_more is not None: + body["has_more"] = self.has_more + if self.job_id is not None: + body["job_id"] = self.job_id + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.run_as_user_name is not None: + body["run_as_user_name"] = self.run_as_user_name + if self.settings: + body["settings"] = self.settings.as_dict() + if self.trigger_state: + body["trigger_state"] = self.trigger_state.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Job into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_time is not None: body['created_time'] = self.created_time - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.has_more is not None: body['has_more'] = self.has_more - if self.job_id is not None: body['job_id'] = self.job_id - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name - if self.settings: body['settings'] = self.settings - if self.trigger_state: body['trigger_state'] = self.trigger_state + if self.created_time is not None: + body["created_time"] = self.created_time + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.has_more is not None: + body["has_more"] = self.has_more + if self.job_id is not None: + body["job_id"] = self.job_id + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.run_as_user_name is not None: + body["run_as_user_name"] = self.run_as_user_name + if self.settings: + body["settings"] = self.settings + if self.trigger_state: + body["trigger_state"] = self.trigger_state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Job: """Deserializes the Job from a dictionary.""" - return cls(created_time=d.get('created_time', None), creator_user_name=d.get('creator_user_name', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), has_more=d.get('has_more', None), job_id=d.get('job_id', None), next_page_token=d.get('next_page_token', None), run_as_user_name=d.get('run_as_user_name', None), settings=_from_dict(d, 'settings', JobSettings), trigger_state=_from_dict(d, 'trigger_state', TriggerStateProto)) - - + return cls( + created_time=d.get("created_time", None), + creator_user_name=d.get("creator_user_name", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + has_more=d.get("has_more", None), + job_id=d.get("job_id", None), + next_page_token=d.get("next_page_token", None), + run_as_user_name=d.get("run_as_user_name", None), + settings=_from_dict(d, "settings", JobSettings), + trigger_state=_from_dict(d, "trigger_state", TriggerStateProto), + ) @dataclass class JobAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[JobPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the JobAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the JobAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobAccessControlRequest: """Deserializes the JobAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', JobPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", JobPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class JobAccessControlResponse: all_permissions: Optional[List[JobPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the JobAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the JobAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobAccessControlResponse: """Deserializes the JobAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', JobPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", JobPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass @@ -2237,68 +2677,79 @@ class JobCluster: """A unique name for the job cluster. This field is required and must be unique within the job. `JobTaskSettings` may refer to this field to determine which cluster to launch for the task execution.""" - + new_cluster: compute.ClusterSpec """If new_cluster, a description of a cluster that is created for each task.""" - + def as_dict(self) -> dict: """Serializes the JobCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key - if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() + if self.job_cluster_key is not None: + body["job_cluster_key"] = self.job_cluster_key + if self.new_cluster: + body["new_cluster"] = self.new_cluster.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the JobCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key - if self.new_cluster: body['new_cluster'] = self.new_cluster + if self.job_cluster_key is not None: + body["job_cluster_key"] = self.job_cluster_key + if self.new_cluster: + body["new_cluster"] = self.new_cluster return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobCluster: """Deserializes the JobCluster from a dictionary.""" - return cls(job_cluster_key=d.get('job_cluster_key', None), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec)) - - + return cls( + job_cluster_key=d.get("job_cluster_key", None), + new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), + ) @dataclass class JobCompliance: job_id: int """Canonical unique identifier for a job.""" - + is_compliant: Optional[bool] = None """Whether this job is in compliance with the latest version of its policy.""" - - violations: Optional[Dict[str,str]] = None + + violations: Optional[Dict[str, str]] = None """An object containing key-value mappings representing the first 200 policy validation errors. The keys indicate the path where the policy validation error is occurring. An identifier for the job cluster is prepended to the path. The values indicate an error message describing the policy validation error.""" - + def as_dict(self) -> dict: """Serializes the JobCompliance into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_compliant is not None: body['is_compliant'] = self.is_compliant - if self.job_id is not None: body['job_id'] = self.job_id - if self.violations: body['violations'] = self.violations + if self.is_compliant is not None: + body["is_compliant"] = self.is_compliant + if self.job_id is not None: + body["job_id"] = self.job_id + if self.violations: + body["violations"] = self.violations return body def as_shallow_dict(self) -> dict: """Serializes the JobCompliance into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_compliant is not None: body['is_compliant'] = self.is_compliant - if self.job_id is not None: body['job_id'] = self.job_id - if self.violations: body['violations'] = self.violations + if self.is_compliant is not None: + body["is_compliant"] = self.is_compliant + if self.job_id is not None: + body["job_id"] = self.job_id + if self.violations: + body["violations"] = self.violations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobCompliance: """Deserializes the JobCompliance from a dictionary.""" - return cls(is_compliant=d.get('is_compliant', None), job_id=d.get('job_id', None), violations=d.get('violations', None)) - - + return cls( + is_compliant=d.get("is_compliant", None), job_id=d.get("job_id", None), violations=d.get("violations", None) + ) @dataclass @@ -2307,141 +2758,164 @@ class JobDeployment: """The kind of deployment that manages the job. * `BUNDLE`: The job is managed by Databricks Asset Bundle.""" - + metadata_file_path: Optional[str] = None """Path of the file that contains deployment metadata.""" - + def as_dict(self) -> dict: """Serializes the JobDeployment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.kind is not None: body['kind'] = self.kind.value - if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path + if self.kind is not None: + body["kind"] = self.kind.value + if self.metadata_file_path is not None: + body["metadata_file_path"] = self.metadata_file_path return body def as_shallow_dict(self) -> dict: """Serializes the JobDeployment into a shallow dictionary of its immediate attributes.""" body = {} - if self.kind is not None: body['kind'] = self.kind - if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path + if self.kind is not None: + body["kind"] = self.kind + if self.metadata_file_path is not None: + body["metadata_file_path"] = self.metadata_file_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobDeployment: """Deserializes the JobDeployment from a dictionary.""" - return cls(kind=_enum(d, 'kind', JobDeploymentKind), metadata_file_path=d.get('metadata_file_path', None)) - - + return cls(kind=_enum(d, "kind", JobDeploymentKind), metadata_file_path=d.get("metadata_file_path", None)) class JobDeploymentKind(Enum): """* `BUNDLE`: The job is managed by Databricks Asset Bundle.""" - - BUNDLE = 'BUNDLE' + + BUNDLE = "BUNDLE" + class JobEditMode(Enum): """Edit mode of the job. - + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified.""" - - EDITABLE = 'EDITABLE' - UI_LOCKED = 'UI_LOCKED' + + EDITABLE = "EDITABLE" + UI_LOCKED = "UI_LOCKED" + @dataclass class JobEmailNotifications: no_alert_for_skipped_runs: Optional[bool] = None """If true, do not send email to recipients specified in `on_failure` if the run is skipped. This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.""" - + on_duration_warning_threshold_exceeded: Optional[List[str]] = None """A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.""" - + on_failure: Optional[List[str]] = None """A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.""" - + on_start: Optional[List[str]] = None """A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.""" - + on_streaming_backlog_exceeded: Optional[List[str]] = None """A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream. Streaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.""" - + on_success: Optional[List[str]] = None """A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.""" - + def as_dict(self) -> dict: """Serializes the JobEmailNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs - if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = [v for v in self.on_duration_warning_threshold_exceeded] - if self.on_failure: body['on_failure'] = [v for v in self.on_failure] - if self.on_start: body['on_start'] = [v for v in self.on_start] - if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded] - if self.on_success: body['on_success'] = [v for v in self.on_success] + if self.no_alert_for_skipped_runs is not None: + body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: + body["on_duration_warning_threshold_exceeded"] = [v for v in self.on_duration_warning_threshold_exceeded] + if self.on_failure: + body["on_failure"] = [v for v in self.on_failure] + if self.on_start: + body["on_start"] = [v for v in self.on_start] + if self.on_streaming_backlog_exceeded: + body["on_streaming_backlog_exceeded"] = [v for v in self.on_streaming_backlog_exceeded] + if self.on_success: + body["on_success"] = [v for v in self.on_success] return body def as_shallow_dict(self) -> dict: """Serializes the JobEmailNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs - if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded - if self.on_failure: body['on_failure'] = self.on_failure - if self.on_start: body['on_start'] = self.on_start - if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded - if self.on_success: body['on_success'] = self.on_success + if self.no_alert_for_skipped_runs is not None: + body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: + body["on_duration_warning_threshold_exceeded"] = self.on_duration_warning_threshold_exceeded + if self.on_failure: + body["on_failure"] = self.on_failure + if self.on_start: + body["on_start"] = self.on_start + if self.on_streaming_backlog_exceeded: + body["on_streaming_backlog_exceeded"] = self.on_streaming_backlog_exceeded + if self.on_success: + body["on_success"] = self.on_success return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobEmailNotifications: """Deserializes the JobEmailNotifications from a dictionary.""" - return cls(no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None), on_duration_warning_threshold_exceeded=d.get('on_duration_warning_threshold_exceeded', None), on_failure=d.get('on_failure', None), on_start=d.get('on_start', None), on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None), on_success=d.get('on_success', None)) - - + return cls( + no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None), + on_duration_warning_threshold_exceeded=d.get("on_duration_warning_threshold_exceeded", None), + on_failure=d.get("on_failure", None), + on_start=d.get("on_start", None), + on_streaming_backlog_exceeded=d.get("on_streaming_backlog_exceeded", None), + on_success=d.get("on_success", None), + ) @dataclass class JobEnvironment: environment_key: str """The key of an environment. It has to be unique within a job.""" - + spec: Optional[compute.Environment] = None """The environment entity used to preserve serverless environment side panel, jobs' environment for non-notebook task, and DLT's environment for classic and serverless pipelines. In this minimal environment spec, only pip dependencies are supported.""" - + def as_dict(self) -> dict: """Serializes the JobEnvironment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.environment_key is not None: body['environment_key'] = self.environment_key - if self.spec: body['spec'] = self.spec.as_dict() + if self.environment_key is not None: + body["environment_key"] = self.environment_key + if self.spec: + body["spec"] = self.spec.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the JobEnvironment into a shallow dictionary of its immediate attributes.""" body = {} - if self.environment_key is not None: body['environment_key'] = self.environment_key - if self.spec: body['spec'] = self.spec + if self.environment_key is not None: + body["environment_key"] = self.environment_key + if self.spec: + body["spec"] = self.spec return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobEnvironment: """Deserializes the JobEnvironment from a dictionary.""" - return cls(environment_key=d.get('environment_key', None), spec=_from_dict(d, 'spec', compute.Environment)) - - + return cls(environment_key=d.get("environment_key", None), spec=_from_dict(d, "spec", compute.Environment)) @dataclass @@ -2449,264 +2923,303 @@ class JobNotificationSettings: no_alert_for_canceled_runs: Optional[bool] = None """If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.""" - + no_alert_for_skipped_runs: Optional[bool] = None """If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.""" - + def as_dict(self) -> dict: """Serializes the JobNotificationSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.no_alert_for_canceled_runs is not None: body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs - if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.no_alert_for_canceled_runs is not None: + body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: + body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs return body def as_shallow_dict(self) -> dict: """Serializes the JobNotificationSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.no_alert_for_canceled_runs is not None: body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs - if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.no_alert_for_canceled_runs is not None: + body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: + body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobNotificationSettings: """Deserializes the JobNotificationSettings from a dictionary.""" - return cls(no_alert_for_canceled_runs=d.get('no_alert_for_canceled_runs', None), no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None)) - - + return cls( + no_alert_for_canceled_runs=d.get("no_alert_for_canceled_runs", None), + no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None), + ) @dataclass class JobParameter: default: Optional[str] = None """The optional default value of the parameter""" - + name: Optional[str] = None """The name of the parameter""" - + value: Optional[str] = None """The value used in the run""" - + def as_dict(self) -> dict: """Serializes the JobParameter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default is not None: body['default'] = self.default - if self.name is not None: body['name'] = self.name - if self.value is not None: body['value'] = self.value + if self.default is not None: + body["default"] = self.default + if self.name is not None: + body["name"] = self.name + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the JobParameter into a shallow dictionary of its immediate attributes.""" body = {} - if self.default is not None: body['default'] = self.default - if self.name is not None: body['name'] = self.name - if self.value is not None: body['value'] = self.value + if self.default is not None: + body["default"] = self.default + if self.name is not None: + body["name"] = self.name + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobParameter: """Deserializes the JobParameter from a dictionary.""" - return cls(default=d.get('default', None), name=d.get('name', None), value=d.get('value', None)) - - + return cls(default=d.get("default", None), name=d.get("name", None), value=d.get("value", None)) @dataclass class JobParameterDefinition: name: str """The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.`""" - + default: str """Default value of the parameter.""" - + def as_dict(self) -> dict: """Serializes the JobParameterDefinition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default is not None: body['default'] = self.default - if self.name is not None: body['name'] = self.name + if self.default is not None: + body["default"] = self.default + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the JobParameterDefinition into a shallow dictionary of its immediate attributes.""" body = {} - if self.default is not None: body['default'] = self.default - if self.name is not None: body['name'] = self.name + if self.default is not None: + body["default"] = self.default + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobParameterDefinition: """Deserializes the JobParameterDefinition from a dictionary.""" - return cls(default=d.get('default', None), name=d.get('name', None)) - - + return cls(default=d.get("default", None), name=d.get("name", None)) @dataclass class JobPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[JobPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the JobPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the JobPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobPermission: """Deserializes the JobPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', JobPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", JobPermissionLevel), + ) class JobPermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = 'CAN_MANAGE' - CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' - CAN_VIEW = 'CAN_VIEW' - IS_OWNER = 'IS_OWNER' + + CAN_MANAGE = "CAN_MANAGE" + CAN_MANAGE_RUN = "CAN_MANAGE_RUN" + CAN_VIEW = "CAN_VIEW" + IS_OWNER = "IS_OWNER" + @dataclass class JobPermissions: access_control_list: Optional[List[JobAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the JobPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the JobPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobPermissions: """Deserializes the JobPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class JobPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[JobPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the JobPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the JobPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobPermissionsDescription: """Deserializes the JobPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', JobPermissionLevel)) - - + return cls( + description=d.get("description", None), permission_level=_enum(d, "permission_level", JobPermissionLevel) + ) @dataclass class JobPermissionsRequest: access_control_list: Optional[List[JobAccessControlRequest]] = None - + job_id: Optional[str] = None """The job for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the JobPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.job_id is not None: body['job_id'] = self.job_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.job_id is not None: + body["job_id"] = self.job_id return body def as_shallow_dict(self) -> dict: """Serializes the JobPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.job_id is not None: body['job_id'] = self.job_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.job_id is not None: + body["job_id"] = self.job_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobPermissionsRequest: """Deserializes the JobPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), job_id=d.get('job_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), + job_id=d.get("job_id", None), + ) @dataclass class JobRunAs: """Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. - + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" - + service_principal_name: Optional[str] = None """Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.""" - + user_name: Optional[str] = None """The email of an active workspace user. Non-admin users can only set this field to their own email.""" - + def as_dict(self) -> dict: """Serializes the JobRunAs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the JobRunAs into a shallow dictionary of its immediate attributes.""" body = {} - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobRunAs: """Deserializes the JobRunAs from a dictionary.""" - return cls(service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls(service_principal_name=d.get("service_principal_name", None), user_name=d.get("user_name", None)) @dataclass @@ -2715,38 +3228,38 @@ class JobSettings: """The id of the user specified budget policy to use for this job. If not specified, a default budget policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the budget policy used by this workload.""" - + continuous: Optional[Continuous] = None """An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.""" - + deployment: Optional[JobDeployment] = None """Deployment information for jobs managed by external sources.""" - + description: Optional[str] = None """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" - + edit_mode: Optional[JobEditMode] = None """Edit mode of the job. * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.""" - + environments: Optional[List[JobEnvironment]] = None """A list of task execution environment specifications that can be referenced by serverless tasks of this job. An environment is required to be present for serverless tasks. For serverless notebook tasks, the environment is accessible in the notebook environment panel. For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.""" - + format: Optional[Format] = None """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -2756,15 +3269,15 @@ class JobSettings: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.""" - + max_concurrent_runs: Optional[int] = None """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be able to execute multiple runs of the same job concurrently. This is useful for example if you @@ -2774,17 +3287,17 @@ class JobSettings: concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs. This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.""" - + name: Optional[str] = None """An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.""" - + notification_settings: Optional[JobNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this job.""" - + parameters: Optional[List[JobParameterDefinition]] = None """Job-level parameter definitions""" - + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. @@ -2792,119 +3305,190 @@ class JobSettings: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + queue: Optional[QueueSettings] = None """The queue settings of the job.""" - + run_as: Optional[JobRunAs] = None """Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.""" - + schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - - tags: Optional[Dict[str,str]] = None + + tags: Optional[Dict[str, str]] = None """A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.""" - + tasks: Optional[List[Task]] = None """A list of task specifications to be executed by this job. It supports up to 1000 elements in write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, :method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root to determine if more results are available.""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - + trigger: Optional[TriggerSettings] = None """A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when runs of this job begin or complete.""" - + def as_dict(self) -> dict: """Serializes the JobSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.continuous: body['continuous'] = self.continuous.as_dict() - if self.deployment: body['deployment'] = self.deployment.as_dict() - if self.description is not None: body['description'] = self.description - if self.edit_mode is not None: body['edit_mode'] = self.edit_mode.value - if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() - if self.environments: body['environments'] = [v.as_dict() for v in self.environments] - if self.format is not None: body['format'] = self.format.value - if self.git_source: body['git_source'] = self.git_source.as_dict() - if self.health: body['health'] = self.health.as_dict() - if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] - if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs - if self.name is not None: body['name'] = self.name - if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] - if self.performance_target is not None: body['performance_target'] = self.performance_target.value - if self.queue: body['queue'] = self.queue.as_dict() - if self.run_as: body['run_as'] = self.run_as.as_dict() - if self.schedule: body['schedule'] = self.schedule.as_dict() - if self.tags: body['tags'] = self.tags - if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.trigger: body['trigger'] = self.trigger.as_dict() - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.continuous: + body["continuous"] = self.continuous.as_dict() + if self.deployment: + body["deployment"] = self.deployment.as_dict() + if self.description is not None: + body["description"] = self.description + if self.edit_mode is not None: + body["edit_mode"] = self.edit_mode.value + if self.email_notifications: + body["email_notifications"] = self.email_notifications.as_dict() + if self.environments: + body["environments"] = [v.as_dict() for v in self.environments] + if self.format is not None: + body["format"] = self.format.value + if self.git_source: + body["git_source"] = self.git_source.as_dict() + if self.health: + body["health"] = self.health.as_dict() + if self.job_clusters: + body["job_clusters"] = [v.as_dict() for v in self.job_clusters] + if self.max_concurrent_runs is not None: + body["max_concurrent_runs"] = self.max_concurrent_runs + if self.name is not None: + body["name"] = self.name + if self.notification_settings: + body["notification_settings"] = self.notification_settings.as_dict() + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] + if self.performance_target is not None: + body["performance_target"] = self.performance_target.value + if self.queue: + body["queue"] = self.queue.as_dict() + if self.run_as: + body["run_as"] = self.run_as.as_dict() + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.tags: + body["tags"] = self.tags + if self.tasks: + body["tasks"] = [v.as_dict() for v in self.tasks] + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.trigger: + body["trigger"] = self.trigger.as_dict() + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the JobSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.continuous: body['continuous'] = self.continuous - if self.deployment: body['deployment'] = self.deployment - if self.description is not None: body['description'] = self.description - if self.edit_mode is not None: body['edit_mode'] = self.edit_mode - if self.email_notifications: body['email_notifications'] = self.email_notifications - if self.environments: body['environments'] = self.environments - if self.format is not None: body['format'] = self.format - if self.git_source: body['git_source'] = self.git_source - if self.health: body['health'] = self.health - if self.job_clusters: body['job_clusters'] = self.job_clusters - if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs - if self.name is not None: body['name'] = self.name - if self.notification_settings: body['notification_settings'] = self.notification_settings - if self.parameters: body['parameters'] = self.parameters - if self.performance_target is not None: body['performance_target'] = self.performance_target - if self.queue: body['queue'] = self.queue - if self.run_as: body['run_as'] = self.run_as - if self.schedule: body['schedule'] = self.schedule - if self.tags: body['tags'] = self.tags - if self.tasks: body['tasks'] = self.tasks - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.trigger: body['trigger'] = self.trigger - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.continuous: + body["continuous"] = self.continuous + if self.deployment: + body["deployment"] = self.deployment + if self.description is not None: + body["description"] = self.description + if self.edit_mode is not None: + body["edit_mode"] = self.edit_mode + if self.email_notifications: + body["email_notifications"] = self.email_notifications + if self.environments: + body["environments"] = self.environments + if self.format is not None: + body["format"] = self.format + if self.git_source: + body["git_source"] = self.git_source + if self.health: + body["health"] = self.health + if self.job_clusters: + body["job_clusters"] = self.job_clusters + if self.max_concurrent_runs is not None: + body["max_concurrent_runs"] = self.max_concurrent_runs + if self.name is not None: + body["name"] = self.name + if self.notification_settings: + body["notification_settings"] = self.notification_settings + if self.parameters: + body["parameters"] = self.parameters + if self.performance_target is not None: + body["performance_target"] = self.performance_target + if self.queue: + body["queue"] = self.queue + if self.run_as: + body["run_as"] = self.run_as + if self.schedule: + body["schedule"] = self.schedule + if self.tags: + body["tags"] = self.tags + if self.tasks: + body["tasks"] = self.tasks + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.trigger: + body["trigger"] = self.trigger + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobSettings: """Deserializes the JobSettings from a dictionary.""" - return cls(budget_policy_id=d.get('budget_policy_id', None), continuous=_from_dict(d, 'continuous', Continuous), deployment=_from_dict(d, 'deployment', JobDeployment), description=d.get('description', None), edit_mode=_enum(d, 'edit_mode', JobEditMode), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environments=_repeated_dict(d, 'environments', JobEnvironment), format=_enum(d, 'format', Format), git_source=_from_dict(d, 'git_source', GitSource), health=_from_dict(d, 'health', JobsHealthRules), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), max_concurrent_runs=d.get('max_concurrent_runs', None), name=d.get('name', None), notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings), parameters=_repeated_dict(d, 'parameters', JobParameterDefinition), performance_target=_enum(d, 'performance_target', PerformanceTarget), queue=_from_dict(d, 'queue', QueueSettings), run_as=_from_dict(d, 'run_as', JobRunAs), schedule=_from_dict(d, 'schedule', CronSchedule), tags=d.get('tags', None), tasks=_repeated_dict(d, 'tasks', Task), timeout_seconds=d.get('timeout_seconds', None), trigger=_from_dict(d, 'trigger', TriggerSettings), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) - - + return cls( + budget_policy_id=d.get("budget_policy_id", None), + continuous=_from_dict(d, "continuous", Continuous), + deployment=_from_dict(d, "deployment", JobDeployment), + description=d.get("description", None), + edit_mode=_enum(d, "edit_mode", JobEditMode), + email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), + environments=_repeated_dict(d, "environments", JobEnvironment), + format=_enum(d, "format", Format), + git_source=_from_dict(d, "git_source", GitSource), + health=_from_dict(d, "health", JobsHealthRules), + job_clusters=_repeated_dict(d, "job_clusters", JobCluster), + max_concurrent_runs=d.get("max_concurrent_runs", None), + name=d.get("name", None), + notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), + parameters=_repeated_dict(d, "parameters", JobParameterDefinition), + performance_target=_enum(d, "performance_target", PerformanceTarget), + queue=_from_dict(d, "queue", QueueSettings), + run_as=_from_dict(d, "run_as", JobRunAs), + schedule=_from_dict(d, "schedule", CronSchedule), + tags=d.get("tags", None), + tasks=_repeated_dict(d, "tasks", Task), + timeout_seconds=d.get("timeout_seconds", None), + trigger=_from_dict(d, "trigger", TriggerSettings), + webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), + ) @dataclass class JobSource: """The source of the job specification in the remote repository when the job is source controlled.""" - + job_config_path: str """Path of the job YAML file that contains the job specification.""" - + import_from_git_branch: str """Name of the branch which the job is imported from.""" - + dirty_state: Optional[JobSourceDirtyState] = None """Dirty state indicates the job is not fully synced with the job specification in the remote repository. @@ -2914,47 +3498,56 @@ class JobSource: `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.""" - + def as_dict(self) -> dict: """Serializes the JobSource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dirty_state is not None: body['dirty_state'] = self.dirty_state.value - if self.import_from_git_branch is not None: body['import_from_git_branch'] = self.import_from_git_branch - if self.job_config_path is not None: body['job_config_path'] = self.job_config_path + if self.dirty_state is not None: + body["dirty_state"] = self.dirty_state.value + if self.import_from_git_branch is not None: + body["import_from_git_branch"] = self.import_from_git_branch + if self.job_config_path is not None: + body["job_config_path"] = self.job_config_path return body def as_shallow_dict(self) -> dict: """Serializes the JobSource into a shallow dictionary of its immediate attributes.""" body = {} - if self.dirty_state is not None: body['dirty_state'] = self.dirty_state - if self.import_from_git_branch is not None: body['import_from_git_branch'] = self.import_from_git_branch - if self.job_config_path is not None: body['job_config_path'] = self.job_config_path + if self.dirty_state is not None: + body["dirty_state"] = self.dirty_state + if self.import_from_git_branch is not None: + body["import_from_git_branch"] = self.import_from_git_branch + if self.job_config_path is not None: + body["job_config_path"] = self.job_config_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobSource: """Deserializes the JobSource from a dictionary.""" - return cls(dirty_state=_enum(d, 'dirty_state', JobSourceDirtyState), import_from_git_branch=d.get('import_from_git_branch', None), job_config_path=d.get('job_config_path', None)) - - + return cls( + dirty_state=_enum(d, "dirty_state", JobSourceDirtyState), + import_from_git_branch=d.get("import_from_git_branch", None), + job_config_path=d.get("job_config_path", None), + ) class JobSourceDirtyState(Enum): """Dirty state indicates the job is not fully synced with the job specification in the remote repository. - + Possible values are: * `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced. * `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.""" - - DISCONNECTED = 'DISCONNECTED' - NOT_SYNCED = 'NOT_SYNCED' + + DISCONNECTED = "DISCONNECTED" + NOT_SYNCED = "NOT_SYNCED" + class JobsHealthMetric(Enum): """Specifies the health metric that is being evaluated for a particular health rule. - + * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag @@ -2962,17 +3555,19 @@ class JobsHealthMetric(Enum): of the maximum consumer delay across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Public Preview.""" - - RUN_DURATION_SECONDS = 'RUN_DURATION_SECONDS' - STREAMING_BACKLOG_BYTES = 'STREAMING_BACKLOG_BYTES' - STREAMING_BACKLOG_FILES = 'STREAMING_BACKLOG_FILES' - STREAMING_BACKLOG_RECORDS = 'STREAMING_BACKLOG_RECORDS' - STREAMING_BACKLOG_SECONDS = 'STREAMING_BACKLOG_SECONDS' + + RUN_DURATION_SECONDS = "RUN_DURATION_SECONDS" + STREAMING_BACKLOG_BYTES = "STREAMING_BACKLOG_BYTES" + STREAMING_BACKLOG_FILES = "STREAMING_BACKLOG_FILES" + STREAMING_BACKLOG_RECORDS = "STREAMING_BACKLOG_RECORDS" + STREAMING_BACKLOG_SECONDS = "STREAMING_BACKLOG_SECONDS" + class JobsHealthOperator(Enum): """Specifies the operator used to compare the health metric value with the specified threshold.""" - - GREATER_THAN = 'GREATER_THAN' + + GREATER_THAN = "GREATER_THAN" + @dataclass class JobsHealthRule: @@ -2986,192 +3581,221 @@ class JobsHealthRule: of the maximum consumer delay across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Public Preview.""" - + op: JobsHealthOperator """Specifies the operator used to compare the health metric value with the specified threshold.""" - + value: int """Specifies the threshold value that the health metric should obey to satisfy the health rule.""" - + def as_dict(self) -> dict: """Serializes the JobsHealthRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metric is not None: body['metric'] = self.metric.value - if self.op is not None: body['op'] = self.op.value - if self.value is not None: body['value'] = self.value + if self.metric is not None: + body["metric"] = self.metric.value + if self.op is not None: + body["op"] = self.op.value + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the JobsHealthRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.metric is not None: body['metric'] = self.metric - if self.op is not None: body['op'] = self.op - if self.value is not None: body['value'] = self.value + if self.metric is not None: + body["metric"] = self.metric + if self.op is not None: + body["op"] = self.op + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobsHealthRule: """Deserializes the JobsHealthRule from a dictionary.""" - return cls(metric=_enum(d, 'metric', JobsHealthMetric), op=_enum(d, 'op', JobsHealthOperator), value=d.get('value', None)) - - + return cls( + metric=_enum(d, "metric", JobsHealthMetric), + op=_enum(d, "op", JobsHealthOperator), + value=d.get("value", None), + ) @dataclass class JobsHealthRules: """An optional set of health rules that can be defined for this job.""" - + rules: Optional[List[JobsHealthRule]] = None - + def as_dict(self) -> dict: """Serializes the JobsHealthRules into a dictionary suitable for use as a JSON request body.""" body = {} - if self.rules: body['rules'] = [v.as_dict() for v in self.rules] + if self.rules: + body["rules"] = [v.as_dict() for v in self.rules] return body def as_shallow_dict(self) -> dict: """Serializes the JobsHealthRules into a shallow dictionary of its immediate attributes.""" body = {} - if self.rules: body['rules'] = self.rules + if self.rules: + body["rules"] = self.rules return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobsHealthRules: """Deserializes the JobsHealthRules from a dictionary.""" - return cls(rules=_repeated_dict(d, 'rules', JobsHealthRule)) - - + return cls(rules=_repeated_dict(d, "rules", JobsHealthRule)) @dataclass class ListJobComplianceForPolicyResponse: jobs: Optional[List[JobCompliance]] = None """A list of jobs and their policy compliance statuses.""" - + next_page_token: Optional[str] = None """This field represents the pagination token to retrieve the next page of results. If this field is not in the response, it means no further results for the request.""" - + prev_page_token: Optional[str] = None """This field represents the pagination token to retrieve the previous page of results. If this field is not in the response, it means no further results for the request.""" - + def as_dict(self) -> dict: """Serializes the ListJobComplianceForPolicyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.jobs: body['jobs'] = [v.as_dict() for v in self.jobs] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.jobs: + body["jobs"] = [v.as_dict() for v in self.jobs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListJobComplianceForPolicyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.jobs: body['jobs'] = self.jobs - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.jobs: + body["jobs"] = self.jobs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListJobComplianceForPolicyResponse: """Deserializes the ListJobComplianceForPolicyResponse from a dictionary.""" - return cls(jobs=_repeated_dict(d, 'jobs', JobCompliance), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) - - - - - - - - + return cls( + jobs=_repeated_dict(d, "jobs", JobCompliance), + next_page_token=d.get("next_page_token", None), + prev_page_token=d.get("prev_page_token", None), + ) @dataclass class ListJobsResponse: """List of jobs was retrieved successfully.""" - + has_more: Optional[bool] = None """If true, additional jobs matching the provided filter are available for listing.""" - + jobs: Optional[List[BaseJob]] = None """The list of jobs. Only included in the response if there are jobs to list.""" - + next_page_token: Optional[str] = None """A token that can be used to list the next page of jobs (if applicable).""" - + prev_page_token: Optional[str] = None """A token that can be used to list the previous page of jobs (if applicable).""" - + def as_dict(self) -> dict: """Serializes the ListJobsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.has_more is not None: body['has_more'] = self.has_more - if self.jobs: body['jobs'] = [v.as_dict() for v in self.jobs] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.has_more is not None: + body["has_more"] = self.has_more + if self.jobs: + body["jobs"] = [v.as_dict() for v in self.jobs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListJobsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.has_more is not None: body['has_more'] = self.has_more - if self.jobs: body['jobs'] = self.jobs - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.has_more is not None: + body["has_more"] = self.has_more + if self.jobs: + body["jobs"] = self.jobs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListJobsResponse: """Deserializes the ListJobsResponse from a dictionary.""" - return cls(has_more=d.get('has_more', None), jobs=_repeated_dict(d, 'jobs', BaseJob), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) - - - - - + return cls( + has_more=d.get("has_more", None), + jobs=_repeated_dict(d, "jobs", BaseJob), + next_page_token=d.get("next_page_token", None), + prev_page_token=d.get("prev_page_token", None), + ) @dataclass class ListRunsResponse: """List of runs was retrieved successfully.""" - + has_more: Optional[bool] = None """If true, additional runs matching the provided filter are available for listing.""" - + next_page_token: Optional[str] = None """A token that can be used to list the next page of runs (if applicable).""" - + prev_page_token: Optional[str] = None """A token that can be used to list the previous page of runs (if applicable).""" - + runs: Optional[List[BaseRun]] = None """A list of runs, from most recently started to least. Only included in the response if there are runs to list.""" - + def as_dict(self) -> dict: """Serializes the ListRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.has_more is not None: body['has_more'] = self.has_more - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token - if self.runs: body['runs'] = [v.as_dict() for v in self.runs] + if self.has_more is not None: + body["has_more"] = self.has_more + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token + if self.runs: + body["runs"] = [v.as_dict() for v in self.runs] return body def as_shallow_dict(self) -> dict: """Serializes the ListRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.has_more is not None: body['has_more'] = self.has_more - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token - if self.runs: body['runs'] = self.runs + if self.has_more is not None: + body["has_more"] = self.has_more + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token + if self.runs: + body["runs"] = self.runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListRunsResponse: """Deserializes the ListRunsResponse from a dictionary.""" - return cls(has_more=d.get('has_more', None), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None), runs=_repeated_dict(d, 'runs', BaseRun)) - - + return cls( + has_more=d.get("has_more", None), + next_page_token=d.get("next_page_token", None), + prev_page_token=d.get("prev_page_token", None), + runs=_repeated_dict(d, "runs", BaseRun), + ) @dataclass @@ -3182,30 +3806,32 @@ class NotebookOutput: Databricks restricts this API to return the first 5 MB of the value. For a larger result, your job can store the results in a cloud storage service. This field is absent if `dbutils.notebook.exit()` was never called.""" - + truncated: Optional[bool] = None """Whether or not the result was truncated.""" - + def as_dict(self) -> dict: """Serializes the NotebookOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.result is not None: body['result'] = self.result - if self.truncated is not None: body['truncated'] = self.truncated + if self.result is not None: + body["result"] = self.result + if self.truncated is not None: + body["truncated"] = self.truncated return body def as_shallow_dict(self) -> dict: """Serializes the NotebookOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.result is not None: body['result'] = self.result - if self.truncated is not None: body['truncated'] = self.truncated + if self.result is not None: + body["result"] = self.result + if self.truncated is not None: + body["truncated"] = self.truncated return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotebookOutput: """Deserializes the NotebookOutput from a dictionary.""" - return cls(result=d.get('result', None), truncated=d.get('truncated', None)) - - + return cls(result=d.get("result", None), truncated=d.get("truncated", None)) @dataclass @@ -3214,8 +3840,8 @@ class NotebookTask: """The path of the notebook to be run in the Databricks workspace or remote repository. For notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash. For notebooks stored in a remote repository, the path must be relative. This field is required.""" - - base_parameters: Optional[Dict[str,str]] = None + + base_parameters: Optional[Dict[str, str]] = None """Base parameters to be used for each run of this job. If the run is initiated by a call to :method:jobs/run Now with parameters specified, the two parameters maps are merged. If the same key is specified in `base_parameters` and in `run-now`, the value from `run-now` is used. Use @@ -3230,360 +3856,434 @@ class NotebookTask: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets""" - + source: Optional[Source] = None """Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved from the local Databricks workspace. When set to `GIT`, the notebook will be retrieved from a Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Notebook is located in Databricks workspace. * `GIT`: Notebook is located in cloud Git provider.""" - + warehouse_id: Optional[str] = None """Optional `warehouse_id` to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT supported, please use serverless or pro SQL warehouses. Note that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the run will fail.""" - + def as_dict(self) -> dict: """Serializes the NotebookTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.base_parameters: body['base_parameters'] = self.base_parameters - if self.notebook_path is not None: body['notebook_path'] = self.notebook_path - if self.source is not None: body['source'] = self.source.value - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.base_parameters: + body["base_parameters"] = self.base_parameters + if self.notebook_path is not None: + body["notebook_path"] = self.notebook_path + if self.source is not None: + body["source"] = self.source.value + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the NotebookTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.base_parameters: body['base_parameters'] = self.base_parameters - if self.notebook_path is not None: body['notebook_path'] = self.notebook_path - if self.source is not None: body['source'] = self.source - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.base_parameters: + body["base_parameters"] = self.base_parameters + if self.notebook_path is not None: + body["notebook_path"] = self.notebook_path + if self.source is not None: + body["source"] = self.source + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotebookTask: """Deserializes the NotebookTask from a dictionary.""" - return cls(base_parameters=d.get('base_parameters', None), notebook_path=d.get('notebook_path', None), source=_enum(d, 'source', Source), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + base_parameters=d.get("base_parameters", None), + notebook_path=d.get("notebook_path", None), + source=_enum(d, "source", Source), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class OutputSchemaInfo: """Stores the catalog name, schema name, and the output schema expiration time for the clean room run.""" - + catalog_name: Optional[str] = None - + expiration_time: Optional[int] = None """The expiration time for the output schema as a Unix timestamp in milliseconds.""" - + schema_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the OutputSchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body def as_shallow_dict(self) -> dict: """Serializes the OutputSchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OutputSchemaInfo: """Deserializes the OutputSchemaInfo from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), expiration_time=d.get('expiration_time', None), schema_name=d.get('schema_name', None)) - - + return cls( + catalog_name=d.get("catalog_name", None), + expiration_time=d.get("expiration_time", None), + schema_name=d.get("schema_name", None), + ) class PauseStatus(Enum): - - - PAUSED = 'PAUSED' - UNPAUSED = 'UNPAUSED' + + PAUSED = "PAUSED" + UNPAUSED = "UNPAUSED" + class PerformanceTarget(Enum): """PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be. The performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget).""" - - PERFORMANCE_OPTIMIZED = 'PERFORMANCE_OPTIMIZED' - STANDARD = 'STANDARD' + + PERFORMANCE_OPTIMIZED = "PERFORMANCE_OPTIMIZED" + STANDARD = "STANDARD" + @dataclass class PeriodicTriggerConfiguration: interval: int """The interval at which the trigger should run.""" - + unit: PeriodicTriggerConfigurationTimeUnit """The unit of time for the interval.""" - + def as_dict(self) -> dict: """Serializes the PeriodicTriggerConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.interval is not None: body['interval'] = self.interval - if self.unit is not None: body['unit'] = self.unit.value + if self.interval is not None: + body["interval"] = self.interval + if self.unit is not None: + body["unit"] = self.unit.value return body def as_shallow_dict(self) -> dict: """Serializes the PeriodicTriggerConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.interval is not None: body['interval'] = self.interval - if self.unit is not None: body['unit'] = self.unit + if self.interval is not None: + body["interval"] = self.interval + if self.unit is not None: + body["unit"] = self.unit return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PeriodicTriggerConfiguration: """Deserializes the PeriodicTriggerConfiguration from a dictionary.""" - return cls(interval=d.get('interval', None), unit=_enum(d, 'unit', PeriodicTriggerConfigurationTimeUnit)) - - + return cls(interval=d.get("interval", None), unit=_enum(d, "unit", PeriodicTriggerConfigurationTimeUnit)) class PeriodicTriggerConfigurationTimeUnit(Enum): - - - DAYS = 'DAYS' - HOURS = 'HOURS' - WEEKS = 'WEEKS' + + DAYS = "DAYS" + HOURS = "HOURS" + WEEKS = "WEEKS" + @dataclass class PipelineParams: full_refresh: Optional[bool] = None """If true, triggers a full refresh on the delta live table.""" - + def as_dict(self) -> dict: """Serializes the PipelineParams into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.full_refresh is not None: + body["full_refresh"] = self.full_refresh return body def as_shallow_dict(self) -> dict: """Serializes the PipelineParams into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_refresh is not None: body['full_refresh'] = self.full_refresh + if self.full_refresh is not None: + body["full_refresh"] = self.full_refresh return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineParams: """Deserializes the PipelineParams from a dictionary.""" - return cls(full_refresh=d.get('full_refresh', None)) - - + return cls(full_refresh=d.get("full_refresh", None)) @dataclass class PipelineTask: pipeline_id: str """The full name of the pipeline task to execute.""" - + full_refresh: Optional[bool] = None """If true, triggers a full refresh on the delta live table.""" - + def as_dict(self) -> dict: """Serializes the PipelineTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_refresh is not None: body['full_refresh'] = self.full_refresh - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.full_refresh is not None: + body["full_refresh"] = self.full_refresh + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id return body def as_shallow_dict(self) -> dict: """Serializes the PipelineTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_refresh is not None: body['full_refresh'] = self.full_refresh - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.full_refresh is not None: + body["full_refresh"] = self.full_refresh + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineTask: """Deserializes the PipelineTask from a dictionary.""" - return cls(full_refresh=d.get('full_refresh', None), pipeline_id=d.get('pipeline_id', None)) - - + return cls(full_refresh=d.get("full_refresh", None), pipeline_id=d.get("pipeline_id", None)) @dataclass class PowerBiModel: authentication_method: Optional[AuthenticationMethod] = None """How the published Power BI model authenticates to Databricks""" - + model_name: Optional[str] = None """The name of the Power BI model""" - + overwrite_existing: Optional[bool] = None """Whether to overwrite existing Power BI models""" - + storage_mode: Optional[StorageMode] = None """The default storage mode of the Power BI model""" - + workspace_name: Optional[str] = None """The name of the Power BI workspace of the model""" - + def as_dict(self) -> dict: """Serializes the PowerBiModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authentication_method is not None: body['authentication_method'] = self.authentication_method.value - if self.model_name is not None: body['model_name'] = self.model_name - if self.overwrite_existing is not None: body['overwrite_existing'] = self.overwrite_existing - if self.storage_mode is not None: body['storage_mode'] = self.storage_mode.value - if self.workspace_name is not None: body['workspace_name'] = self.workspace_name + if self.authentication_method is not None: + body["authentication_method"] = self.authentication_method.value + if self.model_name is not None: + body["model_name"] = self.model_name + if self.overwrite_existing is not None: + body["overwrite_existing"] = self.overwrite_existing + if self.storage_mode is not None: + body["storage_mode"] = self.storage_mode.value + if self.workspace_name is not None: + body["workspace_name"] = self.workspace_name return body def as_shallow_dict(self) -> dict: """Serializes the PowerBiModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.authentication_method is not None: body['authentication_method'] = self.authentication_method - if self.model_name is not None: body['model_name'] = self.model_name - if self.overwrite_existing is not None: body['overwrite_existing'] = self.overwrite_existing - if self.storage_mode is not None: body['storage_mode'] = self.storage_mode - if self.workspace_name is not None: body['workspace_name'] = self.workspace_name + if self.authentication_method is not None: + body["authentication_method"] = self.authentication_method + if self.model_name is not None: + body["model_name"] = self.model_name + if self.overwrite_existing is not None: + body["overwrite_existing"] = self.overwrite_existing + if self.storage_mode is not None: + body["storage_mode"] = self.storage_mode + if self.workspace_name is not None: + body["workspace_name"] = self.workspace_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PowerBiModel: """Deserializes the PowerBiModel from a dictionary.""" - return cls(authentication_method=_enum(d, 'authentication_method', AuthenticationMethod), model_name=d.get('model_name', None), overwrite_existing=d.get('overwrite_existing', None), storage_mode=_enum(d, 'storage_mode', StorageMode), workspace_name=d.get('workspace_name', None)) - - + return cls( + authentication_method=_enum(d, "authentication_method", AuthenticationMethod), + model_name=d.get("model_name", None), + overwrite_existing=d.get("overwrite_existing", None), + storage_mode=_enum(d, "storage_mode", StorageMode), + workspace_name=d.get("workspace_name", None), + ) @dataclass class PowerBiTable: catalog: Optional[str] = None """The catalog name in Databricks""" - + name: Optional[str] = None """The table name in Databricks""" - + schema: Optional[str] = None """The schema name in Databricks""" - + storage_mode: Optional[StorageMode] = None """The Power BI storage mode of the table""" - + def as_dict(self) -> dict: """Serializes the PowerBiTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog is not None: body['catalog'] = self.catalog - if self.name is not None: body['name'] = self.name - if self.schema is not None: body['schema'] = self.schema - if self.storage_mode is not None: body['storage_mode'] = self.storage_mode.value + if self.catalog is not None: + body["catalog"] = self.catalog + if self.name is not None: + body["name"] = self.name + if self.schema is not None: + body["schema"] = self.schema + if self.storage_mode is not None: + body["storage_mode"] = self.storage_mode.value return body def as_shallow_dict(self) -> dict: """Serializes the PowerBiTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog is not None: body['catalog'] = self.catalog - if self.name is not None: body['name'] = self.name - if self.schema is not None: body['schema'] = self.schema - if self.storage_mode is not None: body['storage_mode'] = self.storage_mode + if self.catalog is not None: + body["catalog"] = self.catalog + if self.name is not None: + body["name"] = self.name + if self.schema is not None: + body["schema"] = self.schema + if self.storage_mode is not None: + body["storage_mode"] = self.storage_mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PowerBiTable: """Deserializes the PowerBiTable from a dictionary.""" - return cls(catalog=d.get('catalog', None), name=d.get('name', None), schema=d.get('schema', None), storage_mode=_enum(d, 'storage_mode', StorageMode)) - - + return cls( + catalog=d.get("catalog", None), + name=d.get("name", None), + schema=d.get("schema", None), + storage_mode=_enum(d, "storage_mode", StorageMode), + ) @dataclass class PowerBiTask: connection_resource_name: Optional[str] = None """The resource name of the UC connection to authenticate from Databricks to Power BI""" - + power_bi_model: Optional[PowerBiModel] = None """The semantic model to update""" - + refresh_after_update: Optional[bool] = None """Whether the model should be refreshed after the update""" - + tables: Optional[List[PowerBiTable]] = None """The tables to be exported to Power BI""" - + warehouse_id: Optional[str] = None """The SQL warehouse ID to use as the Power BI data source""" - + def as_dict(self) -> dict: """Serializes the PowerBiTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_resource_name is not None: body['connection_resource_name'] = self.connection_resource_name - if self.power_bi_model: body['power_bi_model'] = self.power_bi_model.as_dict() - if self.refresh_after_update is not None: body['refresh_after_update'] = self.refresh_after_update - if self.tables: body['tables'] = [v.as_dict() for v in self.tables] - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.connection_resource_name is not None: + body["connection_resource_name"] = self.connection_resource_name + if self.power_bi_model: + body["power_bi_model"] = self.power_bi_model.as_dict() + if self.refresh_after_update is not None: + body["refresh_after_update"] = self.refresh_after_update + if self.tables: + body["tables"] = [v.as_dict() for v in self.tables] + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the PowerBiTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_resource_name is not None: body['connection_resource_name'] = self.connection_resource_name - if self.power_bi_model: body['power_bi_model'] = self.power_bi_model - if self.refresh_after_update is not None: body['refresh_after_update'] = self.refresh_after_update - if self.tables: body['tables'] = self.tables - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.connection_resource_name is not None: + body["connection_resource_name"] = self.connection_resource_name + if self.power_bi_model: + body["power_bi_model"] = self.power_bi_model + if self.refresh_after_update is not None: + body["refresh_after_update"] = self.refresh_after_update + if self.tables: + body["tables"] = self.tables + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PowerBiTask: """Deserializes the PowerBiTask from a dictionary.""" - return cls(connection_resource_name=d.get('connection_resource_name', None), power_bi_model=_from_dict(d, 'power_bi_model', PowerBiModel), refresh_after_update=d.get('refresh_after_update', None), tables=_repeated_dict(d, 'tables', PowerBiTable), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + connection_resource_name=d.get("connection_resource_name", None), + power_bi_model=_from_dict(d, "power_bi_model", PowerBiModel), + refresh_after_update=d.get("refresh_after_update", None), + tables=_repeated_dict(d, "tables", PowerBiTable), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class PythonWheelTask: package_name: str """Name of the package to execute""" - + entry_point: str """Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()`""" - - named_parameters: Optional[Dict[str,str]] = None + + named_parameters: Optional[Dict[str, str]] = None """Command-line parameters passed to Python wheel task in the form of `["--name=task", "--data=dbfs:/path/to/data.json"]`. Leave it empty if `parameters` is not null.""" - + parameters: Optional[List[str]] = None """Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is not null.""" - + def as_dict(self) -> dict: """Serializes the PythonWheelTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entry_point is not None: body['entry_point'] = self.entry_point - if self.named_parameters: body['named_parameters'] = self.named_parameters - if self.package_name is not None: body['package_name'] = self.package_name - if self.parameters: body['parameters'] = [v for v in self.parameters] + if self.entry_point is not None: + body["entry_point"] = self.entry_point + if self.named_parameters: + body["named_parameters"] = self.named_parameters + if self.package_name is not None: + body["package_name"] = self.package_name + if self.parameters: + body["parameters"] = [v for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the PythonWheelTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.entry_point is not None: body['entry_point'] = self.entry_point - if self.named_parameters: body['named_parameters'] = self.named_parameters - if self.package_name is not None: body['package_name'] = self.package_name - if self.parameters: body['parameters'] = self.parameters + if self.entry_point is not None: + body["entry_point"] = self.entry_point + if self.named_parameters: + body["named_parameters"] = self.named_parameters + if self.package_name is not None: + body["package_name"] = self.package_name + if self.parameters: + body["parameters"] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PythonWheelTask: """Deserializes the PythonWheelTask from a dictionary.""" - return cls(entry_point=d.get('entry_point', None), named_parameters=d.get('named_parameters', None), package_name=d.get('package_name', None), parameters=d.get('parameters', None)) - - + return cls( + entry_point=d.get("entry_point", None), + named_parameters=d.get("named_parameters", None), + package_name=d.get("package_name", None), + parameters=d.get("parameters", None), + ) @dataclass @@ -3594,31 +4294,33 @@ class QueueDetails: queued due to reaching the per-job limit of concurrent job runs. * `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of active run job tasks.""" - + message: Optional[str] = None """A descriptive message with the queuing details. This field is unstructured, and its exact format is subject to change.""" - + def as_dict(self) -> dict: """Serializes the QueueDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.code is not None: body['code'] = self.code.value - if self.message is not None: body['message'] = self.message + if self.code is not None: + body["code"] = self.code.value + if self.message is not None: + body["message"] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the QueueDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.code is not None: body['code'] = self.code - if self.message is not None: body['message'] = self.message + if self.code is not None: + body["code"] = self.code + if self.message is not None: + body["message"] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueueDetails: """Deserializes the QueueDetails from a dictionary.""" - return cls(code=_enum(d, 'code', QueueDetailsCodeCode), message=d.get('message', None)) - - + return cls(code=_enum(d, "code", QueueDetailsCodeCode), message=d.get("message", None)) class QueueDetailsCodeCode(Enum): @@ -3627,34 +4329,35 @@ class QueueDetailsCodeCode(Enum): queued due to reaching the per-job limit of concurrent job runs. * `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of active run job tasks.""" - - ACTIVE_RUNS_LIMIT_REACHED = 'ACTIVE_RUNS_LIMIT_REACHED' - ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED = 'ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED' - MAX_CONCURRENT_RUNS_REACHED = 'MAX_CONCURRENT_RUNS_REACHED' + + ACTIVE_RUNS_LIMIT_REACHED = "ACTIVE_RUNS_LIMIT_REACHED" + ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED = "ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED" + MAX_CONCURRENT_RUNS_REACHED = "MAX_CONCURRENT_RUNS_REACHED" + @dataclass class QueueSettings: enabled: bool """If true, enable queueing for the job. This is a required field.""" - + def as_dict(self) -> dict: """Serializes the QueueSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled + if self.enabled is not None: + body["enabled"] = self.enabled return body def as_shallow_dict(self) -> dict: """Serializes the QueueSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled + if self.enabled is not None: + body["enabled"] = self.enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueueSettings: """Deserializes the QueueSettings from a dictionary.""" - return cls(enabled=d.get('enabled', None)) - - + return cls(enabled=d.get("enabled", None)) @dataclass @@ -3667,77 +4370,101 @@ class RepairHistoryItem: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + end_time: Optional[int] = None """The end time of the (repaired) run.""" - + id: Optional[int] = None """The ID of the repair. Only returned for the items that represent a repair in `repair_history`.""" - + start_time: Optional[int] = None """The start time of the (repaired) run.""" - + state: Optional[RunState] = None """Deprecated. Please use the `status` field instead.""" - + status: Optional[RunStatus] = None """The current status of the run""" - + task_run_ids: Optional[List[int]] = None """The run IDs of the task runs that ran as part of this repair history item.""" - + type: Optional[RepairHistoryItemType] = None """The repair history item type. Indicates whether a run is the original run or a repair run.""" - + def as_dict(self) -> dict: """Serializes the RepairHistoryItem into a dictionary suitable for use as a JSON request body.""" body = {} - if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target.value - if self.end_time is not None: body['end_time'] = self.end_time - if self.id is not None: body['id'] = self.id - if self.start_time is not None: body['start_time'] = self.start_time - if self.state: body['state'] = self.state.as_dict() - if self.status: body['status'] = self.status.as_dict() - if self.task_run_ids: body['task_run_ids'] = [v for v in self.task_run_ids] - if self.type is not None: body['type'] = self.type.value + if self.effective_performance_target is not None: + body["effective_performance_target"] = self.effective_performance_target.value + if self.end_time is not None: + body["end_time"] = self.end_time + if self.id is not None: + body["id"] = self.id + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state.as_dict() + if self.status: + body["status"] = self.status.as_dict() + if self.task_run_ids: + body["task_run_ids"] = [v for v in self.task_run_ids] + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the RepairHistoryItem into a shallow dictionary of its immediate attributes.""" body = {} - if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target - if self.end_time is not None: body['end_time'] = self.end_time - if self.id is not None: body['id'] = self.id - if self.start_time is not None: body['start_time'] = self.start_time - if self.state: body['state'] = self.state - if self.status: body['status'] = self.status - if self.task_run_ids: body['task_run_ids'] = self.task_run_ids - if self.type is not None: body['type'] = self.type + if self.effective_performance_target is not None: + body["effective_performance_target"] = self.effective_performance_target + if self.end_time is not None: + body["end_time"] = self.end_time + if self.id is not None: + body["id"] = self.id + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state + if self.status: + body["status"] = self.status + if self.task_run_ids: + body["task_run_ids"] = self.task_run_ids + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepairHistoryItem: """Deserializes the RepairHistoryItem from a dictionary.""" - return cls(effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), end_time=d.get('end_time', None), id=d.get('id', None), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), status=_from_dict(d, 'status', RunStatus), task_run_ids=d.get('task_run_ids', None), type=_enum(d, 'type', RepairHistoryItemType)) - - + return cls( + effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), + end_time=d.get("end_time", None), + id=d.get("id", None), + start_time=d.get("start_time", None), + state=_from_dict(d, "state", RunState), + status=_from_dict(d, "status", RunStatus), + task_run_ids=d.get("task_run_ids", None), + type=_enum(d, "type", RepairHistoryItemType), + ) class RepairHistoryItemType(Enum): """The repair history item type. Indicates whether a run is the original run or a repair run.""" - - ORIGINAL = 'ORIGINAL' - REPAIR = 'REPAIR' + + ORIGINAL = "ORIGINAL" + REPAIR = "REPAIR" + @dataclass class RepairRun: run_id: int """The job run ID of the run to repair. The run must not be in progress.""" - + dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - + jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -3748,15 +4475,15 @@ class RepairRun: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str,str]] = None + + job_parameters: Optional[Dict[str, str]] = None """Job-level parameters used in the run. for example `"param": "overriding_val"`""" - + latest_repair_id: Optional[int] = None """The ID of the latest repair. This parameter is not required when repairing a run for the first time, but must be provided on subsequent requests to repair the same run.""" - - notebook_params: Optional[Dict[str,str]] = None + + notebook_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. @@ -3772,7 +4499,7 @@ class RepairRun: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined @@ -3781,12 +4508,12 @@ class RepairRun: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str,str]] = None - + + python_named_params: Optional[Dict[str, str]] = None + python_params: Optional[List[str]] = None """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon @@ -3802,18 +4529,18 @@ class RepairRun: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + rerun_all_failed_tasks: Optional[bool] = None """If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.""" - + rerun_dependent_tasks: Optional[bool] = None """If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were previously successful. Can be also used in combination with `rerun_all_failed_tasks`.""" - + rerun_tasks: Optional[List[str]] = None """The task keys of the task runs to repair.""" - + spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -3830,118 +4557,164 @@ class RepairRun: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str,str]] = None + + sql_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - + def as_dict(self) -> dict: """Serializes the RepairRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands] - if self.jar_params: body['jar_params'] = [v for v in self.jar_params] - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.latest_repair_id is not None: body['latest_repair_id'] = self.latest_repair_id - if self.notebook_params: body['notebook_params'] = self.notebook_params - if self.performance_target is not None: body['performance_target'] = self.performance_target.value - if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() - if self.python_named_params: body['python_named_params'] = self.python_named_params - if self.python_params: body['python_params'] = [v for v in self.python_params] - if self.rerun_all_failed_tasks is not None: body['rerun_all_failed_tasks'] = self.rerun_all_failed_tasks - if self.rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = self.rerun_dependent_tasks - if self.rerun_tasks: body['rerun_tasks'] = [v for v in self.rerun_tasks] - if self.run_id is not None: body['run_id'] = self.run_id - if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params] - if self.sql_params: body['sql_params'] = self.sql_params + if self.dbt_commands: + body["dbt_commands"] = [v for v in self.dbt_commands] + if self.jar_params: + body["jar_params"] = [v for v in self.jar_params] + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.latest_repair_id is not None: + body["latest_repair_id"] = self.latest_repair_id + if self.notebook_params: + body["notebook_params"] = self.notebook_params + if self.performance_target is not None: + body["performance_target"] = self.performance_target.value + if self.pipeline_params: + body["pipeline_params"] = self.pipeline_params.as_dict() + if self.python_named_params: + body["python_named_params"] = self.python_named_params + if self.python_params: + body["python_params"] = [v for v in self.python_params] + if self.rerun_all_failed_tasks is not None: + body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks + if self.rerun_dependent_tasks is not None: + body["rerun_dependent_tasks"] = self.rerun_dependent_tasks + if self.rerun_tasks: + body["rerun_tasks"] = [v for v in self.rerun_tasks] + if self.run_id is not None: + body["run_id"] = self.run_id + if self.spark_submit_params: + body["spark_submit_params"] = [v for v in self.spark_submit_params] + if self.sql_params: + body["sql_params"] = self.sql_params return body def as_shallow_dict(self) -> dict: """Serializes the RepairRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbt_commands: body['dbt_commands'] = self.dbt_commands - if self.jar_params: body['jar_params'] = self.jar_params - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.latest_repair_id is not None: body['latest_repair_id'] = self.latest_repair_id - if self.notebook_params: body['notebook_params'] = self.notebook_params - if self.performance_target is not None: body['performance_target'] = self.performance_target - if self.pipeline_params: body['pipeline_params'] = self.pipeline_params - if self.python_named_params: body['python_named_params'] = self.python_named_params - if self.python_params: body['python_params'] = self.python_params - if self.rerun_all_failed_tasks is not None: body['rerun_all_failed_tasks'] = self.rerun_all_failed_tasks - if self.rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = self.rerun_dependent_tasks - if self.rerun_tasks: body['rerun_tasks'] = self.rerun_tasks - if self.run_id is not None: body['run_id'] = self.run_id - if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params - if self.sql_params: body['sql_params'] = self.sql_params + if self.dbt_commands: + body["dbt_commands"] = self.dbt_commands + if self.jar_params: + body["jar_params"] = self.jar_params + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.latest_repair_id is not None: + body["latest_repair_id"] = self.latest_repair_id + if self.notebook_params: + body["notebook_params"] = self.notebook_params + if self.performance_target is not None: + body["performance_target"] = self.performance_target + if self.pipeline_params: + body["pipeline_params"] = self.pipeline_params + if self.python_named_params: + body["python_named_params"] = self.python_named_params + if self.python_params: + body["python_params"] = self.python_params + if self.rerun_all_failed_tasks is not None: + body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks + if self.rerun_dependent_tasks is not None: + body["rerun_dependent_tasks"] = self.rerun_dependent_tasks + if self.rerun_tasks: + body["rerun_tasks"] = self.rerun_tasks + if self.run_id is not None: + body["run_id"] = self.run_id + if self.spark_submit_params: + body["spark_submit_params"] = self.spark_submit_params + if self.sql_params: + body["sql_params"] = self.sql_params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepairRun: """Deserializes the RepairRun from a dictionary.""" - return cls(dbt_commands=d.get('dbt_commands', None), jar_params=d.get('jar_params', None), job_parameters=d.get('job_parameters', None), latest_repair_id=d.get('latest_repair_id', None), notebook_params=d.get('notebook_params', None), performance_target=_enum(d, 'performance_target', PerformanceTarget), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), rerun_all_failed_tasks=d.get('rerun_all_failed_tasks', None), rerun_dependent_tasks=d.get('rerun_dependent_tasks', None), rerun_tasks=d.get('rerun_tasks', None), run_id=d.get('run_id', None), spark_submit_params=d.get('spark_submit_params', None), sql_params=d.get('sql_params', None)) - - + return cls( + dbt_commands=d.get("dbt_commands", None), + jar_params=d.get("jar_params", None), + job_parameters=d.get("job_parameters", None), + latest_repair_id=d.get("latest_repair_id", None), + notebook_params=d.get("notebook_params", None), + performance_target=_enum(d, "performance_target", PerformanceTarget), + pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), + python_named_params=d.get("python_named_params", None), + python_params=d.get("python_params", None), + rerun_all_failed_tasks=d.get("rerun_all_failed_tasks", None), + rerun_dependent_tasks=d.get("rerun_dependent_tasks", None), + rerun_tasks=d.get("rerun_tasks", None), + run_id=d.get("run_id", None), + spark_submit_params=d.get("spark_submit_params", None), + sql_params=d.get("sql_params", None), + ) @dataclass class RepairRunResponse: """Run repair was initiated.""" - + repair_id: Optional[int] = None """The ID of the repair. Must be provided in subsequent repairs using the `latest_repair_id` field to ensure sequential repairs.""" - + def as_dict(self) -> dict: """Serializes the RepairRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.repair_id is not None: body['repair_id'] = self.repair_id + if self.repair_id is not None: + body["repair_id"] = self.repair_id return body def as_shallow_dict(self) -> dict: """Serializes the RepairRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.repair_id is not None: body['repair_id'] = self.repair_id + if self.repair_id is not None: + body["repair_id"] = self.repair_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepairRunResponse: """Deserializes the RepairRunResponse from a dictionary.""" - return cls(repair_id=d.get('repair_id', None)) - - + return cls(repair_id=d.get("repair_id", None)) @dataclass class ResetJob: job_id: int """The canonical identifier of the job to reset. This field is required.""" - + new_settings: JobSettings """The new settings of the job. These settings completely replace the old settings. Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only.""" - + def as_dict(self) -> dict: """Serializes the ResetJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id - if self.new_settings: body['new_settings'] = self.new_settings.as_dict() + if self.job_id is not None: + body["job_id"] = self.job_id + if self.new_settings: + body["new_settings"] = self.new_settings.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ResetJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id - if self.new_settings: body['new_settings'] = self.new_settings + if self.job_id is not None: + body["job_id"] = self.job_id + if self.new_settings: + body["new_settings"] = self.new_settings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResetJob: """Deserializes the ResetJob from a dictionary.""" - return cls(job_id=d.get('job_id', None), new_settings=_from_dict(d, 'new_settings', JobSettings)) - - + return cls(job_id=d.get("job_id", None), new_settings=_from_dict(d, "new_settings", JobSettings)) @dataclass @@ -3960,281 +4733,314 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ResetResponse: """Deserializes the ResetResponse from a dictionary.""" return cls() - - @dataclass class ResolvedConditionTaskValues: left: Optional[str] = None - + right: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ResolvedConditionTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.left is not None: body['left'] = self.left - if self.right is not None: body['right'] = self.right + if self.left is not None: + body["left"] = self.left + if self.right is not None: + body["right"] = self.right return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedConditionTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.left is not None: body['left'] = self.left - if self.right is not None: body['right'] = self.right + if self.left is not None: + body["left"] = self.left + if self.right is not None: + body["right"] = self.right return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedConditionTaskValues: """Deserializes the ResolvedConditionTaskValues from a dictionary.""" - return cls(left=d.get('left', None), right=d.get('right', None)) - - + return cls(left=d.get("left", None), right=d.get("right", None)) @dataclass class ResolvedDbtTaskValues: commands: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the ResolvedDbtTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.commands: body['commands'] = [v for v in self.commands] + if self.commands: + body["commands"] = [v for v in self.commands] return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedDbtTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.commands: body['commands'] = self.commands + if self.commands: + body["commands"] = self.commands return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedDbtTaskValues: """Deserializes the ResolvedDbtTaskValues from a dictionary.""" - return cls(commands=d.get('commands', None)) - - + return cls(commands=d.get("commands", None)) @dataclass class ResolvedNotebookTaskValues: - base_parameters: Optional[Dict[str,str]] = None - + base_parameters: Optional[Dict[str, str]] = None + def as_dict(self) -> dict: """Serializes the ResolvedNotebookTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.base_parameters: body['base_parameters'] = self.base_parameters + if self.base_parameters: + body["base_parameters"] = self.base_parameters return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedNotebookTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.base_parameters: body['base_parameters'] = self.base_parameters + if self.base_parameters: + body["base_parameters"] = self.base_parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedNotebookTaskValues: """Deserializes the ResolvedNotebookTaskValues from a dictionary.""" - return cls(base_parameters=d.get('base_parameters', None)) - - + return cls(base_parameters=d.get("base_parameters", None)) @dataclass class ResolvedParamPairValues: - parameters: Optional[Dict[str,str]] = None - + parameters: Optional[Dict[str, str]] = None + def as_dict(self) -> dict: """Serializes the ResolvedParamPairValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: body['parameters'] = self.parameters + if self.parameters: + body["parameters"] = self.parameters return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedParamPairValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: body['parameters'] = self.parameters + if self.parameters: + body["parameters"] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedParamPairValues: """Deserializes the ResolvedParamPairValues from a dictionary.""" - return cls(parameters=d.get('parameters', None)) - - + return cls(parameters=d.get("parameters", None)) @dataclass class ResolvedPythonWheelTaskValues: - named_parameters: Optional[Dict[str,str]] = None - + named_parameters: Optional[Dict[str, str]] = None + parameters: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the ResolvedPythonWheelTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.named_parameters: body['named_parameters'] = self.named_parameters - if self.parameters: body['parameters'] = [v for v in self.parameters] + if self.named_parameters: + body["named_parameters"] = self.named_parameters + if self.parameters: + body["parameters"] = [v for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedPythonWheelTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.named_parameters: body['named_parameters'] = self.named_parameters - if self.parameters: body['parameters'] = self.parameters + if self.named_parameters: + body["named_parameters"] = self.named_parameters + if self.parameters: + body["parameters"] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedPythonWheelTaskValues: """Deserializes the ResolvedPythonWheelTaskValues from a dictionary.""" - return cls(named_parameters=d.get('named_parameters', None), parameters=d.get('parameters', None)) - - + return cls(named_parameters=d.get("named_parameters", None), parameters=d.get("parameters", None)) @dataclass class ResolvedRunJobTaskValues: - job_parameters: Optional[Dict[str,str]] = None - - parameters: Optional[Dict[str,str]] = None - + job_parameters: Optional[Dict[str, str]] = None + + parameters: Optional[Dict[str, str]] = None + def as_dict(self) -> dict: """Serializes the ResolvedRunJobTaskValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.parameters: body['parameters'] = self.parameters + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.parameters: + body["parameters"] = self.parameters return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedRunJobTaskValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.parameters: body['parameters'] = self.parameters + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.parameters: + body["parameters"] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedRunJobTaskValues: """Deserializes the ResolvedRunJobTaskValues from a dictionary.""" - return cls(job_parameters=d.get('job_parameters', None), parameters=d.get('parameters', None)) - - + return cls(job_parameters=d.get("job_parameters", None), parameters=d.get("parameters", None)) @dataclass class ResolvedStringParamsValues: parameters: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the ResolvedStringParamsValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: body['parameters'] = [v for v in self.parameters] + if self.parameters: + body["parameters"] = [v for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedStringParamsValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: body['parameters'] = self.parameters + if self.parameters: + body["parameters"] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedStringParamsValues: """Deserializes the ResolvedStringParamsValues from a dictionary.""" - return cls(parameters=d.get('parameters', None)) - - + return cls(parameters=d.get("parameters", None)) @dataclass class ResolvedValues: condition_task: Optional[ResolvedConditionTaskValues] = None - + dbt_task: Optional[ResolvedDbtTaskValues] = None - + notebook_task: Optional[ResolvedNotebookTaskValues] = None - + python_wheel_task: Optional[ResolvedPythonWheelTaskValues] = None - + run_job_task: Optional[ResolvedRunJobTaskValues] = None - + simulation_task: Optional[ResolvedParamPairValues] = None - + spark_jar_task: Optional[ResolvedStringParamsValues] = None - + spark_python_task: Optional[ResolvedStringParamsValues] = None - + spark_submit_task: Optional[ResolvedStringParamsValues] = None - + sql_task: Optional[ResolvedParamPairValues] = None - + def as_dict(self) -> dict: """Serializes the ResolvedValues into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition_task: body['condition_task'] = self.condition_task.as_dict() - if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() - if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict() - if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict() - if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict() - if self.simulation_task: body['simulation_task'] = self.simulation_task.as_dict() - if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict() - if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict() - if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict() - if self.sql_task: body['sql_task'] = self.sql_task.as_dict() + if self.condition_task: + body["condition_task"] = self.condition_task.as_dict() + if self.dbt_task: + body["dbt_task"] = self.dbt_task.as_dict() + if self.notebook_task: + body["notebook_task"] = self.notebook_task.as_dict() + if self.python_wheel_task: + body["python_wheel_task"] = self.python_wheel_task.as_dict() + if self.run_job_task: + body["run_job_task"] = self.run_job_task.as_dict() + if self.simulation_task: + body["simulation_task"] = self.simulation_task.as_dict() + if self.spark_jar_task: + body["spark_jar_task"] = self.spark_jar_task.as_dict() + if self.spark_python_task: + body["spark_python_task"] = self.spark_python_task.as_dict() + if self.spark_submit_task: + body["spark_submit_task"] = self.spark_submit_task.as_dict() + if self.sql_task: + body["sql_task"] = self.sql_task.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ResolvedValues into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition_task: body['condition_task'] = self.condition_task - if self.dbt_task: body['dbt_task'] = self.dbt_task - if self.notebook_task: body['notebook_task'] = self.notebook_task - if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task - if self.run_job_task: body['run_job_task'] = self.run_job_task - if self.simulation_task: body['simulation_task'] = self.simulation_task - if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task - if self.spark_python_task: body['spark_python_task'] = self.spark_python_task - if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task - if self.sql_task: body['sql_task'] = self.sql_task + if self.condition_task: + body["condition_task"] = self.condition_task + if self.dbt_task: + body["dbt_task"] = self.dbt_task + if self.notebook_task: + body["notebook_task"] = self.notebook_task + if self.python_wheel_task: + body["python_wheel_task"] = self.python_wheel_task + if self.run_job_task: + body["run_job_task"] = self.run_job_task + if self.simulation_task: + body["simulation_task"] = self.simulation_task + if self.spark_jar_task: + body["spark_jar_task"] = self.spark_jar_task + if self.spark_python_task: + body["spark_python_task"] = self.spark_python_task + if self.spark_submit_task: + body["spark_submit_task"] = self.spark_submit_task + if self.sql_task: + body["sql_task"] = self.sql_task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResolvedValues: """Deserializes the ResolvedValues from a dictionary.""" - return cls(condition_task=_from_dict(d, 'condition_task', ResolvedConditionTaskValues), dbt_task=_from_dict(d, 'dbt_task', ResolvedDbtTaskValues), notebook_task=_from_dict(d, 'notebook_task', ResolvedNotebookTaskValues), python_wheel_task=_from_dict(d, 'python_wheel_task', ResolvedPythonWheelTaskValues), run_job_task=_from_dict(d, 'run_job_task', ResolvedRunJobTaskValues), simulation_task=_from_dict(d, 'simulation_task', ResolvedParamPairValues), spark_jar_task=_from_dict(d, 'spark_jar_task', ResolvedStringParamsValues), spark_python_task=_from_dict(d, 'spark_python_task', ResolvedStringParamsValues), spark_submit_task=_from_dict(d, 'spark_submit_task', ResolvedStringParamsValues), sql_task=_from_dict(d, 'sql_task', ResolvedParamPairValues)) - - + return cls( + condition_task=_from_dict(d, "condition_task", ResolvedConditionTaskValues), + dbt_task=_from_dict(d, "dbt_task", ResolvedDbtTaskValues), + notebook_task=_from_dict(d, "notebook_task", ResolvedNotebookTaskValues), + python_wheel_task=_from_dict(d, "python_wheel_task", ResolvedPythonWheelTaskValues), + run_job_task=_from_dict(d, "run_job_task", ResolvedRunJobTaskValues), + simulation_task=_from_dict(d, "simulation_task", ResolvedParamPairValues), + spark_jar_task=_from_dict(d, "spark_jar_task", ResolvedStringParamsValues), + spark_python_task=_from_dict(d, "spark_python_task", ResolvedStringParamsValues), + spark_submit_task=_from_dict(d, "spark_submit_task", ResolvedStringParamsValues), + sql_task=_from_dict(d, "sql_task", ResolvedParamPairValues), + ) @dataclass class Run: """Run was retrieved successfully""" - + attempt_number: Optional[int] = None """The sequence number of this run attempt for a triggered job run. The initial attempt of a run has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" - + cleanup_duration: Optional[int] = None """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `cleanup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + cluster_instance: Optional[ClusterInstance] = None """The cluster used for this run. If the run is specified to use a new cluster, this field is set once the Jobs service has requested a cluster for the run.""" - + cluster_spec: Optional[ClusterSpec] = None """A snapshot of the job’s cluster specification when this run was created.""" - + creator_user_name: Optional[str] = None """The creator user name. This field won’t be included in the response if the user has already been deleted.""" - + description: Optional[str] = None """Description of the run""" - + effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from the client-set performance target on the request depending on whether the performance mode is @@ -4243,18 +5049,18 @@ class Run: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" - + execution_duration: Optional[int] = None """The time in milliseconds it took to execute the commands in the JAR or notebook until they completed, failed, timed out, were cancelled, or encountered an unexpected error. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `execution_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -4264,97 +5070,97 @@ class Run: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + has_more: Optional[bool] = None """Indicates if the run has more array properties (`tasks`, `job_clusters`) that are not shown. They can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2 :method:jobs/listruns requests with `expand_tasks=true`.""" - + iterations: Optional[List[RunTask]] = None """Only populated by for-each iterations. The parent for-each task is located in tasks array.""" - + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings. If more than 100 job clusters are available, you can paginate through them using :method:jobs/getrun.""" - + job_id: Optional[int] = None """The canonical identifier of the job that contains this run.""" - + job_parameters: Optional[List[JobParameter]] = None """Job-level parameters used in the run""" - + job_run_id: Optional[int] = None """ID of the job run that this run belongs to. For legacy and single-task job runs the field is populated with the job run ID. For task runs, the field is populated with the ID of the job run that the task run belongs to.""" - + next_page_token: Optional[str] = None """A token that can be used to list the next page of array properties.""" - + number_in_job: Optional[int] = None """A unique identifier for this job run. This is set to the same value as `run_id`.""" - + original_attempt_run_id: Optional[int] = None """If this run is a retry of a prior run attempt, this field contains the run_id of the original attempt; otherwise, it is the same as the run_id.""" - + overriding_parameters: Optional[RunParameters] = None """The parameters used for this run.""" - + queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" - + repair_history: Optional[List[RepairHistoryItem]] = None """The repair history of the run.""" - + run_duration: Optional[int] = None """The time in milliseconds it took the job run and all of its repairs to finish.""" - + run_id: Optional[int] = None """The canonical identifier of the run. This ID is unique across all runs of all jobs.""" - + run_name: Optional[str] = None """An optional name for the run. The maximum length is 4096 bytes in UTF-8 encoding.""" - + run_page_url: Optional[str] = None """The URL to the detail page of the run.""" - + run_type: Optional[RunType] = None """The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with :method:jobs/submit. [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow""" - + schedule: Optional[CronSchedule] = None """The cron schedule that triggered this run if it was triggered by the periodic scheduler.""" - + setup_duration: Optional[int] = None """The time in milliseconds it took to set up the cluster. For runs that run on new clusters this is the cluster creation time, for runs that run on existing clusters this time should be very short. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `setup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + start_time: Optional[int] = None """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC). This may not be the time when the job task starts executing, for example, if the job is scheduled to run on a new cluster, this is the time the cluster creation call is issued.""" - + state: Optional[RunState] = None """Deprecated. Please use the `status` field instead.""" - + status: Optional[RunStatus] = None """The current status of the run""" - + tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object root to determine if more results are available.""" - + trigger: Optional[TriggerType] = None """The type of trigger that fired this run. @@ -4366,96 +5172,200 @@ class Run: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run.""" - + trigger_info: Optional[TriggerInfo] = None """Additional details about what triggered the run""" - + def as_dict(self) -> dict: """Serializes the Run into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attempt_number is not None: body['attempt_number'] = self.attempt_number - if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration - if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict() - if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict() - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.description is not None: body['description'] = self.description - if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target.value - if self.end_time is not None: body['end_time'] = self.end_time - if self.execution_duration is not None: body['execution_duration'] = self.execution_duration - if self.git_source: body['git_source'] = self.git_source.as_dict() - if self.has_more is not None: body['has_more'] = self.has_more - if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations] - if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters] - if self.job_run_id is not None: body['job_run_id'] = self.job_run_id - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.number_in_job is not None: body['number_in_job'] = self.number_in_job - if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id - if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict() - if self.queue_duration is not None: body['queue_duration'] = self.queue_duration - if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history] - if self.run_duration is not None: body['run_duration'] = self.run_duration - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.run_page_url is not None: body['run_page_url'] = self.run_page_url - if self.run_type is not None: body['run_type'] = self.run_type.value - if self.schedule: body['schedule'] = self.schedule.as_dict() - if self.setup_duration is not None: body['setup_duration'] = self.setup_duration - if self.start_time is not None: body['start_time'] = self.start_time - if self.state: body['state'] = self.state.as_dict() - if self.status: body['status'] = self.status.as_dict() - if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] - if self.trigger is not None: body['trigger'] = self.trigger.value - if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict() + if self.attempt_number is not None: + body["attempt_number"] = self.attempt_number + if self.cleanup_duration is not None: + body["cleanup_duration"] = self.cleanup_duration + if self.cluster_instance: + body["cluster_instance"] = self.cluster_instance.as_dict() + if self.cluster_spec: + body["cluster_spec"] = self.cluster_spec.as_dict() + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.description is not None: + body["description"] = self.description + if self.effective_performance_target is not None: + body["effective_performance_target"] = self.effective_performance_target.value + if self.end_time is not None: + body["end_time"] = self.end_time + if self.execution_duration is not None: + body["execution_duration"] = self.execution_duration + if self.git_source: + body["git_source"] = self.git_source.as_dict() + if self.has_more is not None: + body["has_more"] = self.has_more + if self.iterations: + body["iterations"] = [v.as_dict() for v in self.iterations] + if self.job_clusters: + body["job_clusters"] = [v.as_dict() for v in self.job_clusters] + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_parameters: + body["job_parameters"] = [v.as_dict() for v in self.job_parameters] + if self.job_run_id is not None: + body["job_run_id"] = self.job_run_id + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.number_in_job is not None: + body["number_in_job"] = self.number_in_job + if self.original_attempt_run_id is not None: + body["original_attempt_run_id"] = self.original_attempt_run_id + if self.overriding_parameters: + body["overriding_parameters"] = self.overriding_parameters.as_dict() + if self.queue_duration is not None: + body["queue_duration"] = self.queue_duration + if self.repair_history: + body["repair_history"] = [v.as_dict() for v in self.repair_history] + if self.run_duration is not None: + body["run_duration"] = self.run_duration + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.run_page_url is not None: + body["run_page_url"] = self.run_page_url + if self.run_type is not None: + body["run_type"] = self.run_type.value + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.setup_duration is not None: + body["setup_duration"] = self.setup_duration + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state.as_dict() + if self.status: + body["status"] = self.status.as_dict() + if self.tasks: + body["tasks"] = [v.as_dict() for v in self.tasks] + if self.trigger is not None: + body["trigger"] = self.trigger.value + if self.trigger_info: + body["trigger_info"] = self.trigger_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Run into a shallow dictionary of its immediate attributes.""" body = {} - if self.attempt_number is not None: body['attempt_number'] = self.attempt_number - if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration - if self.cluster_instance: body['cluster_instance'] = self.cluster_instance - if self.cluster_spec: body['cluster_spec'] = self.cluster_spec - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.description is not None: body['description'] = self.description - if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target - if self.end_time is not None: body['end_time'] = self.end_time - if self.execution_duration is not None: body['execution_duration'] = self.execution_duration - if self.git_source: body['git_source'] = self.git_source - if self.has_more is not None: body['has_more'] = self.has_more - if self.iterations: body['iterations'] = self.iterations - if self.job_clusters: body['job_clusters'] = self.job_clusters - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.job_run_id is not None: body['job_run_id'] = self.job_run_id - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.number_in_job is not None: body['number_in_job'] = self.number_in_job - if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id - if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters - if self.queue_duration is not None: body['queue_duration'] = self.queue_duration - if self.repair_history: body['repair_history'] = self.repair_history - if self.run_duration is not None: body['run_duration'] = self.run_duration - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.run_page_url is not None: body['run_page_url'] = self.run_page_url - if self.run_type is not None: body['run_type'] = self.run_type - if self.schedule: body['schedule'] = self.schedule - if self.setup_duration is not None: body['setup_duration'] = self.setup_duration - if self.start_time is not None: body['start_time'] = self.start_time - if self.state: body['state'] = self.state - if self.status: body['status'] = self.status - if self.tasks: body['tasks'] = self.tasks - if self.trigger is not None: body['trigger'] = self.trigger - if self.trigger_info: body['trigger_info'] = self.trigger_info + if self.attempt_number is not None: + body["attempt_number"] = self.attempt_number + if self.cleanup_duration is not None: + body["cleanup_duration"] = self.cleanup_duration + if self.cluster_instance: + body["cluster_instance"] = self.cluster_instance + if self.cluster_spec: + body["cluster_spec"] = self.cluster_spec + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.description is not None: + body["description"] = self.description + if self.effective_performance_target is not None: + body["effective_performance_target"] = self.effective_performance_target + if self.end_time is not None: + body["end_time"] = self.end_time + if self.execution_duration is not None: + body["execution_duration"] = self.execution_duration + if self.git_source: + body["git_source"] = self.git_source + if self.has_more is not None: + body["has_more"] = self.has_more + if self.iterations: + body["iterations"] = self.iterations + if self.job_clusters: + body["job_clusters"] = self.job_clusters + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.job_run_id is not None: + body["job_run_id"] = self.job_run_id + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.number_in_job is not None: + body["number_in_job"] = self.number_in_job + if self.original_attempt_run_id is not None: + body["original_attempt_run_id"] = self.original_attempt_run_id + if self.overriding_parameters: + body["overriding_parameters"] = self.overriding_parameters + if self.queue_duration is not None: + body["queue_duration"] = self.queue_duration + if self.repair_history: + body["repair_history"] = self.repair_history + if self.run_duration is not None: + body["run_duration"] = self.run_duration + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.run_page_url is not None: + body["run_page_url"] = self.run_page_url + if self.run_type is not None: + body["run_type"] = self.run_type + if self.schedule: + body["schedule"] = self.schedule + if self.setup_duration is not None: + body["setup_duration"] = self.setup_duration + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state + if self.status: + body["status"] = self.status + if self.tasks: + body["tasks"] = self.tasks + if self.trigger is not None: + body["trigger"] = self.trigger + if self.trigger_info: + body["trigger_info"] = self.trigger_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Run: """Deserializes the Run from a dictionary.""" - return cls(attempt_number=d.get('attempt_number', None), cleanup_duration=d.get('cleanup_duration', None), cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance), cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec), creator_user_name=d.get('creator_user_name', None), description=d.get('description', None), effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), git_source=_from_dict(d, 'git_source', GitSource), has_more=d.get('has_more', None), iterations=_repeated_dict(d, 'iterations', RunTask), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), job_parameters=_repeated_dict(d, 'job_parameters', JobParameter), job_run_id=d.get('job_run_id', None), next_page_token=d.get('next_page_token', None), number_in_job=d.get('number_in_job', None), original_attempt_run_id=d.get('original_attempt_run_id', None), overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters), queue_duration=d.get('queue_duration', None), repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem), run_duration=d.get('run_duration', None), run_id=d.get('run_id', None), run_name=d.get('run_name', None), run_page_url=d.get('run_page_url', None), run_type=_enum(d, 'run_type', RunType), schedule=_from_dict(d, 'schedule', CronSchedule), setup_duration=d.get('setup_duration', None), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), status=_from_dict(d, 'status', RunStatus), tasks=_repeated_dict(d, 'tasks', RunTask), trigger=_enum(d, 'trigger', TriggerType), trigger_info=_from_dict(d, 'trigger_info', TriggerInfo)) - - + return cls( + attempt_number=d.get("attempt_number", None), + cleanup_duration=d.get("cleanup_duration", None), + cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance), + cluster_spec=_from_dict(d, "cluster_spec", ClusterSpec), + creator_user_name=d.get("creator_user_name", None), + description=d.get("description", None), + effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), + end_time=d.get("end_time", None), + execution_duration=d.get("execution_duration", None), + git_source=_from_dict(d, "git_source", GitSource), + has_more=d.get("has_more", None), + iterations=_repeated_dict(d, "iterations", RunTask), + job_clusters=_repeated_dict(d, "job_clusters", JobCluster), + job_id=d.get("job_id", None), + job_parameters=_repeated_dict(d, "job_parameters", JobParameter), + job_run_id=d.get("job_run_id", None), + next_page_token=d.get("next_page_token", None), + number_in_job=d.get("number_in_job", None), + original_attempt_run_id=d.get("original_attempt_run_id", None), + overriding_parameters=_from_dict(d, "overriding_parameters", RunParameters), + queue_duration=d.get("queue_duration", None), + repair_history=_repeated_dict(d, "repair_history", RepairHistoryItem), + run_duration=d.get("run_duration", None), + run_id=d.get("run_id", None), + run_name=d.get("run_name", None), + run_page_url=d.get("run_page_url", None), + run_type=_enum(d, "run_type", RunType), + schedule=_from_dict(d, "schedule", CronSchedule), + setup_duration=d.get("setup_duration", None), + start_time=d.get("start_time", None), + state=_from_dict(d, "state", RunState), + status=_from_dict(d, "status", RunStatus), + tasks=_repeated_dict(d, "tasks", RunTask), + trigger=_enum(d, "trigger", TriggerType), + trigger_info=_from_dict(d, "trigger_info", TriggerInfo), + ) @dataclass @@ -4470,138 +5380,161 @@ class RunConditionTask: The boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.""" - + left: str """The left operand of the condition task. Can be either a string value or a job state or parameter reference.""" - + right: str """The right operand of the condition task. Can be either a string value or a job state or parameter reference.""" - + outcome: Optional[str] = None """The condition expression evaluation result. Filled in if the task was successfully completed. Can be `"true"` or `"false"`""" - + def as_dict(self) -> dict: """Serializes the RunConditionTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.left is not None: body['left'] = self.left - if self.op is not None: body['op'] = self.op.value - if self.outcome is not None: body['outcome'] = self.outcome - if self.right is not None: body['right'] = self.right + if self.left is not None: + body["left"] = self.left + if self.op is not None: + body["op"] = self.op.value + if self.outcome is not None: + body["outcome"] = self.outcome + if self.right is not None: + body["right"] = self.right return body def as_shallow_dict(self) -> dict: """Serializes the RunConditionTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.left is not None: body['left'] = self.left - if self.op is not None: body['op'] = self.op - if self.outcome is not None: body['outcome'] = self.outcome - if self.right is not None: body['right'] = self.right + if self.left is not None: + body["left"] = self.left + if self.op is not None: + body["op"] = self.op + if self.outcome is not None: + body["outcome"] = self.outcome + if self.right is not None: + body["right"] = self.right return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunConditionTask: """Deserializes the RunConditionTask from a dictionary.""" - return cls(left=d.get('left', None), op=_enum(d, 'op', ConditionTaskOp), outcome=d.get('outcome', None), right=d.get('right', None)) - - + return cls( + left=d.get("left", None), + op=_enum(d, "op", ConditionTaskOp), + outcome=d.get("outcome", None), + right=d.get("right", None), + ) @dataclass class RunForEachTask: inputs: str """Array for task to iterate on. This can be a JSON string or a reference to an array parameter.""" - + task: Task """Configuration for the task that will be run for each element in the array""" - + concurrency: Optional[int] = None """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to be able to execute multiple runs of the task concurrently.""" - + stats: Optional[ForEachStats] = None """Read only field. Populated for GetRun and ListRuns RPC calls and stores the execution stats of an For each task""" - + def as_dict(self) -> dict: """Serializes the RunForEachTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.concurrency is not None: body['concurrency'] = self.concurrency - if self.inputs is not None: body['inputs'] = self.inputs - if self.stats: body['stats'] = self.stats.as_dict() - if self.task: body['task'] = self.task.as_dict() + if self.concurrency is not None: + body["concurrency"] = self.concurrency + if self.inputs is not None: + body["inputs"] = self.inputs + if self.stats: + body["stats"] = self.stats.as_dict() + if self.task: + body["task"] = self.task.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RunForEachTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.concurrency is not None: body['concurrency'] = self.concurrency - if self.inputs is not None: body['inputs'] = self.inputs - if self.stats: body['stats'] = self.stats - if self.task: body['task'] = self.task + if self.concurrency is not None: + body["concurrency"] = self.concurrency + if self.inputs is not None: + body["inputs"] = self.inputs + if self.stats: + body["stats"] = self.stats + if self.task: + body["task"] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunForEachTask: """Deserializes the RunForEachTask from a dictionary.""" - return cls(concurrency=d.get('concurrency', None), inputs=d.get('inputs', None), stats=_from_dict(d, 'stats', ForEachStats), task=_from_dict(d, 'task', Task)) - - + return cls( + concurrency=d.get("concurrency", None), + inputs=d.get("inputs", None), + stats=_from_dict(d, "stats", ForEachStats), + task=_from_dict(d, "task", Task), + ) class RunIf(Enum): """An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. - + Possible values are: * `ALL_SUCCESS`: All dependencies have executed and succeeded * `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded * `NONE_FAILED`: None of the dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl dependencies have failed""" - - ALL_DONE = 'ALL_DONE' - ALL_FAILED = 'ALL_FAILED' - ALL_SUCCESS = 'ALL_SUCCESS' - AT_LEAST_ONE_FAILED = 'AT_LEAST_ONE_FAILED' - AT_LEAST_ONE_SUCCESS = 'AT_LEAST_ONE_SUCCESS' - NONE_FAILED = 'NONE_FAILED' + + ALL_DONE = "ALL_DONE" + ALL_FAILED = "ALL_FAILED" + ALL_SUCCESS = "ALL_SUCCESS" + AT_LEAST_ONE_FAILED = "AT_LEAST_ONE_FAILED" + AT_LEAST_ONE_SUCCESS = "AT_LEAST_ONE_SUCCESS" + NONE_FAILED = "NONE_FAILED" + @dataclass class RunJobOutput: run_id: Optional[int] = None """The run id of the triggered job run""" - + def as_dict(self) -> dict: """Serializes the RunJobOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunJobOutput: """Deserializes the RunJobOutput from a dictionary.""" - return cls(run_id=d.get('run_id', None)) - - + return cls(run_id=d.get("run_id", None)) @dataclass class RunJobTask: job_id: int """ID of the job to trigger.""" - + dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - + jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -4612,11 +5545,11 @@ class RunJobTask: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str,str]] = None + + job_parameters: Optional[Dict[str, str]] = None """Job-level parameters used to trigger the job.""" - - notebook_params: Optional[Dict[str,str]] = None + + notebook_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. @@ -4632,12 +5565,12 @@ class RunJobTask: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str,str]] = None - + + python_named_params: Optional[Dict[str, str]] = None + python_params: Optional[List[str]] = None """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon @@ -4653,7 +5586,7 @@ class RunJobTask: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -4670,47 +5603,76 @@ class RunJobTask: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str,str]] = None + + sql_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - + def as_dict(self) -> dict: """Serializes the RunJobTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands] - if self.jar_params: body['jar_params'] = [v for v in self.jar_params] - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.notebook_params: body['notebook_params'] = self.notebook_params - if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() - if self.python_named_params: body['python_named_params'] = self.python_named_params - if self.python_params: body['python_params'] = [v for v in self.python_params] - if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params] - if self.sql_params: body['sql_params'] = self.sql_params + if self.dbt_commands: + body["dbt_commands"] = [v for v in self.dbt_commands] + if self.jar_params: + body["jar_params"] = [v for v in self.jar_params] + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.notebook_params: + body["notebook_params"] = self.notebook_params + if self.pipeline_params: + body["pipeline_params"] = self.pipeline_params.as_dict() + if self.python_named_params: + body["python_named_params"] = self.python_named_params + if self.python_params: + body["python_params"] = [v for v in self.python_params] + if self.spark_submit_params: + body["spark_submit_params"] = [v for v in self.spark_submit_params] + if self.sql_params: + body["sql_params"] = self.sql_params return body def as_shallow_dict(self) -> dict: """Serializes the RunJobTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbt_commands: body['dbt_commands'] = self.dbt_commands - if self.jar_params: body['jar_params'] = self.jar_params - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.notebook_params: body['notebook_params'] = self.notebook_params - if self.pipeline_params: body['pipeline_params'] = self.pipeline_params - if self.python_named_params: body['python_named_params'] = self.python_named_params - if self.python_params: body['python_params'] = self.python_params - if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params - if self.sql_params: body['sql_params'] = self.sql_params + if self.dbt_commands: + body["dbt_commands"] = self.dbt_commands + if self.jar_params: + body["jar_params"] = self.jar_params + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.notebook_params: + body["notebook_params"] = self.notebook_params + if self.pipeline_params: + body["pipeline_params"] = self.pipeline_params + if self.python_named_params: + body["python_named_params"] = self.python_named_params + if self.python_params: + body["python_params"] = self.python_params + if self.spark_submit_params: + body["spark_submit_params"] = self.spark_submit_params + if self.sql_params: + body["sql_params"] = self.sql_params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunJobTask: """Deserializes the RunJobTask from a dictionary.""" - return cls(dbt_commands=d.get('dbt_commands', None), jar_params=d.get('jar_params', None), job_id=d.get('job_id', None), job_parameters=d.get('job_parameters', None), notebook_params=d.get('notebook_params', None), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), spark_submit_params=d.get('spark_submit_params', None), sql_params=d.get('sql_params', None)) - - + return cls( + dbt_commands=d.get("dbt_commands", None), + jar_params=d.get("jar_params", None), + job_id=d.get("job_id", None), + job_parameters=d.get("job_parameters", None), + notebook_params=d.get("notebook_params", None), + pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), + python_named_params=d.get("python_named_params", None), + python_params=d.get("python_params", None), + spark_submit_params=d.get("spark_submit_params", None), + sql_params=d.get("sql_params", None), + ) class RunLifeCycleState(Enum): @@ -4725,37 +5687,39 @@ class RunLifeCycleState(Enum): long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry.""" - - BLOCKED = 'BLOCKED' - INTERNAL_ERROR = 'INTERNAL_ERROR' - PENDING = 'PENDING' - QUEUED = 'QUEUED' - RUNNING = 'RUNNING' - SKIPPED = 'SKIPPED' - TERMINATED = 'TERMINATED' - TERMINATING = 'TERMINATING' - WAITING_FOR_RETRY = 'WAITING_FOR_RETRY' + + BLOCKED = "BLOCKED" + INTERNAL_ERROR = "INTERNAL_ERROR" + PENDING = "PENDING" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + SKIPPED = "SKIPPED" + TERMINATED = "TERMINATED" + TERMINATING = "TERMINATING" + WAITING_FOR_RETRY = "WAITING_FOR_RETRY" + class RunLifecycleStateV2State(Enum): """The current state of the run.""" - - BLOCKED = 'BLOCKED' - PENDING = 'PENDING' - QUEUED = 'QUEUED' - RUNNING = 'RUNNING' - TERMINATED = 'TERMINATED' - TERMINATING = 'TERMINATING' - WAITING = 'WAITING' + + BLOCKED = "BLOCKED" + PENDING = "PENDING" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + TERMINATED = "TERMINATED" + TERMINATING = "TERMINATING" + WAITING = "WAITING" + @dataclass class RunNow: job_id: int """The ID of the job to be executed""" - + dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - + idempotency_token: Optional[str] = None """An optional token to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing @@ -4769,7 +5733,7 @@ class RunNow: For more information, see [How to ensure idempotency for jobs]. [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html""" - + jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -4780,11 +5744,11 @@ class RunNow: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str,str]] = None + + job_parameters: Optional[Dict[str, str]] = None """Job-level parameters used in the run. for example `"param": "overriding_val"`""" - - notebook_params: Optional[Dict[str,str]] = None + + notebook_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. @@ -4800,11 +5764,11 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - + only: Optional[List[str]] = None """A list of task keys to run inside of the job. If this field is not provided, all tasks in the job will be run.""" - + performance_target: Optional[PerformanceTarget] = None """The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined @@ -4813,12 +5777,12 @@ class RunNow: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str,str]] = None - + + python_named_params: Optional[Dict[str, str]] = None + python_params: Optional[List[str]] = None """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon @@ -4834,10 +5798,10 @@ class RunNow: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + queue: Optional[QueueSettings] = None """The queue settings of the run.""" - + spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -4854,113 +5818,156 @@ class RunNow: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str,str]] = None + + sql_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - + def as_dict(self) -> dict: """Serializes the RunNow into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands] - if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token - if self.jar_params: body['jar_params'] = [v for v in self.jar_params] - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.notebook_params: body['notebook_params'] = self.notebook_params - if self.only: body['only'] = [v for v in self.only] - if self.performance_target is not None: body['performance_target'] = self.performance_target.value - if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() - if self.python_named_params: body['python_named_params'] = self.python_named_params - if self.python_params: body['python_params'] = [v for v in self.python_params] - if self.queue: body['queue'] = self.queue.as_dict() - if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params] - if self.sql_params: body['sql_params'] = self.sql_params + if self.dbt_commands: + body["dbt_commands"] = [v for v in self.dbt_commands] + if self.idempotency_token is not None: + body["idempotency_token"] = self.idempotency_token + if self.jar_params: + body["jar_params"] = [v for v in self.jar_params] + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.notebook_params: + body["notebook_params"] = self.notebook_params + if self.only: + body["only"] = [v for v in self.only] + if self.performance_target is not None: + body["performance_target"] = self.performance_target.value + if self.pipeline_params: + body["pipeline_params"] = self.pipeline_params.as_dict() + if self.python_named_params: + body["python_named_params"] = self.python_named_params + if self.python_params: + body["python_params"] = [v for v in self.python_params] + if self.queue: + body["queue"] = self.queue.as_dict() + if self.spark_submit_params: + body["spark_submit_params"] = [v for v in self.spark_submit_params] + if self.sql_params: + body["sql_params"] = self.sql_params return body def as_shallow_dict(self) -> dict: """Serializes the RunNow into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbt_commands: body['dbt_commands'] = self.dbt_commands - if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token - if self.jar_params: body['jar_params'] = self.jar_params - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_parameters: body['job_parameters'] = self.job_parameters - if self.notebook_params: body['notebook_params'] = self.notebook_params - if self.only: body['only'] = self.only - if self.performance_target is not None: body['performance_target'] = self.performance_target - if self.pipeline_params: body['pipeline_params'] = self.pipeline_params - if self.python_named_params: body['python_named_params'] = self.python_named_params - if self.python_params: body['python_params'] = self.python_params - if self.queue: body['queue'] = self.queue - if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params - if self.sql_params: body['sql_params'] = self.sql_params + if self.dbt_commands: + body["dbt_commands"] = self.dbt_commands + if self.idempotency_token is not None: + body["idempotency_token"] = self.idempotency_token + if self.jar_params: + body["jar_params"] = self.jar_params + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_parameters: + body["job_parameters"] = self.job_parameters + if self.notebook_params: + body["notebook_params"] = self.notebook_params + if self.only: + body["only"] = self.only + if self.performance_target is not None: + body["performance_target"] = self.performance_target + if self.pipeline_params: + body["pipeline_params"] = self.pipeline_params + if self.python_named_params: + body["python_named_params"] = self.python_named_params + if self.python_params: + body["python_params"] = self.python_params + if self.queue: + body["queue"] = self.queue + if self.spark_submit_params: + body["spark_submit_params"] = self.spark_submit_params + if self.sql_params: + body["sql_params"] = self.sql_params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunNow: """Deserializes the RunNow from a dictionary.""" - return cls(dbt_commands=d.get('dbt_commands', None), idempotency_token=d.get('idempotency_token', None), jar_params=d.get('jar_params', None), job_id=d.get('job_id', None), job_parameters=d.get('job_parameters', None), notebook_params=d.get('notebook_params', None), only=d.get('only', None), performance_target=_enum(d, 'performance_target', PerformanceTarget), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), queue=_from_dict(d, 'queue', QueueSettings), spark_submit_params=d.get('spark_submit_params', None), sql_params=d.get('sql_params', None)) - - + return cls( + dbt_commands=d.get("dbt_commands", None), + idempotency_token=d.get("idempotency_token", None), + jar_params=d.get("jar_params", None), + job_id=d.get("job_id", None), + job_parameters=d.get("job_parameters", None), + notebook_params=d.get("notebook_params", None), + only=d.get("only", None), + performance_target=_enum(d, "performance_target", PerformanceTarget), + pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), + python_named_params=d.get("python_named_params", None), + python_params=d.get("python_params", None), + queue=_from_dict(d, "queue", QueueSettings), + spark_submit_params=d.get("spark_submit_params", None), + sql_params=d.get("sql_params", None), + ) @dataclass class RunNowResponse: """Run was started successfully.""" - + number_in_job: Optional[int] = None """A unique identifier for this job run. This is set to the same value as `run_id`.""" - + run_id: Optional[int] = None """The globally unique ID of the newly triggered run.""" - + def as_dict(self) -> dict: """Serializes the RunNowResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.number_in_job is not None: body['number_in_job'] = self.number_in_job - if self.run_id is not None: body['run_id'] = self.run_id + if self.number_in_job is not None: + body["number_in_job"] = self.number_in_job + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the RunNowResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.number_in_job is not None: body['number_in_job'] = self.number_in_job - if self.run_id is not None: body['run_id'] = self.run_id + if self.number_in_job is not None: + body["number_in_job"] = self.number_in_job + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunNowResponse: """Deserializes the RunNowResponse from a dictionary.""" - return cls(number_in_job=d.get('number_in_job', None), run_id=d.get('run_id', None)) - - + return cls(number_in_job=d.get("number_in_job", None), run_id=d.get("run_id", None)) @dataclass class RunOutput: """Run output was retrieved successfully.""" - + clean_rooms_notebook_output: Optional[CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput] = None """The output of a clean rooms notebook task, if available""" - + dashboard_output: Optional[DashboardTaskOutput] = None """The output of a dashboard task, if available""" - + dbt_cloud_output: Optional[DbtCloudTaskOutput] = None - + dbt_output: Optional[DbtOutput] = None """The output of a dbt task, if available.""" - + error: Optional[str] = None """An error message indicating why a task failed or why output is not available. The message is unstructured, and its exact format is subject to change.""" - + error_trace: Optional[str] = None """If there was an error executing the run, this field contains any available stack traces.""" - + info: Optional[str] = None - + logs: Optional[str] = None """The output from tasks that write to standard streams (stdout/stderr) such as spark_jar_task, spark_python_task, python_wheel_task. @@ -4968,13 +5975,13 @@ class RunOutput: It's not supported for the notebook_task, pipeline_task or spark_submit_task. Databricks restricts this API to return the last 5 MB of these logs.""" - + logs_truncated: Optional[bool] = None """Whether the logs are truncated.""" - + metadata: Optional[Run] = None """All details of the run except for its output.""" - + notebook_output: Optional[NotebookOutput] = None """The output of a notebook task, if available. A notebook task that terminates (either successfully or with a failure) without calling `dbutils.notebook.exit()` is considered to have @@ -4983,55 +5990,95 @@ class RunOutput: field to configure log storage for the job cluster. [ClusterLogConf]: https://docs.databricks.com/dev-tools/api/latest/clusters.html#clusterlogconf""" - + run_job_output: Optional[RunJobOutput] = None """The output of a run job task, if available""" - + sql_output: Optional[SqlOutput] = None """The output of a SQL task, if available.""" - + def as_dict(self) -> dict: """Serializes the RunOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_rooms_notebook_output: body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output.as_dict() - if self.dashboard_output: body['dashboard_output'] = self.dashboard_output.as_dict() - if self.dbt_cloud_output: body['dbt_cloud_output'] = self.dbt_cloud_output.as_dict() - if self.dbt_output: body['dbt_output'] = self.dbt_output.as_dict() - if self.error is not None: body['error'] = self.error - if self.error_trace is not None: body['error_trace'] = self.error_trace - if self.info is not None: body['info'] = self.info - if self.logs is not None: body['logs'] = self.logs - if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated - if self.metadata: body['metadata'] = self.metadata.as_dict() - if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict() - if self.run_job_output: body['run_job_output'] = self.run_job_output.as_dict() - if self.sql_output: body['sql_output'] = self.sql_output.as_dict() + if self.clean_rooms_notebook_output: + body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output.as_dict() + if self.dashboard_output: + body["dashboard_output"] = self.dashboard_output.as_dict() + if self.dbt_cloud_output: + body["dbt_cloud_output"] = self.dbt_cloud_output.as_dict() + if self.dbt_output: + body["dbt_output"] = self.dbt_output.as_dict() + if self.error is not None: + body["error"] = self.error + if self.error_trace is not None: + body["error_trace"] = self.error_trace + if self.info is not None: + body["info"] = self.info + if self.logs is not None: + body["logs"] = self.logs + if self.logs_truncated is not None: + body["logs_truncated"] = self.logs_truncated + if self.metadata: + body["metadata"] = self.metadata.as_dict() + if self.notebook_output: + body["notebook_output"] = self.notebook_output.as_dict() + if self.run_job_output: + body["run_job_output"] = self.run_job_output.as_dict() + if self.sql_output: + body["sql_output"] = self.sql_output.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RunOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_rooms_notebook_output: body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output - if self.dashboard_output: body['dashboard_output'] = self.dashboard_output - if self.dbt_cloud_output: body['dbt_cloud_output'] = self.dbt_cloud_output - if self.dbt_output: body['dbt_output'] = self.dbt_output - if self.error is not None: body['error'] = self.error - if self.error_trace is not None: body['error_trace'] = self.error_trace - if self.info is not None: body['info'] = self.info - if self.logs is not None: body['logs'] = self.logs - if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated - if self.metadata: body['metadata'] = self.metadata - if self.notebook_output: body['notebook_output'] = self.notebook_output - if self.run_job_output: body['run_job_output'] = self.run_job_output - if self.sql_output: body['sql_output'] = self.sql_output + if self.clean_rooms_notebook_output: + body["clean_rooms_notebook_output"] = self.clean_rooms_notebook_output + if self.dashboard_output: + body["dashboard_output"] = self.dashboard_output + if self.dbt_cloud_output: + body["dbt_cloud_output"] = self.dbt_cloud_output + if self.dbt_output: + body["dbt_output"] = self.dbt_output + if self.error is not None: + body["error"] = self.error + if self.error_trace is not None: + body["error_trace"] = self.error_trace + if self.info is not None: + body["info"] = self.info + if self.logs is not None: + body["logs"] = self.logs + if self.logs_truncated is not None: + body["logs_truncated"] = self.logs_truncated + if self.metadata: + body["metadata"] = self.metadata + if self.notebook_output: + body["notebook_output"] = self.notebook_output + if self.run_job_output: + body["run_job_output"] = self.run_job_output + if self.sql_output: + body["sql_output"] = self.sql_output return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunOutput: """Deserializes the RunOutput from a dictionary.""" - return cls(clean_rooms_notebook_output=_from_dict(d, 'clean_rooms_notebook_output', CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput), dashboard_output=_from_dict(d, 'dashboard_output', DashboardTaskOutput), dbt_cloud_output=_from_dict(d, 'dbt_cloud_output', DbtCloudTaskOutput), dbt_output=_from_dict(d, 'dbt_output', DbtOutput), error=d.get('error', None), error_trace=d.get('error_trace', None), info=d.get('info', None), logs=d.get('logs', None), logs_truncated=d.get('logs_truncated', None), metadata=_from_dict(d, 'metadata', Run), notebook_output=_from_dict(d, 'notebook_output', NotebookOutput), run_job_output=_from_dict(d, 'run_job_output', RunJobOutput), sql_output=_from_dict(d, 'sql_output', SqlOutput)) - - + return cls( + clean_rooms_notebook_output=_from_dict( + d, "clean_rooms_notebook_output", CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput + ), + dashboard_output=_from_dict(d, "dashboard_output", DashboardTaskOutput), + dbt_cloud_output=_from_dict(d, "dbt_cloud_output", DbtCloudTaskOutput), + dbt_output=_from_dict(d, "dbt_output", DbtOutput), + error=d.get("error", None), + error_trace=d.get("error_trace", None), + info=d.get("info", None), + logs=d.get("logs", None), + logs_truncated=d.get("logs_truncated", None), + metadata=_from_dict(d, "metadata", Run), + notebook_output=_from_dict(d, "notebook_output", NotebookOutput), + run_job_output=_from_dict(d, "run_job_output", RunJobOutput), + sql_output=_from_dict(d, "sql_output", SqlOutput), + ) @dataclass @@ -5039,7 +6086,7 @@ class RunParameters: dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - + jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -5050,8 +6097,8 @@ class RunParameters: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - notebook_params: Optional[Dict[str,str]] = None + + notebook_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. @@ -5067,12 +6114,12 @@ class RunParameters: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - + pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str,str]] = None - + + python_named_params: Optional[Dict[str, str]] = None + python_params: Optional[List[str]] = None """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon @@ -5088,7 +6135,7 @@ class RunParameters: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -5105,43 +6152,66 @@ class RunParameters: emojis. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str,str]] = None + + sql_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - + def as_dict(self) -> dict: """Serializes the RunParameters into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbt_commands: body['dbt_commands'] = [v for v in self.dbt_commands] - if self.jar_params: body['jar_params'] = [v for v in self.jar_params] - if self.notebook_params: body['notebook_params'] = self.notebook_params - if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict() - if self.python_named_params: body['python_named_params'] = self.python_named_params - if self.python_params: body['python_params'] = [v for v in self.python_params] - if self.spark_submit_params: body['spark_submit_params'] = [v for v in self.spark_submit_params] - if self.sql_params: body['sql_params'] = self.sql_params + if self.dbt_commands: + body["dbt_commands"] = [v for v in self.dbt_commands] + if self.jar_params: + body["jar_params"] = [v for v in self.jar_params] + if self.notebook_params: + body["notebook_params"] = self.notebook_params + if self.pipeline_params: + body["pipeline_params"] = self.pipeline_params.as_dict() + if self.python_named_params: + body["python_named_params"] = self.python_named_params + if self.python_params: + body["python_params"] = [v for v in self.python_params] + if self.spark_submit_params: + body["spark_submit_params"] = [v for v in self.spark_submit_params] + if self.sql_params: + body["sql_params"] = self.sql_params return body def as_shallow_dict(self) -> dict: """Serializes the RunParameters into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbt_commands: body['dbt_commands'] = self.dbt_commands - if self.jar_params: body['jar_params'] = self.jar_params - if self.notebook_params: body['notebook_params'] = self.notebook_params - if self.pipeline_params: body['pipeline_params'] = self.pipeline_params - if self.python_named_params: body['python_named_params'] = self.python_named_params - if self.python_params: body['python_params'] = self.python_params - if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params - if self.sql_params: body['sql_params'] = self.sql_params + if self.dbt_commands: + body["dbt_commands"] = self.dbt_commands + if self.jar_params: + body["jar_params"] = self.jar_params + if self.notebook_params: + body["notebook_params"] = self.notebook_params + if self.pipeline_params: + body["pipeline_params"] = self.pipeline_params + if self.python_named_params: + body["python_named_params"] = self.python_named_params + if self.python_params: + body["python_params"] = self.python_params + if self.spark_submit_params: + body["spark_submit_params"] = self.spark_submit_params + if self.sql_params: + body["sql_params"] = self.sql_params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunParameters: """Deserializes the RunParameters from a dictionary.""" - return cls(dbt_commands=d.get('dbt_commands', None), jar_params=d.get('jar_params', None), notebook_params=d.get('notebook_params', None), pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams), python_named_params=d.get('python_named_params', None), python_params=d.get('python_params', None), spark_submit_params=d.get('spark_submit_params', None), sql_params=d.get('sql_params', None)) - - + return cls( + dbt_commands=d.get("dbt_commands", None), + jar_params=d.get("jar_params", None), + notebook_params=d.get("notebook_params", None), + pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), + python_named_params=d.get("python_named_params", None), + python_params=d.get("python_params", None), + spark_submit_params=d.get("spark_submit_params", None), + sql_params=d.get("sql_params", None), + ) class RunResultState(Enum): @@ -5154,164 +6224,187 @@ class RunResultState(Enum): successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. * `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. * `DISABLED`: The run was skipped because it was disabled explicitly by the user.""" - - CANCELED = 'CANCELED' - DISABLED = 'DISABLED' - EXCLUDED = 'EXCLUDED' - FAILED = 'FAILED' - MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED' - SUCCESS = 'SUCCESS' - SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES' - TIMEDOUT = 'TIMEDOUT' - UPSTREAM_CANCELED = 'UPSTREAM_CANCELED' - UPSTREAM_FAILED = 'UPSTREAM_FAILED' + + CANCELED = "CANCELED" + DISABLED = "DISABLED" + EXCLUDED = "EXCLUDED" + FAILED = "FAILED" + MAXIMUM_CONCURRENT_RUNS_REACHED = "MAXIMUM_CONCURRENT_RUNS_REACHED" + SUCCESS = "SUCCESS" + SUCCESS_WITH_FAILURES = "SUCCESS_WITH_FAILURES" + TIMEDOUT = "TIMEDOUT" + UPSTREAM_CANCELED = "UPSTREAM_CANCELED" + UPSTREAM_FAILED = "UPSTREAM_FAILED" + @dataclass class RunState: """The current state of the run.""" - + life_cycle_state: Optional[RunLifeCycleState] = None """A value indicating the run's current lifecycle state. This field is always available in the response. Note: Additional states might be introduced in future releases.""" - + queue_reason: Optional[str] = None """The reason indicating why the run was queued.""" - + result_state: Optional[RunResultState] = None """A value indicating the run's result. This field is only available for terminal lifecycle states. Note: Additional states might be introduced in future releases.""" - + state_message: Optional[str] = None """A descriptive message for the current state. This field is unstructured, and its exact format is subject to change.""" - + user_cancelled_or_timedout: Optional[bool] = None """A value indicating whether a run was canceled manually by a user or by the scheduler because the run timed out.""" - + def as_dict(self) -> dict: """Serializes the RunState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state.value - if self.queue_reason is not None: body['queue_reason'] = self.queue_reason - if self.result_state is not None: body['result_state'] = self.result_state.value - if self.state_message is not None: body['state_message'] = self.state_message - if self.user_cancelled_or_timedout is not None: body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout + if self.life_cycle_state is not None: + body["life_cycle_state"] = self.life_cycle_state.value + if self.queue_reason is not None: + body["queue_reason"] = self.queue_reason + if self.result_state is not None: + body["result_state"] = self.result_state.value + if self.state_message is not None: + body["state_message"] = self.state_message + if self.user_cancelled_or_timedout is not None: + body["user_cancelled_or_timedout"] = self.user_cancelled_or_timedout return body def as_shallow_dict(self) -> dict: """Serializes the RunState into a shallow dictionary of its immediate attributes.""" body = {} - if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state - if self.queue_reason is not None: body['queue_reason'] = self.queue_reason - if self.result_state is not None: body['result_state'] = self.result_state - if self.state_message is not None: body['state_message'] = self.state_message - if self.user_cancelled_or_timedout is not None: body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout + if self.life_cycle_state is not None: + body["life_cycle_state"] = self.life_cycle_state + if self.queue_reason is not None: + body["queue_reason"] = self.queue_reason + if self.result_state is not None: + body["result_state"] = self.result_state + if self.state_message is not None: + body["state_message"] = self.state_message + if self.user_cancelled_or_timedout is not None: + body["user_cancelled_or_timedout"] = self.user_cancelled_or_timedout return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunState: """Deserializes the RunState from a dictionary.""" - return cls(life_cycle_state=_enum(d, 'life_cycle_state', RunLifeCycleState), queue_reason=d.get('queue_reason', None), result_state=_enum(d, 'result_state', RunResultState), state_message=d.get('state_message', None), user_cancelled_or_timedout=d.get('user_cancelled_or_timedout', None)) - - + return cls( + life_cycle_state=_enum(d, "life_cycle_state", RunLifeCycleState), + queue_reason=d.get("queue_reason", None), + result_state=_enum(d, "result_state", RunResultState), + state_message=d.get("state_message", None), + user_cancelled_or_timedout=d.get("user_cancelled_or_timedout", None), + ) @dataclass class RunStatus: """The current status of the run""" - + queue_details: Optional[QueueDetails] = None """If the run was queued, details about the reason for queuing the run.""" - + state: Optional[RunLifecycleStateV2State] = None """The current state of the run.""" - + termination_details: Optional[TerminationDetails] = None """If the run is in a TERMINATING or TERMINATED state, details about the reason for terminating the run.""" - + def as_dict(self) -> dict: """Serializes the RunStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.queue_details: body['queue_details'] = self.queue_details.as_dict() - if self.state is not None: body['state'] = self.state.value - if self.termination_details: body['termination_details'] = self.termination_details.as_dict() + if self.queue_details: + body["queue_details"] = self.queue_details.as_dict() + if self.state is not None: + body["state"] = self.state.value + if self.termination_details: + body["termination_details"] = self.termination_details.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RunStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.queue_details: body['queue_details'] = self.queue_details - if self.state is not None: body['state'] = self.state - if self.termination_details: body['termination_details'] = self.termination_details + if self.queue_details: + body["queue_details"] = self.queue_details + if self.state is not None: + body["state"] = self.state + if self.termination_details: + body["termination_details"] = self.termination_details return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunStatus: """Deserializes the RunStatus from a dictionary.""" - return cls(queue_details=_from_dict(d, 'queue_details', QueueDetails), state=_enum(d, 'state', RunLifecycleStateV2State), termination_details=_from_dict(d, 'termination_details', TerminationDetails)) - - + return cls( + queue_details=_from_dict(d, "queue_details", QueueDetails), + state=_enum(d, "state", RunLifecycleStateV2State), + termination_details=_from_dict(d, "termination_details", TerminationDetails), + ) @dataclass class RunTask: """Used when outputting a child run, in GetRun or ListRuns.""" - + task_key: str """A unique name for the task. This field is used to refer to this task from other tasks. This field is required and must be unique within its parent job. On Update or Reset, this field is used to reference the tasks to be updated or reset.""" - + attempt_number: Optional[int] = None """The sequence number of this run attempt for a triggered job run. The initial attempt of a run has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" - + clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" - + cleanup_duration: Optional[int] = None """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `cleanup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + cluster_instance: Optional[ClusterInstance] = None """The cluster used for this run. If the run is specified to use a new cluster, this field is set once the Jobs service has requested a cluster for the run.""" - + condition_task: Optional[RunConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present. The condition task does not require a cluster to execute and does not support retries or notifications.""" - + dashboard_task: Optional[DashboardTask] = None """The task refreshes a dashboard and sends a snapshot to subscribers.""" - + dbt_cloud_task: Optional[DbtCloudTask] = None """Task type for dbt cloud""" - + dbt_task: Optional[DbtTask] = None """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.""" - + depends_on: Optional[List[TaskDependency]] = None """An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete successfully before executing this task. The key is `task_key`, and the value is the name assigned to the dependent task.""" - + description: Optional[str] = None """An optional description for this task.""" - + disabled: Optional[bool] = None """Deprecated, field was never used in production.""" - + effective_performance_target: Optional[PerformanceTarget] = None """The actual performance target used by the serverless run during execution. This can differ from the client-set performance target on the request depending on whether the performance mode is @@ -5320,37 +6413,37 @@ class RunTask: * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the task run begins or completes. The default behavior is to not send any emails.""" - + end_time: Optional[int] = None """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This field is set to 0 if the job is still running.""" - + environment_key: Optional[str] = None """The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.""" - + execution_duration: Optional[int] = None """The time in milliseconds it took to execute the commands in the JAR or notebook until they completed, failed, timed out, were cancelled, or encountered an unexpected error. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `execution_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + existing_cluster_id: Optional[str] = None """If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops responding. We suggest running jobs and tasks on new clusters for greater reliability""" - + for_each_task: Optional[RunForEachTask] = None """The task executes a nested task for every input provided when the `for_each_task` field is present.""" - + gen_ai_compute_task: Optional[GenAiComputeTask] = None - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -5358,70 +6451,70 @@ class RunTask: However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + job_cluster_key: Optional[str] = None """If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.""" - + libraries: Optional[List[compute.Library]] = None """An optional list of libraries to be installed on the cluster. The default value is an empty list.""" - + new_cluster: Optional[compute.ClusterSpec] = None """If new_cluster, a description of a new cluster that is created for each run.""" - + notebook_task: Optional[NotebookTask] = None """The task runs a notebook when the `notebook_task` field is present.""" - + notification_settings: Optional[TaskNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task run.""" - + pipeline_task: Optional[PipelineTask] = None """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.""" - + power_bi_task: Optional[PowerBiTask] = None """The task triggers a Power BI semantic model update when the `power_bi_task` field is present.""" - + python_wheel_task: Optional[PythonWheelTask] = None """The task runs a Python wheel when the `python_wheel_task` field is present.""" - + queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" - + resolved_values: Optional[ResolvedValues] = None """Parameter values including resolved references""" - + run_duration: Optional[int] = None """The time in milliseconds it took the job run and all of its repairs to finish.""" - + run_id: Optional[int] = None """The ID of the task run.""" - + run_if: Optional[RunIf] = None """An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. See :method:jobs/create for a list of possible values.""" - + run_job_task: Optional[RunJobTask] = None """The task triggers another job when the `run_job_task` field is present.""" - + run_page_url: Optional[str] = None - + setup_duration: Optional[int] = None """The time in milliseconds it took to set up the cluster. For runs that run on new clusters this is the cluster creation time, for runs that run on existing clusters this time should be very short. The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the `cleanup_duration`. The `setup_duration` field is set to 0 for multitask job runs. The total duration of a multitask job run is the value of the `run_duration` field.""" - + spark_jar_task: Optional[SparkJarTask] = None """The task runs a JAR when the `spark_jar_task` field is present.""" - + spark_python_task: Optional[SparkPythonTask] = None """The task runs a Python file when the `spark_python_task` field is present.""" - + spark_submit_task: Optional[SparkSubmitTask] = None """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute. @@ -5438,150 +6531,288 @@ class RunTask: to leave some room for off-heap usage. The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" - + sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.""" - + start_time: Optional[int] = None """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC). This may not be the time when the job task starts executing, for example, if the job is scheduled to run on a new cluster, this is the time the cluster creation call is issued.""" - + state: Optional[RunState] = None """Deprecated. Please use the `status` field instead.""" - + status: Optional[RunStatus] = None """The current status of the run""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when the run begins or completes. The default behavior is to not send any system notifications. Task webhooks respect the task notification settings.""" - + def as_dict(self) -> dict: """Serializes the RunTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.attempt_number is not None: body['attempt_number'] = self.attempt_number - if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict() - if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration - if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict() - if self.condition_task: body['condition_task'] = self.condition_task.as_dict() - if self.dashboard_task: body['dashboard_task'] = self.dashboard_task.as_dict() - if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task.as_dict() - if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() - if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on] - if self.description is not None: body['description'] = self.description - if self.disabled is not None: body['disabled'] = self.disabled - if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target.value - if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() - if self.end_time is not None: body['end_time'] = self.end_time - if self.environment_key is not None: body['environment_key'] = self.environment_key - if self.execution_duration is not None: body['execution_duration'] = self.execution_duration - if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id - if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict() - if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task.as_dict() - if self.git_source: body['git_source'] = self.git_source.as_dict() - if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() - if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict() - if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() - if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict() - if self.power_bi_task: body['power_bi_task'] = self.power_bi_task.as_dict() - if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict() - if self.queue_duration is not None: body['queue_duration'] = self.queue_duration - if self.resolved_values: body['resolved_values'] = self.resolved_values.as_dict() - if self.run_duration is not None: body['run_duration'] = self.run_duration - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_if is not None: body['run_if'] = self.run_if.value - if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict() - if self.run_page_url is not None: body['run_page_url'] = self.run_page_url - if self.setup_duration is not None: body['setup_duration'] = self.setup_duration - if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict() - if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict() - if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict() - if self.sql_task: body['sql_task'] = self.sql_task.as_dict() - if self.start_time is not None: body['start_time'] = self.start_time - if self.state: body['state'] = self.state.as_dict() - if self.status: body['status'] = self.status.as_dict() - if self.task_key is not None: body['task_key'] = self.task_key - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() + if self.attempt_number is not None: + body["attempt_number"] = self.attempt_number + if self.clean_rooms_notebook_task: + body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task.as_dict() + if self.cleanup_duration is not None: + body["cleanup_duration"] = self.cleanup_duration + if self.cluster_instance: + body["cluster_instance"] = self.cluster_instance.as_dict() + if self.condition_task: + body["condition_task"] = self.condition_task.as_dict() + if self.dashboard_task: + body["dashboard_task"] = self.dashboard_task.as_dict() + if self.dbt_cloud_task: + body["dbt_cloud_task"] = self.dbt_cloud_task.as_dict() + if self.dbt_task: + body["dbt_task"] = self.dbt_task.as_dict() + if self.depends_on: + body["depends_on"] = [v.as_dict() for v in self.depends_on] + if self.description is not None: + body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled + if self.effective_performance_target is not None: + body["effective_performance_target"] = self.effective_performance_target.value + if self.email_notifications: + body["email_notifications"] = self.email_notifications.as_dict() + if self.end_time is not None: + body["end_time"] = self.end_time + if self.environment_key is not None: + body["environment_key"] = self.environment_key + if self.execution_duration is not None: + body["execution_duration"] = self.execution_duration + if self.existing_cluster_id is not None: + body["existing_cluster_id"] = self.existing_cluster_id + if self.for_each_task: + body["for_each_task"] = self.for_each_task.as_dict() + if self.gen_ai_compute_task: + body["gen_ai_compute_task"] = self.gen_ai_compute_task.as_dict() + if self.git_source: + body["git_source"] = self.git_source.as_dict() + if self.job_cluster_key is not None: + body["job_cluster_key"] = self.job_cluster_key + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.new_cluster: + body["new_cluster"] = self.new_cluster.as_dict() + if self.notebook_task: + body["notebook_task"] = self.notebook_task.as_dict() + if self.notification_settings: + body["notification_settings"] = self.notification_settings.as_dict() + if self.pipeline_task: + body["pipeline_task"] = self.pipeline_task.as_dict() + if self.power_bi_task: + body["power_bi_task"] = self.power_bi_task.as_dict() + if self.python_wheel_task: + body["python_wheel_task"] = self.python_wheel_task.as_dict() + if self.queue_duration is not None: + body["queue_duration"] = self.queue_duration + if self.resolved_values: + body["resolved_values"] = self.resolved_values.as_dict() + if self.run_duration is not None: + body["run_duration"] = self.run_duration + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_if is not None: + body["run_if"] = self.run_if.value + if self.run_job_task: + body["run_job_task"] = self.run_job_task.as_dict() + if self.run_page_url is not None: + body["run_page_url"] = self.run_page_url + if self.setup_duration is not None: + body["setup_duration"] = self.setup_duration + if self.spark_jar_task: + body["spark_jar_task"] = self.spark_jar_task.as_dict() + if self.spark_python_task: + body["spark_python_task"] = self.spark_python_task.as_dict() + if self.spark_submit_task: + body["spark_submit_task"] = self.spark_submit_task.as_dict() + if self.sql_task: + body["sql_task"] = self.sql_task.as_dict() + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state.as_dict() + if self.status: + body["status"] = self.status.as_dict() + if self.task_key is not None: + body["task_key"] = self.task_key + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RunTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.attempt_number is not None: body['attempt_number'] = self.attempt_number - if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task - if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration - if self.cluster_instance: body['cluster_instance'] = self.cluster_instance - if self.condition_task: body['condition_task'] = self.condition_task - if self.dashboard_task: body['dashboard_task'] = self.dashboard_task - if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task - if self.dbt_task: body['dbt_task'] = self.dbt_task - if self.depends_on: body['depends_on'] = self.depends_on - if self.description is not None: body['description'] = self.description - if self.disabled is not None: body['disabled'] = self.disabled - if self.effective_performance_target is not None: body['effective_performance_target'] = self.effective_performance_target - if self.email_notifications: body['email_notifications'] = self.email_notifications - if self.end_time is not None: body['end_time'] = self.end_time - if self.environment_key is not None: body['environment_key'] = self.environment_key - if self.execution_duration is not None: body['execution_duration'] = self.execution_duration - if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id - if self.for_each_task: body['for_each_task'] = self.for_each_task - if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task - if self.git_source: body['git_source'] = self.git_source - if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key - if self.libraries: body['libraries'] = self.libraries - if self.new_cluster: body['new_cluster'] = self.new_cluster - if self.notebook_task: body['notebook_task'] = self.notebook_task - if self.notification_settings: body['notification_settings'] = self.notification_settings - if self.pipeline_task: body['pipeline_task'] = self.pipeline_task - if self.power_bi_task: body['power_bi_task'] = self.power_bi_task - if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task - if self.queue_duration is not None: body['queue_duration'] = self.queue_duration - if self.resolved_values: body['resolved_values'] = self.resolved_values - if self.run_duration is not None: body['run_duration'] = self.run_duration - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_if is not None: body['run_if'] = self.run_if - if self.run_job_task: body['run_job_task'] = self.run_job_task - if self.run_page_url is not None: body['run_page_url'] = self.run_page_url - if self.setup_duration is not None: body['setup_duration'] = self.setup_duration - if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task - if self.spark_python_task: body['spark_python_task'] = self.spark_python_task - if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task - if self.sql_task: body['sql_task'] = self.sql_task - if self.start_time is not None: body['start_time'] = self.start_time - if self.state: body['state'] = self.state - if self.status: body['status'] = self.status - if self.task_key is not None: body['task_key'] = self.task_key - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + if self.attempt_number is not None: + body["attempt_number"] = self.attempt_number + if self.clean_rooms_notebook_task: + body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task + if self.cleanup_duration is not None: + body["cleanup_duration"] = self.cleanup_duration + if self.cluster_instance: + body["cluster_instance"] = self.cluster_instance + if self.condition_task: + body["condition_task"] = self.condition_task + if self.dashboard_task: + body["dashboard_task"] = self.dashboard_task + if self.dbt_cloud_task: + body["dbt_cloud_task"] = self.dbt_cloud_task + if self.dbt_task: + body["dbt_task"] = self.dbt_task + if self.depends_on: + body["depends_on"] = self.depends_on + if self.description is not None: + body["description"] = self.description + if self.disabled is not None: + body["disabled"] = self.disabled + if self.effective_performance_target is not None: + body["effective_performance_target"] = self.effective_performance_target + if self.email_notifications: + body["email_notifications"] = self.email_notifications + if self.end_time is not None: + body["end_time"] = self.end_time + if self.environment_key is not None: + body["environment_key"] = self.environment_key + if self.execution_duration is not None: + body["execution_duration"] = self.execution_duration + if self.existing_cluster_id is not None: + body["existing_cluster_id"] = self.existing_cluster_id + if self.for_each_task: + body["for_each_task"] = self.for_each_task + if self.gen_ai_compute_task: + body["gen_ai_compute_task"] = self.gen_ai_compute_task + if self.git_source: + body["git_source"] = self.git_source + if self.job_cluster_key is not None: + body["job_cluster_key"] = self.job_cluster_key + if self.libraries: + body["libraries"] = self.libraries + if self.new_cluster: + body["new_cluster"] = self.new_cluster + if self.notebook_task: + body["notebook_task"] = self.notebook_task + if self.notification_settings: + body["notification_settings"] = self.notification_settings + if self.pipeline_task: + body["pipeline_task"] = self.pipeline_task + if self.power_bi_task: + body["power_bi_task"] = self.power_bi_task + if self.python_wheel_task: + body["python_wheel_task"] = self.python_wheel_task + if self.queue_duration is not None: + body["queue_duration"] = self.queue_duration + if self.resolved_values: + body["resolved_values"] = self.resolved_values + if self.run_duration is not None: + body["run_duration"] = self.run_duration + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_if is not None: + body["run_if"] = self.run_if + if self.run_job_task: + body["run_job_task"] = self.run_job_task + if self.run_page_url is not None: + body["run_page_url"] = self.run_page_url + if self.setup_duration is not None: + body["setup_duration"] = self.setup_duration + if self.spark_jar_task: + body["spark_jar_task"] = self.spark_jar_task + if self.spark_python_task: + body["spark_python_task"] = self.spark_python_task + if self.spark_submit_task: + body["spark_submit_task"] = self.spark_submit_task + if self.sql_task: + body["sql_task"] = self.sql_task + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state + if self.status: + body["status"] = self.status + if self.task_key is not None: + body["task_key"] = self.task_key + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunTask: """Deserializes the RunTask from a dictionary.""" - return cls(attempt_number=d.get('attempt_number', None), clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task', CleanRoomsNotebookTask), cleanup_duration=d.get('cleanup_duration', None), cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance), condition_task=_from_dict(d, 'condition_task', RunConditionTask), dashboard_task=_from_dict(d, 'dashboard_task', DashboardTask), dbt_cloud_task=_from_dict(d, 'dbt_cloud_task', DbtCloudTask), dbt_task=_from_dict(d, 'dbt_task', DbtTask), depends_on=_repeated_dict(d, 'depends_on', TaskDependency), description=d.get('description', None), disabled=d.get('disabled', None), effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), end_time=d.get('end_time', None), environment_key=d.get('environment_key', None), execution_duration=d.get('execution_duration', None), existing_cluster_id=d.get('existing_cluster_id', None), for_each_task=_from_dict(d, 'for_each_task', RunForEachTask), gen_ai_compute_task=_from_dict(d, 'gen_ai_compute_task', GenAiComputeTask), git_source=_from_dict(d, 'git_source', GitSource), job_cluster_key=d.get('job_cluster_key', None), libraries=_repeated_dict(d, 'libraries', compute.Library), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec), notebook_task=_from_dict(d, 'notebook_task', NotebookTask), notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings), pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask), power_bi_task=_from_dict(d, 'power_bi_task', PowerBiTask), python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask), queue_duration=d.get('queue_duration', None), resolved_values=_from_dict(d, 'resolved_values', ResolvedValues), run_duration=d.get('run_duration', None), run_id=d.get('run_id', None), run_if=_enum(d, 'run_if', RunIf), run_job_task=_from_dict(d, 'run_job_task', RunJobTask), run_page_url=d.get('run_page_url', None), setup_duration=d.get('setup_duration', None), spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask), spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask), spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask), sql_task=_from_dict(d, 'sql_task', SqlTask), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), status=_from_dict(d, 'status', RunStatus), task_key=d.get('task_key', None), timeout_seconds=d.get('timeout_seconds', None), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) - - + return cls( + attempt_number=d.get("attempt_number", None), + clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask), + cleanup_duration=d.get("cleanup_duration", None), + cluster_instance=_from_dict(d, "cluster_instance", ClusterInstance), + condition_task=_from_dict(d, "condition_task", RunConditionTask), + dashboard_task=_from_dict(d, "dashboard_task", DashboardTask), + dbt_cloud_task=_from_dict(d, "dbt_cloud_task", DbtCloudTask), + dbt_task=_from_dict(d, "dbt_task", DbtTask), + depends_on=_repeated_dict(d, "depends_on", TaskDependency), + description=d.get("description", None), + disabled=d.get("disabled", None), + effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), + email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), + end_time=d.get("end_time", None), + environment_key=d.get("environment_key", None), + execution_duration=d.get("execution_duration", None), + existing_cluster_id=d.get("existing_cluster_id", None), + for_each_task=_from_dict(d, "for_each_task", RunForEachTask), + gen_ai_compute_task=_from_dict(d, "gen_ai_compute_task", GenAiComputeTask), + git_source=_from_dict(d, "git_source", GitSource), + job_cluster_key=d.get("job_cluster_key", None), + libraries=_repeated_dict(d, "libraries", compute.Library), + new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), + notebook_task=_from_dict(d, "notebook_task", NotebookTask), + notification_settings=_from_dict(d, "notification_settings", TaskNotificationSettings), + pipeline_task=_from_dict(d, "pipeline_task", PipelineTask), + power_bi_task=_from_dict(d, "power_bi_task", PowerBiTask), + python_wheel_task=_from_dict(d, "python_wheel_task", PythonWheelTask), + queue_duration=d.get("queue_duration", None), + resolved_values=_from_dict(d, "resolved_values", ResolvedValues), + run_duration=d.get("run_duration", None), + run_id=d.get("run_id", None), + run_if=_enum(d, "run_if", RunIf), + run_job_task=_from_dict(d, "run_job_task", RunJobTask), + run_page_url=d.get("run_page_url", None), + setup_duration=d.get("setup_duration", None), + spark_jar_task=_from_dict(d, "spark_jar_task", SparkJarTask), + spark_python_task=_from_dict(d, "spark_python_task", SparkPythonTask), + spark_submit_task=_from_dict(d, "spark_submit_task", SparkSubmitTask), + sql_task=_from_dict(d, "sql_task", SqlTask), + start_time=d.get("start_time", None), + state=_from_dict(d, "state", RunState), + status=_from_dict(d, "status", RunStatus), + task_key=d.get("task_key", None), + timeout_seconds=d.get("timeout_seconds", None), + webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), + ) class RunType(Enum): """The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with :method:jobs/submit. - + [dbutils.notebook.run]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow""" - - JOB_RUN = 'JOB_RUN' - SUBMIT_RUN = 'SUBMIT_RUN' - WORKFLOW_RUN = 'WORKFLOW_RUN' + + JOB_RUN = "JOB_RUN" + SUBMIT_RUN = "SUBMIT_RUN" + WORKFLOW_RUN = "WORKFLOW_RUN" + class Source(Enum): """Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\ @@ -5591,57 +6822,69 @@ class Source(Enum): * `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in cloud Git provider.""" - - GIT = 'GIT' - WORKSPACE = 'WORKSPACE' + + GIT = "GIT" + WORKSPACE = "WORKSPACE" + @dataclass class SparkJarTask: jar_uri: Optional[str] = None """Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create.""" - + main_class_name: Optional[str] = None """The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.""" - + parameters: Optional[List[str]] = None """Parameters passed to the main method. Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + run_as_repl: Optional[bool] = None """Deprecated. A value of `false` is no longer supported.""" - + def as_dict(self) -> dict: """Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.jar_uri is not None: body['jar_uri'] = self.jar_uri - if self.main_class_name is not None: body['main_class_name'] = self.main_class_name - if self.parameters: body['parameters'] = [v for v in self.parameters] - if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl + if self.jar_uri is not None: + body["jar_uri"] = self.jar_uri + if self.main_class_name is not None: + body["main_class_name"] = self.main_class_name + if self.parameters: + body["parameters"] = [v for v in self.parameters] + if self.run_as_repl is not None: + body["run_as_repl"] = self.run_as_repl return body def as_shallow_dict(self) -> dict: """Serializes the SparkJarTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.jar_uri is not None: body['jar_uri'] = self.jar_uri - if self.main_class_name is not None: body['main_class_name'] = self.main_class_name - if self.parameters: body['parameters'] = self.parameters - if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl + if self.jar_uri is not None: + body["jar_uri"] = self.jar_uri + if self.main_class_name is not None: + body["main_class_name"] = self.main_class_name + if self.parameters: + body["parameters"] = self.parameters + if self.run_as_repl is not None: + body["run_as_repl"] = self.run_as_repl return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkJarTask: """Deserializes the SparkJarTask from a dictionary.""" - return cls(jar_uri=d.get('jar_uri', None), main_class_name=d.get('main_class_name', None), parameters=d.get('parameters', None), run_as_repl=d.get('run_as_repl', None)) - - + return cls( + jar_uri=d.get("jar_uri", None), + main_class_name=d.get("main_class_name", None), + parameters=d.get("parameters", None), + run_as_repl=d.get("run_as_repl", None), + ) @dataclass @@ -5651,14 +6894,14 @@ class SparkPythonTask: workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.""" - + parameters: Optional[List[str]] = None """Command line parameters passed to the Python file. Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + source: Optional[Source] = None """Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved from the local Databricks workspace or cloud location (if the `python_file` @@ -5667,29 +6910,37 @@ class SparkPythonTask: * `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem URI. * `GIT`: The Python file is located in a remote Git repository.""" - + def as_dict(self) -> dict: """Serializes the SparkPythonTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: body['parameters'] = [v for v in self.parameters] - if self.python_file is not None: body['python_file'] = self.python_file - if self.source is not None: body['source'] = self.source.value + if self.parameters: + body["parameters"] = [v for v in self.parameters] + if self.python_file is not None: + body["python_file"] = self.python_file + if self.source is not None: + body["source"] = self.source.value return body def as_shallow_dict(self) -> dict: """Serializes the SparkPythonTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: body['parameters'] = self.parameters - if self.python_file is not None: body['python_file'] = self.python_file - if self.source is not None: body['source'] = self.source + if self.parameters: + body["parameters"] = self.parameters + if self.python_file is not None: + body["python_file"] = self.python_file + if self.source is not None: + body["source"] = self.source return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkPythonTask: """Deserializes the SparkPythonTask from a dictionary.""" - return cls(parameters=d.get('parameters', None), python_file=d.get('python_file', None), source=_enum(d, 'source', Source)) - - + return cls( + parameters=d.get("parameters", None), + python_file=d.get("python_file", None), + source=_enum(d, "source", Source), + ) @dataclass @@ -5700,25 +6951,25 @@ class SparkSubmitTask: Use [Task parameter variables] to set parameters containing information about job runs. [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - + def as_dict(self) -> dict: """Serializes the SparkSubmitTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: body['parameters'] = [v for v in self.parameters] + if self.parameters: + body["parameters"] = [v for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the SparkSubmitTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: body['parameters'] = self.parameters + if self.parameters: + body["parameters"] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparkSubmitTask: """Deserializes the SparkSubmitTask from a dictionary.""" - return cls(parameters=d.get('parameters', None)) - - + return cls(parameters=d.get("parameters", None)) @dataclass @@ -5728,279 +6979,340 @@ class SqlAlertOutput: * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions""" - + output_link: Optional[str] = None """The link to find the output results.""" - + query_text: Optional[str] = None """The text of the SQL query. Can Run permission of the SQL query associated with the SQL alert is required to view this field.""" - + sql_statements: Optional[List[SqlStatementOutput]] = None """Information about SQL statements executed in the run.""" - + warehouse_id: Optional[str] = None """The canonical identifier of the SQL warehouse.""" - + def as_dict(self) -> dict: """Serializes the SqlAlertOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_state is not None: body['alert_state'] = self.alert_state.value - if self.output_link is not None: body['output_link'] = self.output_link - if self.query_text is not None: body['query_text'] = self.query_text - if self.sql_statements: body['sql_statements'] = [v.as_dict() for v in self.sql_statements] - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.alert_state is not None: + body["alert_state"] = self.alert_state.value + if self.output_link is not None: + body["output_link"] = self.output_link + if self.query_text is not None: + body["query_text"] = self.query_text + if self.sql_statements: + body["sql_statements"] = [v.as_dict() for v in self.sql_statements] + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the SqlAlertOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_state is not None: body['alert_state'] = self.alert_state - if self.output_link is not None: body['output_link'] = self.output_link - if self.query_text is not None: body['query_text'] = self.query_text - if self.sql_statements: body['sql_statements'] = self.sql_statements - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.alert_state is not None: + body["alert_state"] = self.alert_state + if self.output_link is not None: + body["output_link"] = self.output_link + if self.query_text is not None: + body["query_text"] = self.query_text + if self.sql_statements: + body["sql_statements"] = self.sql_statements + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlAlertOutput: """Deserializes the SqlAlertOutput from a dictionary.""" - return cls(alert_state=_enum(d, 'alert_state', SqlAlertState), output_link=d.get('output_link', None), query_text=d.get('query_text', None), sql_statements=_repeated_dict(d, 'sql_statements', SqlStatementOutput), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + alert_state=_enum(d, "alert_state", SqlAlertState), + output_link=d.get("output_link", None), + query_text=d.get("query_text", None), + sql_statements=_repeated_dict(d, "sql_statements", SqlStatementOutput), + warehouse_id=d.get("warehouse_id", None), + ) class SqlAlertState(Enum): """The state of the SQL alert. - + * UNKNOWN: alert yet to be evaluated * OK: alert evaluated and did not fulfill trigger conditions * TRIGGERED: alert evaluated and fulfilled trigger conditions""" - - OK = 'OK' - TRIGGERED = 'TRIGGERED' - UNKNOWN = 'UNKNOWN' + + OK = "OK" + TRIGGERED = "TRIGGERED" + UNKNOWN = "UNKNOWN" + @dataclass class SqlDashboardOutput: warehouse_id: Optional[str] = None """The canonical identifier of the SQL warehouse.""" - + widgets: Optional[List[SqlDashboardWidgetOutput]] = None """Widgets executed in the run. Only SQL query based widgets are listed.""" - + def as_dict(self) -> dict: """Serializes the SqlDashboardOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id - if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets] + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id + if self.widgets: + body["widgets"] = [v.as_dict() for v in self.widgets] return body def as_shallow_dict(self) -> dict: """Serializes the SqlDashboardOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id - if self.widgets: body['widgets'] = self.widgets + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id + if self.widgets: + body["widgets"] = self.widgets return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlDashboardOutput: """Deserializes the SqlDashboardOutput from a dictionary.""" - return cls(warehouse_id=d.get('warehouse_id', None), widgets=_repeated_dict(d, 'widgets', SqlDashboardWidgetOutput)) - - + return cls( + warehouse_id=d.get("warehouse_id", None), widgets=_repeated_dict(d, "widgets", SqlDashboardWidgetOutput) + ) @dataclass class SqlDashboardWidgetOutput: end_time: Optional[int] = None """Time (in epoch milliseconds) when execution of the SQL widget ends.""" - + error: Optional[SqlOutputError] = None """The information about the error when execution fails.""" - + output_link: Optional[str] = None """The link to find the output results.""" - + start_time: Optional[int] = None """Time (in epoch milliseconds) when execution of the SQL widget starts.""" - + status: Optional[SqlDashboardWidgetOutputStatus] = None """The execution status of the SQL widget.""" - + widget_id: Optional[str] = None """The canonical identifier of the SQL widget.""" - + widget_title: Optional[str] = None """The title of the SQL widget.""" - + def as_dict(self) -> dict: """Serializes the SqlDashboardWidgetOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end_time is not None: body['end_time'] = self.end_time - if self.error: body['error'] = self.error.as_dict() - if self.output_link is not None: body['output_link'] = self.output_link - if self.start_time is not None: body['start_time'] = self.start_time - if self.status is not None: body['status'] = self.status.value - if self.widget_id is not None: body['widget_id'] = self.widget_id - if self.widget_title is not None: body['widget_title'] = self.widget_title + if self.end_time is not None: + body["end_time"] = self.end_time + if self.error: + body["error"] = self.error.as_dict() + if self.output_link is not None: + body["output_link"] = self.output_link + if self.start_time is not None: + body["start_time"] = self.start_time + if self.status is not None: + body["status"] = self.status.value + if self.widget_id is not None: + body["widget_id"] = self.widget_id + if self.widget_title is not None: + body["widget_title"] = self.widget_title return body def as_shallow_dict(self) -> dict: """Serializes the SqlDashboardWidgetOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.end_time is not None: body['end_time'] = self.end_time - if self.error: body['error'] = self.error - if self.output_link is not None: body['output_link'] = self.output_link - if self.start_time is not None: body['start_time'] = self.start_time - if self.status is not None: body['status'] = self.status - if self.widget_id is not None: body['widget_id'] = self.widget_id - if self.widget_title is not None: body['widget_title'] = self.widget_title + if self.end_time is not None: + body["end_time"] = self.end_time + if self.error: + body["error"] = self.error + if self.output_link is not None: + body["output_link"] = self.output_link + if self.start_time is not None: + body["start_time"] = self.start_time + if self.status is not None: + body["status"] = self.status + if self.widget_id is not None: + body["widget_id"] = self.widget_id + if self.widget_title is not None: + body["widget_title"] = self.widget_title return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlDashboardWidgetOutput: """Deserializes the SqlDashboardWidgetOutput from a dictionary.""" - return cls(end_time=d.get('end_time', None), error=_from_dict(d, 'error', SqlOutputError), output_link=d.get('output_link', None), start_time=d.get('start_time', None), status=_enum(d, 'status', SqlDashboardWidgetOutputStatus), widget_id=d.get('widget_id', None), widget_title=d.get('widget_title', None)) - - + return cls( + end_time=d.get("end_time", None), + error=_from_dict(d, "error", SqlOutputError), + output_link=d.get("output_link", None), + start_time=d.get("start_time", None), + status=_enum(d, "status", SqlDashboardWidgetOutputStatus), + widget_id=d.get("widget_id", None), + widget_title=d.get("widget_title", None), + ) class SqlDashboardWidgetOutputStatus(Enum): - - - CANCELLED = 'CANCELLED' - FAILED = 'FAILED' - PENDING = 'PENDING' - RUNNING = 'RUNNING' - SUCCESS = 'SUCCESS' + + CANCELLED = "CANCELLED" + FAILED = "FAILED" + PENDING = "PENDING" + RUNNING = "RUNNING" + SUCCESS = "SUCCESS" + @dataclass class SqlOutput: alert_output: Optional[SqlAlertOutput] = None """The output of a SQL alert task, if available.""" - + dashboard_output: Optional[SqlDashboardOutput] = None """The output of a SQL dashboard task, if available.""" - + query_output: Optional[SqlQueryOutput] = None """The output of a SQL query task, if available.""" - + def as_dict(self) -> dict: """Serializes the SqlOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_output: body['alert_output'] = self.alert_output.as_dict() - if self.dashboard_output: body['dashboard_output'] = self.dashboard_output.as_dict() - if self.query_output: body['query_output'] = self.query_output.as_dict() + if self.alert_output: + body["alert_output"] = self.alert_output.as_dict() + if self.dashboard_output: + body["dashboard_output"] = self.dashboard_output.as_dict() + if self.query_output: + body["query_output"] = self.query_output.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SqlOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_output: body['alert_output'] = self.alert_output - if self.dashboard_output: body['dashboard_output'] = self.dashboard_output - if self.query_output: body['query_output'] = self.query_output + if self.alert_output: + body["alert_output"] = self.alert_output + if self.dashboard_output: + body["dashboard_output"] = self.dashboard_output + if self.query_output: + body["query_output"] = self.query_output return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlOutput: """Deserializes the SqlOutput from a dictionary.""" - return cls(alert_output=_from_dict(d, 'alert_output', SqlAlertOutput), dashboard_output=_from_dict(d, 'dashboard_output', SqlDashboardOutput), query_output=_from_dict(d, 'query_output', SqlQueryOutput)) - - + return cls( + alert_output=_from_dict(d, "alert_output", SqlAlertOutput), + dashboard_output=_from_dict(d, "dashboard_output", SqlDashboardOutput), + query_output=_from_dict(d, "query_output", SqlQueryOutput), + ) @dataclass class SqlOutputError: message: Optional[str] = None """The error message when execution fails.""" - + def as_dict(self) -> dict: """Serializes the SqlOutputError into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: body['message'] = self.message + if self.message is not None: + body["message"] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the SqlOutputError into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: body['message'] = self.message + if self.message is not None: + body["message"] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlOutputError: """Deserializes the SqlOutputError from a dictionary.""" - return cls(message=d.get('message', None)) - - + return cls(message=d.get("message", None)) @dataclass class SqlQueryOutput: endpoint_id: Optional[str] = None - + output_link: Optional[str] = None """The link to find the output results.""" - + query_text: Optional[str] = None """The text of the SQL query. Can Run permission of the SQL query is required to view this field.""" - + sql_statements: Optional[List[SqlStatementOutput]] = None """Information about SQL statements executed in the run.""" - + warehouse_id: Optional[str] = None """The canonical identifier of the SQL warehouse.""" - + def as_dict(self) -> dict: """Serializes the SqlQueryOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id - if self.output_link is not None: body['output_link'] = self.output_link - if self.query_text is not None: body['query_text'] = self.query_text - if self.sql_statements: body['sql_statements'] = [v.as_dict() for v in self.sql_statements] - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.endpoint_id is not None: + body["endpoint_id"] = self.endpoint_id + if self.output_link is not None: + body["output_link"] = self.output_link + if self.query_text is not None: + body["query_text"] = self.query_text + if self.sql_statements: + body["sql_statements"] = [v.as_dict() for v in self.sql_statements] + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the SqlQueryOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id - if self.output_link is not None: body['output_link'] = self.output_link - if self.query_text is not None: body['query_text'] = self.query_text - if self.sql_statements: body['sql_statements'] = self.sql_statements - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.endpoint_id is not None: + body["endpoint_id"] = self.endpoint_id + if self.output_link is not None: + body["output_link"] = self.output_link + if self.query_text is not None: + body["query_text"] = self.query_text + if self.sql_statements: + body["sql_statements"] = self.sql_statements + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlQueryOutput: """Deserializes the SqlQueryOutput from a dictionary.""" - return cls(endpoint_id=d.get('endpoint_id', None), output_link=d.get('output_link', None), query_text=d.get('query_text', None), sql_statements=_repeated_dict(d, 'sql_statements', SqlStatementOutput), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + endpoint_id=d.get("endpoint_id", None), + output_link=d.get("output_link", None), + query_text=d.get("query_text", None), + sql_statements=_repeated_dict(d, "sql_statements", SqlStatementOutput), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class SqlStatementOutput: lookup_key: Optional[str] = None """A key that can be used to look up query details.""" - + def as_dict(self) -> dict: """Serializes the SqlStatementOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.lookup_key is not None: body['lookup_key'] = self.lookup_key + if self.lookup_key is not None: + body["lookup_key"] = self.lookup_key return body def as_shallow_dict(self) -> dict: """Serializes the SqlStatementOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.lookup_key is not None: body['lookup_key'] = self.lookup_key + if self.lookup_key is not None: + body["lookup_key"] = self.lookup_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlStatementOutput: """Deserializes the SqlStatementOutput from a dictionary.""" - return cls(lookup_key=d.get('lookup_key', None)) - - + return cls(lookup_key=d.get("lookup_key", None)) @dataclass @@ -6009,126 +7321,162 @@ class SqlTask: """The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.""" - + alert: Optional[SqlTaskAlert] = None """If alert, indicates that this job must refresh a SQL alert.""" - + dashboard: Optional[SqlTaskDashboard] = None """If dashboard, indicates that this job must refresh a SQL dashboard.""" - + file: Optional[SqlTaskFile] = None """If file, indicates that this job runs a SQL file in a remote Git repository.""" - - parameters: Optional[Dict[str,str]] = None + + parameters: Optional[Dict[str, str]] = None """Parameters to be used for each run of this job. The SQL alert task does not support custom parameters.""" - + query: Optional[SqlTaskQuery] = None """If query, indicates that this job must execute a SQL query.""" - + def as_dict(self) -> dict: """Serializes the SqlTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert: body['alert'] = self.alert.as_dict() - if self.dashboard: body['dashboard'] = self.dashboard.as_dict() - if self.file: body['file'] = self.file.as_dict() - if self.parameters: body['parameters'] = self.parameters - if self.query: body['query'] = self.query.as_dict() - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.alert: + body["alert"] = self.alert.as_dict() + if self.dashboard: + body["dashboard"] = self.dashboard.as_dict() + if self.file: + body["file"] = self.file.as_dict() + if self.parameters: + body["parameters"] = self.parameters + if self.query: + body["query"] = self.query.as_dict() + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the SqlTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert: body['alert'] = self.alert - if self.dashboard: body['dashboard'] = self.dashboard - if self.file: body['file'] = self.file - if self.parameters: body['parameters'] = self.parameters - if self.query: body['query'] = self.query - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.alert: + body["alert"] = self.alert + if self.dashboard: + body["dashboard"] = self.dashboard + if self.file: + body["file"] = self.file + if self.parameters: + body["parameters"] = self.parameters + if self.query: + body["query"] = self.query + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTask: """Deserializes the SqlTask from a dictionary.""" - return cls(alert=_from_dict(d, 'alert', SqlTaskAlert), dashboard=_from_dict(d, 'dashboard', SqlTaskDashboard), file=_from_dict(d, 'file', SqlTaskFile), parameters=d.get('parameters', None), query=_from_dict(d, 'query', SqlTaskQuery), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + alert=_from_dict(d, "alert", SqlTaskAlert), + dashboard=_from_dict(d, "dashboard", SqlTaskDashboard), + file=_from_dict(d, "file", SqlTaskFile), + parameters=d.get("parameters", None), + query=_from_dict(d, "query", SqlTaskQuery), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class SqlTaskAlert: alert_id: str """The canonical identifier of the SQL alert.""" - + pause_subscriptions: Optional[bool] = None """If true, the alert notifications are not sent to subscribers.""" - + subscriptions: Optional[List[SqlTaskSubscription]] = None """If specified, alert notifications are sent to subscribers.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_id is not None: body['alert_id'] = self.alert_id - if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions - if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] + if self.alert_id is not None: + body["alert_id"] = self.alert_id + if self.pause_subscriptions is not None: + body["pause_subscriptions"] = self.pause_subscriptions + if self.subscriptions: + body["subscriptions"] = [v.as_dict() for v in self.subscriptions] return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_id is not None: body['alert_id'] = self.alert_id - if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions - if self.subscriptions: body['subscriptions'] = self.subscriptions + if self.alert_id is not None: + body["alert_id"] = self.alert_id + if self.pause_subscriptions is not None: + body["pause_subscriptions"] = self.pause_subscriptions + if self.subscriptions: + body["subscriptions"] = self.subscriptions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskAlert: """Deserializes the SqlTaskAlert from a dictionary.""" - return cls(alert_id=d.get('alert_id', None), pause_subscriptions=d.get('pause_subscriptions', None), subscriptions=_repeated_dict(d, 'subscriptions', SqlTaskSubscription)) - - + return cls( + alert_id=d.get("alert_id", None), + pause_subscriptions=d.get("pause_subscriptions", None), + subscriptions=_repeated_dict(d, "subscriptions", SqlTaskSubscription), + ) @dataclass class SqlTaskDashboard: dashboard_id: str """The canonical identifier of the SQL dashboard.""" - + custom_subject: Optional[str] = None """Subject of the email sent to subscribers of this task.""" - + pause_subscriptions: Optional[bool] = None """If true, the dashboard snapshot is not taken, and emails are not sent to subscribers.""" - + subscriptions: Optional[List[SqlTaskSubscription]] = None """If specified, dashboard snapshots are sent to subscriptions.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskDashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions - if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.pause_subscriptions is not None: + body["pause_subscriptions"] = self.pause_subscriptions + if self.subscriptions: + body["subscriptions"] = [v.as_dict() for v in self.subscriptions] return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskDashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions - if self.subscriptions: body['subscriptions'] = self.subscriptions + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.pause_subscriptions is not None: + body["pause_subscriptions"] = self.pause_subscriptions + if self.subscriptions: + body["subscriptions"] = self.subscriptions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskDashboard: """Deserializes the SqlTaskDashboard from a dictionary.""" - return cls(custom_subject=d.get('custom_subject', None), dashboard_id=d.get('dashboard_id', None), pause_subscriptions=d.get('pause_subscriptions', None), subscriptions=_repeated_dict(d, 'subscriptions', SqlTaskSubscription)) - - + return cls( + custom_subject=d.get("custom_subject", None), + dashboard_id=d.get("dashboard_id", None), + pause_subscriptions=d.get("pause_subscriptions", None), + subscriptions=_repeated_dict(d, "subscriptions", SqlTaskSubscription), + ) @dataclass @@ -6136,7 +7484,7 @@ class SqlTaskFile: path: str """Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.""" - + source: Optional[Source] = None """Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved from the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a @@ -6145,52 +7493,54 @@ class SqlTaskFile: * `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in cloud Git provider.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskFile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: body['path'] = self.path - if self.source is not None: body['source'] = self.source.value + if self.path is not None: + body["path"] = self.path + if self.source is not None: + body["source"] = self.source.value return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskFile into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: body['path'] = self.path - if self.source is not None: body['source'] = self.source + if self.path is not None: + body["path"] = self.path + if self.source is not None: + body["source"] = self.source return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskFile: """Deserializes the SqlTaskFile from a dictionary.""" - return cls(path=d.get('path', None), source=_enum(d, 'source', Source)) - - + return cls(path=d.get("path", None), source=_enum(d, "source", Source)) @dataclass class SqlTaskQuery: query_id: str """The canonical identifier of the SQL query.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.query_id is not None: body['query_id'] = self.query_id + if self.query_id is not None: + body["query_id"] = self.query_id return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.query_id is not None: body['query_id'] = self.query_id + if self.query_id is not None: + body["query_id"] = self.query_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskQuery: """Deserializes the SqlTaskQuery from a dictionary.""" - return cls(query_id=d.get('query_id', None)) - - + return cls(query_id=d.get("query_id", None)) @dataclass @@ -6199,55 +7549,57 @@ class SqlTaskSubscription: """The canonical identifier of the destination to receive email notification. This parameter is mutually exclusive with user_name. You cannot set both destination_id and user_name for subscription notifications.""" - + user_name: Optional[str] = None """The user name to receive the subscription email. This parameter is mutually exclusive with destination_id. You cannot set both destination_id and user_name for subscription notifications.""" - + def as_dict(self) -> dict: """Serializes the SqlTaskSubscription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_id is not None: body['destination_id'] = self.destination_id - if self.user_name is not None: body['user_name'] = self.user_name + if self.destination_id is not None: + body["destination_id"] = self.destination_id + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the SqlTaskSubscription into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_id is not None: body['destination_id'] = self.destination_id - if self.user_name is not None: body['user_name'] = self.user_name + if self.destination_id is not None: + body["destination_id"] = self.destination_id + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlTaskSubscription: """Deserializes the SqlTaskSubscription from a dictionary.""" - return cls(destination_id=d.get('destination_id', None), user_name=d.get('user_name', None)) - - + return cls(destination_id=d.get("destination_id", None), user_name=d.get("user_name", None)) class StorageMode(Enum): - - - DIRECT_QUERY = 'DIRECT_QUERY' - DUAL = 'DUAL' - IMPORT = 'IMPORT' + + DIRECT_QUERY = "DIRECT_QUERY" + DUAL = "DUAL" + IMPORT = "IMPORT" + @dataclass class SubmitRun: access_control_list: Optional[List[JobAccessControlRequest]] = None """List of permissions to set on the job.""" - + budget_policy_id: Optional[str] = None """The user specified id of the budget policy to use for this one-time run. If not specified, the run will be not be attributed to any budget policy.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the run begins or completes.""" - + environments: Optional[List[JobEnvironment]] = None """A list of task execution environment specifications that can be referenced by tasks of this run.""" - + git_source: Optional[GitSource] = None """An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. @@ -6257,10 +7609,10 @@ class SubmitRun: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + idempotency_token: Optional[str] = None """An optional token that can be used to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID @@ -6274,100 +7626,141 @@ class SubmitRun: For more information, see [How to ensure idempotency for jobs]. [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html""" - + notification_settings: Optional[JobNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this run.""" - + queue: Optional[QueueSettings] = None """The queue settings of the one-time run.""" - + run_as: Optional[JobRunAs] = None """Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who submits the request.""" - + run_name: Optional[str] = None """An optional name for the run. The default value is `Untitled`.""" - + tasks: Optional[List[SubmitTask]] = None - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when the run begins or completes.""" - + def as_dict(self) -> dict: """Serializes the SubmitRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() - if self.environments: body['environments'] = [v.as_dict() for v in self.environments] - if self.git_source: body['git_source'] = self.git_source.as_dict() - if self.health: body['health'] = self.health.as_dict() - if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token - if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() - if self.queue: body['queue'] = self.queue.as_dict() - if self.run_as: body['run_as'] = self.run_as.as_dict() - if self.run_name is not None: body['run_name'] = self.run_name - if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.email_notifications: + body["email_notifications"] = self.email_notifications.as_dict() + if self.environments: + body["environments"] = [v.as_dict() for v in self.environments] + if self.git_source: + body["git_source"] = self.git_source.as_dict() + if self.health: + body["health"] = self.health.as_dict() + if self.idempotency_token is not None: + body["idempotency_token"] = self.idempotency_token + if self.notification_settings: + body["notification_settings"] = self.notification_settings.as_dict() + if self.queue: + body["queue"] = self.queue.as_dict() + if self.run_as: + body["run_as"] = self.run_as.as_dict() + if self.run_name is not None: + body["run_name"] = self.run_name + if self.tasks: + body["tasks"] = [v.as_dict() for v in self.tasks] + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SubmitRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.email_notifications: body['email_notifications'] = self.email_notifications - if self.environments: body['environments'] = self.environments - if self.git_source: body['git_source'] = self.git_source - if self.health: body['health'] = self.health - if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token - if self.notification_settings: body['notification_settings'] = self.notification_settings - if self.queue: body['queue'] = self.queue - if self.run_as: body['run_as'] = self.run_as - if self.run_name is not None: body['run_name'] = self.run_name - if self.tasks: body['tasks'] = self.tasks - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.email_notifications: + body["email_notifications"] = self.email_notifications + if self.environments: + body["environments"] = self.environments + if self.git_source: + body["git_source"] = self.git_source + if self.health: + body["health"] = self.health + if self.idempotency_token is not None: + body["idempotency_token"] = self.idempotency_token + if self.notification_settings: + body["notification_settings"] = self.notification_settings + if self.queue: + body["queue"] = self.queue + if self.run_as: + body["run_as"] = self.run_as + if self.run_name is not None: + body["run_name"] = self.run_name + if self.tasks: + body["tasks"] = self.tasks + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubmitRun: """Deserializes the SubmitRun from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), budget_policy_id=d.get('budget_policy_id', None), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environments=_repeated_dict(d, 'environments', JobEnvironment), git_source=_from_dict(d, 'git_source', GitSource), health=_from_dict(d, 'health', JobsHealthRules), idempotency_token=d.get('idempotency_token', None), notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings), queue=_from_dict(d, 'queue', QueueSettings), run_as=_from_dict(d, 'run_as', JobRunAs), run_name=d.get('run_name', None), tasks=_repeated_dict(d, 'tasks', SubmitTask), timeout_seconds=d.get('timeout_seconds', None), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), + budget_policy_id=d.get("budget_policy_id", None), + email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), + environments=_repeated_dict(d, "environments", JobEnvironment), + git_source=_from_dict(d, "git_source", GitSource), + health=_from_dict(d, "health", JobsHealthRules), + idempotency_token=d.get("idempotency_token", None), + notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), + queue=_from_dict(d, "queue", QueueSettings), + run_as=_from_dict(d, "run_as", JobRunAs), + run_name=d.get("run_name", None), + tasks=_repeated_dict(d, "tasks", SubmitTask), + timeout_seconds=d.get("timeout_seconds", None), + webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), + ) @dataclass class SubmitRunResponse: """Run was created and started successfully.""" - + run_id: Optional[int] = None """The canonical identifier for the newly submitted run.""" - + def as_dict(self) -> dict: """Serializes the SubmitRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the SubmitRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubmitRunResponse: """Deserializes the SubmitRunResponse from a dictionary.""" - return cls(run_id=d.get('run_id', None)) - - + return cls(run_id=d.get("run_id", None)) @dataclass @@ -6376,95 +7769,95 @@ class SubmitTask: """A unique name for the task. This field is used to refer to this task from other tasks. This field is required and must be unique within its parent job. On Update or Reset, this field is used to reference the tasks to be updated or reset.""" - + clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" - + condition_task: Optional[ConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present. The condition task does not require a cluster to execute and does not support retries or notifications.""" - + dashboard_task: Optional[DashboardTask] = None """The task refreshes a dashboard and sends a snapshot to subscribers.""" - + dbt_cloud_task: Optional[DbtCloudTask] = None """Task type for dbt cloud""" - + dbt_task: Optional[DbtTask] = None """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.""" - + depends_on: Optional[List[TaskDependency]] = None """An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete successfully before executing this task. The key is `task_key`, and the value is the name assigned to the dependent task.""" - + description: Optional[str] = None """An optional description for this task.""" - + email_notifications: Optional[JobEmailNotifications] = None """An optional set of email addresses notified when the task run begins or completes. The default behavior is to not send any emails.""" - + environment_key: Optional[str] = None """The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.""" - + existing_cluster_id: Optional[str] = None """If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops responding. We suggest running jobs and tasks on new clusters for greater reliability""" - + for_each_task: Optional[ForEachTask] = None """The task executes a nested task for every input provided when the `for_each_task` field is present.""" - + gen_ai_compute_task: Optional[GenAiComputeTask] = None - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + libraries: Optional[List[compute.Library]] = None """An optional list of libraries to be installed on the cluster. The default value is an empty list.""" - + new_cluster: Optional[compute.ClusterSpec] = None """If new_cluster, a description of a new cluster that is created for each run.""" - + notebook_task: Optional[NotebookTask] = None """The task runs a notebook when the `notebook_task` field is present.""" - + notification_settings: Optional[TaskNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task run.""" - + pipeline_task: Optional[PipelineTask] = None """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.""" - + power_bi_task: Optional[PowerBiTask] = None """The task triggers a Power BI semantic model update when the `power_bi_task` field is present.""" - + python_wheel_task: Optional[PythonWheelTask] = None """The task runs a Python wheel when the `python_wheel_task` field is present.""" - + run_if: Optional[RunIf] = None """An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`. See :method:jobs/create for a list of possible values.""" - + run_job_task: Optional[RunJobTask] = None """The task triggers another job when the `run_job_task` field is present.""" - + spark_jar_task: Optional[SparkJarTask] = None """The task runs a JAR when the `spark_jar_task` field is present.""" - + spark_python_task: Optional[SparkPythonTask] = None """The task runs a Python file when the `spark_python_task` field is present.""" - + spark_submit_task: Optional[SparkSubmitTask] = None """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute. @@ -6481,128 +7874,222 @@ class SubmitTask: to leave some room for off-heap usage. The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" - + sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when the run begins or completes. The default behavior is to not send any system notifications. Task webhooks respect the task notification settings.""" - + def as_dict(self) -> dict: """Serializes the SubmitTask into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict() - if self.condition_task: body['condition_task'] = self.condition_task.as_dict() - if self.dashboard_task: body['dashboard_task'] = self.dashboard_task.as_dict() - if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task.as_dict() - if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() - if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on] - if self.description is not None: body['description'] = self.description - if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() - if self.environment_key is not None: body['environment_key'] = self.environment_key - if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id - if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict() - if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task.as_dict() - if self.health: body['health'] = self.health.as_dict() - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() - if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict() - if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() - if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict() - if self.power_bi_task: body['power_bi_task'] = self.power_bi_task.as_dict() - if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict() - if self.run_if is not None: body['run_if'] = self.run_if.value - if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict() - if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict() - if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict() - if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict() - if self.sql_task: body['sql_task'] = self.sql_task.as_dict() - if self.task_key is not None: body['task_key'] = self.task_key - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() + if self.clean_rooms_notebook_task: + body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task.as_dict() + if self.condition_task: + body["condition_task"] = self.condition_task.as_dict() + if self.dashboard_task: + body["dashboard_task"] = self.dashboard_task.as_dict() + if self.dbt_cloud_task: + body["dbt_cloud_task"] = self.dbt_cloud_task.as_dict() + if self.dbt_task: + body["dbt_task"] = self.dbt_task.as_dict() + if self.depends_on: + body["depends_on"] = [v.as_dict() for v in self.depends_on] + if self.description is not None: + body["description"] = self.description + if self.email_notifications: + body["email_notifications"] = self.email_notifications.as_dict() + if self.environment_key is not None: + body["environment_key"] = self.environment_key + if self.existing_cluster_id is not None: + body["existing_cluster_id"] = self.existing_cluster_id + if self.for_each_task: + body["for_each_task"] = self.for_each_task.as_dict() + if self.gen_ai_compute_task: + body["gen_ai_compute_task"] = self.gen_ai_compute_task.as_dict() + if self.health: + body["health"] = self.health.as_dict() + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.new_cluster: + body["new_cluster"] = self.new_cluster.as_dict() + if self.notebook_task: + body["notebook_task"] = self.notebook_task.as_dict() + if self.notification_settings: + body["notification_settings"] = self.notification_settings.as_dict() + if self.pipeline_task: + body["pipeline_task"] = self.pipeline_task.as_dict() + if self.power_bi_task: + body["power_bi_task"] = self.power_bi_task.as_dict() + if self.python_wheel_task: + body["python_wheel_task"] = self.python_wheel_task.as_dict() + if self.run_if is not None: + body["run_if"] = self.run_if.value + if self.run_job_task: + body["run_job_task"] = self.run_job_task.as_dict() + if self.spark_jar_task: + body["spark_jar_task"] = self.spark_jar_task.as_dict() + if self.spark_python_task: + body["spark_python_task"] = self.spark_python_task.as_dict() + if self.spark_submit_task: + body["spark_submit_task"] = self.spark_submit_task.as_dict() + if self.sql_task: + body["sql_task"] = self.sql_task.as_dict() + if self.task_key is not None: + body["task_key"] = self.task_key + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SubmitTask into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task - if self.condition_task: body['condition_task'] = self.condition_task - if self.dashboard_task: body['dashboard_task'] = self.dashboard_task - if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task - if self.dbt_task: body['dbt_task'] = self.dbt_task - if self.depends_on: body['depends_on'] = self.depends_on - if self.description is not None: body['description'] = self.description - if self.email_notifications: body['email_notifications'] = self.email_notifications - if self.environment_key is not None: body['environment_key'] = self.environment_key - if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id - if self.for_each_task: body['for_each_task'] = self.for_each_task - if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task - if self.health: body['health'] = self.health - if self.libraries: body['libraries'] = self.libraries - if self.new_cluster: body['new_cluster'] = self.new_cluster - if self.notebook_task: body['notebook_task'] = self.notebook_task - if self.notification_settings: body['notification_settings'] = self.notification_settings - if self.pipeline_task: body['pipeline_task'] = self.pipeline_task - if self.power_bi_task: body['power_bi_task'] = self.power_bi_task - if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task - if self.run_if is not None: body['run_if'] = self.run_if - if self.run_job_task: body['run_job_task'] = self.run_job_task - if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task - if self.spark_python_task: body['spark_python_task'] = self.spark_python_task - if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task - if self.sql_task: body['sql_task'] = self.sql_task - if self.task_key is not None: body['task_key'] = self.task_key - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + if self.clean_rooms_notebook_task: + body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task + if self.condition_task: + body["condition_task"] = self.condition_task + if self.dashboard_task: + body["dashboard_task"] = self.dashboard_task + if self.dbt_cloud_task: + body["dbt_cloud_task"] = self.dbt_cloud_task + if self.dbt_task: + body["dbt_task"] = self.dbt_task + if self.depends_on: + body["depends_on"] = self.depends_on + if self.description is not None: + body["description"] = self.description + if self.email_notifications: + body["email_notifications"] = self.email_notifications + if self.environment_key is not None: + body["environment_key"] = self.environment_key + if self.existing_cluster_id is not None: + body["existing_cluster_id"] = self.existing_cluster_id + if self.for_each_task: + body["for_each_task"] = self.for_each_task + if self.gen_ai_compute_task: + body["gen_ai_compute_task"] = self.gen_ai_compute_task + if self.health: + body["health"] = self.health + if self.libraries: + body["libraries"] = self.libraries + if self.new_cluster: + body["new_cluster"] = self.new_cluster + if self.notebook_task: + body["notebook_task"] = self.notebook_task + if self.notification_settings: + body["notification_settings"] = self.notification_settings + if self.pipeline_task: + body["pipeline_task"] = self.pipeline_task + if self.power_bi_task: + body["power_bi_task"] = self.power_bi_task + if self.python_wheel_task: + body["python_wheel_task"] = self.python_wheel_task + if self.run_if is not None: + body["run_if"] = self.run_if + if self.run_job_task: + body["run_job_task"] = self.run_job_task + if self.spark_jar_task: + body["spark_jar_task"] = self.spark_jar_task + if self.spark_python_task: + body["spark_python_task"] = self.spark_python_task + if self.spark_submit_task: + body["spark_submit_task"] = self.spark_submit_task + if self.sql_task: + body["sql_task"] = self.sql_task + if self.task_key is not None: + body["task_key"] = self.task_key + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubmitTask: """Deserializes the SubmitTask from a dictionary.""" - return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task', CleanRoomsNotebookTask), condition_task=_from_dict(d, 'condition_task', ConditionTask), dashboard_task=_from_dict(d, 'dashboard_task', DashboardTask), dbt_cloud_task=_from_dict(d, 'dbt_cloud_task', DbtCloudTask), dbt_task=_from_dict(d, 'dbt_task', DbtTask), depends_on=_repeated_dict(d, 'depends_on', TaskDependency), description=d.get('description', None), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environment_key=d.get('environment_key', None), existing_cluster_id=d.get('existing_cluster_id', None), for_each_task=_from_dict(d, 'for_each_task', ForEachTask), gen_ai_compute_task=_from_dict(d, 'gen_ai_compute_task', GenAiComputeTask), health=_from_dict(d, 'health', JobsHealthRules), libraries=_repeated_dict(d, 'libraries', compute.Library), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec), notebook_task=_from_dict(d, 'notebook_task', NotebookTask), notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings), pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask), power_bi_task=_from_dict(d, 'power_bi_task', PowerBiTask), python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask), run_if=_enum(d, 'run_if', RunIf), run_job_task=_from_dict(d, 'run_job_task', RunJobTask), spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask), spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask), spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask), sql_task=_from_dict(d, 'sql_task', SqlTask), task_key=d.get('task_key', None), timeout_seconds=d.get('timeout_seconds', None), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) - - + return cls( + clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask), + condition_task=_from_dict(d, "condition_task", ConditionTask), + dashboard_task=_from_dict(d, "dashboard_task", DashboardTask), + dbt_cloud_task=_from_dict(d, "dbt_cloud_task", DbtCloudTask), + dbt_task=_from_dict(d, "dbt_task", DbtTask), + depends_on=_repeated_dict(d, "depends_on", TaskDependency), + description=d.get("description", None), + email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), + environment_key=d.get("environment_key", None), + existing_cluster_id=d.get("existing_cluster_id", None), + for_each_task=_from_dict(d, "for_each_task", ForEachTask), + gen_ai_compute_task=_from_dict(d, "gen_ai_compute_task", GenAiComputeTask), + health=_from_dict(d, "health", JobsHealthRules), + libraries=_repeated_dict(d, "libraries", compute.Library), + new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), + notebook_task=_from_dict(d, "notebook_task", NotebookTask), + notification_settings=_from_dict(d, "notification_settings", TaskNotificationSettings), + pipeline_task=_from_dict(d, "pipeline_task", PipelineTask), + power_bi_task=_from_dict(d, "power_bi_task", PowerBiTask), + python_wheel_task=_from_dict(d, "python_wheel_task", PythonWheelTask), + run_if=_enum(d, "run_if", RunIf), + run_job_task=_from_dict(d, "run_job_task", RunJobTask), + spark_jar_task=_from_dict(d, "spark_jar_task", SparkJarTask), + spark_python_task=_from_dict(d, "spark_python_task", SparkPythonTask), + spark_submit_task=_from_dict(d, "spark_submit_task", SparkSubmitTask), + sql_task=_from_dict(d, "sql_task", SqlTask), + task_key=d.get("task_key", None), + timeout_seconds=d.get("timeout_seconds", None), + webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), + ) @dataclass class Subscription: custom_subject: Optional[str] = None """Optional: Allows users to specify a custom subject line on the email sent to subscribers.""" - + paused: Optional[bool] = None """When true, the subscription will not send emails.""" - + subscribers: Optional[List[SubscriptionSubscriber]] = None """The list of subscribers to send the snapshot of the dashboard to.""" - + def as_dict(self) -> dict: """Serializes the Subscription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.paused is not None: body['paused'] = self.paused - if self.subscribers: body['subscribers'] = [v.as_dict() for v in self.subscribers] + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.paused is not None: + body["paused"] = self.paused + if self.subscribers: + body["subscribers"] = [v.as_dict() for v in self.subscribers] return body def as_shallow_dict(self) -> dict: """Serializes the Subscription into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.paused is not None: body['paused'] = self.paused - if self.subscribers: body['subscribers'] = self.subscribers + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.paused is not None: + body["paused"] = self.paused + if self.subscribers: + body["subscribers"] = self.subscribers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Subscription: """Deserializes the Subscription from a dictionary.""" - return cls(custom_subject=d.get('custom_subject', None), paused=d.get('paused', None), subscribers=_repeated_dict(d, 'subscribers', SubscriptionSubscriber)) - - + return cls( + custom_subject=d.get("custom_subject", None), + paused=d.get("paused", None), + subscribers=_repeated_dict(d, "subscribers", SubscriptionSubscriber), + ) @dataclass @@ -6610,75 +8097,88 @@ class SubscriptionSubscriber: destination_id: Optional[str] = None """A snapshot of the dashboard will be sent to the destination when the `destination_id` field is present.""" - + user_name: Optional[str] = None """A snapshot of the dashboard will be sent to the user's email when the `user_name` field is present.""" - + def as_dict(self) -> dict: """Serializes the SubscriptionSubscriber into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_id is not None: body['destination_id'] = self.destination_id - if self.user_name is not None: body['user_name'] = self.user_name + if self.destination_id is not None: + body["destination_id"] = self.destination_id + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the SubscriptionSubscriber into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_id is not None: body['destination_id'] = self.destination_id - if self.user_name is not None: body['user_name'] = self.user_name + if self.destination_id is not None: + body["destination_id"] = self.destination_id + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SubscriptionSubscriber: """Deserializes the SubscriptionSubscriber from a dictionary.""" - return cls(destination_id=d.get('destination_id', None), user_name=d.get('user_name', None)) - - + return cls(destination_id=d.get("destination_id", None), user_name=d.get("user_name", None)) @dataclass class TableUpdateTriggerConfiguration: condition: Optional[Condition] = None """The table(s) condition based on which to trigger a job run.""" - + min_time_between_triggers_seconds: Optional[int] = None """If set, the trigger starts a run only after the specified amount of time has passed since the last time the trigger fired. The minimum allowed value is 60 seconds.""" - + table_names: Optional[List[str]] = None """A list of Delta tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`.""" - + wait_after_last_change_seconds: Optional[int] = None """If set, the trigger starts a run only after no table updates have occurred for the specified time and can be used to wait for a series of table updates before triggering a run. The minimum allowed value is 60 seconds.""" - + def as_dict(self) -> dict: """Serializes the TableUpdateTriggerConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition is not None: body['condition'] = self.condition.value - if self.min_time_between_triggers_seconds is not None: body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds - if self.table_names: body['table_names'] = [v for v in self.table_names] - if self.wait_after_last_change_seconds is not None: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds + if self.condition is not None: + body["condition"] = self.condition.value + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.table_names: + body["table_names"] = [v for v in self.table_names] + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds return body def as_shallow_dict(self) -> dict: """Serializes the TableUpdateTriggerConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition is not None: body['condition'] = self.condition - if self.min_time_between_triggers_seconds is not None: body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds - if self.table_names: body['table_names'] = self.table_names - if self.wait_after_last_change_seconds is not None: body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds + if self.condition is not None: + body["condition"] = self.condition + if self.min_time_between_triggers_seconds is not None: + body["min_time_between_triggers_seconds"] = self.min_time_between_triggers_seconds + if self.table_names: + body["table_names"] = self.table_names + if self.wait_after_last_change_seconds is not None: + body["wait_after_last_change_seconds"] = self.wait_after_last_change_seconds return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableUpdateTriggerConfiguration: """Deserializes the TableUpdateTriggerConfiguration from a dictionary.""" - return cls(condition=_enum(d, 'condition', Condition), min_time_between_triggers_seconds=d.get('min_time_between_triggers_seconds', None), table_names=d.get('table_names', None), wait_after_last_change_seconds=d.get('wait_after_last_change_seconds', None)) - - + return cls( + condition=_enum(d, "condition", Condition), + min_time_between_triggers_seconds=d.get("min_time_between_triggers_seconds", None), + table_names=d.get("table_names", None), + wait_after_last_change_seconds=d.get("wait_after_last_change_seconds", None), + ) @dataclass @@ -6687,103 +8187,103 @@ class Task: """A unique name for the task. This field is used to refer to this task from other tasks. This field is required and must be unique within its parent job. On Update or Reset, this field is used to reference the tasks to be updated or reset.""" - + clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present. [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html""" - + condition_task: Optional[ConditionTask] = None """The task evaluates a condition that can be used to control the execution of other tasks when the `condition_task` field is present. The condition task does not require a cluster to execute and does not support retries or notifications.""" - + dashboard_task: Optional[DashboardTask] = None """The task refreshes a dashboard and sends a snapshot to subscribers.""" - + dbt_cloud_task: Optional[DbtCloudTask] = None """Task type for dbt cloud""" - + dbt_task: Optional[DbtTask] = None """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.""" - + depends_on: Optional[List[TaskDependency]] = None """An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete before executing this task. The task will run only if the `run_if` condition is true. The key is `task_key`, and the value is the name assigned to the dependent task.""" - + description: Optional[str] = None """An optional description for this task.""" - + disable_auto_optimization: Optional[bool] = None """An option to disable auto optimization in serverless""" - + email_notifications: Optional[TaskEmailNotifications] = None """An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.""" - + environment_key: Optional[str] = None """The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.""" - + existing_cluster_id: Optional[str] = None """If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops responding. We suggest running jobs and tasks on new clusters for greater reliability""" - + for_each_task: Optional[ForEachTask] = None """The task executes a nested task for every input provided when the `for_each_task` field is present.""" - + gen_ai_compute_task: Optional[GenAiComputeTask] = None - + health: Optional[JobsHealthRules] = None """An optional set of health rules that can be defined for this job.""" - + job_cluster_key: Optional[str] = None """If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.""" - + libraries: Optional[List[compute.Library]] = None """An optional list of libraries to be installed on the cluster. The default value is an empty list.""" - + max_retries: Optional[int] = None """An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with the `FAILED` result_state or `INTERNAL_ERROR` `life_cycle_state`. The value `-1` means to retry indefinitely and the value `0` means to never retry.""" - + min_retry_interval_millis: Optional[int] = None """An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried.""" - + new_cluster: Optional[compute.ClusterSpec] = None """If new_cluster, a description of a new cluster that is created for each run.""" - + notebook_task: Optional[NotebookTask] = None """The task runs a notebook when the `notebook_task` field is present.""" - + notification_settings: Optional[TaskNotificationSettings] = None """Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task.""" - + pipeline_task: Optional[PipelineTask] = None """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines configured to use triggered more are supported.""" - + power_bi_task: Optional[PowerBiTask] = None """The task triggers a Power BI semantic model update when the `power_bi_task` field is present.""" - + python_wheel_task: Optional[PythonWheelTask] = None """The task runs a Python wheel when the `python_wheel_task` field is present.""" - + retry_on_timeout: Optional[bool] = None """An optional policy to specify whether to retry a job when it times out. The default behavior is to not retry on timeout.""" - + run_if: Optional[RunIf] = None """An optional value specifying the condition determining whether the task is run once its dependencies have been completed. @@ -6792,16 +8292,16 @@ class Task: one dependency has succeeded * `NONE_FAILED`: None of the dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl dependencies have failed""" - + run_job_task: Optional[RunJobTask] = None """The task triggers another job when the `run_job_task` field is present.""" - + spark_jar_task: Optional[SparkJarTask] = None """The task runs a JAR when the `spark_jar_task` field is present.""" - + spark_python_task: Optional[SparkPythonTask] = None """The task runs a Python file when the `spark_python_task` field is present.""" - + spark_submit_task: Optional[SparkSubmitTask] = None """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present. This task can run only on new clusters and is not compatible with serverless compute. @@ -6818,133 +8318,236 @@ class Task: to leave some room for off-heap usage. The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.""" - + sql_task: Optional[SqlTask] = None """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the `sql_task` field is present.""" - + timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" - + webhook_notifications: Optional[WebhookNotifications] = None """A collection of system notification IDs to notify when runs of this task begin or complete. The default behavior is to not send any system notifications.""" - + def as_dict(self) -> dict: """Serializes the Task into a dictionary suitable for use as a JSON request body.""" body = {} - if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict() - if self.condition_task: body['condition_task'] = self.condition_task.as_dict() - if self.dashboard_task: body['dashboard_task'] = self.dashboard_task.as_dict() - if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task.as_dict() - if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict() - if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on] - if self.description is not None: body['description'] = self.description - if self.disable_auto_optimization is not None: body['disable_auto_optimization'] = self.disable_auto_optimization - if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict() - if self.environment_key is not None: body['environment_key'] = self.environment_key - if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id - if self.for_each_task: body['for_each_task'] = self.for_each_task.as_dict() - if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task.as_dict() - if self.health: body['health'] = self.health.as_dict() - if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.max_retries is not None: body['max_retries'] = self.max_retries - if self.min_retry_interval_millis is not None: body['min_retry_interval_millis'] = self.min_retry_interval_millis - if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict() - if self.notebook_task: body['notebook_task'] = self.notebook_task.as_dict() - if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict() - if self.pipeline_task: body['pipeline_task'] = self.pipeline_task.as_dict() - if self.power_bi_task: body['power_bi_task'] = self.power_bi_task.as_dict() - if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task.as_dict() - if self.retry_on_timeout is not None: body['retry_on_timeout'] = self.retry_on_timeout - if self.run_if is not None: body['run_if'] = self.run_if.value - if self.run_job_task: body['run_job_task'] = self.run_job_task.as_dict() - if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task.as_dict() - if self.spark_python_task: body['spark_python_task'] = self.spark_python_task.as_dict() - if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task.as_dict() - if self.sql_task: body['sql_task'] = self.sql_task.as_dict() - if self.task_key is not None: body['task_key'] = self.task_key - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() + if self.clean_rooms_notebook_task: + body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task.as_dict() + if self.condition_task: + body["condition_task"] = self.condition_task.as_dict() + if self.dashboard_task: + body["dashboard_task"] = self.dashboard_task.as_dict() + if self.dbt_cloud_task: + body["dbt_cloud_task"] = self.dbt_cloud_task.as_dict() + if self.dbt_task: + body["dbt_task"] = self.dbt_task.as_dict() + if self.depends_on: + body["depends_on"] = [v.as_dict() for v in self.depends_on] + if self.description is not None: + body["description"] = self.description + if self.disable_auto_optimization is not None: + body["disable_auto_optimization"] = self.disable_auto_optimization + if self.email_notifications: + body["email_notifications"] = self.email_notifications.as_dict() + if self.environment_key is not None: + body["environment_key"] = self.environment_key + if self.existing_cluster_id is not None: + body["existing_cluster_id"] = self.existing_cluster_id + if self.for_each_task: + body["for_each_task"] = self.for_each_task.as_dict() + if self.gen_ai_compute_task: + body["gen_ai_compute_task"] = self.gen_ai_compute_task.as_dict() + if self.health: + body["health"] = self.health.as_dict() + if self.job_cluster_key is not None: + body["job_cluster_key"] = self.job_cluster_key + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.max_retries is not None: + body["max_retries"] = self.max_retries + if self.min_retry_interval_millis is not None: + body["min_retry_interval_millis"] = self.min_retry_interval_millis + if self.new_cluster: + body["new_cluster"] = self.new_cluster.as_dict() + if self.notebook_task: + body["notebook_task"] = self.notebook_task.as_dict() + if self.notification_settings: + body["notification_settings"] = self.notification_settings.as_dict() + if self.pipeline_task: + body["pipeline_task"] = self.pipeline_task.as_dict() + if self.power_bi_task: + body["power_bi_task"] = self.power_bi_task.as_dict() + if self.python_wheel_task: + body["python_wheel_task"] = self.python_wheel_task.as_dict() + if self.retry_on_timeout is not None: + body["retry_on_timeout"] = self.retry_on_timeout + if self.run_if is not None: + body["run_if"] = self.run_if.value + if self.run_job_task: + body["run_job_task"] = self.run_job_task.as_dict() + if self.spark_jar_task: + body["spark_jar_task"] = self.spark_jar_task.as_dict() + if self.spark_python_task: + body["spark_python_task"] = self.spark_python_task.as_dict() + if self.spark_submit_task: + body["spark_submit_task"] = self.spark_submit_task.as_dict() + if self.sql_task: + body["sql_task"] = self.sql_task.as_dict() + if self.task_key is not None: + body["task_key"] = self.task_key + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Task into a shallow dictionary of its immediate attributes.""" body = {} - if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task - if self.condition_task: body['condition_task'] = self.condition_task - if self.dashboard_task: body['dashboard_task'] = self.dashboard_task - if self.dbt_cloud_task: body['dbt_cloud_task'] = self.dbt_cloud_task - if self.dbt_task: body['dbt_task'] = self.dbt_task - if self.depends_on: body['depends_on'] = self.depends_on - if self.description is not None: body['description'] = self.description - if self.disable_auto_optimization is not None: body['disable_auto_optimization'] = self.disable_auto_optimization - if self.email_notifications: body['email_notifications'] = self.email_notifications - if self.environment_key is not None: body['environment_key'] = self.environment_key - if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id - if self.for_each_task: body['for_each_task'] = self.for_each_task - if self.gen_ai_compute_task: body['gen_ai_compute_task'] = self.gen_ai_compute_task - if self.health: body['health'] = self.health - if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key - if self.libraries: body['libraries'] = self.libraries - if self.max_retries is not None: body['max_retries'] = self.max_retries - if self.min_retry_interval_millis is not None: body['min_retry_interval_millis'] = self.min_retry_interval_millis - if self.new_cluster: body['new_cluster'] = self.new_cluster - if self.notebook_task: body['notebook_task'] = self.notebook_task - if self.notification_settings: body['notification_settings'] = self.notification_settings - if self.pipeline_task: body['pipeline_task'] = self.pipeline_task - if self.power_bi_task: body['power_bi_task'] = self.power_bi_task - if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task - if self.retry_on_timeout is not None: body['retry_on_timeout'] = self.retry_on_timeout - if self.run_if is not None: body['run_if'] = self.run_if - if self.run_job_task: body['run_job_task'] = self.run_job_task - if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task - if self.spark_python_task: body['spark_python_task'] = self.spark_python_task - if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task - if self.sql_task: body['sql_task'] = self.sql_task - if self.task_key is not None: body['task_key'] = self.task_key - if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds - if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications + if self.clean_rooms_notebook_task: + body["clean_rooms_notebook_task"] = self.clean_rooms_notebook_task + if self.condition_task: + body["condition_task"] = self.condition_task + if self.dashboard_task: + body["dashboard_task"] = self.dashboard_task + if self.dbt_cloud_task: + body["dbt_cloud_task"] = self.dbt_cloud_task + if self.dbt_task: + body["dbt_task"] = self.dbt_task + if self.depends_on: + body["depends_on"] = self.depends_on + if self.description is not None: + body["description"] = self.description + if self.disable_auto_optimization is not None: + body["disable_auto_optimization"] = self.disable_auto_optimization + if self.email_notifications: + body["email_notifications"] = self.email_notifications + if self.environment_key is not None: + body["environment_key"] = self.environment_key + if self.existing_cluster_id is not None: + body["existing_cluster_id"] = self.existing_cluster_id + if self.for_each_task: + body["for_each_task"] = self.for_each_task + if self.gen_ai_compute_task: + body["gen_ai_compute_task"] = self.gen_ai_compute_task + if self.health: + body["health"] = self.health + if self.job_cluster_key is not None: + body["job_cluster_key"] = self.job_cluster_key + if self.libraries: + body["libraries"] = self.libraries + if self.max_retries is not None: + body["max_retries"] = self.max_retries + if self.min_retry_interval_millis is not None: + body["min_retry_interval_millis"] = self.min_retry_interval_millis + if self.new_cluster: + body["new_cluster"] = self.new_cluster + if self.notebook_task: + body["notebook_task"] = self.notebook_task + if self.notification_settings: + body["notification_settings"] = self.notification_settings + if self.pipeline_task: + body["pipeline_task"] = self.pipeline_task + if self.power_bi_task: + body["power_bi_task"] = self.power_bi_task + if self.python_wheel_task: + body["python_wheel_task"] = self.python_wheel_task + if self.retry_on_timeout is not None: + body["retry_on_timeout"] = self.retry_on_timeout + if self.run_if is not None: + body["run_if"] = self.run_if + if self.run_job_task: + body["run_job_task"] = self.run_job_task + if self.spark_jar_task: + body["spark_jar_task"] = self.spark_jar_task + if self.spark_python_task: + body["spark_python_task"] = self.spark_python_task + if self.spark_submit_task: + body["spark_submit_task"] = self.spark_submit_task + if self.sql_task: + body["sql_task"] = self.sql_task + if self.task_key is not None: + body["task_key"] = self.task_key + if self.timeout_seconds is not None: + body["timeout_seconds"] = self.timeout_seconds + if self.webhook_notifications: + body["webhook_notifications"] = self.webhook_notifications return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Task: """Deserializes the Task from a dictionary.""" - return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task', CleanRoomsNotebookTask), condition_task=_from_dict(d, 'condition_task', ConditionTask), dashboard_task=_from_dict(d, 'dashboard_task', DashboardTask), dbt_cloud_task=_from_dict(d, 'dbt_cloud_task', DbtCloudTask), dbt_task=_from_dict(d, 'dbt_task', DbtTask), depends_on=_repeated_dict(d, 'depends_on', TaskDependency), description=d.get('description', None), disable_auto_optimization=d.get('disable_auto_optimization', None), email_notifications=_from_dict(d, 'email_notifications', TaskEmailNotifications), environment_key=d.get('environment_key', None), existing_cluster_id=d.get('existing_cluster_id', None), for_each_task=_from_dict(d, 'for_each_task', ForEachTask), gen_ai_compute_task=_from_dict(d, 'gen_ai_compute_task', GenAiComputeTask), health=_from_dict(d, 'health', JobsHealthRules), job_cluster_key=d.get('job_cluster_key', None), libraries=_repeated_dict(d, 'libraries', compute.Library), max_retries=d.get('max_retries', None), min_retry_interval_millis=d.get('min_retry_interval_millis', None), new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec), notebook_task=_from_dict(d, 'notebook_task', NotebookTask), notification_settings=_from_dict(d, 'notification_settings', TaskNotificationSettings), pipeline_task=_from_dict(d, 'pipeline_task', PipelineTask), power_bi_task=_from_dict(d, 'power_bi_task', PowerBiTask), python_wheel_task=_from_dict(d, 'python_wheel_task', PythonWheelTask), retry_on_timeout=d.get('retry_on_timeout', None), run_if=_enum(d, 'run_if', RunIf), run_job_task=_from_dict(d, 'run_job_task', RunJobTask), spark_jar_task=_from_dict(d, 'spark_jar_task', SparkJarTask), spark_python_task=_from_dict(d, 'spark_python_task', SparkPythonTask), spark_submit_task=_from_dict(d, 'spark_submit_task', SparkSubmitTask), sql_task=_from_dict(d, 'sql_task', SqlTask), task_key=d.get('task_key', None), timeout_seconds=d.get('timeout_seconds', None), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) - - + return cls( + clean_rooms_notebook_task=_from_dict(d, "clean_rooms_notebook_task", CleanRoomsNotebookTask), + condition_task=_from_dict(d, "condition_task", ConditionTask), + dashboard_task=_from_dict(d, "dashboard_task", DashboardTask), + dbt_cloud_task=_from_dict(d, "dbt_cloud_task", DbtCloudTask), + dbt_task=_from_dict(d, "dbt_task", DbtTask), + depends_on=_repeated_dict(d, "depends_on", TaskDependency), + description=d.get("description", None), + disable_auto_optimization=d.get("disable_auto_optimization", None), + email_notifications=_from_dict(d, "email_notifications", TaskEmailNotifications), + environment_key=d.get("environment_key", None), + existing_cluster_id=d.get("existing_cluster_id", None), + for_each_task=_from_dict(d, "for_each_task", ForEachTask), + gen_ai_compute_task=_from_dict(d, "gen_ai_compute_task", GenAiComputeTask), + health=_from_dict(d, "health", JobsHealthRules), + job_cluster_key=d.get("job_cluster_key", None), + libraries=_repeated_dict(d, "libraries", compute.Library), + max_retries=d.get("max_retries", None), + min_retry_interval_millis=d.get("min_retry_interval_millis", None), + new_cluster=_from_dict(d, "new_cluster", compute.ClusterSpec), + notebook_task=_from_dict(d, "notebook_task", NotebookTask), + notification_settings=_from_dict(d, "notification_settings", TaskNotificationSettings), + pipeline_task=_from_dict(d, "pipeline_task", PipelineTask), + power_bi_task=_from_dict(d, "power_bi_task", PowerBiTask), + python_wheel_task=_from_dict(d, "python_wheel_task", PythonWheelTask), + retry_on_timeout=d.get("retry_on_timeout", None), + run_if=_enum(d, "run_if", RunIf), + run_job_task=_from_dict(d, "run_job_task", RunJobTask), + spark_jar_task=_from_dict(d, "spark_jar_task", SparkJarTask), + spark_python_task=_from_dict(d, "spark_python_task", SparkPythonTask), + spark_submit_task=_from_dict(d, "spark_submit_task", SparkSubmitTask), + sql_task=_from_dict(d, "sql_task", SqlTask), + task_key=d.get("task_key", None), + timeout_seconds=d.get("timeout_seconds", None), + webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), + ) @dataclass class TaskDependency: task_key: str """The name of the task this task depends on.""" - + outcome: Optional[str] = None """Can only be specified on condition task dependencies. The outcome of the dependent task that must be met for this task to run.""" - + def as_dict(self) -> dict: """Serializes the TaskDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.outcome is not None: body['outcome'] = self.outcome - if self.task_key is not None: body['task_key'] = self.task_key + if self.outcome is not None: + body["outcome"] = self.outcome + if self.task_key is not None: + body["task_key"] = self.task_key return body def as_shallow_dict(self) -> dict: """Serializes the TaskDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.outcome is not None: body['outcome'] = self.outcome - if self.task_key is not None: body['task_key'] = self.task_key + if self.outcome is not None: + body["outcome"] = self.outcome + if self.task_key is not None: + body["task_key"] = self.task_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TaskDependency: """Deserializes the TaskDependency from a dictionary.""" - return cls(outcome=d.get('outcome', None), task_key=d.get('task_key', None)) - - + return cls(outcome=d.get("outcome", None), task_key=d.get("task_key", None)) @dataclass @@ -6952,64 +8555,81 @@ class TaskEmailNotifications: no_alert_for_skipped_runs: Optional[bool] = None """If true, do not send email to recipients specified in `on_failure` if the run is skipped. This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.""" - + on_duration_warning_threshold_exceeded: Optional[List[str]] = None """A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.""" - + on_failure: Optional[List[str]] = None """A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.""" - + on_start: Optional[List[str]] = None """A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.""" - + on_streaming_backlog_exceeded: Optional[List[str]] = None """A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream. Streaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.""" - + on_success: Optional[List[str]] = None """A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.""" - + def as_dict(self) -> dict: """Serializes the TaskEmailNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs - if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = [v for v in self.on_duration_warning_threshold_exceeded] - if self.on_failure: body['on_failure'] = [v for v in self.on_failure] - if self.on_start: body['on_start'] = [v for v in self.on_start] - if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = [v for v in self.on_streaming_backlog_exceeded] - if self.on_success: body['on_success'] = [v for v in self.on_success] + if self.no_alert_for_skipped_runs is not None: + body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: + body["on_duration_warning_threshold_exceeded"] = [v for v in self.on_duration_warning_threshold_exceeded] + if self.on_failure: + body["on_failure"] = [v for v in self.on_failure] + if self.on_start: + body["on_start"] = [v for v in self.on_start] + if self.on_streaming_backlog_exceeded: + body["on_streaming_backlog_exceeded"] = [v for v in self.on_streaming_backlog_exceeded] + if self.on_success: + body["on_success"] = [v for v in self.on_success] return body def as_shallow_dict(self) -> dict: """Serializes the TaskEmailNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs - if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded - if self.on_failure: body['on_failure'] = self.on_failure - if self.on_start: body['on_start'] = self.on_start - if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded - if self.on_success: body['on_success'] = self.on_success + if self.no_alert_for_skipped_runs is not None: + body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs + if self.on_duration_warning_threshold_exceeded: + body["on_duration_warning_threshold_exceeded"] = self.on_duration_warning_threshold_exceeded + if self.on_failure: + body["on_failure"] = self.on_failure + if self.on_start: + body["on_start"] = self.on_start + if self.on_streaming_backlog_exceeded: + body["on_streaming_backlog_exceeded"] = self.on_streaming_backlog_exceeded + if self.on_success: + body["on_success"] = self.on_success return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TaskEmailNotifications: """Deserializes the TaskEmailNotifications from a dictionary.""" - return cls(no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None), on_duration_warning_threshold_exceeded=d.get('on_duration_warning_threshold_exceeded', None), on_failure=d.get('on_failure', None), on_start=d.get('on_start', None), on_streaming_backlog_exceeded=d.get('on_streaming_backlog_exceeded', None), on_success=d.get('on_success', None)) - - + return cls( + no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None), + on_duration_warning_threshold_exceeded=d.get("on_duration_warning_threshold_exceeded", None), + on_failure=d.get("on_failure", None), + on_start=d.get("on_start", None), + on_streaming_backlog_exceeded=d.get("on_streaming_backlog_exceeded", None), + on_success=d.get("on_success", None), + ) @dataclass @@ -7018,37 +8638,45 @@ class TaskNotificationSettings: """If true, do not send notifications to recipients specified in `on_start` for the retried runs and do not send notifications to recipients specified in `on_failure` until the last retry of the run.""" - + no_alert_for_canceled_runs: Optional[bool] = None """If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.""" - + no_alert_for_skipped_runs: Optional[bool] = None """If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.""" - + def as_dict(self) -> dict: """Serializes the TaskNotificationSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_on_last_attempt is not None: body['alert_on_last_attempt'] = self.alert_on_last_attempt - if self.no_alert_for_canceled_runs is not None: body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs - if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.alert_on_last_attempt is not None: + body["alert_on_last_attempt"] = self.alert_on_last_attempt + if self.no_alert_for_canceled_runs is not None: + body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: + body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs return body def as_shallow_dict(self) -> dict: """Serializes the TaskNotificationSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_on_last_attempt is not None: body['alert_on_last_attempt'] = self.alert_on_last_attempt - if self.no_alert_for_canceled_runs is not None: body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs - if self.no_alert_for_skipped_runs is not None: body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs + if self.alert_on_last_attempt is not None: + body["alert_on_last_attempt"] = self.alert_on_last_attempt + if self.no_alert_for_canceled_runs is not None: + body["no_alert_for_canceled_runs"] = self.no_alert_for_canceled_runs + if self.no_alert_for_skipped_runs is not None: + body["no_alert_for_skipped_runs"] = self.no_alert_for_skipped_runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TaskNotificationSettings: """Deserializes the TaskNotificationSettings from a dictionary.""" - return cls(alert_on_last_attempt=d.get('alert_on_last_attempt', None), no_alert_for_canceled_runs=d.get('no_alert_for_canceled_runs', None), no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None)) - - + return cls( + alert_on_last_attempt=d.get("alert_on_last_attempt", None), + no_alert_for_canceled_runs=d.get("no_alert_for_canceled_runs", None), + no_alert_for_skipped_runs=d.get("no_alert_for_skipped_runs", None), + ) class TerminationCodeCode(Enum): @@ -7086,34 +8714,35 @@ class TerminationCodeCode(Enum): run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. - + [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" - - BUDGET_POLICY_LIMIT_EXCEEDED = 'BUDGET_POLICY_LIMIT_EXCEEDED' - CANCELED = 'CANCELED' - CLOUD_FAILURE = 'CLOUD_FAILURE' - CLUSTER_ERROR = 'CLUSTER_ERROR' - CLUSTER_REQUEST_LIMIT_EXCEEDED = 'CLUSTER_REQUEST_LIMIT_EXCEEDED' - DISABLED = 'DISABLED' - DRIVER_ERROR = 'DRIVER_ERROR' - FEATURE_DISABLED = 'FEATURE_DISABLED' - INTERNAL_ERROR = 'INTERNAL_ERROR' - INVALID_CLUSTER_REQUEST = 'INVALID_CLUSTER_REQUEST' - INVALID_RUN_CONFIGURATION = 'INVALID_RUN_CONFIGURATION' - LIBRARY_INSTALLATION_ERROR = 'LIBRARY_INSTALLATION_ERROR' - MAX_CONCURRENT_RUNS_EXCEEDED = 'MAX_CONCURRENT_RUNS_EXCEEDED' - MAX_JOB_QUEUE_SIZE_EXCEEDED = 'MAX_JOB_QUEUE_SIZE_EXCEEDED' - MAX_SPARK_CONTEXTS_EXCEEDED = 'MAX_SPARK_CONTEXTS_EXCEEDED' - REPOSITORY_CHECKOUT_FAILED = 'REPOSITORY_CHECKOUT_FAILED' - RESOURCE_NOT_FOUND = 'RESOURCE_NOT_FOUND' - RUN_EXECUTION_ERROR = 'RUN_EXECUTION_ERROR' - SKIPPED = 'SKIPPED' - STORAGE_ACCESS_ERROR = 'STORAGE_ACCESS_ERROR' - SUCCESS = 'SUCCESS' - SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES' - UNAUTHORIZED_ERROR = 'UNAUTHORIZED_ERROR' - USER_CANCELED = 'USER_CANCELED' - WORKSPACE_RUN_LIMIT_EXCEEDED = 'WORKSPACE_RUN_LIMIT_EXCEEDED' + + BUDGET_POLICY_LIMIT_EXCEEDED = "BUDGET_POLICY_LIMIT_EXCEEDED" + CANCELED = "CANCELED" + CLOUD_FAILURE = "CLOUD_FAILURE" + CLUSTER_ERROR = "CLUSTER_ERROR" + CLUSTER_REQUEST_LIMIT_EXCEEDED = "CLUSTER_REQUEST_LIMIT_EXCEEDED" + DISABLED = "DISABLED" + DRIVER_ERROR = "DRIVER_ERROR" + FEATURE_DISABLED = "FEATURE_DISABLED" + INTERNAL_ERROR = "INTERNAL_ERROR" + INVALID_CLUSTER_REQUEST = "INVALID_CLUSTER_REQUEST" + INVALID_RUN_CONFIGURATION = "INVALID_RUN_CONFIGURATION" + LIBRARY_INSTALLATION_ERROR = "LIBRARY_INSTALLATION_ERROR" + MAX_CONCURRENT_RUNS_EXCEEDED = "MAX_CONCURRENT_RUNS_EXCEEDED" + MAX_JOB_QUEUE_SIZE_EXCEEDED = "MAX_JOB_QUEUE_SIZE_EXCEEDED" + MAX_SPARK_CONTEXTS_EXCEEDED = "MAX_SPARK_CONTEXTS_EXCEEDED" + REPOSITORY_CHECKOUT_FAILED = "REPOSITORY_CHECKOUT_FAILED" + RESOURCE_NOT_FOUND = "RESOURCE_NOT_FOUND" + RUN_EXECUTION_ERROR = "RUN_EXECUTION_ERROR" + SKIPPED = "SKIPPED" + STORAGE_ACCESS_ERROR = "STORAGE_ACCESS_ERROR" + SUCCESS = "SUCCESS" + SUCCESS_WITH_FAILURES = "SUCCESS_WITH_FAILURES" + UNAUTHORIZED_ERROR = "UNAUTHORIZED_ERROR" + USER_CANCELED = "USER_CANCELED" + WORKSPACE_RUN_LIMIT_EXCEEDED = "WORKSPACE_RUN_LIMIT_EXCEEDED" + @dataclass class TerminationDetails: @@ -7154,11 +8783,11 @@ class TerminationDetails: limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" - + message: Optional[str] = None """A descriptive message with the termination details. This field is unstructured and the format might change.""" - + type: Optional[TerminationTypeType] = None """* `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the Databricks platform. Please look at the [status page] or contact support if the issue persists. @@ -7167,29 +8796,37 @@ class TerminationDetails: provider. [status page]: https://status.databricks.com/""" - + def as_dict(self) -> dict: """Serializes the TerminationDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.code is not None: body['code'] = self.code.value - if self.message is not None: body['message'] = self.message - if self.type is not None: body['type'] = self.type.value + if self.code is not None: + body["code"] = self.code.value + if self.message is not None: + body["message"] = self.message + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the TerminationDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.code is not None: body['code'] = self.code - if self.message is not None: body['message'] = self.message - if self.type is not None: body['type'] = self.type + if self.code is not None: + body["code"] = self.code + if self.message is not None: + body["message"] = self.message + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TerminationDetails: """Deserializes the TerminationDetails from a dictionary.""" - return cls(code=_enum(d, 'code', TerminationCodeCode), message=d.get('message', None), type=_enum(d, 'type', TerminationTypeType)) - - + return cls( + code=_enum(d, "code", TerminationCodeCode), + message=d.get("message", None), + type=_enum(d, "type", TerminationTypeType), + ) class TerminationTypeType(Enum): @@ -7198,112 +8835,127 @@ class TerminationTypeType(Enum): * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud provider. - + [status page]: https://status.databricks.com/""" - - CLIENT_ERROR = 'CLIENT_ERROR' - CLOUD_FAILURE = 'CLOUD_FAILURE' - INTERNAL_ERROR = 'INTERNAL_ERROR' - SUCCESS = 'SUCCESS' + + CLIENT_ERROR = "CLIENT_ERROR" + CLOUD_FAILURE = "CLOUD_FAILURE" + INTERNAL_ERROR = "INTERNAL_ERROR" + SUCCESS = "SUCCESS" + @dataclass class TriggerInfo: """Additional details about what triggered the run""" - + run_id: Optional[int] = None """The run id of the Run Job task run""" - + def as_dict(self) -> dict: """Serializes the TriggerInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the TriggerInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TriggerInfo: """Deserializes the TriggerInfo from a dictionary.""" - return cls(run_id=d.get('run_id', None)) - - + return cls(run_id=d.get("run_id", None)) @dataclass class TriggerSettings: file_arrival: Optional[FileArrivalTriggerConfiguration] = None """File arrival trigger settings.""" - + pause_status: Optional[PauseStatus] = None """Whether this trigger is paused or not.""" - + periodic: Optional[PeriodicTriggerConfiguration] = None """Periodic trigger settings.""" - + table: Optional[TableUpdateTriggerConfiguration] = None """Old table trigger settings name. Deprecated in favor of `table_update`.""" - + table_update: Optional[TableUpdateTriggerConfiguration] = None - + def as_dict(self) -> dict: """Serializes the TriggerSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict() - if self.pause_status is not None: body['pause_status'] = self.pause_status.value - if self.periodic: body['periodic'] = self.periodic.as_dict() - if self.table: body['table'] = self.table.as_dict() - if self.table_update: body['table_update'] = self.table_update.as_dict() + if self.file_arrival: + body["file_arrival"] = self.file_arrival.as_dict() + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value + if self.periodic: + body["periodic"] = self.periodic.as_dict() + if self.table: + body["table"] = self.table.as_dict() + if self.table_update: + body["table_update"] = self.table_update.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TriggerSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_arrival: body['file_arrival'] = self.file_arrival - if self.pause_status is not None: body['pause_status'] = self.pause_status - if self.periodic: body['periodic'] = self.periodic - if self.table: body['table'] = self.table - if self.table_update: body['table_update'] = self.table_update + if self.file_arrival: + body["file_arrival"] = self.file_arrival + if self.pause_status is not None: + body["pause_status"] = self.pause_status + if self.periodic: + body["periodic"] = self.periodic + if self.table: + body["table"] = self.table + if self.table_update: + body["table_update"] = self.table_update return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TriggerSettings: """Deserializes the TriggerSettings from a dictionary.""" - return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerConfiguration), pause_status=_enum(d, 'pause_status', PauseStatus), periodic=_from_dict(d, 'periodic', PeriodicTriggerConfiguration), table=_from_dict(d, 'table', TableUpdateTriggerConfiguration), table_update=_from_dict(d, 'table_update', TableUpdateTriggerConfiguration)) - - + return cls( + file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerConfiguration), + pause_status=_enum(d, "pause_status", PauseStatus), + periodic=_from_dict(d, "periodic", PeriodicTriggerConfiguration), + table=_from_dict(d, "table", TableUpdateTriggerConfiguration), + table_update=_from_dict(d, "table_update", TableUpdateTriggerConfiguration), + ) @dataclass class TriggerStateProto: file_arrival: Optional[FileArrivalTriggerState] = None - + def as_dict(self) -> dict: """Serializes the TriggerStateProto into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_arrival: body['file_arrival'] = self.file_arrival.as_dict() + if self.file_arrival: + body["file_arrival"] = self.file_arrival.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TriggerStateProto into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_arrival: body['file_arrival'] = self.file_arrival + if self.file_arrival: + body["file_arrival"] = self.file_arrival return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TriggerStateProto: """Deserializes the TriggerStateProto from a dictionary.""" - return cls(file_arrival=_from_dict(d, 'file_arrival', FileArrivalTriggerState)) - - + return cls(file_arrival=_from_dict(d, "file_arrival", FileArrivalTriggerState)) class TriggerType(Enum): """The type of trigger that fired this run. - + * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a @@ -7312,23 +8964,24 @@ class TriggerType(Enum): Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run.""" - - FILE_ARRIVAL = 'FILE_ARRIVAL' - ONE_TIME = 'ONE_TIME' - PERIODIC = 'PERIODIC' - RETRY = 'RETRY' - RUN_JOB_TASK = 'RUN_JOB_TASK' - TABLE = 'TABLE' + + FILE_ARRIVAL = "FILE_ARRIVAL" + ONE_TIME = "ONE_TIME" + PERIODIC = "PERIODIC" + RETRY = "RETRY" + RUN_JOB_TASK = "RUN_JOB_TASK" + TABLE = "TABLE" + @dataclass class UpdateJob: job_id: int """The canonical identifier of the job to update. This field is required.""" - + fields_to_remove: Optional[List[str]] = None """Remove top-level fields in the job settings. Removing nested fields is not supported, except for tasks and job clusters (`tasks/task_1`). This field is optional.""" - + new_settings: Optional[JobSettings] = None """The new settings for the job. @@ -7340,29 +8993,37 @@ class UpdateJob: Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only.""" - + def as_dict(self) -> dict: """Serializes the UpdateJob into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fields_to_remove: body['fields_to_remove'] = [v for v in self.fields_to_remove] - if self.job_id is not None: body['job_id'] = self.job_id - if self.new_settings: body['new_settings'] = self.new_settings.as_dict() + if self.fields_to_remove: + body["fields_to_remove"] = [v for v in self.fields_to_remove] + if self.job_id is not None: + body["job_id"] = self.job_id + if self.new_settings: + body["new_settings"] = self.new_settings.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateJob into a shallow dictionary of its immediate attributes.""" body = {} - if self.fields_to_remove: body['fields_to_remove'] = self.fields_to_remove - if self.job_id is not None: body['job_id'] = self.job_id - if self.new_settings: body['new_settings'] = self.new_settings + if self.fields_to_remove: + body["fields_to_remove"] = self.fields_to_remove + if self.job_id is not None: + body["job_id"] = self.job_id + if self.new_settings: + body["new_settings"] = self.new_settings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateJob: """Deserializes the UpdateJob from a dictionary.""" - return cls(fields_to_remove=d.get('fields_to_remove', None), job_id=d.get('job_id', None), new_settings=_from_dict(d, 'new_settings', JobSettings)) - - + return cls( + fields_to_remove=d.get("fields_to_remove", None), + job_id=d.get("job_id", None), + new_settings=_from_dict(d, "new_settings", JobSettings), + ) @dataclass @@ -7381,82 +9042,86 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() - - @dataclass class ViewItem: content: Optional[str] = None """Content of the view.""" - + name: Optional[str] = None """Name of the view item. In the case of code view, it would be the notebook’s name. In the case of dashboard view, it would be the dashboard’s name.""" - + type: Optional[ViewType] = None """Type of the view item.""" - + def as_dict(self) -> dict: """Serializes the ViewItem into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: body['content'] = self.content - if self.name is not None: body['name'] = self.name - if self.type is not None: body['type'] = self.type.value + if self.content is not None: + body["content"] = self.content + if self.name is not None: + body["name"] = self.name + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the ViewItem into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: body['content'] = self.content - if self.name is not None: body['name'] = self.name - if self.type is not None: body['type'] = self.type + if self.content is not None: + body["content"] = self.content + if self.name is not None: + body["name"] = self.name + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ViewItem: """Deserializes the ViewItem from a dictionary.""" - return cls(content=d.get('content', None), name=d.get('name', None), type=_enum(d, 'type', ViewType)) - - + return cls(content=d.get("content", None), name=d.get("name", None), type=_enum(d, "type", ViewType)) class ViewType(Enum): """* `NOTEBOOK`: Notebook view item. * `DASHBOARD`: Dashboard view item.""" - - DASHBOARD = 'DASHBOARD' - NOTEBOOK = 'NOTEBOOK' + + DASHBOARD = "DASHBOARD" + NOTEBOOK = "NOTEBOOK" + class ViewsToExport(Enum): """* `CODE`: Code view of the notebook. * `DASHBOARDS`: All dashboard views of the notebook. * `ALL`: All views of the notebook.""" - - ALL = 'ALL' - CODE = 'CODE' - DASHBOARDS = 'DASHBOARDS' -@dataclass + ALL = "ALL" + CODE = "CODE" + DASHBOARDS = "DASHBOARDS" + + +@dataclass class Webhook: id: str - + def as_dict(self) -> dict: """Serializes the Webhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the Webhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Webhook: """Deserializes the Webhook from a dictionary.""" - return cls(id=d.get('id', None)) - - + return cls(id=d.get("id", None)) @dataclass @@ -7465,15 +9130,15 @@ class WebhookNotifications: """An optional list of system notification IDs to call when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3 destinations can be specified for the `on_duration_warning_threshold_exceeded` property.""" - + on_failure: Optional[List[Webhook]] = None """An optional list of system notification IDs to call when the run fails. A maximum of 3 destinations can be specified for the `on_failure` property.""" - + on_start: Optional[List[Webhook]] = None """An optional list of system notification IDs to call when the run starts. A maximum of 3 destinations can be specified for the `on_start` property.""" - + on_streaming_backlog_exceeded: Optional[List[Webhook]] = None """An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream. Streaming backlog thresholds can be set in the `health` field using the @@ -7481,200 +9146,221 @@ class WebhookNotifications: `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`. Alerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes. A maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.""" - + on_success: Optional[List[Webhook]] = None """An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.""" - + def as_dict(self) -> dict: """Serializes the WebhookNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = [v.as_dict() for v in self.on_duration_warning_threshold_exceeded] - if self.on_failure: body['on_failure'] = [v.as_dict() for v in self.on_failure] - if self.on_start: body['on_start'] = [v.as_dict() for v in self.on_start] - if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = [v.as_dict() for v in self.on_streaming_backlog_exceeded] - if self.on_success: body['on_success'] = [v.as_dict() for v in self.on_success] + if self.on_duration_warning_threshold_exceeded: + body["on_duration_warning_threshold_exceeded"] = [ + v.as_dict() for v in self.on_duration_warning_threshold_exceeded + ] + if self.on_failure: + body["on_failure"] = [v.as_dict() for v in self.on_failure] + if self.on_start: + body["on_start"] = [v.as_dict() for v in self.on_start] + if self.on_streaming_backlog_exceeded: + body["on_streaming_backlog_exceeded"] = [v.as_dict() for v in self.on_streaming_backlog_exceeded] + if self.on_success: + body["on_success"] = [v.as_dict() for v in self.on_success] return body def as_shallow_dict(self) -> dict: """Serializes the WebhookNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.on_duration_warning_threshold_exceeded: body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded - if self.on_failure: body['on_failure'] = self.on_failure - if self.on_start: body['on_start'] = self.on_start - if self.on_streaming_backlog_exceeded: body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded - if self.on_success: body['on_success'] = self.on_success + if self.on_duration_warning_threshold_exceeded: + body["on_duration_warning_threshold_exceeded"] = self.on_duration_warning_threshold_exceeded + if self.on_failure: + body["on_failure"] = self.on_failure + if self.on_start: + body["on_start"] = self.on_start + if self.on_streaming_backlog_exceeded: + body["on_streaming_backlog_exceeded"] = self.on_streaming_backlog_exceeded + if self.on_success: + body["on_success"] = self.on_success return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WebhookNotifications: """Deserializes the WebhookNotifications from a dictionary.""" - return cls(on_duration_warning_threshold_exceeded=_repeated_dict(d, 'on_duration_warning_threshold_exceeded', Webhook), on_failure=_repeated_dict(d, 'on_failure', Webhook), on_start=_repeated_dict(d, 'on_start', Webhook), on_streaming_backlog_exceeded=_repeated_dict(d, 'on_streaming_backlog_exceeded', Webhook), on_success=_repeated_dict(d, 'on_success', Webhook)) - - + return cls( + on_duration_warning_threshold_exceeded=_repeated_dict(d, "on_duration_warning_threshold_exceeded", Webhook), + on_failure=_repeated_dict(d, "on_failure", Webhook), + on_start=_repeated_dict(d, "on_start", Webhook), + on_streaming_backlog_exceeded=_repeated_dict(d, "on_streaming_backlog_exceeded", Webhook), + on_success=_repeated_dict(d, "on_success", Webhook), + ) @dataclass class WidgetErrorDetail: message: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the WidgetErrorDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: body['message'] = self.message + if self.message is not None: + body["message"] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the WidgetErrorDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: body['message'] = self.message + if self.message is not None: + body["message"] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WidgetErrorDetail: """Deserializes the WidgetErrorDetail from a dictionary.""" - return cls(message=d.get('message', None)) - - - - + return cls(message=d.get("message", None)) class JobsAPI: """The Jobs API allows you to create, edit, and delete jobs. - + You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or Python, Scala, Spark submit, and Java applications. - + You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebooks and jobs. - + [Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html [Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_get_run_job_terminated_or_skipped(self, run_id: int, - timeout=timedelta(minutes=20), callback: Optional[Callable[[Run], None]] = None) -> Run: - deadline = time.time() + timeout.total_seconds() - target_states = (RunLifeCycleState.TERMINATED, RunLifeCycleState.SKIPPED, ) - failure_states = (RunLifeCycleState.INTERNAL_ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get_run(run_id=run_id) - status = poll.state.life_cycle_state - status_message = f'current status: {status}' - if poll.state: - status_message = poll.state.state_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach TERMINATED or SKIPPED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"run_id={run_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - - def cancel_all_runs(self - - , * - , all_queued_runs: Optional[bool] = None, job_id: Optional[int] = None): + + def wait_get_run_job_terminated_or_skipped( + self, run_id: int, timeout=timedelta(minutes=20), callback: Optional[Callable[[Run], None]] = None + ) -> Run: + deadline = time.time() + timeout.total_seconds() + target_states = ( + RunLifeCycleState.TERMINATED, + RunLifeCycleState.SKIPPED, + ) + failure_states = (RunLifeCycleState.INTERNAL_ERROR,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get_run(run_id=run_id) + status = poll.state.life_cycle_state + status_message = f"current status: {status}" + if poll.state: + status_message = poll.state.state_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach TERMINATED or SKIPPED, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"run_id={run_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def cancel_all_runs(self, *, all_queued_runs: Optional[bool] = None, job_id: Optional[int] = None): """Cancel all runs of a job. - + Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs from being started. - + :param all_queued_runs: bool (optional) Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in the workspace are canceled. :param job_id: int (optional) The canonical identifier of the job to cancel all runs of. - - + + """ body = {} - if all_queued_runs is not None: body['all_queued_runs'] = all_queued_runs - if job_id is not None: body['job_id'] = job_id - headers = {'Content-Type': 'application/json',} - - self._api.do('POST','/api/2.2/jobs/runs/cancel-all', body=body - - , headers=headers - ) - + if all_queued_runs is not None: + body["all_queued_runs"] = all_queued_runs + if job_id is not None: + body["job_id"] = job_id + headers = { + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.2/jobs/runs/cancel-all", body=body, headers=headers) - def cancel_run(self - , run_id: int - ) -> Wait[Run]: + def cancel_run(self, run_id: int) -> Wait[Run]: """Cancel a run. - + Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when this request completes. - + :param run_id: int This field is required. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ body = {} - if run_id is not None: body['run_id'] = run_id - headers = {'Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.2/jobs/runs/cancel', body=body - - , headers=headers - ) - return Wait(self.wait_get_run_job_terminated_or_skipped - , response = CancelRunResponse.from_dict(op_response) - , run_id=run_id) + if run_id is not None: + body["run_id"] = run_id + headers = { + "Content-Type": "application/json", + } - - def cancel_run_and_wait(self - , run_id: int - , - timeout=timedelta(minutes=20)) -> Run: + op_response = self._api.do("POST", "/api/2.2/jobs/runs/cancel", body=body, headers=headers) + return Wait( + self.wait_get_run_job_terminated_or_skipped, + response=CancelRunResponse.from_dict(op_response), + run_id=run_id, + ) + + def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run: return self.cancel_run(run_id=run_id).result(timeout=timeout) - - - - def create(self - - , * - , access_control_list: Optional[List[JobAccessControlRequest]] = None, budget_policy_id: Optional[str] = None, continuous: Optional[Continuous] = None, deployment: Optional[JobDeployment] = None, description: Optional[str] = None, edit_mode: Optional[JobEditMode] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, format: Optional[Format] = None, git_source: Optional[GitSource] = None, health: Optional[JobsHealthRules] = None, job_clusters: Optional[List[JobCluster]] = None, max_concurrent_runs: Optional[int] = None, name: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, parameters: Optional[List[JobParameterDefinition]] = None, performance_target: Optional[PerformanceTarget] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, schedule: Optional[CronSchedule] = None, tags: Optional[Dict[str,str]] = None, tasks: Optional[List[Task]] = None, timeout_seconds: Optional[int] = None, trigger: Optional[TriggerSettings] = None, webhook_notifications: Optional[WebhookNotifications] = None) -> CreateResponse: + def create( + self, + *, + access_control_list: Optional[List[JobAccessControlRequest]] = None, + budget_policy_id: Optional[str] = None, + continuous: Optional[Continuous] = None, + deployment: Optional[JobDeployment] = None, + description: Optional[str] = None, + edit_mode: Optional[JobEditMode] = None, + email_notifications: Optional[JobEmailNotifications] = None, + environments: Optional[List[JobEnvironment]] = None, + format: Optional[Format] = None, + git_source: Optional[GitSource] = None, + health: Optional[JobsHealthRules] = None, + job_clusters: Optional[List[JobCluster]] = None, + max_concurrent_runs: Optional[int] = None, + name: Optional[str] = None, + notification_settings: Optional[JobNotificationSettings] = None, + parameters: Optional[List[JobParameterDefinition]] = None, + performance_target: Optional[PerformanceTarget] = None, + queue: Optional[QueueSettings] = None, + run_as: Optional[JobRunAs] = None, + schedule: Optional[CronSchedule] = None, + tags: Optional[Dict[str, str]] = None, + tasks: Optional[List[Task]] = None, + timeout_seconds: Optional[int] = None, + trigger: Optional[TriggerSettings] = None, + webhook_notifications: Optional[WebhookNotifications] = None, + ) -> CreateResponse: """Create a new job. - + Create a new job. - + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -7690,7 +9376,7 @@ def create(self An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. :param edit_mode: :class:`JobEditMode` (optional) Edit mode of the job. - + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified. :param email_notifications: :class:`JobEmailNotifications` (optional) @@ -7707,10 +9393,10 @@ def create(self :param git_source: :class:`GitSource` (optional) An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) @@ -7737,7 +9423,7 @@ def create(self :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -7746,7 +9432,7 @@ def create(self :param run_as: :class:`JobRunAs` (optional) Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. - + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -7769,234 +9455,219 @@ def create(self `runNow`. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when runs of this job begin or complete. - + :returns: :class:`CreateResponse` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id - if continuous is not None: body['continuous'] = continuous.as_dict() - if deployment is not None: body['deployment'] = deployment.as_dict() - if description is not None: body['description'] = description - if edit_mode is not None: body['edit_mode'] = edit_mode.value - if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict() - if environments is not None: body['environments'] = [v.as_dict() for v in environments] - if format is not None: body['format'] = format.value - if git_source is not None: body['git_source'] = git_source.as_dict() - if health is not None: body['health'] = health.as_dict() - if job_clusters is not None: body['job_clusters'] = [v.as_dict() for v in job_clusters] - if max_concurrent_runs is not None: body['max_concurrent_runs'] = max_concurrent_runs - if name is not None: body['name'] = name - if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict() - if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters] - if performance_target is not None: body['performance_target'] = performance_target.value - if queue is not None: body['queue'] = queue.as_dict() - if run_as is not None: body['run_as'] = run_as.as_dict() - if schedule is not None: body['schedule'] = schedule.as_dict() - if tags is not None: body['tags'] = tags - if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks] - if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds - if trigger is not None: body['trigger'] = trigger.as_dict() - if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.2/jobs/create', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + if budget_policy_id is not None: + body["budget_policy_id"] = budget_policy_id + if continuous is not None: + body["continuous"] = continuous.as_dict() + if deployment is not None: + body["deployment"] = deployment.as_dict() + if description is not None: + body["description"] = description + if edit_mode is not None: + body["edit_mode"] = edit_mode.value + if email_notifications is not None: + body["email_notifications"] = email_notifications.as_dict() + if environments is not None: + body["environments"] = [v.as_dict() for v in environments] + if format is not None: + body["format"] = format.value + if git_source is not None: + body["git_source"] = git_source.as_dict() + if health is not None: + body["health"] = health.as_dict() + if job_clusters is not None: + body["job_clusters"] = [v.as_dict() for v in job_clusters] + if max_concurrent_runs is not None: + body["max_concurrent_runs"] = max_concurrent_runs + if name is not None: + body["name"] = name + if notification_settings is not None: + body["notification_settings"] = notification_settings.as_dict() + if parameters is not None: + body["parameters"] = [v.as_dict() for v in parameters] + if performance_target is not None: + body["performance_target"] = performance_target.value + if queue is not None: + body["queue"] = queue.as_dict() + if run_as is not None: + body["run_as"] = run_as.as_dict() + if schedule is not None: + body["schedule"] = schedule.as_dict() + if tags is not None: + body["tags"] = tags + if tasks is not None: + body["tasks"] = [v.as_dict() for v in tasks] + if timeout_seconds is not None: + body["timeout_seconds"] = timeout_seconds + if trigger is not None: + body["trigger"] = trigger.as_dict() + if webhook_notifications is not None: + body["webhook_notifications"] = webhook_notifications.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.2/jobs/create", body=body, headers=headers) return CreateResponse.from_dict(res) - - - - - def delete(self - , job_id: int - ): + def delete(self, job_id: int): """Delete a job. - + Deletes a job. - + :param job_id: int The canonical identifier of the job to delete. This field is required. - - + + """ body = {} - if job_id is not None: body['job_id'] = job_id - headers = {'Content-Type': 'application/json',} - - self._api.do('POST','/api/2.2/jobs/delete', body=body - - , headers=headers - ) - + if job_id is not None: + body["job_id"] = job_id + headers = { + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.2/jobs/delete", body=body, headers=headers) - def delete_run(self - , run_id: int - ): + def delete_run(self, run_id: int): """Delete a job run. - + Deletes a non-active run. Returns an error if the run is active. - + :param run_id: int ID of the run to delete. - - + + """ body = {} - if run_id is not None: body['run_id'] = run_id - headers = {'Content-Type': 'application/json',} - - self._api.do('POST','/api/2.2/jobs/runs/delete', body=body - - , headers=headers - ) - + if run_id is not None: + body["run_id"] = run_id + headers = { + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.2/jobs/runs/delete", body=body, headers=headers) - def export_run(self - , run_id: int - , * - , views_to_export: Optional[ViewsToExport] = None) -> ExportRunOutput: + def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] = None) -> ExportRunOutput: """Export and retrieve a job run. - + Export and retrieve the job run task. - + :param run_id: int The canonical identifier for the run. This field is required. :param views_to_export: :class:`ViewsToExport` (optional) Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE. - + :returns: :class:`ExportRunOutput` """ - + query = {} - if run_id is not None: query['run_id'] = run_id - if views_to_export is not None: query['views_to_export'] = views_to_export.value - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.2/jobs/runs/export', query=query - - , headers=headers - ) + if run_id is not None: + query["run_id"] = run_id + if views_to_export is not None: + query["views_to_export"] = views_to_export.value + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.2/jobs/runs/export", query=query, headers=headers) return ExportRunOutput.from_dict(res) - - - - - def get(self - , job_id: int - , * - , page_token: Optional[str] = None) -> Job: + def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job: """Get a single job. - + Retrieves the details for a single job. - + Large arrays in the results will be paginated when they exceed 100 elements. A request for a single job will return all properties for that job, and the first 100 elements of array properties (`tasks`, `job_clusters`, `environments` and `parameters`). Use the `next_page_token` field to check for more results and pass its value as the `page_token` in subsequent requests. If any array properties have more than 100 elements, additional results will be returned on subsequent requests. Arrays without additional results will be empty on later pages. - + :param job_id: int The canonical identifier of the job to retrieve information about. This field is required. :param page_token: str (optional) Use `next_page_token` returned from the previous GetJob response to request the next page of the job's array properties. - + :returns: :class:`Job` """ - + query = {} - if job_id is not None: query['job_id'] = job_id - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.2/jobs/get', query=query - - , headers=headers - ) + if job_id is not None: + query["job_id"] = job_id + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.2/jobs/get", query=query, headers=headers) return Job.from_dict(res) - - - - - def get_permission_levels(self - , job_id: str - ) -> GetJobPermissionLevelsResponse: + def get_permission_levels(self, job_id: str) -> GetJobPermissionLevelsResponse: """Get job permission levels. - + Gets the permission levels that a user can have on an object. - + :param job_id: str The job for which to get or manage permissions. - + :returns: :class:`GetJobPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/jobs/{job_id}/permissionLevels' - - , headers=headers - ) - return GetJobPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_permissions(self - , job_id: str - ) -> JobPermissions: + res = self._api.do("GET", f"/api/2.0/permissions/jobs/{job_id}/permissionLevels", headers=headers) + return GetJobPermissionLevelsResponse.from_dict(res) + + def get_permissions(self, job_id: str) -> JobPermissions: """Get job permissions. - + Gets the permissions of a job. Jobs can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. - + :returns: :class:`JobPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/jobs/{job_id}' - - , headers=headers - ) - return JobPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_run(self - , run_id: int - , * - , include_history: Optional[bool] = None, include_resolved_values: Optional[bool] = None, page_token: Optional[str] = None) -> Run: + res = self._api.do("GET", f"/api/2.0/permissions/jobs/{job_id}", headers=headers) + return JobPermissions.from_dict(res) + + def get_run( + self, + run_id: int, + *, + include_history: Optional[bool] = None, + include_resolved_values: Optional[bool] = None, + page_token: Optional[str] = None, + ) -> Run: """Get a single job run. - + Retrieves the metadata of a run. - + Large arrays in the results will be paginated when they exceed 100 elements. A request for a single run will return all properties for that run, and the first 100 elements of array properties (`tasks`, `job_clusters`, `job_parameters` and `repair_history`). Use the next_page_token field to check for more results and pass its value as the page_token in subsequent requests. If any array properties have more than 100 elements, additional results will be returned on subsequent requests. Arrays without additional results will be empty on later pages. - + :param run_id: int The canonical identifier of the run for which to retrieve the metadata. This field is required. :param include_history: bool (optional) @@ -8006,69 +9677,67 @@ def get_run(self :param page_token: str (optional) Use `next_page_token` returned from the previous GetRun response to request the next page of the run's array properties. - + :returns: :class:`Run` """ - + query = {} - if include_history is not None: query['include_history'] = include_history - if include_resolved_values is not None: query['include_resolved_values'] = include_resolved_values - if page_token is not None: query['page_token'] = page_token - if run_id is not None: query['run_id'] = run_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.2/jobs/runs/get', query=query - - , headers=headers - ) + if include_history is not None: + query["include_history"] = include_history + if include_resolved_values is not None: + query["include_resolved_values"] = include_resolved_values + if page_token is not None: + query["page_token"] = page_token + if run_id is not None: + query["run_id"] = run_id + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.2/jobs/runs/get", query=query, headers=headers) return Run.from_dict(res) - - - - - def get_run_output(self - , run_id: int - ) -> RunOutput: + def get_run_output(self, run_id: int) -> RunOutput: """Get the output for a single run. - + Retrieve the output and metadata of a single task run. When a notebook task returns a value through the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks restricts this API to returning the first 5 MB of the output. To return a larger result, you can store job results in a cloud storage service. - + This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to reference them beyond 60 days, you must save old run results before they expire. - + :param run_id: int The canonical identifier for the run. - + :returns: :class:`RunOutput` """ - + query = {} - if run_id is not None: query['run_id'] = run_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.2/jobs/runs/get-output', query=query - - , headers=headers - ) - return RunOutput.from_dict(res) + if run_id is not None: + query["run_id"] = run_id + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.2/jobs/runs/get-output", query=query, headers=headers) + return RunOutput.from_dict(res) - def list(self - - , * - , expand_tasks: Optional[bool] = None, limit: Optional[int] = None, name: Optional[str] = None, offset: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[BaseJob]: + def list( + self, + *, + expand_tasks: Optional[bool] = None, + limit: Optional[int] = None, + name: Optional[str] = None, + offset: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[BaseJob]: """List jobs. - + Retrieves a list of jobs. - + :param expand_tasks: bool (optional) Whether to include task and cluster details in the response. Note that only the first 100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters. @@ -8083,45 +9752,52 @@ def list(self :param page_token: str (optional) Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or previous page of jobs respectively. - + :returns: Iterator over :class:`BaseJob` """ - - query = {} - if expand_tasks is not None: query['expand_tasks'] = expand_tasks - if limit is not None: query['limit'] = limit - if name is not None: query['name'] = name - if offset is not None: query['offset'] = offset - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.2/jobs/list', query=query - - , headers=headers - ) - if 'jobs' in json: - for v in json['jobs']: - yield BaseJob.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if expand_tasks is not None: + query["expand_tasks"] = expand_tasks + if limit is not None: + query["limit"] = limit + if name is not None: + query["name"] = name + if offset is not None: + query["offset"] = offset + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def list_runs(self - - , * - , active_only: Optional[bool] = None, completed_only: Optional[bool] = None, expand_tasks: Optional[bool] = None, job_id: Optional[int] = None, limit: Optional[int] = None, offset: Optional[int] = None, page_token: Optional[str] = None, run_type: Optional[RunType] = None, start_time_from: Optional[int] = None, start_time_to: Optional[int] = None) -> Iterator[BaseRun]: + while True: + json = self._api.do("GET", "/api/2.2/jobs/list", query=query, headers=headers) + if "jobs" in json: + for v in json["jobs"]: + yield BaseJob.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_runs( + self, + *, + active_only: Optional[bool] = None, + completed_only: Optional[bool] = None, + expand_tasks: Optional[bool] = None, + job_id: Optional[int] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, + page_token: Optional[str] = None, + run_type: Optional[RunType] = None, + start_time_from: Optional[int] = None, + start_time_to: Optional[int] = None, + ) -> Iterator[BaseRun]: """List job runs. - + List runs in descending order by start time. - + :param active_only: bool (optional) If active_only is `true`, only active runs are included in the results; otherwise, lists both active and completed runs. An active run is a run in the `QUEUED`, `PENDING`, `RUNNING`, or `TERMINATING`. @@ -8151,51 +9827,68 @@ def list_runs(self :param start_time_to: int (optional) Show runs that started _at or before_ this value. The value must be a UTC timestamp in milliseconds. Can be combined with _start_time_from_ to filter by a time range. - + :returns: Iterator over :class:`BaseRun` """ - - query = {} - if active_only is not None: query['active_only'] = active_only - if completed_only is not None: query['completed_only'] = completed_only - if expand_tasks is not None: query['expand_tasks'] = expand_tasks - if job_id is not None: query['job_id'] = job_id - if limit is not None: query['limit'] = limit - if offset is not None: query['offset'] = offset - if page_token is not None: query['page_token'] = page_token - if run_type is not None: query['run_type'] = run_type.value - if start_time_from is not None: query['start_time_from'] = start_time_from - if start_time_to is not None: query['start_time_to'] = start_time_to - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.2/jobs/runs/list', query=query - - , headers=headers - ) - if 'runs' in json: - for v in json['runs']: - yield BaseRun.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if active_only is not None: + query["active_only"] = active_only + if completed_only is not None: + query["completed_only"] = completed_only + if expand_tasks is not None: + query["expand_tasks"] = expand_tasks + if job_id is not None: + query["job_id"] = job_id + if limit is not None: + query["limit"] = limit + if offset is not None: + query["offset"] = offset + if page_token is not None: + query["page_token"] = page_token + if run_type is not None: + query["run_type"] = run_type.value + if start_time_from is not None: + query["start_time_from"] = start_time_from + if start_time_to is not None: + query["start_time_to"] = start_time_to + headers = { + "Accept": "application/json", + } - def repair_run(self - , run_id: int - , * - , dbt_commands: Optional[List[str]] = None, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str,str]] = None, latest_repair_id: Optional[int] = None, notebook_params: Optional[Dict[str,str]] = None, performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str,str]] = None, python_params: Optional[List[str]] = None, rerun_all_failed_tasks: Optional[bool] = None, rerun_dependent_tasks: Optional[bool] = None, rerun_tasks: Optional[List[str]] = None, spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str,str]] = None) -> Wait[Run]: + while True: + json = self._api.do("GET", "/api/2.2/jobs/runs/list", query=query, headers=headers) + if "runs" in json: + for v in json["runs"]: + yield BaseRun.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def repair_run( + self, + run_id: int, + *, + dbt_commands: Optional[List[str]] = None, + jar_params: Optional[List[str]] = None, + job_parameters: Optional[Dict[str, str]] = None, + latest_repair_id: Optional[int] = None, + notebook_params: Optional[Dict[str, str]] = None, + performance_target: Optional[PerformanceTarget] = None, + pipeline_params: Optional[PipelineParams] = None, + python_named_params: Optional[Dict[str, str]] = None, + python_params: Optional[List[str]] = None, + rerun_all_failed_tasks: Optional[bool] = None, + rerun_dependent_tasks: Optional[bool] = None, + rerun_tasks: Optional[List[str]] = None, + spark_submit_params: Optional[List[str]] = None, + sql_params: Optional[Dict[str, str]] = None, + ) -> Wait[Run]: """Repair a job run. - + Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job and task settings, and can be viewed in the history for the original job run. - + :param run_id: int The job run ID of the run to repair. The run must not be in progress. :param dbt_commands: List[str] (optional) @@ -8207,9 +9900,9 @@ def repair_run(self task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` @@ -8220,23 +9913,23 @@ def repair_run(self A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. - + If not specified upon `run-now`, the triggered run uses the job’s base parameters. - + notebook_params cannot be specified in conjunction with jar_params. - + Use [Task parameter variables] to set parameters containing information about job runs. - + The JSON representation of this field (for example `{"notebook_params":{"name":"john doe","age":"35"}}`) cannot exceed 10,000 bytes. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined on the job level. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -8248,15 +9941,15 @@ def repair_run(self The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param rerun_all_failed_tasks: bool (optional) If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used. @@ -8271,102 +9964,154 @@ def repair_run(self as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param sql_params: Dict[str,str] (optional) A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ body = {} - if dbt_commands is not None: body['dbt_commands'] = [v for v in dbt_commands] - if jar_params is not None: body['jar_params'] = [v for v in jar_params] - if job_parameters is not None: body['job_parameters'] = job_parameters - if latest_repair_id is not None: body['latest_repair_id'] = latest_repair_id - if notebook_params is not None: body['notebook_params'] = notebook_params - if performance_target is not None: body['performance_target'] = performance_target.value - if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict() - if python_named_params is not None: body['python_named_params'] = python_named_params - if python_params is not None: body['python_params'] = [v for v in python_params] - if rerun_all_failed_tasks is not None: body['rerun_all_failed_tasks'] = rerun_all_failed_tasks - if rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = rerun_dependent_tasks - if rerun_tasks is not None: body['rerun_tasks'] = [v for v in rerun_tasks] - if run_id is not None: body['run_id'] = run_id - if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params] - if sql_params is not None: body['sql_params'] = sql_params - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.2/jobs/runs/repair', body=body - - , headers=headers - ) - return Wait(self.wait_get_run_job_terminated_or_skipped - , response = RepairRunResponse.from_dict(op_response) - , run_id=run_id) - - - def repair_run_and_wait(self - , run_id: int - , * - , dbt_commands: Optional[List[str]] = None, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str,str]] = None, latest_repair_id: Optional[int] = None, notebook_params: Optional[Dict[str,str]] = None, performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str,str]] = None, python_params: Optional[List[str]] = None, rerun_all_failed_tasks: Optional[bool] = None, rerun_dependent_tasks: Optional[bool] = None, rerun_tasks: Optional[List[str]] = None, spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str,str]] = None, - timeout=timedelta(minutes=20)) -> Run: - return self.repair_run(dbt_commands=dbt_commands, jar_params=jar_params, job_parameters=job_parameters, latest_repair_id=latest_repair_id, notebook_params=notebook_params, performance_target=performance_target, pipeline_params=pipeline_params, python_named_params=python_named_params, python_params=python_params, rerun_all_failed_tasks=rerun_all_failed_tasks, rerun_dependent_tasks=rerun_dependent_tasks, rerun_tasks=rerun_tasks, run_id=run_id, spark_submit_params=spark_submit_params, sql_params=sql_params).result(timeout=timeout) - - - - - def reset(self - , job_id: int, new_settings: JobSettings - ): + if dbt_commands is not None: + body["dbt_commands"] = [v for v in dbt_commands] + if jar_params is not None: + body["jar_params"] = [v for v in jar_params] + if job_parameters is not None: + body["job_parameters"] = job_parameters + if latest_repair_id is not None: + body["latest_repair_id"] = latest_repair_id + if notebook_params is not None: + body["notebook_params"] = notebook_params + if performance_target is not None: + body["performance_target"] = performance_target.value + if pipeline_params is not None: + body["pipeline_params"] = pipeline_params.as_dict() + if python_named_params is not None: + body["python_named_params"] = python_named_params + if python_params is not None: + body["python_params"] = [v for v in python_params] + if rerun_all_failed_tasks is not None: + body["rerun_all_failed_tasks"] = rerun_all_failed_tasks + if rerun_dependent_tasks is not None: + body["rerun_dependent_tasks"] = rerun_dependent_tasks + if rerun_tasks is not None: + body["rerun_tasks"] = [v for v in rerun_tasks] + if run_id is not None: + body["run_id"] = run_id + if spark_submit_params is not None: + body["spark_submit_params"] = [v for v in spark_submit_params] + if sql_params is not None: + body["sql_params"] = sql_params + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.2/jobs/runs/repair", body=body, headers=headers) + return Wait( + self.wait_get_run_job_terminated_or_skipped, + response=RepairRunResponse.from_dict(op_response), + run_id=run_id, + ) + + def repair_run_and_wait( + self, + run_id: int, + *, + dbt_commands: Optional[List[str]] = None, + jar_params: Optional[List[str]] = None, + job_parameters: Optional[Dict[str, str]] = None, + latest_repair_id: Optional[int] = None, + notebook_params: Optional[Dict[str, str]] = None, + performance_target: Optional[PerformanceTarget] = None, + pipeline_params: Optional[PipelineParams] = None, + python_named_params: Optional[Dict[str, str]] = None, + python_params: Optional[List[str]] = None, + rerun_all_failed_tasks: Optional[bool] = None, + rerun_dependent_tasks: Optional[bool] = None, + rerun_tasks: Optional[List[str]] = None, + spark_submit_params: Optional[List[str]] = None, + sql_params: Optional[Dict[str, str]] = None, + timeout=timedelta(minutes=20), + ) -> Run: + return self.repair_run( + dbt_commands=dbt_commands, + jar_params=jar_params, + job_parameters=job_parameters, + latest_repair_id=latest_repair_id, + notebook_params=notebook_params, + performance_target=performance_target, + pipeline_params=pipeline_params, + python_named_params=python_named_params, + python_params=python_params, + rerun_all_failed_tasks=rerun_all_failed_tasks, + rerun_dependent_tasks=rerun_dependent_tasks, + rerun_tasks=rerun_tasks, + run_id=run_id, + spark_submit_params=spark_submit_params, + sql_params=sql_params, + ).result(timeout=timeout) + + def reset(self, job_id: int, new_settings: JobSettings): """Update all job settings (reset). - + Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update job settings partially. - + :param job_id: int The canonical identifier of the job to reset. This field is required. :param new_settings: :class:`JobSettings` The new settings of the job. These settings completely replace the old settings. - + Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only. - - - """ - body = {} - if job_id is not None: body['job_id'] = job_id - if new_settings is not None: body['new_settings'] = new_settings.as_dict() - headers = {'Content-Type': 'application/json',} - - self._api.do('POST','/api/2.2/jobs/reset', body=body - - , headers=headers - ) - - - - - def run_now(self - , job_id: int - , * - , dbt_commands: Optional[List[str]] = None, idempotency_token: Optional[str] = None, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str,str]] = None, notebook_params: Optional[Dict[str,str]] = None, only: Optional[List[str]] = None, performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str,str]] = None, python_params: Optional[List[str]] = None, queue: Optional[QueueSettings] = None, spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str,str]] = None) -> Wait[Run]: + """ + body = {} + if job_id is not None: + body["job_id"] = job_id + if new_settings is not None: + body["new_settings"] = new_settings.as_dict() + headers = { + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.2/jobs/reset", body=body, headers=headers) + + def run_now( + self, + job_id: int, + *, + dbt_commands: Optional[List[str]] = None, + idempotency_token: Optional[str] = None, + jar_params: Optional[List[str]] = None, + job_parameters: Optional[Dict[str, str]] = None, + notebook_params: Optional[Dict[str, str]] = None, + only: Optional[List[str]] = None, + performance_target: Optional[PerformanceTarget] = None, + pipeline_params: Optional[PipelineParams] = None, + python_named_params: Optional[Dict[str, str]] = None, + python_params: Optional[List[str]] = None, + queue: Optional[QueueSettings] = None, + spark_submit_params: Optional[List[str]] = None, + sql_params: Optional[Dict[str, str]] = None, + ) -> Wait[Run]: """Trigger a new job run. - + Run a job and return the `run_id` of the triggered run. - + :param job_id: int The ID of the job to be executed :param dbt_commands: List[str] (optional) @@ -8376,14 +10121,14 @@ def run_now(self An optional token to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing run instead. If a run with the provided token is deleted, an error is returned. - + If you specify the idempotency token, upon failure you can retry until the request succeeds. Databricks guarantees that exactly one run is launched with that idempotency token. - + This token must have at most 64 characters. - + For more information, see [How to ensure idempotency for jobs]. - + [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html :param jar_params: List[str] (optional) A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. @@ -8391,9 +10136,9 @@ def run_now(self task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` @@ -8401,16 +10146,16 @@ def run_now(self A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. - + If not specified upon `run-now`, the triggered run uses the job’s base parameters. - + notebook_params cannot be specified in conjunction with jar_params. - + Use [Task parameter variables] to set parameters containing information about job runs. - + The JSON representation of this field (for example `{"notebook_params":{"name":"john doe","age":"35"}}`) cannot exceed 10,000 bytes. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html :param only: List[str] (optional) @@ -8420,7 +10165,7 @@ def run_now(self The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined on the job level. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -8432,15 +10177,15 @@ def run_now(self The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param queue: :class:`QueueSettings` (optional) The queue settings of the run. @@ -8450,99 +10195,150 @@ def run_now(self as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param sql_params: Dict[str,str] (optional) A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ body = {} - if dbt_commands is not None: body['dbt_commands'] = [v for v in dbt_commands] - if idempotency_token is not None: body['idempotency_token'] = idempotency_token - if jar_params is not None: body['jar_params'] = [v for v in jar_params] - if job_id is not None: body['job_id'] = job_id - if job_parameters is not None: body['job_parameters'] = job_parameters - if notebook_params is not None: body['notebook_params'] = notebook_params - if only is not None: body['only'] = [v for v in only] - if performance_target is not None: body['performance_target'] = performance_target.value - if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict() - if python_named_params is not None: body['python_named_params'] = python_named_params - if python_params is not None: body['python_params'] = [v for v in python_params] - if queue is not None: body['queue'] = queue.as_dict() - if spark_submit_params is not None: body['spark_submit_params'] = [v for v in spark_submit_params] - if sql_params is not None: body['sql_params'] = sql_params - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.2/jobs/run-now', body=body - - , headers=headers - ) - return Wait(self.wait_get_run_job_terminated_or_skipped - , response = RunNowResponse.from_dict(op_response) - , run_id=op_response['run_id']) - - - def run_now_and_wait(self - , job_id: int - , * - , dbt_commands: Optional[List[str]] = None, idempotency_token: Optional[str] = None, jar_params: Optional[List[str]] = None, job_parameters: Optional[Dict[str,str]] = None, notebook_params: Optional[Dict[str,str]] = None, only: Optional[List[str]] = None, performance_target: Optional[PerformanceTarget] = None, pipeline_params: Optional[PipelineParams] = None, python_named_params: Optional[Dict[str,str]] = None, python_params: Optional[List[str]] = None, queue: Optional[QueueSettings] = None, spark_submit_params: Optional[List[str]] = None, sql_params: Optional[Dict[str,str]] = None, - timeout=timedelta(minutes=20)) -> Run: - return self.run_now(dbt_commands=dbt_commands, idempotency_token=idempotency_token, jar_params=jar_params, job_id=job_id, job_parameters=job_parameters, notebook_params=notebook_params, only=only, performance_target=performance_target, pipeline_params=pipeline_params, python_named_params=python_named_params, python_params=python_params, queue=queue, spark_submit_params=spark_submit_params, sql_params=sql_params).result(timeout=timeout) - - - - - def set_permissions(self - , job_id: str - , * - , access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions: + if dbt_commands is not None: + body["dbt_commands"] = [v for v in dbt_commands] + if idempotency_token is not None: + body["idempotency_token"] = idempotency_token + if jar_params is not None: + body["jar_params"] = [v for v in jar_params] + if job_id is not None: + body["job_id"] = job_id + if job_parameters is not None: + body["job_parameters"] = job_parameters + if notebook_params is not None: + body["notebook_params"] = notebook_params + if only is not None: + body["only"] = [v for v in only] + if performance_target is not None: + body["performance_target"] = performance_target.value + if pipeline_params is not None: + body["pipeline_params"] = pipeline_params.as_dict() + if python_named_params is not None: + body["python_named_params"] = python_named_params + if python_params is not None: + body["python_params"] = [v for v in python_params] + if queue is not None: + body["queue"] = queue.as_dict() + if spark_submit_params is not None: + body["spark_submit_params"] = [v for v in spark_submit_params] + if sql_params is not None: + body["sql_params"] = sql_params + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.2/jobs/run-now", body=body, headers=headers) + return Wait( + self.wait_get_run_job_terminated_or_skipped, + response=RunNowResponse.from_dict(op_response), + run_id=op_response["run_id"], + ) + + def run_now_and_wait( + self, + job_id: int, + *, + dbt_commands: Optional[List[str]] = None, + idempotency_token: Optional[str] = None, + jar_params: Optional[List[str]] = None, + job_parameters: Optional[Dict[str, str]] = None, + notebook_params: Optional[Dict[str, str]] = None, + only: Optional[List[str]] = None, + performance_target: Optional[PerformanceTarget] = None, + pipeline_params: Optional[PipelineParams] = None, + python_named_params: Optional[Dict[str, str]] = None, + python_params: Optional[List[str]] = None, + queue: Optional[QueueSettings] = None, + spark_submit_params: Optional[List[str]] = None, + sql_params: Optional[Dict[str, str]] = None, + timeout=timedelta(minutes=20), + ) -> Run: + return self.run_now( + dbt_commands=dbt_commands, + idempotency_token=idempotency_token, + jar_params=jar_params, + job_id=job_id, + job_parameters=job_parameters, + notebook_params=notebook_params, + only=only, + performance_target=performance_target, + pipeline_params=pipeline_params, + python_named_params=python_named_params, + python_params=python_params, + queue=queue, + spark_submit_params=spark_submit_params, + sql_params=sql_params, + ).result(timeout=timeout) + + def set_permissions( + self, job_id: str, *, access_control_list: Optional[List[JobAccessControlRequest]] = None + ) -> JobPermissions: """Set job permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) - + :returns: :class:`JobPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/jobs/{job_id}', body=body - - , headers=headers - ) - return JobPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PUT", f"/api/2.0/permissions/jobs/{job_id}", body=body, headers=headers) + return JobPermissions.from_dict(res) - def submit(self - - , * - , access_control_list: Optional[List[JobAccessControlRequest]] = None, budget_policy_id: Optional[str] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, git_source: Optional[GitSource] = None, health: Optional[JobsHealthRules] = None, idempotency_token: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, run_name: Optional[str] = None, tasks: Optional[List[SubmitTask]] = None, timeout_seconds: Optional[int] = None, webhook_notifications: Optional[WebhookNotifications] = None) -> Wait[Run]: + def submit( + self, + *, + access_control_list: Optional[List[JobAccessControlRequest]] = None, + budget_policy_id: Optional[str] = None, + email_notifications: Optional[JobEmailNotifications] = None, + environments: Optional[List[JobEnvironment]] = None, + git_source: Optional[GitSource] = None, + health: Optional[JobsHealthRules] = None, + idempotency_token: Optional[str] = None, + notification_settings: Optional[JobNotificationSettings] = None, + queue: Optional[QueueSettings] = None, + run_as: Optional[JobRunAs] = None, + run_name: Optional[str] = None, + tasks: Optional[List[SubmitTask]] = None, + timeout_seconds: Optional[int] = None, + webhook_notifications: Optional[WebhookNotifications] = None, + ) -> Wait[Run]: """Create and trigger a one-time run. - + Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. - + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -8555,10 +10351,10 @@ def submit(self :param git_source: :class:`GitSource` (optional) An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) @@ -8567,14 +10363,14 @@ def submit(self An optional token that can be used to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing run instead. If a run with the provided token is deleted, an error is returned. - + If you specify the idempotency token, upon failure you can retry until the request succeeds. Databricks guarantees that exactly one run is launched with that idempotency token. - + This token must have at most 64 characters. - + For more information, see [How to ensure idempotency for jobs]. - + [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html :param notification_settings: :class:`JobNotificationSettings` (optional) Optional notification settings that are used when sending notifications to each of the @@ -8591,56 +10387,96 @@ def submit(self An optional timeout applied to each run of this job. A value of `0` means no timeout. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when the run begins or completes. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id - if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict() - if environments is not None: body['environments'] = [v.as_dict() for v in environments] - if git_source is not None: body['git_source'] = git_source.as_dict() - if health is not None: body['health'] = health.as_dict() - if idempotency_token is not None: body['idempotency_token'] = idempotency_token - if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict() - if queue is not None: body['queue'] = queue.as_dict() - if run_as is not None: body['run_as'] = run_as.as_dict() - if run_name is not None: body['run_name'] = run_name - if tasks is not None: body['tasks'] = [v.as_dict() for v in tasks] - if timeout_seconds is not None: body['timeout_seconds'] = timeout_seconds - if webhook_notifications is not None: body['webhook_notifications'] = webhook_notifications.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.2/jobs/runs/submit', body=body - - , headers=headers - ) - return Wait(self.wait_get_run_job_terminated_or_skipped - , response = SubmitRunResponse.from_dict(op_response) - , run_id=op_response['run_id']) - - - def submit_and_wait(self - - , * - , access_control_list: Optional[List[JobAccessControlRequest]] = None, budget_policy_id: Optional[str] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, git_source: Optional[GitSource] = None, health: Optional[JobsHealthRules] = None, idempotency_token: Optional[str] = None, notification_settings: Optional[JobNotificationSettings] = None, queue: Optional[QueueSettings] = None, run_as: Optional[JobRunAs] = None, run_name: Optional[str] = None, tasks: Optional[List[SubmitTask]] = None, timeout_seconds: Optional[int] = None, webhook_notifications: Optional[WebhookNotifications] = None, - timeout=timedelta(minutes=20)) -> Run: - return self.submit(access_control_list=access_control_list, budget_policy_id=budget_policy_id, email_notifications=email_notifications, environments=environments, git_source=git_source, health=health, idempotency_token=idempotency_token, notification_settings=notification_settings, queue=queue, run_as=run_as, run_name=run_name, tasks=tasks, timeout_seconds=timeout_seconds, webhook_notifications=webhook_notifications).result(timeout=timeout) - - - - - def update(self - , job_id: int - , * - , fields_to_remove: Optional[List[str]] = None, new_settings: Optional[JobSettings] = None): + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + if budget_policy_id is not None: + body["budget_policy_id"] = budget_policy_id + if email_notifications is not None: + body["email_notifications"] = email_notifications.as_dict() + if environments is not None: + body["environments"] = [v.as_dict() for v in environments] + if git_source is not None: + body["git_source"] = git_source.as_dict() + if health is not None: + body["health"] = health.as_dict() + if idempotency_token is not None: + body["idempotency_token"] = idempotency_token + if notification_settings is not None: + body["notification_settings"] = notification_settings.as_dict() + if queue is not None: + body["queue"] = queue.as_dict() + if run_as is not None: + body["run_as"] = run_as.as_dict() + if run_name is not None: + body["run_name"] = run_name + if tasks is not None: + body["tasks"] = [v.as_dict() for v in tasks] + if timeout_seconds is not None: + body["timeout_seconds"] = timeout_seconds + if webhook_notifications is not None: + body["webhook_notifications"] = webhook_notifications.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.2/jobs/runs/submit", body=body, headers=headers) + return Wait( + self.wait_get_run_job_terminated_or_skipped, + response=SubmitRunResponse.from_dict(op_response), + run_id=op_response["run_id"], + ) + + def submit_and_wait( + self, + *, + access_control_list: Optional[List[JobAccessControlRequest]] = None, + budget_policy_id: Optional[str] = None, + email_notifications: Optional[JobEmailNotifications] = None, + environments: Optional[List[JobEnvironment]] = None, + git_source: Optional[GitSource] = None, + health: Optional[JobsHealthRules] = None, + idempotency_token: Optional[str] = None, + notification_settings: Optional[JobNotificationSettings] = None, + queue: Optional[QueueSettings] = None, + run_as: Optional[JobRunAs] = None, + run_name: Optional[str] = None, + tasks: Optional[List[SubmitTask]] = None, + timeout_seconds: Optional[int] = None, + webhook_notifications: Optional[WebhookNotifications] = None, + timeout=timedelta(minutes=20), + ) -> Run: + return self.submit( + access_control_list=access_control_list, + budget_policy_id=budget_policy_id, + email_notifications=email_notifications, + environments=environments, + git_source=git_source, + health=health, + idempotency_token=idempotency_token, + notification_settings=notification_settings, + queue=queue, + run_as=run_as, + run_name=run_name, + tasks=tasks, + timeout_seconds=timeout_seconds, + webhook_notifications=webhook_notifications, + ).result(timeout=timeout) + + def update( + self, job_id: int, *, fields_to_remove: Optional[List[str]] = None, new_settings: Optional[JobSettings] = None + ): """Update job settings partially. - + Add, update, or remove specific settings of an existing job. Use the [_Reset_ endpoint](:method:jobs/reset) to overwrite all job settings. - + :param job_id: int The canonical identifier of the job to update. This field is required. :param fields_to_remove: List[str] (optional) @@ -8648,154 +10484,132 @@ def update(self tasks and job clusters (`tasks/task_1`). This field is optional. :param new_settings: :class:`JobSettings` (optional) The new settings for the job. - + Top-level fields specified in `new_settings` are completely replaced, except for arrays which are merged. That is, new and existing entries are completely replaced based on the respective key fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept. - + Partially updating nested fields is not supported. - + Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only. - - - """ - body = {} - if fields_to_remove is not None: body['fields_to_remove'] = [v for v in fields_to_remove] - if job_id is not None: body['job_id'] = job_id - if new_settings is not None: body['new_settings'] = new_settings.as_dict() - headers = {'Content-Type': 'application/json',} - - self._api.do('POST','/api/2.2/jobs/update', body=body - - , headers=headers - ) - - - - - def update_permissions(self - , job_id: str - , * - , access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions: + """ + body = {} + if fields_to_remove is not None: + body["fields_to_remove"] = [v for v in fields_to_remove] + if job_id is not None: + body["job_id"] = job_id + if new_settings is not None: + body["new_settings"] = new_settings.as_dict() + headers = { + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.2/jobs/update", body=body, headers=headers) + + def update_permissions( + self, job_id: str, *, access_control_list: Optional[List[JobAccessControlRequest]] = None + ) -> JobPermissions: """Update job permissions. - + Updates the permissions on a job. Jobs can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) - + :returns: :class:`JobPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/jobs/{job_id}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/permissions/jobs/{job_id}", body=body, headers=headers) return JobPermissions.from_dict(res) - - + class PolicyComplianceForJobsAPI: """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace. This API currently only supports compliance controls for cluster policies. - + A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last edited. The job is considered out of compliance if any of its clusters no longer comply with their updated policies. - + The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce compliance API allows you to update a job so that it becomes compliant with all of its policies.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def enforce_compliance(self - , job_id: int - , * - , validate_only: Optional[bool] = None) -> EnforcePolicyComplianceResponse: + def enforce_compliance( + self, job_id: int, *, validate_only: Optional[bool] = None + ) -> EnforcePolicyComplianceResponse: """Enforce job policy compliance. - + Updates a job so the job clusters that are created when running the job (specified in `new_cluster`) are compliant with the current versions of their respective cluster policies. All-purpose clusters used in the job will not be updated. - + :param job_id: int The ID of the job you want to enforce policy compliance on. :param validate_only: bool (optional) If set, previews changes made to the job to comply with its policy, but does not update the job. - + :returns: :class:`EnforcePolicyComplianceResponse` """ body = {} - if job_id is not None: body['job_id'] = job_id - if validate_only is not None: body['validate_only'] = validate_only - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/policies/jobs/enforce-compliance', body=body - - , headers=headers - ) - return EnforcePolicyComplianceResponse.from_dict(res) + if job_id is not None: + body["job_id"] = job_id + if validate_only is not None: + body["validate_only"] = validate_only + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/policies/jobs/enforce-compliance", body=body, headers=headers) + return EnforcePolicyComplianceResponse.from_dict(res) - def get_compliance(self - , job_id: int - ) -> GetPolicyComplianceResponse: + def get_compliance(self, job_id: int) -> GetPolicyComplianceResponse: """Get job policy compliance. - + Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and some of its job clusters no longer comply with their updated policies. - + :param job_id: int The ID of the job whose compliance status you are requesting. - + :returns: :class:`GetPolicyComplianceResponse` """ - + query = {} - if job_id is not None: query['job_id'] = job_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/policies/jobs/get-compliance', query=query - - , headers=headers - ) - return GetPolicyComplianceResponse.from_dict(res) + if job_id is not None: + query["job_id"] = job_id + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/policies/jobs/get-compliance", query=query, headers=headers) + return GetPolicyComplianceResponse.from_dict(res) - def list_compliance(self - , policy_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[JobCompliance]: + def list_compliance( + self, policy_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[JobCompliance]: """List job policy compliance. - + Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and its job clusters no longer comply with the updated policy. - + :param policy_id: str Canonical unique identifier for the cluster policy. :param page_size: int (optional) @@ -8804,30 +10618,26 @@ def list_compliance(self :param page_token: str (optional) A page token that can be used to navigate to the next page or previous page as returned by `next_page_token` or `prev_page_token`. - + :returns: Iterator over :class:`JobCompliance` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - if policy_id is not None: query['policy_id'] = policy_id - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/policies/jobs/list-compliance', query=query - - , headers=headers - ) - if 'jobs' in json: - for v in json['jobs']: - yield JobCompliance.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if policy_id is not None: + query["policy_id"] = policy_id + headers = { + "Accept": "application/json", + } - - \ No newline at end of file + while True: + json = self._api.do("GET", "/api/2.0/policies/jobs/list-compliance", query=query, headers=headers) + if "jobs" in json: + for v in json["jobs"]: + yield JobCompliance.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 156e8b91e..6d3a8815b 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -1,663 +1,719 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AddExchangeForListingRequest: listing_id: str - + exchange_id: str - + def as_dict(self) -> dict: """Serializes the AddExchangeForListingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_id is not None: body['exchange_id'] = self.exchange_id - if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.exchange_id is not None: + body["exchange_id"] = self.exchange_id + if self.listing_id is not None: + body["listing_id"] = self.listing_id return body def as_shallow_dict(self) -> dict: """Serializes the AddExchangeForListingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_id is not None: body['exchange_id'] = self.exchange_id - if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.exchange_id is not None: + body["exchange_id"] = self.exchange_id + if self.listing_id is not None: + body["listing_id"] = self.listing_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AddExchangeForListingRequest: """Deserializes the AddExchangeForListingRequest from a dictionary.""" - return cls(exchange_id=d.get('exchange_id', None), listing_id=d.get('listing_id', None)) - - + return cls(exchange_id=d.get("exchange_id", None), listing_id=d.get("listing_id", None)) @dataclass class AddExchangeForListingResponse: exchange_for_listing: Optional[ExchangeListing] = None - + def as_dict(self) -> dict: """Serializes the AddExchangeForListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing.as_dict() + if self.exchange_for_listing: + body["exchange_for_listing"] = self.exchange_for_listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AddExchangeForListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing + if self.exchange_for_listing: + body["exchange_for_listing"] = self.exchange_for_listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AddExchangeForListingResponse: """Deserializes the AddExchangeForListingResponse from a dictionary.""" - return cls(exchange_for_listing=_from_dict(d, 'exchange_for_listing', ExchangeListing)) - - + return cls(exchange_for_listing=_from_dict(d, "exchange_for_listing", ExchangeListing)) class AssetType(Enum): - - - ASSET_TYPE_APP = 'ASSET_TYPE_APP' - ASSET_TYPE_DATA_TABLE = 'ASSET_TYPE_DATA_TABLE' - ASSET_TYPE_GIT_REPO = 'ASSET_TYPE_GIT_REPO' - ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA' - ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL' - ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK' - ASSET_TYPE_PARTNER_INTEGRATION = 'ASSET_TYPE_PARTNER_INTEGRATION' - + ASSET_TYPE_APP = "ASSET_TYPE_APP" + ASSET_TYPE_DATA_TABLE = "ASSET_TYPE_DATA_TABLE" + ASSET_TYPE_GIT_REPO = "ASSET_TYPE_GIT_REPO" + ASSET_TYPE_MEDIA = "ASSET_TYPE_MEDIA" + ASSET_TYPE_MODEL = "ASSET_TYPE_MODEL" + ASSET_TYPE_NOTEBOOK = "ASSET_TYPE_NOTEBOOK" + ASSET_TYPE_PARTNER_INTEGRATION = "ASSET_TYPE_PARTNER_INTEGRATION" @dataclass class BatchGetListingsResponse: listings: Optional[List[Listing]] = None - + def as_dict(self) -> dict: """Serializes the BatchGetListingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listings: body['listings'] = [v.as_dict() for v in self.listings] + if self.listings: + body["listings"] = [v.as_dict() for v in self.listings] return body def as_shallow_dict(self) -> dict: """Serializes the BatchGetListingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listings: body['listings'] = self.listings + if self.listings: + body["listings"] = self.listings return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BatchGetListingsResponse: """Deserializes the BatchGetListingsResponse from a dictionary.""" - return cls(listings=_repeated_dict(d, 'listings', Listing)) - - - - - + return cls(listings=_repeated_dict(d, "listings", Listing)) @dataclass class BatchGetProvidersResponse: providers: Optional[List[ProviderInfo]] = None - + def as_dict(self) -> dict: """Serializes the BatchGetProvidersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.providers: body['providers'] = [v.as_dict() for v in self.providers] + if self.providers: + body["providers"] = [v.as_dict() for v in self.providers] return body def as_shallow_dict(self) -> dict: """Serializes the BatchGetProvidersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.providers: body['providers'] = self.providers + if self.providers: + body["providers"] = self.providers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BatchGetProvidersResponse: """Deserializes the BatchGetProvidersResponse from a dictionary.""" - return cls(providers=_repeated_dict(d, 'providers', ProviderInfo)) - - + return cls(providers=_repeated_dict(d, "providers", ProviderInfo)) class Category(Enum): - - - ADVERTISING_AND_MARKETING = 'ADVERTISING_AND_MARKETING' - CLIMATE_AND_ENVIRONMENT = 'CLIMATE_AND_ENVIRONMENT' - COMMERCE = 'COMMERCE' - DEMOGRAPHICS = 'DEMOGRAPHICS' - ECONOMICS = 'ECONOMICS' - EDUCATION = 'EDUCATION' - ENERGY = 'ENERGY' - FINANCIAL = 'FINANCIAL' - GAMING = 'GAMING' - GEOSPATIAL = 'GEOSPATIAL' - HEALTH = 'HEALTH' - LOOKUP_TABLES = 'LOOKUP_TABLES' - MANUFACTURING = 'MANUFACTURING' - MEDIA = 'MEDIA' - OTHER = 'OTHER' - PUBLIC_SECTOR = 'PUBLIC_SECTOR' - RETAIL = 'RETAIL' - SCIENCE_AND_RESEARCH = 'SCIENCE_AND_RESEARCH' - SECURITY = 'SECURITY' - SPORTS = 'SPORTS' - TRANSPORTATION_AND_LOGISTICS = 'TRANSPORTATION_AND_LOGISTICS' - TRAVEL_AND_TOURISM = 'TRAVEL_AND_TOURISM' + + ADVERTISING_AND_MARKETING = "ADVERTISING_AND_MARKETING" + CLIMATE_AND_ENVIRONMENT = "CLIMATE_AND_ENVIRONMENT" + COMMERCE = "COMMERCE" + DEMOGRAPHICS = "DEMOGRAPHICS" + ECONOMICS = "ECONOMICS" + EDUCATION = "EDUCATION" + ENERGY = "ENERGY" + FINANCIAL = "FINANCIAL" + GAMING = "GAMING" + GEOSPATIAL = "GEOSPATIAL" + HEALTH = "HEALTH" + LOOKUP_TABLES = "LOOKUP_TABLES" + MANUFACTURING = "MANUFACTURING" + MEDIA = "MEDIA" + OTHER = "OTHER" + PUBLIC_SECTOR = "PUBLIC_SECTOR" + RETAIL = "RETAIL" + SCIENCE_AND_RESEARCH = "SCIENCE_AND_RESEARCH" + SECURITY = "SECURITY" + SPORTS = "SPORTS" + TRANSPORTATION_AND_LOGISTICS = "TRANSPORTATION_AND_LOGISTICS" + TRAVEL_AND_TOURISM = "TRAVEL_AND_TOURISM" + @dataclass class ConsumerTerms: version: str - + def as_dict(self) -> dict: """Serializes the ConsumerTerms into a dictionary suitable for use as a JSON request body.""" body = {} - if self.version is not None: body['version'] = self.version + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ConsumerTerms into a shallow dictionary of its immediate attributes.""" body = {} - if self.version is not None: body['version'] = self.version + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ConsumerTerms: """Deserializes the ConsumerTerms from a dictionary.""" - return cls(version=d.get('version', None)) - - + return cls(version=d.get("version", None)) @dataclass class ContactInfo: """contact info for the consumer requesting data or performing a listing installation""" - + company: Optional[str] = None - + email: Optional[str] = None - + first_name: Optional[str] = None - + last_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ContactInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.company is not None: body['company'] = self.company - if self.email is not None: body['email'] = self.email - if self.first_name is not None: body['first_name'] = self.first_name - if self.last_name is not None: body['last_name'] = self.last_name + if self.company is not None: + body["company"] = self.company + if self.email is not None: + body["email"] = self.email + if self.first_name is not None: + body["first_name"] = self.first_name + if self.last_name is not None: + body["last_name"] = self.last_name return body def as_shallow_dict(self) -> dict: """Serializes the ContactInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.company is not None: body['company'] = self.company - if self.email is not None: body['email'] = self.email - if self.first_name is not None: body['first_name'] = self.first_name - if self.last_name is not None: body['last_name'] = self.last_name + if self.company is not None: + body["company"] = self.company + if self.email is not None: + body["email"] = self.email + if self.first_name is not None: + body["first_name"] = self.first_name + if self.last_name is not None: + body["last_name"] = self.last_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ContactInfo: """Deserializes the ContactInfo from a dictionary.""" - return cls(company=d.get('company', None), email=d.get('email', None), first_name=d.get('first_name', None), last_name=d.get('last_name', None)) - - + return cls( + company=d.get("company", None), + email=d.get("email", None), + first_name=d.get("first_name", None), + last_name=d.get("last_name", None), + ) class Cost(Enum): - - - FREE = 'FREE' - PAID = 'PAID' + + FREE = "FREE" + PAID = "PAID" + @dataclass class CreateExchangeFilterRequest: filter: ExchangeFilter - + def as_dict(self) -> dict: """Serializes the CreateExchangeFilterRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter: body['filter'] = self.filter.as_dict() + if self.filter: + body["filter"] = self.filter.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter: body['filter'] = self.filter + if self.filter: + body["filter"] = self.filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeFilterRequest: """Deserializes the CreateExchangeFilterRequest from a dictionary.""" - return cls(filter=_from_dict(d, 'filter', ExchangeFilter)) - - + return cls(filter=_from_dict(d, "filter", ExchangeFilter)) @dataclass class CreateExchangeFilterResponse: filter_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateExchangeFilterResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter_id is not None: body['filter_id'] = self.filter_id + if self.filter_id is not None: + body["filter_id"] = self.filter_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateExchangeFilterResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter_id is not None: body['filter_id'] = self.filter_id + if self.filter_id is not None: + body["filter_id"] = self.filter_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeFilterResponse: """Deserializes the CreateExchangeFilterResponse from a dictionary.""" - return cls(filter_id=d.get('filter_id', None)) - - + return cls(filter_id=d.get("filter_id", None)) @dataclass class CreateExchangeRequest: exchange: Exchange - + def as_dict(self) -> dict: """Serializes the CreateExchangeRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange: body['exchange'] = self.exchange.as_dict() + if self.exchange: + body["exchange"] = self.exchange.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateExchangeRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange: body['exchange'] = self.exchange + if self.exchange: + body["exchange"] = self.exchange return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeRequest: """Deserializes the CreateExchangeRequest from a dictionary.""" - return cls(exchange=_from_dict(d, 'exchange', Exchange)) - - + return cls(exchange=_from_dict(d, "exchange", Exchange)) @dataclass class CreateExchangeResponse: exchange_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateExchangeResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.exchange_id is not None: + body["exchange_id"] = self.exchange_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateExchangeResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_id is not None: body['exchange_id'] = self.exchange_id + if self.exchange_id is not None: + body["exchange_id"] = self.exchange_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeResponse: """Deserializes the CreateExchangeResponse from a dictionary.""" - return cls(exchange_id=d.get('exchange_id', None)) - - + return cls(exchange_id=d.get("exchange_id", None)) @dataclass class CreateFileRequest: file_parent: FileParent - + marketplace_file_type: MarketplaceFileType - + mime_type: str - + display_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateFileRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.file_parent: body['file_parent'] = self.file_parent.as_dict() - if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type.value - if self.mime_type is not None: body['mime_type'] = self.mime_type + if self.display_name is not None: + body["display_name"] = self.display_name + if self.file_parent: + body["file_parent"] = self.file_parent.as_dict() + if self.marketplace_file_type is not None: + body["marketplace_file_type"] = self.marketplace_file_type.value + if self.mime_type is not None: + body["mime_type"] = self.mime_type return body def as_shallow_dict(self) -> dict: """Serializes the CreateFileRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.file_parent: body['file_parent'] = self.file_parent - if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type - if self.mime_type is not None: body['mime_type'] = self.mime_type + if self.display_name is not None: + body["display_name"] = self.display_name + if self.file_parent: + body["file_parent"] = self.file_parent + if self.marketplace_file_type is not None: + body["marketplace_file_type"] = self.marketplace_file_type + if self.mime_type is not None: + body["mime_type"] = self.mime_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateFileRequest: """Deserializes the CreateFileRequest from a dictionary.""" - return cls(display_name=d.get('display_name', None), file_parent=_from_dict(d, 'file_parent', FileParent), marketplace_file_type=_enum(d, 'marketplace_file_type', MarketplaceFileType), mime_type=d.get('mime_type', None)) - - + return cls( + display_name=d.get("display_name", None), + file_parent=_from_dict(d, "file_parent", FileParent), + marketplace_file_type=_enum(d, "marketplace_file_type", MarketplaceFileType), + mime_type=d.get("mime_type", None), + ) @dataclass class CreateFileResponse: file_info: Optional[FileInfo] = None - + upload_url: Optional[str] = None """Pre-signed POST URL to blob storage""" - + def as_dict(self) -> dict: """Serializes the CreateFileResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_info: body['file_info'] = self.file_info.as_dict() - if self.upload_url is not None: body['upload_url'] = self.upload_url + if self.file_info: + body["file_info"] = self.file_info.as_dict() + if self.upload_url is not None: + body["upload_url"] = self.upload_url return body def as_shallow_dict(self) -> dict: """Serializes the CreateFileResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_info: body['file_info'] = self.file_info - if self.upload_url is not None: body['upload_url'] = self.upload_url + if self.file_info: + body["file_info"] = self.file_info + if self.upload_url is not None: + body["upload_url"] = self.upload_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateFileResponse: """Deserializes the CreateFileResponse from a dictionary.""" - return cls(file_info=_from_dict(d, 'file_info', FileInfo), upload_url=d.get('upload_url', None)) - - + return cls(file_info=_from_dict(d, "file_info", FileInfo), upload_url=d.get("upload_url", None)) @dataclass class CreateInstallationRequest: accepted_consumer_terms: Optional[ConsumerTerms] = None - + catalog_name: Optional[str] = None - + listing_id: Optional[str] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + repo_detail: Optional[RepoInstallation] = None """for git repo installations""" - + share_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateInstallationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms.as_dict() - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value - if self.repo_detail: body['repo_detail'] = self.repo_detail.as_dict() - if self.share_name is not None: body['share_name'] = self.share_name + if self.accepted_consumer_terms: + body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict() + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type.value + if self.repo_detail: + body["repo_detail"] = self.repo_detail.as_dict() + if self.share_name is not None: + body["share_name"] = self.share_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateInstallationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type - if self.repo_detail: body['repo_detail'] = self.repo_detail - if self.share_name is not None: body['share_name'] = self.share_name + if self.accepted_consumer_terms: + body["accepted_consumer_terms"] = self.accepted_consumer_terms + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type + if self.repo_detail: + body["repo_detail"] = self.repo_detail + if self.share_name is not None: + body["share_name"] = self.share_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateInstallationRequest: """Deserializes the CreateInstallationRequest from a dictionary.""" - return cls(accepted_consumer_terms=_from_dict(d, 'accepted_consumer_terms', ConsumerTerms), catalog_name=d.get('catalog_name', None), listing_id=d.get('listing_id', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType), repo_detail=_from_dict(d, 'repo_detail', RepoInstallation), share_name=d.get('share_name', None)) - - + return cls( + accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms), + catalog_name=d.get("catalog_name", None), + listing_id=d.get("listing_id", None), + recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), + repo_detail=_from_dict(d, "repo_detail", RepoInstallation), + share_name=d.get("share_name", None), + ) @dataclass class CreateListingRequest: listing: Listing - + def as_dict(self) -> dict: """Serializes the CreateListingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing: body['listing'] = self.listing.as_dict() + if self.listing: + body["listing"] = self.listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateListingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing: body['listing'] = self.listing + if self.listing: + body["listing"] = self.listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateListingRequest: """Deserializes the CreateListingRequest from a dictionary.""" - return cls(listing=_from_dict(d, 'listing', Listing)) - - + return cls(listing=_from_dict(d, "listing", Listing)) @dataclass class CreateListingResponse: listing_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_id is not None: + body["listing_id"] = self.listing_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing_id is not None: body['listing_id'] = self.listing_id + if self.listing_id is not None: + body["listing_id"] = self.listing_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateListingResponse: """Deserializes the CreateListingResponse from a dictionary.""" - return cls(listing_id=d.get('listing_id', None)) - - + return cls(listing_id=d.get("listing_id", None)) @dataclass class CreatePersonalizationRequest: """Data request messages also creates a lead (maybe)""" - + intended_use: str - + accepted_consumer_terms: ConsumerTerms - + comment: Optional[str] = None - + company: Optional[str] = None - + first_name: Optional[str] = None - + is_from_lighthouse: Optional[bool] = None - + last_name: Optional[str] = None - + listing_id: Optional[str] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + def as_dict(self) -> dict: """Serializes the CreatePersonalizationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms.as_dict() - if self.comment is not None: body['comment'] = self.comment - if self.company is not None: body['company'] = self.company - if self.first_name is not None: body['first_name'] = self.first_name - if self.intended_use is not None: body['intended_use'] = self.intended_use - if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse - if self.last_name is not None: body['last_name'] = self.last_name - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value + if self.accepted_consumer_terms: + body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.company is not None: + body["company"] = self.company + if self.first_name is not None: + body["first_name"] = self.first_name + if self.intended_use is not None: + body["intended_use"] = self.intended_use + if self.is_from_lighthouse is not None: + body["is_from_lighthouse"] = self.is_from_lighthouse + if self.last_name is not None: + body["last_name"] = self.last_name + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreatePersonalizationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms - if self.comment is not None: body['comment'] = self.comment - if self.company is not None: body['company'] = self.company - if self.first_name is not None: body['first_name'] = self.first_name - if self.intended_use is not None: body['intended_use'] = self.intended_use - if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse - if self.last_name is not None: body['last_name'] = self.last_name - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type + if self.accepted_consumer_terms: + body["accepted_consumer_terms"] = self.accepted_consumer_terms + if self.comment is not None: + body["comment"] = self.comment + if self.company is not None: + body["company"] = self.company + if self.first_name is not None: + body["first_name"] = self.first_name + if self.intended_use is not None: + body["intended_use"] = self.intended_use + if self.is_from_lighthouse is not None: + body["is_from_lighthouse"] = self.is_from_lighthouse + if self.last_name is not None: + body["last_name"] = self.last_name + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePersonalizationRequest: """Deserializes the CreatePersonalizationRequest from a dictionary.""" - return cls(accepted_consumer_terms=_from_dict(d, 'accepted_consumer_terms', ConsumerTerms), comment=d.get('comment', None), company=d.get('company', None), first_name=d.get('first_name', None), intended_use=d.get('intended_use', None), is_from_lighthouse=d.get('is_from_lighthouse', None), last_name=d.get('last_name', None), listing_id=d.get('listing_id', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType)) - - + return cls( + accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms), + comment=d.get("comment", None), + company=d.get("company", None), + first_name=d.get("first_name", None), + intended_use=d.get("intended_use", None), + is_from_lighthouse=d.get("is_from_lighthouse", None), + last_name=d.get("last_name", None), + listing_id=d.get("listing_id", None), + recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), + ) @dataclass class CreatePersonalizationRequestResponse: id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreatePersonalizationRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePersonalizationRequestResponse: """Deserializes the CreatePersonalizationRequestResponse from a dictionary.""" - return cls(id=d.get('id', None)) - - + return cls(id=d.get("id", None)) @dataclass class CreateProviderRequest: provider: ProviderInfo - + def as_dict(self) -> dict: """Serializes the CreateProviderRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.provider: body['provider'] = self.provider.as_dict() + if self.provider: + body["provider"] = self.provider.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateProviderRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.provider: body['provider'] = self.provider + if self.provider: + body["provider"] = self.provider return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateProviderRequest: """Deserializes the CreateProviderRequest from a dictionary.""" - return cls(provider=_from_dict(d, 'provider', ProviderInfo)) - - + return cls(provider=_from_dict(d, "provider", ProviderInfo)) @dataclass class CreateProviderResponse: id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CreateProviderResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the CreateProviderResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateProviderResponse: """Deserializes the CreateProviderResponse from a dictionary.""" - return cls(id=d.get('id', None)) - - + return cls(id=d.get("id", None)) class DataRefresh(Enum): - - - DAILY = 'DAILY' - HOURLY = 'HOURLY' - MINUTE = 'MINUTE' - MONTHLY = 'MONTHLY' - NONE = 'NONE' - QUARTERLY = 'QUARTERLY' - SECOND = 'SECOND' - WEEKLY = 'WEEKLY' - YEARLY = 'YEARLY' + + DAILY = "DAILY" + HOURLY = "HOURLY" + MINUTE = "MINUTE" + MONTHLY = "MONTHLY" + NONE = "NONE" + QUARTERLY = "QUARTERLY" + SECOND = "SECOND" + WEEKLY = "WEEKLY" + YEARLY = "YEARLY" + @dataclass class DataRefreshInfo: interval: int - + unit: DataRefresh - + def as_dict(self) -> dict: """Serializes the DataRefreshInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.interval is not None: body['interval'] = self.interval - if self.unit is not None: body['unit'] = self.unit.value + if self.interval is not None: + body["interval"] = self.interval + if self.unit is not None: + body["unit"] = self.unit.value return body def as_shallow_dict(self) -> dict: """Serializes the DataRefreshInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.interval is not None: body['interval'] = self.interval - if self.unit is not None: body['unit'] = self.unit + if self.interval is not None: + body["interval"] = self.interval + if self.unit is not None: + body["unit"] = self.unit return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataRefreshInfo: """Deserializes the DataRefreshInfo from a dictionary.""" - return cls(interval=d.get('interval', None), unit=_enum(d, 'unit', DataRefresh)) - - - - - + return cls(interval=d.get("interval", None), unit=_enum(d, "unit", DataRefresh)) @dataclass @@ -676,11 +732,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteExchangeFilterResponse: """Deserializes the DeleteExchangeFilterResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -699,11 +750,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteExchangeResponse: """Deserializes the DeleteExchangeResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -722,11 +768,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteFileResponse: """Deserializes the DeleteFileResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -745,11 +786,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteInstallationResponse: """Deserializes the DeleteInstallationResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -768,11 +804,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteListingResponse: """Deserializes the DeleteListingResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -791,1069 +822,1215 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteProviderResponse: """Deserializes the DeleteProviderResponse from a dictionary.""" return cls() - - class DeltaSharingRecipientType(Enum): - - - DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS = 'DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS' - DELTA_SHARING_RECIPIENT_TYPE_OPEN = 'DELTA_SHARING_RECIPIENT_TYPE_OPEN' + + DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS = "DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS" + DELTA_SHARING_RECIPIENT_TYPE_OPEN = "DELTA_SHARING_RECIPIENT_TYPE_OPEN" + @dataclass class Exchange: name: str - + comment: Optional[str] = None - + created_at: Optional[int] = None - + created_by: Optional[str] = None - + filters: Optional[List[ExchangeFilter]] = None - + id: Optional[str] = None - + linked_listings: Optional[List[ExchangeListing]] = None - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the Exchange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.filters: body['filters'] = [v.as_dict() for v in self.filters] - if self.id is not None: body['id'] = self.id - if self.linked_listings: body['linked_listings'] = [v.as_dict() for v in self.linked_listings] - if self.name is not None: body['name'] = self.name - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.filters: + body["filters"] = [v.as_dict() for v in self.filters] + if self.id is not None: + body["id"] = self.id + if self.linked_listings: + body["linked_listings"] = [v.as_dict() for v in self.linked_listings] + if self.name is not None: + body["name"] = self.name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the Exchange into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.filters: body['filters'] = self.filters - if self.id is not None: body['id'] = self.id - if self.linked_listings: body['linked_listings'] = self.linked_listings - if self.name is not None: body['name'] = self.name - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.filters: + body["filters"] = self.filters + if self.id is not None: + body["id"] = self.id + if self.linked_listings: + body["linked_listings"] = self.linked_listings + if self.name is not None: + body["name"] = self.name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Exchange: """Deserializes the Exchange from a dictionary.""" - return cls(comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), filters=_repeated_dict(d, 'filters', ExchangeFilter), id=d.get('id', None), linked_listings=_repeated_dict(d, 'linked_listings', ExchangeListing), name=d.get('name', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + filters=_repeated_dict(d, "filters", ExchangeFilter), + id=d.get("id", None), + linked_listings=_repeated_dict(d, "linked_listings", ExchangeListing), + name=d.get("name", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class ExchangeFilter: exchange_id: str - + filter_value: str - + filter_type: ExchangeFilterType - + created_at: Optional[int] = None - + created_by: Optional[str] = None - + id: Optional[str] = None - + name: Optional[str] = None - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ExchangeFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.exchange_id is not None: body['exchange_id'] = self.exchange_id - if self.filter_type is not None: body['filter_type'] = self.filter_type.value - if self.filter_value is not None: body['filter_value'] = self.filter_value - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.exchange_id is not None: + body["exchange_id"] = self.exchange_id + if self.filter_type is not None: + body["filter_type"] = self.filter_type.value + if self.filter_value is not None: + body["filter_value"] = self.filter_value + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeFilter into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.exchange_id is not None: body['exchange_id'] = self.exchange_id - if self.filter_type is not None: body['filter_type'] = self.filter_type - if self.filter_value is not None: body['filter_value'] = self.filter_value - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.exchange_id is not None: + body["exchange_id"] = self.exchange_id + if self.filter_type is not None: + body["filter_type"] = self.filter_type + if self.filter_value is not None: + body["filter_value"] = self.filter_value + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeFilter: """Deserializes the ExchangeFilter from a dictionary.""" - return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), exchange_id=d.get('exchange_id', None), filter_type=_enum(d, 'filter_type', ExchangeFilterType), filter_value=d.get('filter_value', None), id=d.get('id', None), name=d.get('name', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + exchange_id=d.get("exchange_id", None), + filter_type=_enum(d, "filter_type", ExchangeFilterType), + filter_value=d.get("filter_value", None), + id=d.get("id", None), + name=d.get("name", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) class ExchangeFilterType(Enum): - - - GLOBAL_METASTORE_ID = 'GLOBAL_METASTORE_ID' + + GLOBAL_METASTORE_ID = "GLOBAL_METASTORE_ID" + @dataclass class ExchangeListing: created_at: Optional[int] = None - + created_by: Optional[str] = None - + exchange_id: Optional[str] = None - + exchange_name: Optional[str] = None - + id: Optional[str] = None - + listing_id: Optional[str] = None - + listing_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ExchangeListing into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.exchange_id is not None: body['exchange_id'] = self.exchange_id - if self.exchange_name is not None: body['exchange_name'] = self.exchange_name - if self.id is not None: body['id'] = self.id - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.listing_name is not None: body['listing_name'] = self.listing_name + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.exchange_id is not None: + body["exchange_id"] = self.exchange_id + if self.exchange_name is not None: + body["exchange_name"] = self.exchange_name + if self.id is not None: + body["id"] = self.id + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.listing_name is not None: + body["listing_name"] = self.listing_name return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeListing into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.exchange_id is not None: body['exchange_id'] = self.exchange_id - if self.exchange_name is not None: body['exchange_name'] = self.exchange_name - if self.id is not None: body['id'] = self.id - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.listing_name is not None: body['listing_name'] = self.listing_name + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.exchange_id is not None: + body["exchange_id"] = self.exchange_id + if self.exchange_name is not None: + body["exchange_name"] = self.exchange_name + if self.id is not None: + body["id"] = self.id + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.listing_name is not None: + body["listing_name"] = self.listing_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeListing: """Deserializes the ExchangeListing from a dictionary.""" - return cls(created_at=d.get('created_at', None), created_by=d.get('created_by', None), exchange_id=d.get('exchange_id', None), exchange_name=d.get('exchange_name', None), id=d.get('id', None), listing_id=d.get('listing_id', None), listing_name=d.get('listing_name', None)) - - + return cls( + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + exchange_id=d.get("exchange_id", None), + exchange_name=d.get("exchange_name", None), + id=d.get("id", None), + listing_id=d.get("listing_id", None), + listing_name=d.get("listing_name", None), + ) @dataclass class FileInfo: created_at: Optional[int] = None - + display_name: Optional[str] = None """Name displayed to users for applicable files, e.g. embedded notebooks""" - + download_link: Optional[str] = None - + file_parent: Optional[FileParent] = None - + id: Optional[str] = None - + marketplace_file_type: Optional[MarketplaceFileType] = None - + mime_type: Optional[str] = None - + status: Optional[FileStatus] = None - + status_message: Optional[str] = None """Populated if status is in a failed state with more information on reason for the failure.""" - + updated_at: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the FileInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.display_name is not None: body['display_name'] = self.display_name - if self.download_link is not None: body['download_link'] = self.download_link - if self.file_parent: body['file_parent'] = self.file_parent.as_dict() - if self.id is not None: body['id'] = self.id - if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type.value - if self.mime_type is not None: body['mime_type'] = self.mime_type - if self.status is not None: body['status'] = self.status.value - if self.status_message is not None: body['status_message'] = self.status_message - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.created_at is not None: + body["created_at"] = self.created_at + if self.display_name is not None: + body["display_name"] = self.display_name + if self.download_link is not None: + body["download_link"] = self.download_link + if self.file_parent: + body["file_parent"] = self.file_parent.as_dict() + if self.id is not None: + body["id"] = self.id + if self.marketplace_file_type is not None: + body["marketplace_file_type"] = self.marketplace_file_type.value + if self.mime_type is not None: + body["mime_type"] = self.mime_type + if self.status is not None: + body["status"] = self.status.value + if self.status_message is not None: + body["status_message"] = self.status_message + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the FileInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.display_name is not None: body['display_name'] = self.display_name - if self.download_link is not None: body['download_link'] = self.download_link - if self.file_parent: body['file_parent'] = self.file_parent - if self.id is not None: body['id'] = self.id - if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type - if self.mime_type is not None: body['mime_type'] = self.mime_type - if self.status is not None: body['status'] = self.status - if self.status_message is not None: body['status_message'] = self.status_message - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.created_at is not None: + body["created_at"] = self.created_at + if self.display_name is not None: + body["display_name"] = self.display_name + if self.download_link is not None: + body["download_link"] = self.download_link + if self.file_parent: + body["file_parent"] = self.file_parent + if self.id is not None: + body["id"] = self.id + if self.marketplace_file_type is not None: + body["marketplace_file_type"] = self.marketplace_file_type + if self.mime_type is not None: + body["mime_type"] = self.mime_type + if self.status is not None: + body["status"] = self.status + if self.status_message is not None: + body["status_message"] = self.status_message + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileInfo: """Deserializes the FileInfo from a dictionary.""" - return cls(created_at=d.get('created_at', None), display_name=d.get('display_name', None), download_link=d.get('download_link', None), file_parent=_from_dict(d, 'file_parent', FileParent), id=d.get('id', None), marketplace_file_type=_enum(d, 'marketplace_file_type', MarketplaceFileType), mime_type=d.get('mime_type', None), status=_enum(d, 'status', FileStatus), status_message=d.get('status_message', None), updated_at=d.get('updated_at', None)) - - + return cls( + created_at=d.get("created_at", None), + display_name=d.get("display_name", None), + download_link=d.get("download_link", None), + file_parent=_from_dict(d, "file_parent", FileParent), + id=d.get("id", None), + marketplace_file_type=_enum(d, "marketplace_file_type", MarketplaceFileType), + mime_type=d.get("mime_type", None), + status=_enum(d, "status", FileStatus), + status_message=d.get("status_message", None), + updated_at=d.get("updated_at", None), + ) @dataclass class FileParent: file_parent_type: Optional[FileParentType] = None - + parent_id: Optional[str] = None """TODO make the following fields required""" - + def as_dict(self) -> dict: """Serializes the FileParent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_parent_type is not None: body['file_parent_type'] = self.file_parent_type.value - if self.parent_id is not None: body['parent_id'] = self.parent_id + if self.file_parent_type is not None: + body["file_parent_type"] = self.file_parent_type.value + if self.parent_id is not None: + body["parent_id"] = self.parent_id return body def as_shallow_dict(self) -> dict: """Serializes the FileParent into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_parent_type is not None: body['file_parent_type'] = self.file_parent_type - if self.parent_id is not None: body['parent_id'] = self.parent_id + if self.file_parent_type is not None: + body["file_parent_type"] = self.file_parent_type + if self.parent_id is not None: + body["parent_id"] = self.parent_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileParent: """Deserializes the FileParent from a dictionary.""" - return cls(file_parent_type=_enum(d, 'file_parent_type', FileParentType), parent_id=d.get('parent_id', None)) - - + return cls(file_parent_type=_enum(d, "file_parent_type", FileParentType), parent_id=d.get("parent_id", None)) class FileParentType(Enum): - - - LISTING = 'LISTING' - LISTING_RESOURCE = 'LISTING_RESOURCE' - PROVIDER = 'PROVIDER' + + LISTING = "LISTING" + LISTING_RESOURCE = "LISTING_RESOURCE" + PROVIDER = "PROVIDER" + class FileStatus(Enum): - - - FILE_STATUS_PUBLISHED = 'FILE_STATUS_PUBLISHED' - FILE_STATUS_SANITIZATION_FAILED = 'FILE_STATUS_SANITIZATION_FAILED' - FILE_STATUS_SANITIZING = 'FILE_STATUS_SANITIZING' - FILE_STATUS_STAGING = 'FILE_STATUS_STAGING' -class FulfillmentType(Enum): - - - INSTALL = 'INSTALL' - REQUEST_ACCESS = 'REQUEST_ACCESS' + FILE_STATUS_PUBLISHED = "FILE_STATUS_PUBLISHED" + FILE_STATUS_SANITIZATION_FAILED = "FILE_STATUS_SANITIZATION_FAILED" + FILE_STATUS_SANITIZING = "FILE_STATUS_SANITIZING" + FILE_STATUS_STAGING = "FILE_STATUS_STAGING" +class FulfillmentType(Enum): + + INSTALL = "INSTALL" + REQUEST_ACCESS = "REQUEST_ACCESS" @dataclass class GetExchangeResponse: exchange: Optional[Exchange] = None - + def as_dict(self) -> dict: """Serializes the GetExchangeResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange: body['exchange'] = self.exchange.as_dict() + if self.exchange: + body["exchange"] = self.exchange.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetExchangeResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange: body['exchange'] = self.exchange + if self.exchange: + body["exchange"] = self.exchange return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetExchangeResponse: """Deserializes the GetExchangeResponse from a dictionary.""" - return cls(exchange=_from_dict(d, 'exchange', Exchange)) - - - - - + return cls(exchange=_from_dict(d, "exchange", Exchange)) @dataclass class GetFileResponse: file_info: Optional[FileInfo] = None - + def as_dict(self) -> dict: """Serializes the GetFileResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_info: body['file_info'] = self.file_info.as_dict() + if self.file_info: + body["file_info"] = self.file_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetFileResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_info: body['file_info'] = self.file_info + if self.file_info: + body["file_info"] = self.file_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetFileResponse: """Deserializes the GetFileResponse from a dictionary.""" - return cls(file_info=_from_dict(d, 'file_info', FileInfo)) - - + return cls(file_info=_from_dict(d, "file_info", FileInfo)) @dataclass class GetLatestVersionProviderAnalyticsDashboardResponse: version: Optional[int] = None """version here is latest logical version of the dashboard template""" - + def as_dict(self) -> dict: """Serializes the GetLatestVersionProviderAnalyticsDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.version is not None: body['version'] = self.version + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the GetLatestVersionProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.version is not None: body['version'] = self.version + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetLatestVersionProviderAnalyticsDashboardResponse: """Deserializes the GetLatestVersionProviderAnalyticsDashboardResponse from a dictionary.""" - return cls(version=d.get('version', None)) - - - - - + return cls(version=d.get("version", None)) @dataclass class GetListingContentMetadataResponse: next_page_token: Optional[str] = None - + shared_data_objects: Optional[List[SharedDataObject]] = None - + def as_dict(self) -> dict: """Serializes the GetListingContentMetadataResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.shared_data_objects: body['shared_data_objects'] = [v.as_dict() for v in self.shared_data_objects] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.shared_data_objects: + body["shared_data_objects"] = [v.as_dict() for v in self.shared_data_objects] return body def as_shallow_dict(self) -> dict: """Serializes the GetListingContentMetadataResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.shared_data_objects: body['shared_data_objects'] = self.shared_data_objects + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.shared_data_objects: + body["shared_data_objects"] = self.shared_data_objects return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetListingContentMetadataResponse: """Deserializes the GetListingContentMetadataResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), shared_data_objects=_repeated_dict(d, 'shared_data_objects', SharedDataObject)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), + shared_data_objects=_repeated_dict(d, "shared_data_objects", SharedDataObject), + ) @dataclass class GetListingResponse: listing: Optional[Listing] = None - + def as_dict(self) -> dict: """Serializes the GetListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing: body['listing'] = self.listing.as_dict() + if self.listing: + body["listing"] = self.listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing: body['listing'] = self.listing + if self.listing: + body["listing"] = self.listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetListingResponse: """Deserializes the GetListingResponse from a dictionary.""" - return cls(listing=_from_dict(d, 'listing', Listing)) - - - - - + return cls(listing=_from_dict(d, "listing", Listing)) @dataclass class GetListingsResponse: listings: Optional[List[Listing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the GetListingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listings: body['listings'] = [v.as_dict() for v in self.listings] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.listings: + body["listings"] = [v.as_dict() for v in self.listings] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetListingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listings: body['listings'] = self.listings - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.listings: + body["listings"] = self.listings + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetListingsResponse: """Deserializes the GetListingsResponse from a dictionary.""" - return cls(listings=_repeated_dict(d, 'listings', Listing), next_page_token=d.get('next_page_token', None)) - - - - - + return cls(listings=_repeated_dict(d, "listings", Listing), next_page_token=d.get("next_page_token", None)) @dataclass class GetPersonalizationRequestResponse: personalization_requests: Optional[List[PersonalizationRequest]] = None - + def as_dict(self) -> dict: """Serializes the GetPersonalizationRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.personalization_requests: body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests] + if self.personalization_requests: + body["personalization_requests"] = [v.as_dict() for v in self.personalization_requests] return body def as_shallow_dict(self) -> dict: """Serializes the GetPersonalizationRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.personalization_requests: body['personalization_requests'] = self.personalization_requests + if self.personalization_requests: + body["personalization_requests"] = self.personalization_requests return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPersonalizationRequestResponse: """Deserializes the GetPersonalizationRequestResponse from a dictionary.""" - return cls(personalization_requests=_repeated_dict(d, 'personalization_requests', PersonalizationRequest)) - - - - - + return cls(personalization_requests=_repeated_dict(d, "personalization_requests", PersonalizationRequest)) @dataclass class GetProviderResponse: provider: Optional[ProviderInfo] = None - + def as_dict(self) -> dict: """Serializes the GetProviderResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.provider: body['provider'] = self.provider.as_dict() + if self.provider: + body["provider"] = self.provider.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetProviderResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.provider: body['provider'] = self.provider + if self.provider: + body["provider"] = self.provider return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetProviderResponse: """Deserializes the GetProviderResponse from a dictionary.""" - return cls(provider=_from_dict(d, 'provider', ProviderInfo)) - - + return cls(provider=_from_dict(d, "provider", ProviderInfo)) @dataclass class Installation: installation: Optional[InstallationDetail] = None - + def as_dict(self) -> dict: """Serializes the Installation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installation: body['installation'] = self.installation.as_dict() + if self.installation: + body["installation"] = self.installation.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Installation into a shallow dictionary of its immediate attributes.""" body = {} - if self.installation: body['installation'] = self.installation + if self.installation: + body["installation"] = self.installation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Installation: """Deserializes the Installation from a dictionary.""" - return cls(installation=_from_dict(d, 'installation', InstallationDetail)) - - + return cls(installation=_from_dict(d, "installation", InstallationDetail)) @dataclass class InstallationDetail: catalog_name: Optional[str] = None - + error_message: Optional[str] = None - + id: Optional[str] = None - + installed_on: Optional[int] = None - + listing_id: Optional[str] = None - + listing_name: Optional[str] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + repo_name: Optional[str] = None - + repo_path: Optional[str] = None - + share_name: Optional[str] = None - + status: Optional[InstallationStatus] = None - + token_detail: Optional[TokenDetail] = None - + tokens: Optional[List[TokenInfo]] = None - + def as_dict(self) -> dict: """Serializes the InstallationDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.error_message is not None: body['error_message'] = self.error_message - if self.id is not None: body['id'] = self.id - if self.installed_on is not None: body['installed_on'] = self.installed_on - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.listing_name is not None: body['listing_name'] = self.listing_name - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value - if self.repo_name is not None: body['repo_name'] = self.repo_name - if self.repo_path is not None: body['repo_path'] = self.repo_path - if self.share_name is not None: body['share_name'] = self.share_name - if self.status is not None: body['status'] = self.status.value - if self.token_detail: body['token_detail'] = self.token_detail.as_dict() - if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens] + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.error_message is not None: + body["error_message"] = self.error_message + if self.id is not None: + body["id"] = self.id + if self.installed_on is not None: + body["installed_on"] = self.installed_on + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.listing_name is not None: + body["listing_name"] = self.listing_name + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type.value + if self.repo_name is not None: + body["repo_name"] = self.repo_name + if self.repo_path is not None: + body["repo_path"] = self.repo_path + if self.share_name is not None: + body["share_name"] = self.share_name + if self.status is not None: + body["status"] = self.status.value + if self.token_detail: + body["token_detail"] = self.token_detail.as_dict() + if self.tokens: + body["tokens"] = [v.as_dict() for v in self.tokens] return body def as_shallow_dict(self) -> dict: """Serializes the InstallationDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.error_message is not None: body['error_message'] = self.error_message - if self.id is not None: body['id'] = self.id - if self.installed_on is not None: body['installed_on'] = self.installed_on - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.listing_name is not None: body['listing_name'] = self.listing_name - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type - if self.repo_name is not None: body['repo_name'] = self.repo_name - if self.repo_path is not None: body['repo_path'] = self.repo_path - if self.share_name is not None: body['share_name'] = self.share_name - if self.status is not None: body['status'] = self.status - if self.token_detail: body['token_detail'] = self.token_detail - if self.tokens: body['tokens'] = self.tokens + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.error_message is not None: + body["error_message"] = self.error_message + if self.id is not None: + body["id"] = self.id + if self.installed_on is not None: + body["installed_on"] = self.installed_on + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.listing_name is not None: + body["listing_name"] = self.listing_name + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type + if self.repo_name is not None: + body["repo_name"] = self.repo_name + if self.repo_path is not None: + body["repo_path"] = self.repo_path + if self.share_name is not None: + body["share_name"] = self.share_name + if self.status is not None: + body["status"] = self.status + if self.token_detail: + body["token_detail"] = self.token_detail + if self.tokens: + body["tokens"] = self.tokens return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InstallationDetail: """Deserializes the InstallationDetail from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), error_message=d.get('error_message', None), id=d.get('id', None), installed_on=d.get('installed_on', None), listing_id=d.get('listing_id', None), listing_name=d.get('listing_name', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType), repo_name=d.get('repo_name', None), repo_path=d.get('repo_path', None), share_name=d.get('share_name', None), status=_enum(d, 'status', InstallationStatus), token_detail=_from_dict(d, 'token_detail', TokenDetail), tokens=_repeated_dict(d, 'tokens', TokenInfo)) - - + return cls( + catalog_name=d.get("catalog_name", None), + error_message=d.get("error_message", None), + id=d.get("id", None), + installed_on=d.get("installed_on", None), + listing_id=d.get("listing_id", None), + listing_name=d.get("listing_name", None), + recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), + repo_name=d.get("repo_name", None), + repo_path=d.get("repo_path", None), + share_name=d.get("share_name", None), + status=_enum(d, "status", InstallationStatus), + token_detail=_from_dict(d, "token_detail", TokenDetail), + tokens=_repeated_dict(d, "tokens", TokenInfo), + ) class InstallationStatus(Enum): - - - FAILED = 'FAILED' - INSTALLED = 'INSTALLED' - + FAILED = "FAILED" + INSTALLED = "INSTALLED" @dataclass class ListAllInstallationsResponse: installations: Optional[List[InstallationDetail]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListAllInstallationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installations: body['installations'] = [v.as_dict() for v in self.installations] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.installations: + body["installations"] = [v.as_dict() for v in self.installations] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListAllInstallationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.installations: body['installations'] = self.installations - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.installations: + body["installations"] = self.installations + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAllInstallationsResponse: """Deserializes the ListAllInstallationsResponse from a dictionary.""" - return cls(installations=_repeated_dict(d, 'installations', InstallationDetail), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + installations=_repeated_dict(d, "installations", InstallationDetail), + next_page_token=d.get("next_page_token", None), + ) @dataclass class ListAllPersonalizationRequestsResponse: next_page_token: Optional[str] = None - + personalization_requests: Optional[List[PersonalizationRequest]] = None - + def as_dict(self) -> dict: """Serializes the ListAllPersonalizationRequestsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.personalization_requests: body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.personalization_requests: + body["personalization_requests"] = [v.as_dict() for v in self.personalization_requests] return body def as_shallow_dict(self) -> dict: """Serializes the ListAllPersonalizationRequestsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.personalization_requests: body['personalization_requests'] = self.personalization_requests + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.personalization_requests: + body["personalization_requests"] = self.personalization_requests return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAllPersonalizationRequestsResponse: """Deserializes the ListAllPersonalizationRequestsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), personalization_requests=_repeated_dict(d, 'personalization_requests', PersonalizationRequest)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), + personalization_requests=_repeated_dict(d, "personalization_requests", PersonalizationRequest), + ) @dataclass class ListExchangeFiltersResponse: filters: Optional[List[ExchangeFilter]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListExchangeFiltersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filters: body['filters'] = [v.as_dict() for v in self.filters] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.filters: + body["filters"] = [v.as_dict() for v in self.filters] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExchangeFiltersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.filters: body['filters'] = self.filters - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.filters: + body["filters"] = self.filters + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExchangeFiltersResponse: """Deserializes the ListExchangeFiltersResponse from a dictionary.""" - return cls(filters=_repeated_dict(d, 'filters', ExchangeFilter), next_page_token=d.get('next_page_token', None)) - - - - - + return cls(filters=_repeated_dict(d, "filters", ExchangeFilter), next_page_token=d.get("next_page_token", None)) @dataclass class ListExchangesForListingResponse: exchange_listing: Optional[List[ExchangeListing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListExchangesForListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_listing: body['exchange_listing'] = [v.as_dict() for v in self.exchange_listing] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.exchange_listing: + body["exchange_listing"] = [v.as_dict() for v in self.exchange_listing] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExchangesForListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_listing: body['exchange_listing'] = self.exchange_listing - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.exchange_listing: + body["exchange_listing"] = self.exchange_listing + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExchangesForListingResponse: """Deserializes the ListExchangesForListingResponse from a dictionary.""" - return cls(exchange_listing=_repeated_dict(d, 'exchange_listing', ExchangeListing), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + exchange_listing=_repeated_dict(d, "exchange_listing", ExchangeListing), + next_page_token=d.get("next_page_token", None), + ) @dataclass class ListExchangesResponse: exchanges: Optional[List[Exchange]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListExchangesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchanges: body['exchanges'] = [v.as_dict() for v in self.exchanges] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.exchanges: + body["exchanges"] = [v.as_dict() for v in self.exchanges] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExchangesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchanges: body['exchanges'] = self.exchanges - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.exchanges: + body["exchanges"] = self.exchanges + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExchangesResponse: """Deserializes the ListExchangesResponse from a dictionary.""" - return cls(exchanges=_repeated_dict(d, 'exchanges', Exchange), next_page_token=d.get('next_page_token', None)) - - - - - + return cls(exchanges=_repeated_dict(d, "exchanges", Exchange), next_page_token=d.get("next_page_token", None)) @dataclass class ListFilesResponse: file_infos: Optional[List[FileInfo]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListFilesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_infos: body['file_infos'] = [v.as_dict() for v in self.file_infos] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.file_infos: + body["file_infos"] = [v.as_dict() for v in self.file_infos] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListFilesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_infos: body['file_infos'] = self.file_infos - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.file_infos: + body["file_infos"] = self.file_infos + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFilesResponse: """Deserializes the ListFilesResponse from a dictionary.""" - return cls(file_infos=_repeated_dict(d, 'file_infos', FileInfo), next_page_token=d.get('next_page_token', None)) - - - - - + return cls(file_infos=_repeated_dict(d, "file_infos", FileInfo), next_page_token=d.get("next_page_token", None)) @dataclass class ListFulfillmentsResponse: fulfillments: Optional[List[ListingFulfillment]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListFulfillmentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fulfillments: body['fulfillments'] = [v.as_dict() for v in self.fulfillments] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.fulfillments: + body["fulfillments"] = [v.as_dict() for v in self.fulfillments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListFulfillmentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.fulfillments: body['fulfillments'] = self.fulfillments - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.fulfillments: + body["fulfillments"] = self.fulfillments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFulfillmentsResponse: """Deserializes the ListFulfillmentsResponse from a dictionary.""" - return cls(fulfillments=_repeated_dict(d, 'fulfillments', ListingFulfillment), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + fulfillments=_repeated_dict(d, "fulfillments", ListingFulfillment), + next_page_token=d.get("next_page_token", None), + ) @dataclass class ListInstallationsResponse: installations: Optional[List[InstallationDetail]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListInstallationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installations: body['installations'] = [v.as_dict() for v in self.installations] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.installations: + body["installations"] = [v.as_dict() for v in self.installations] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListInstallationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.installations: body['installations'] = self.installations - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.installations: + body["installations"] = self.installations + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListInstallationsResponse: """Deserializes the ListInstallationsResponse from a dictionary.""" - return cls(installations=_repeated_dict(d, 'installations', InstallationDetail), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + installations=_repeated_dict(d, "installations", InstallationDetail), + next_page_token=d.get("next_page_token", None), + ) @dataclass class ListListingsForExchangeResponse: exchange_listings: Optional[List[ExchangeListing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListListingsForExchangeResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange_listings: body['exchange_listings'] = [v.as_dict() for v in self.exchange_listings] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.exchange_listings: + body["exchange_listings"] = [v.as_dict() for v in self.exchange_listings] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListListingsForExchangeResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange_listings: body['exchange_listings'] = self.exchange_listings - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.exchange_listings: + body["exchange_listings"] = self.exchange_listings + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListListingsForExchangeResponse: """Deserializes the ListListingsForExchangeResponse from a dictionary.""" - return cls(exchange_listings=_repeated_dict(d, 'exchange_listings', ExchangeListing), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + exchange_listings=_repeated_dict(d, "exchange_listings", ExchangeListing), + next_page_token=d.get("next_page_token", None), + ) @dataclass class ListListingsResponse: listings: Optional[List[Listing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ListListingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listings: body['listings'] = [v.as_dict() for v in self.listings] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.listings: + body["listings"] = [v.as_dict() for v in self.listings] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListListingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listings: body['listings'] = self.listings - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.listings: + body["listings"] = self.listings + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListListingsResponse: """Deserializes the ListListingsResponse from a dictionary.""" - return cls(listings=_repeated_dict(d, 'listings', Listing), next_page_token=d.get('next_page_token', None)) - - + return cls(listings=_repeated_dict(d, "listings", Listing), next_page_token=d.get("next_page_token", None)) @dataclass class ListProviderAnalyticsDashboardResponse: id: str - + dashboard_id: str """dashboard_id will be used to open Lakeview dashboard.""" - + version: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the ListProviderAnalyticsDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.id is not None: body['id'] = self.id - if self.version is not None: body['version'] = self.version + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.id is not None: + body["id"] = self.id + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ListProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.id is not None: body['id'] = self.id - if self.version is not None: body['version'] = self.version + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.id is not None: + body["id"] = self.id + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProviderAnalyticsDashboardResponse: """Deserializes the ListProviderAnalyticsDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), id=d.get('id', None), version=d.get('version', None)) - - - - - + return cls(dashboard_id=d.get("dashboard_id", None), id=d.get("id", None), version=d.get("version", None)) @dataclass class ListProvidersResponse: next_page_token: Optional[str] = None - + providers: Optional[List[ProviderInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListProvidersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.providers: body['providers'] = [v.as_dict() for v in self.providers] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.providers: + body["providers"] = [v.as_dict() for v in self.providers] return body def as_shallow_dict(self) -> dict: """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.providers: body['providers'] = self.providers + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.providers: + body["providers"] = self.providers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProvidersResponse: """Deserializes the ListProvidersResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), providers=_repeated_dict(d, 'providers', ProviderInfo)) - - + return cls( + next_page_token=d.get("next_page_token", None), providers=_repeated_dict(d, "providers", ProviderInfo) + ) @dataclass class Listing: summary: ListingSummary - + detail: Optional[ListingDetail] = None - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the Listing into a dictionary suitable for use as a JSON request body.""" body = {} - if self.detail: body['detail'] = self.detail.as_dict() - if self.id is not None: body['id'] = self.id - if self.summary: body['summary'] = self.summary.as_dict() + if self.detail: + body["detail"] = self.detail.as_dict() + if self.id is not None: + body["id"] = self.id + if self.summary: + body["summary"] = self.summary.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Listing into a shallow dictionary of its immediate attributes.""" body = {} - if self.detail: body['detail'] = self.detail - if self.id is not None: body['id'] = self.id - if self.summary: body['summary'] = self.summary + if self.detail: + body["detail"] = self.detail + if self.id is not None: + body["id"] = self.id + if self.summary: + body["summary"] = self.summary return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Listing: """Deserializes the Listing from a dictionary.""" - return cls(detail=_from_dict(d, 'detail', ListingDetail), id=d.get('id', None), summary=_from_dict(d, 'summary', ListingSummary)) - - + return cls( + detail=_from_dict(d, "detail", ListingDetail), + id=d.get("id", None), + summary=_from_dict(d, "summary", ListingSummary), + ) @dataclass class ListingDetail: assets: Optional[List[AssetType]] = None """Type of assets included in the listing. eg. GIT_REPO, DATA_TABLE, MODEL, NOTEBOOK""" - + collection_date_end: Optional[int] = None """The ending date timestamp for when the data spans""" - + collection_date_start: Optional[int] = None """The starting date timestamp for when the data spans""" - + collection_granularity: Optional[DataRefreshInfo] = None """Smallest unit of time in the dataset""" - + cost: Optional[Cost] = None """Whether the dataset is free or paid""" - + data_source: Optional[str] = None """Where/how the data is sourced""" - + description: Optional[str] = None - + documentation_link: Optional[str] = None - + embedded_notebook_file_infos: Optional[List[FileInfo]] = None - + file_ids: Optional[List[str]] = None - + geographical_coverage: Optional[str] = None """Which geo region the listing data is collected from""" - + license: Optional[str] = None """ID 20, 21 removed don't use License of the data asset - Required for listings with model based assets""" - + pricing_model: Optional[str] = None """What the pricing model is (e.g. paid, subscription, paid upfront); should only be present if cost is paid TODO: Not used yet, should deprecate if we will never use it""" - + privacy_policy_link: Optional[str] = None - + size: Optional[float] = None """size of the dataset in GB""" - + support_link: Optional[str] = None - + tags: Optional[List[ListingTag]] = None """Listing tags - Simple key value pair to annotate listings. When should I use tags vs dedicated fields? Using tags avoids the need to add new columns in the database for new annotations. @@ -1861,518 +2038,734 @@ class ListingDetail: 1. If the field is optional and won't need to have NOT NULL integrity check 2. The value is fairly fixed, static and low cardinality (eg. enums). 3. The value won't be used in filters or joins with other tables.""" - + terms_of_service: Optional[str] = None - + update_frequency: Optional[DataRefreshInfo] = None """How often data is updated""" - + def as_dict(self) -> dict: """Serializes the ListingDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assets: body['assets'] = [v.value for v in self.assets] - if self.collection_date_end is not None: body['collection_date_end'] = self.collection_date_end - if self.collection_date_start is not None: body['collection_date_start'] = self.collection_date_start - if self.collection_granularity: body['collection_granularity'] = self.collection_granularity.as_dict() - if self.cost is not None: body['cost'] = self.cost.value - if self.data_source is not None: body['data_source'] = self.data_source - if self.description is not None: body['description'] = self.description - if self.documentation_link is not None: body['documentation_link'] = self.documentation_link - if self.embedded_notebook_file_infos: body['embedded_notebook_file_infos'] = [v.as_dict() for v in self.embedded_notebook_file_infos] - if self.file_ids: body['file_ids'] = [v for v in self.file_ids] - if self.geographical_coverage is not None: body['geographical_coverage'] = self.geographical_coverage - if self.license is not None: body['license'] = self.license - if self.pricing_model is not None: body['pricing_model'] = self.pricing_model - if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link - if self.size is not None: body['size'] = self.size - if self.support_link is not None: body['support_link'] = self.support_link - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] - if self.terms_of_service is not None: body['terms_of_service'] = self.terms_of_service - if self.update_frequency: body['update_frequency'] = self.update_frequency.as_dict() + if self.assets: + body["assets"] = [v.value for v in self.assets] + if self.collection_date_end is not None: + body["collection_date_end"] = self.collection_date_end + if self.collection_date_start is not None: + body["collection_date_start"] = self.collection_date_start + if self.collection_granularity: + body["collection_granularity"] = self.collection_granularity.as_dict() + if self.cost is not None: + body["cost"] = self.cost.value + if self.data_source is not None: + body["data_source"] = self.data_source + if self.description is not None: + body["description"] = self.description + if self.documentation_link is not None: + body["documentation_link"] = self.documentation_link + if self.embedded_notebook_file_infos: + body["embedded_notebook_file_infos"] = [v.as_dict() for v in self.embedded_notebook_file_infos] + if self.file_ids: + body["file_ids"] = [v for v in self.file_ids] + if self.geographical_coverage is not None: + body["geographical_coverage"] = self.geographical_coverage + if self.license is not None: + body["license"] = self.license + if self.pricing_model is not None: + body["pricing_model"] = self.pricing_model + if self.privacy_policy_link is not None: + body["privacy_policy_link"] = self.privacy_policy_link + if self.size is not None: + body["size"] = self.size + if self.support_link is not None: + body["support_link"] = self.support_link + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] + if self.terms_of_service is not None: + body["terms_of_service"] = self.terms_of_service + if self.update_frequency: + body["update_frequency"] = self.update_frequency.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ListingDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.assets: body['assets'] = self.assets - if self.collection_date_end is not None: body['collection_date_end'] = self.collection_date_end - if self.collection_date_start is not None: body['collection_date_start'] = self.collection_date_start - if self.collection_granularity: body['collection_granularity'] = self.collection_granularity - if self.cost is not None: body['cost'] = self.cost - if self.data_source is not None: body['data_source'] = self.data_source - if self.description is not None: body['description'] = self.description - if self.documentation_link is not None: body['documentation_link'] = self.documentation_link - if self.embedded_notebook_file_infos: body['embedded_notebook_file_infos'] = self.embedded_notebook_file_infos - if self.file_ids: body['file_ids'] = self.file_ids - if self.geographical_coverage is not None: body['geographical_coverage'] = self.geographical_coverage - if self.license is not None: body['license'] = self.license - if self.pricing_model is not None: body['pricing_model'] = self.pricing_model - if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link - if self.size is not None: body['size'] = self.size - if self.support_link is not None: body['support_link'] = self.support_link - if self.tags: body['tags'] = self.tags - if self.terms_of_service is not None: body['terms_of_service'] = self.terms_of_service - if self.update_frequency: body['update_frequency'] = self.update_frequency + if self.assets: + body["assets"] = self.assets + if self.collection_date_end is not None: + body["collection_date_end"] = self.collection_date_end + if self.collection_date_start is not None: + body["collection_date_start"] = self.collection_date_start + if self.collection_granularity: + body["collection_granularity"] = self.collection_granularity + if self.cost is not None: + body["cost"] = self.cost + if self.data_source is not None: + body["data_source"] = self.data_source + if self.description is not None: + body["description"] = self.description + if self.documentation_link is not None: + body["documentation_link"] = self.documentation_link + if self.embedded_notebook_file_infos: + body["embedded_notebook_file_infos"] = self.embedded_notebook_file_infos + if self.file_ids: + body["file_ids"] = self.file_ids + if self.geographical_coverage is not None: + body["geographical_coverage"] = self.geographical_coverage + if self.license is not None: + body["license"] = self.license + if self.pricing_model is not None: + body["pricing_model"] = self.pricing_model + if self.privacy_policy_link is not None: + body["privacy_policy_link"] = self.privacy_policy_link + if self.size is not None: + body["size"] = self.size + if self.support_link is not None: + body["support_link"] = self.support_link + if self.tags: + body["tags"] = self.tags + if self.terms_of_service is not None: + body["terms_of_service"] = self.terms_of_service + if self.update_frequency: + body["update_frequency"] = self.update_frequency return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingDetail: """Deserializes the ListingDetail from a dictionary.""" - return cls(assets=_repeated_enum(d, 'assets', AssetType), collection_date_end=d.get('collection_date_end', None), collection_date_start=d.get('collection_date_start', None), collection_granularity=_from_dict(d, 'collection_granularity', DataRefreshInfo), cost=_enum(d, 'cost', Cost), data_source=d.get('data_source', None), description=d.get('description', None), documentation_link=d.get('documentation_link', None), embedded_notebook_file_infos=_repeated_dict(d, 'embedded_notebook_file_infos', FileInfo), file_ids=d.get('file_ids', None), geographical_coverage=d.get('geographical_coverage', None), license=d.get('license', None), pricing_model=d.get('pricing_model', None), privacy_policy_link=d.get('privacy_policy_link', None), size=d.get('size', None), support_link=d.get('support_link', None), tags=_repeated_dict(d, 'tags', ListingTag), terms_of_service=d.get('terms_of_service', None), update_frequency=_from_dict(d, 'update_frequency', DataRefreshInfo)) - - + return cls( + assets=_repeated_enum(d, "assets", AssetType), + collection_date_end=d.get("collection_date_end", None), + collection_date_start=d.get("collection_date_start", None), + collection_granularity=_from_dict(d, "collection_granularity", DataRefreshInfo), + cost=_enum(d, "cost", Cost), + data_source=d.get("data_source", None), + description=d.get("description", None), + documentation_link=d.get("documentation_link", None), + embedded_notebook_file_infos=_repeated_dict(d, "embedded_notebook_file_infos", FileInfo), + file_ids=d.get("file_ids", None), + geographical_coverage=d.get("geographical_coverage", None), + license=d.get("license", None), + pricing_model=d.get("pricing_model", None), + privacy_policy_link=d.get("privacy_policy_link", None), + size=d.get("size", None), + support_link=d.get("support_link", None), + tags=_repeated_dict(d, "tags", ListingTag), + terms_of_service=d.get("terms_of_service", None), + update_frequency=_from_dict(d, "update_frequency", DataRefreshInfo), + ) @dataclass class ListingFulfillment: listing_id: str - + fulfillment_type: Optional[FulfillmentType] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + repo_info: Optional[RepoInfo] = None - + share_info: Optional[ShareInfo] = None - + def as_dict(self) -> dict: """Serializes the ListingFulfillment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fulfillment_type is not None: body['fulfillment_type'] = self.fulfillment_type.value - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value - if self.repo_info: body['repo_info'] = self.repo_info.as_dict() - if self.share_info: body['share_info'] = self.share_info.as_dict() + if self.fulfillment_type is not None: + body["fulfillment_type"] = self.fulfillment_type.value + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type.value + if self.repo_info: + body["repo_info"] = self.repo_info.as_dict() + if self.share_info: + body["share_info"] = self.share_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ListingFulfillment into a shallow dictionary of its immediate attributes.""" body = {} - if self.fulfillment_type is not None: body['fulfillment_type'] = self.fulfillment_type - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type - if self.repo_info: body['repo_info'] = self.repo_info - if self.share_info: body['share_info'] = self.share_info + if self.fulfillment_type is not None: + body["fulfillment_type"] = self.fulfillment_type + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type + if self.repo_info: + body["repo_info"] = self.repo_info + if self.share_info: + body["share_info"] = self.share_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingFulfillment: """Deserializes the ListingFulfillment from a dictionary.""" - return cls(fulfillment_type=_enum(d, 'fulfillment_type', FulfillmentType), listing_id=d.get('listing_id', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType), repo_info=_from_dict(d, 'repo_info', RepoInfo), share_info=_from_dict(d, 'share_info', ShareInfo)) - - + return cls( + fulfillment_type=_enum(d, "fulfillment_type", FulfillmentType), + listing_id=d.get("listing_id", None), + recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), + repo_info=_from_dict(d, "repo_info", RepoInfo), + share_info=_from_dict(d, "share_info", ShareInfo), + ) @dataclass class ListingSetting: visibility: Optional[Visibility] = None - + def as_dict(self) -> dict: """Serializes the ListingSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.visibility is not None: body['visibility'] = self.visibility.value + if self.visibility is not None: + body["visibility"] = self.visibility.value return body def as_shallow_dict(self) -> dict: """Serializes the ListingSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.visibility is not None: body['visibility'] = self.visibility + if self.visibility is not None: + body["visibility"] = self.visibility return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingSetting: """Deserializes the ListingSetting from a dictionary.""" - return cls(visibility=_enum(d, 'visibility', Visibility)) - - + return cls(visibility=_enum(d, "visibility", Visibility)) class ListingShareType(Enum): - - - FULL = 'FULL' - SAMPLE = 'SAMPLE' + + FULL = "FULL" + SAMPLE = "SAMPLE" + class ListingStatus(Enum): """Enums""" - - DRAFT = 'DRAFT' - PENDING = 'PENDING' - PUBLISHED = 'PUBLISHED' - SUSPENDED = 'SUSPENDED' + + DRAFT = "DRAFT" + PENDING = "PENDING" + PUBLISHED = "PUBLISHED" + SUSPENDED = "SUSPENDED" + @dataclass class ListingSummary: name: str - + listing_type: ListingType - + categories: Optional[List[Category]] = None - + created_at: Optional[int] = None - + created_by: Optional[str] = None - + created_by_id: Optional[int] = None - + exchange_ids: Optional[List[str]] = None - + git_repo: Optional[RepoInfo] = None """if a git repo is being created, a listing will be initialized with this field as opposed to a share""" - + provider_id: Optional[str] = None - + provider_region: Optional[RegionInfo] = None - + published_at: Optional[int] = None - + published_by: Optional[str] = None - + setting: Optional[ListingSetting] = None - + share: Optional[ShareInfo] = None - + status: Optional[ListingStatus] = None """Enums""" - + subtitle: Optional[str] = None - + updated_at: Optional[int] = None - + updated_by: Optional[str] = None - + updated_by_id: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the ListingSummary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.categories: body['categories'] = [v.value for v in self.categories] - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.created_by_id is not None: body['created_by_id'] = self.created_by_id - if self.exchange_ids: body['exchange_ids'] = [v for v in self.exchange_ids] - if self.git_repo: body['git_repo'] = self.git_repo.as_dict() - if self.listing_type is not None: body['listingType'] = self.listing_type.value - if self.name is not None: body['name'] = self.name - if self.provider_id is not None: body['provider_id'] = self.provider_id - if self.provider_region: body['provider_region'] = self.provider_region.as_dict() - if self.published_at is not None: body['published_at'] = self.published_at - if self.published_by is not None: body['published_by'] = self.published_by - if self.setting: body['setting'] = self.setting.as_dict() - if self.share: body['share'] = self.share.as_dict() - if self.status is not None: body['status'] = self.status.value - if self.subtitle is not None: body['subtitle'] = self.subtitle - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id + if self.categories: + body["categories"] = [v.value for v in self.categories] + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.created_by_id is not None: + body["created_by_id"] = self.created_by_id + if self.exchange_ids: + body["exchange_ids"] = [v for v in self.exchange_ids] + if self.git_repo: + body["git_repo"] = self.git_repo.as_dict() + if self.listing_type is not None: + body["listingType"] = self.listing_type.value + if self.name is not None: + body["name"] = self.name + if self.provider_id is not None: + body["provider_id"] = self.provider_id + if self.provider_region: + body["provider_region"] = self.provider_region.as_dict() + if self.published_at is not None: + body["published_at"] = self.published_at + if self.published_by is not None: + body["published_by"] = self.published_by + if self.setting: + body["setting"] = self.setting.as_dict() + if self.share: + body["share"] = self.share.as_dict() + if self.status is not None: + body["status"] = self.status.value + if self.subtitle is not None: + body["subtitle"] = self.subtitle + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.updated_by_id is not None: + body["updated_by_id"] = self.updated_by_id return body def as_shallow_dict(self) -> dict: """Serializes the ListingSummary into a shallow dictionary of its immediate attributes.""" body = {} - if self.categories: body['categories'] = self.categories - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.created_by_id is not None: body['created_by_id'] = self.created_by_id - if self.exchange_ids: body['exchange_ids'] = self.exchange_ids - if self.git_repo: body['git_repo'] = self.git_repo - if self.listing_type is not None: body['listingType'] = self.listing_type - if self.name is not None: body['name'] = self.name - if self.provider_id is not None: body['provider_id'] = self.provider_id - if self.provider_region: body['provider_region'] = self.provider_region - if self.published_at is not None: body['published_at'] = self.published_at - if self.published_by is not None: body['published_by'] = self.published_by - if self.setting: body['setting'] = self.setting - if self.share: body['share'] = self.share - if self.status is not None: body['status'] = self.status - if self.subtitle is not None: body['subtitle'] = self.subtitle - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by - if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id + if self.categories: + body["categories"] = self.categories + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.created_by_id is not None: + body["created_by_id"] = self.created_by_id + if self.exchange_ids: + body["exchange_ids"] = self.exchange_ids + if self.git_repo: + body["git_repo"] = self.git_repo + if self.listing_type is not None: + body["listingType"] = self.listing_type + if self.name is not None: + body["name"] = self.name + if self.provider_id is not None: + body["provider_id"] = self.provider_id + if self.provider_region: + body["provider_region"] = self.provider_region + if self.published_at is not None: + body["published_at"] = self.published_at + if self.published_by is not None: + body["published_by"] = self.published_by + if self.setting: + body["setting"] = self.setting + if self.share: + body["share"] = self.share + if self.status is not None: + body["status"] = self.status + if self.subtitle is not None: + body["subtitle"] = self.subtitle + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.updated_by_id is not None: + body["updated_by_id"] = self.updated_by_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingSummary: """Deserializes the ListingSummary from a dictionary.""" - return cls(categories=_repeated_enum(d, 'categories', Category), created_at=d.get('created_at', None), created_by=d.get('created_by', None), created_by_id=d.get('created_by_id', None), exchange_ids=d.get('exchange_ids', None), git_repo=_from_dict(d, 'git_repo', RepoInfo), listing_type=_enum(d, 'listingType', ListingType), name=d.get('name', None), provider_id=d.get('provider_id', None), provider_region=_from_dict(d, 'provider_region', RegionInfo), published_at=d.get('published_at', None), published_by=d.get('published_by', None), setting=_from_dict(d, 'setting', ListingSetting), share=_from_dict(d, 'share', ShareInfo), status=_enum(d, 'status', ListingStatus), subtitle=d.get('subtitle', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None), updated_by_id=d.get('updated_by_id', None)) - - + return cls( + categories=_repeated_enum(d, "categories", Category), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + created_by_id=d.get("created_by_id", None), + exchange_ids=d.get("exchange_ids", None), + git_repo=_from_dict(d, "git_repo", RepoInfo), + listing_type=_enum(d, "listingType", ListingType), + name=d.get("name", None), + provider_id=d.get("provider_id", None), + provider_region=_from_dict(d, "provider_region", RegionInfo), + published_at=d.get("published_at", None), + published_by=d.get("published_by", None), + setting=_from_dict(d, "setting", ListingSetting), + share=_from_dict(d, "share", ShareInfo), + status=_enum(d, "status", ListingStatus), + subtitle=d.get("subtitle", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + updated_by_id=d.get("updated_by_id", None), + ) @dataclass class ListingTag: tag_name: Optional[ListingTagType] = None """Tag name (enum)""" - + tag_values: Optional[List[str]] = None """String representation of the tag value. Values should be string literals (no complex types)""" - + def as_dict(self) -> dict: """Serializes the ListingTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.tag_name is not None: body['tag_name'] = self.tag_name.value - if self.tag_values: body['tag_values'] = [v for v in self.tag_values] + if self.tag_name is not None: + body["tag_name"] = self.tag_name.value + if self.tag_values: + body["tag_values"] = [v for v in self.tag_values] return body def as_shallow_dict(self) -> dict: """Serializes the ListingTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.tag_name is not None: body['tag_name'] = self.tag_name - if self.tag_values: body['tag_values'] = self.tag_values + if self.tag_name is not None: + body["tag_name"] = self.tag_name + if self.tag_values: + body["tag_values"] = self.tag_values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListingTag: """Deserializes the ListingTag from a dictionary.""" - return cls(tag_name=_enum(d, 'tag_name', ListingTagType), tag_values=d.get('tag_values', None)) - - + return cls(tag_name=_enum(d, "tag_name", ListingTagType), tag_values=d.get("tag_values", None)) class ListingTagType(Enum): - - - LISTING_TAG_TYPE_LANGUAGE = 'LISTING_TAG_TYPE_LANGUAGE' - LISTING_TAG_TYPE_TASK = 'LISTING_TAG_TYPE_TASK' + + LISTING_TAG_TYPE_LANGUAGE = "LISTING_TAG_TYPE_LANGUAGE" + LISTING_TAG_TYPE_TASK = "LISTING_TAG_TYPE_TASK" + class ListingType(Enum): - - - PERSONALIZED = 'PERSONALIZED' - STANDARD = 'STANDARD' + + PERSONALIZED = "PERSONALIZED" + STANDARD = "STANDARD" + class MarketplaceFileType(Enum): - - - APP = 'APP' - EMBEDDED_NOTEBOOK = 'EMBEDDED_NOTEBOOK' - PROVIDER_ICON = 'PROVIDER_ICON' + + APP = "APP" + EMBEDDED_NOTEBOOK = "EMBEDDED_NOTEBOOK" + PROVIDER_ICON = "PROVIDER_ICON" + @dataclass class PersonalizationRequest: consumer_region: RegionInfo - + comment: Optional[str] = None - + contact_info: Optional[ContactInfo] = None """contact info for the consumer requesting data or performing a listing installation""" - + created_at: Optional[int] = None - + id: Optional[str] = None - + intended_use: Optional[str] = None - + is_from_lighthouse: Optional[bool] = None - + listing_id: Optional[str] = None - + listing_name: Optional[str] = None - + metastore_id: Optional[str] = None - + provider_id: Optional[str] = None - + recipient_type: Optional[DeltaSharingRecipientType] = None - + share: Optional[ShareInfo] = None - + status: Optional[PersonalizationRequestStatus] = None - + status_message: Optional[str] = None - + updated_at: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the PersonalizationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.consumer_region: body['consumer_region'] = self.consumer_region.as_dict() - if self.contact_info: body['contact_info'] = self.contact_info.as_dict() - if self.created_at is not None: body['created_at'] = self.created_at - if self.id is not None: body['id'] = self.id - if self.intended_use is not None: body['intended_use'] = self.intended_use - if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.listing_name is not None: body['listing_name'] = self.listing_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.provider_id is not None: body['provider_id'] = self.provider_id - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value - if self.share: body['share'] = self.share.as_dict() - if self.status is not None: body['status'] = self.status.value - if self.status_message is not None: body['status_message'] = self.status_message - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.comment is not None: + body["comment"] = self.comment + if self.consumer_region: + body["consumer_region"] = self.consumer_region.as_dict() + if self.contact_info: + body["contact_info"] = self.contact_info.as_dict() + if self.created_at is not None: + body["created_at"] = self.created_at + if self.id is not None: + body["id"] = self.id + if self.intended_use is not None: + body["intended_use"] = self.intended_use + if self.is_from_lighthouse is not None: + body["is_from_lighthouse"] = self.is_from_lighthouse + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.listing_name is not None: + body["listing_name"] = self.listing_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.provider_id is not None: + body["provider_id"] = self.provider_id + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type.value + if self.share: + body["share"] = self.share.as_dict() + if self.status is not None: + body["status"] = self.status.value + if self.status_message is not None: + body["status_message"] = self.status_message + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the PersonalizationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.consumer_region: body['consumer_region'] = self.consumer_region - if self.contact_info: body['contact_info'] = self.contact_info - if self.created_at is not None: body['created_at'] = self.created_at - if self.id is not None: body['id'] = self.id - if self.intended_use is not None: body['intended_use'] = self.intended_use - if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.listing_name is not None: body['listing_name'] = self.listing_name - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.provider_id is not None: body['provider_id'] = self.provider_id - if self.recipient_type is not None: body['recipient_type'] = self.recipient_type - if self.share: body['share'] = self.share - if self.status is not None: body['status'] = self.status - if self.status_message is not None: body['status_message'] = self.status_message - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.comment is not None: + body["comment"] = self.comment + if self.consumer_region: + body["consumer_region"] = self.consumer_region + if self.contact_info: + body["contact_info"] = self.contact_info + if self.created_at is not None: + body["created_at"] = self.created_at + if self.id is not None: + body["id"] = self.id + if self.intended_use is not None: + body["intended_use"] = self.intended_use + if self.is_from_lighthouse is not None: + body["is_from_lighthouse"] = self.is_from_lighthouse + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.listing_name is not None: + body["listing_name"] = self.listing_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.provider_id is not None: + body["provider_id"] = self.provider_id + if self.recipient_type is not None: + body["recipient_type"] = self.recipient_type + if self.share: + body["share"] = self.share + if self.status is not None: + body["status"] = self.status + if self.status_message is not None: + body["status_message"] = self.status_message + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PersonalizationRequest: """Deserializes the PersonalizationRequest from a dictionary.""" - return cls(comment=d.get('comment', None), consumer_region=_from_dict(d, 'consumer_region', RegionInfo), contact_info=_from_dict(d, 'contact_info', ContactInfo), created_at=d.get('created_at', None), id=d.get('id', None), intended_use=d.get('intended_use', None), is_from_lighthouse=d.get('is_from_lighthouse', None), listing_id=d.get('listing_id', None), listing_name=d.get('listing_name', None), metastore_id=d.get('metastore_id', None), provider_id=d.get('provider_id', None), recipient_type=_enum(d, 'recipient_type', DeltaSharingRecipientType), share=_from_dict(d, 'share', ShareInfo), status=_enum(d, 'status', PersonalizationRequestStatus), status_message=d.get('status_message', None), updated_at=d.get('updated_at', None)) - - + return cls( + comment=d.get("comment", None), + consumer_region=_from_dict(d, "consumer_region", RegionInfo), + contact_info=_from_dict(d, "contact_info", ContactInfo), + created_at=d.get("created_at", None), + id=d.get("id", None), + intended_use=d.get("intended_use", None), + is_from_lighthouse=d.get("is_from_lighthouse", None), + listing_id=d.get("listing_id", None), + listing_name=d.get("listing_name", None), + metastore_id=d.get("metastore_id", None), + provider_id=d.get("provider_id", None), + recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), + share=_from_dict(d, "share", ShareInfo), + status=_enum(d, "status", PersonalizationRequestStatus), + status_message=d.get("status_message", None), + updated_at=d.get("updated_at", None), + ) class PersonalizationRequestStatus(Enum): - - - DENIED = 'DENIED' - FULFILLED = 'FULFILLED' - NEW = 'NEW' - REQUEST_PENDING = 'REQUEST_PENDING' + + DENIED = "DENIED" + FULFILLED = "FULFILLED" + NEW = "NEW" + REQUEST_PENDING = "REQUEST_PENDING" + @dataclass class ProviderAnalyticsDashboard: id: str - + def as_dict(self) -> dict: """Serializes the ProviderAnalyticsDashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the ProviderAnalyticsDashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProviderAnalyticsDashboard: """Deserializes the ProviderAnalyticsDashboard from a dictionary.""" - return cls(id=d.get('id', None)) - - + return cls(id=d.get("id", None)) @dataclass class ProviderInfo: name: str - + business_contact_email: str - + term_of_service_link: str - + privacy_policy_link: str - + company_website_link: Optional[str] = None - + dark_mode_icon_file_id: Optional[str] = None - + dark_mode_icon_file_path: Optional[str] = None - + description: Optional[str] = None - + icon_file_id: Optional[str] = None - + icon_file_path: Optional[str] = None - + id: Optional[str] = None - + is_featured: Optional[bool] = None """is_featured is accessible by consumers only""" - + published_by: Optional[str] = None """published_by is only applicable to data aggregators (e.g. Crux)""" - + support_contact_email: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ProviderInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.business_contact_email is not None: body['business_contact_email'] = self.business_contact_email - if self.company_website_link is not None: body['company_website_link'] = self.company_website_link - if self.dark_mode_icon_file_id is not None: body['dark_mode_icon_file_id'] = self.dark_mode_icon_file_id - if self.dark_mode_icon_file_path is not None: body['dark_mode_icon_file_path'] = self.dark_mode_icon_file_path - if self.description is not None: body['description'] = self.description - if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id - if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path - if self.id is not None: body['id'] = self.id - if self.is_featured is not None: body['is_featured'] = self.is_featured - if self.name is not None: body['name'] = self.name - if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link - if self.published_by is not None: body['published_by'] = self.published_by - if self.support_contact_email is not None: body['support_contact_email'] = self.support_contact_email - if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link + if self.business_contact_email is not None: + body["business_contact_email"] = self.business_contact_email + if self.company_website_link is not None: + body["company_website_link"] = self.company_website_link + if self.dark_mode_icon_file_id is not None: + body["dark_mode_icon_file_id"] = self.dark_mode_icon_file_id + if self.dark_mode_icon_file_path is not None: + body["dark_mode_icon_file_path"] = self.dark_mode_icon_file_path + if self.description is not None: + body["description"] = self.description + if self.icon_file_id is not None: + body["icon_file_id"] = self.icon_file_id + if self.icon_file_path is not None: + body["icon_file_path"] = self.icon_file_path + if self.id is not None: + body["id"] = self.id + if self.is_featured is not None: + body["is_featured"] = self.is_featured + if self.name is not None: + body["name"] = self.name + if self.privacy_policy_link is not None: + body["privacy_policy_link"] = self.privacy_policy_link + if self.published_by is not None: + body["published_by"] = self.published_by + if self.support_contact_email is not None: + body["support_contact_email"] = self.support_contact_email + if self.term_of_service_link is not None: + body["term_of_service_link"] = self.term_of_service_link return body def as_shallow_dict(self) -> dict: """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.business_contact_email is not None: body['business_contact_email'] = self.business_contact_email - if self.company_website_link is not None: body['company_website_link'] = self.company_website_link - if self.dark_mode_icon_file_id is not None: body['dark_mode_icon_file_id'] = self.dark_mode_icon_file_id - if self.dark_mode_icon_file_path is not None: body['dark_mode_icon_file_path'] = self.dark_mode_icon_file_path - if self.description is not None: body['description'] = self.description - if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id - if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path - if self.id is not None: body['id'] = self.id - if self.is_featured is not None: body['is_featured'] = self.is_featured - if self.name is not None: body['name'] = self.name - if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link - if self.published_by is not None: body['published_by'] = self.published_by - if self.support_contact_email is not None: body['support_contact_email'] = self.support_contact_email - if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link + if self.business_contact_email is not None: + body["business_contact_email"] = self.business_contact_email + if self.company_website_link is not None: + body["company_website_link"] = self.company_website_link + if self.dark_mode_icon_file_id is not None: + body["dark_mode_icon_file_id"] = self.dark_mode_icon_file_id + if self.dark_mode_icon_file_path is not None: + body["dark_mode_icon_file_path"] = self.dark_mode_icon_file_path + if self.description is not None: + body["description"] = self.description + if self.icon_file_id is not None: + body["icon_file_id"] = self.icon_file_id + if self.icon_file_path is not None: + body["icon_file_path"] = self.icon_file_path + if self.id is not None: + body["id"] = self.id + if self.is_featured is not None: + body["is_featured"] = self.is_featured + if self.name is not None: + body["name"] = self.name + if self.privacy_policy_link is not None: + body["privacy_policy_link"] = self.privacy_policy_link + if self.published_by is not None: + body["published_by"] = self.published_by + if self.support_contact_email is not None: + body["support_contact_email"] = self.support_contact_email + if self.term_of_service_link is not None: + body["term_of_service_link"] = self.term_of_service_link return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProviderInfo: """Deserializes the ProviderInfo from a dictionary.""" - return cls(business_contact_email=d.get('business_contact_email', None), company_website_link=d.get('company_website_link', None), dark_mode_icon_file_id=d.get('dark_mode_icon_file_id', None), dark_mode_icon_file_path=d.get('dark_mode_icon_file_path', None), description=d.get('description', None), icon_file_id=d.get('icon_file_id', None), icon_file_path=d.get('icon_file_path', None), id=d.get('id', None), is_featured=d.get('is_featured', None), name=d.get('name', None), privacy_policy_link=d.get('privacy_policy_link', None), published_by=d.get('published_by', None), support_contact_email=d.get('support_contact_email', None), term_of_service_link=d.get('term_of_service_link', None)) - - + return cls( + business_contact_email=d.get("business_contact_email", None), + company_website_link=d.get("company_website_link", None), + dark_mode_icon_file_id=d.get("dark_mode_icon_file_id", None), + dark_mode_icon_file_path=d.get("dark_mode_icon_file_path", None), + description=d.get("description", None), + icon_file_id=d.get("icon_file_id", None), + icon_file_path=d.get("icon_file_path", None), + id=d.get("id", None), + is_featured=d.get("is_featured", None), + name=d.get("name", None), + privacy_policy_link=d.get("privacy_policy_link", None), + published_by=d.get("published_by", None), + support_contact_email=d.get("support_contact_email", None), + term_of_service_link=d.get("term_of_service_link", None), + ) @dataclass class RegionInfo: cloud: Optional[str] = None - + region: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the RegionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cloud is not None: body['cloud'] = self.cloud - if self.region is not None: body['region'] = self.region + if self.cloud is not None: + body["cloud"] = self.cloud + if self.region is not None: + body["region"] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the RegionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cloud is not None: body['cloud'] = self.cloud - if self.region is not None: body['region'] = self.region + if self.cloud is not None: + body["cloud"] = self.cloud + if self.region is not None: + body["region"] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegionInfo: """Deserializes the RegionInfo from a dictionary.""" - return cls(cloud=d.get('cloud', None), region=d.get('region', None)) - - - - - + return cls(cloud=d.get("cloud", None), region=d.get("region", None)) @dataclass @@ -2391,191 +2784,205 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RemoveExchangeForListingResponse: """Deserializes the RemoveExchangeForListingResponse from a dictionary.""" return cls() - - @dataclass class RepoInfo: git_repo_url: str """the git repo url e.g. https://github.com/databrickslabs/dolly.git""" - + def as_dict(self) -> dict: """Serializes the RepoInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url + if self.git_repo_url is not None: + body["git_repo_url"] = self.git_repo_url return body def as_shallow_dict(self) -> dict: """Serializes the RepoInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url + if self.git_repo_url is not None: + body["git_repo_url"] = self.git_repo_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoInfo: """Deserializes the RepoInfo from a dictionary.""" - return cls(git_repo_url=d.get('git_repo_url', None)) - - + return cls(git_repo_url=d.get("git_repo_url", None)) @dataclass class RepoInstallation: repo_name: str """the user-specified repo name for their installed git repo listing""" - + repo_path: str """refers to the full url file path that navigates the user to the repo's entrypoint (e.g. a README.md file, or the repo file view in the unified UI) should just be a relative path""" - + def as_dict(self) -> dict: """Serializes the RepoInstallation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.repo_name is not None: body['repo_name'] = self.repo_name - if self.repo_path is not None: body['repo_path'] = self.repo_path + if self.repo_name is not None: + body["repo_name"] = self.repo_name + if self.repo_path is not None: + body["repo_path"] = self.repo_path return body def as_shallow_dict(self) -> dict: """Serializes the RepoInstallation into a shallow dictionary of its immediate attributes.""" body = {} - if self.repo_name is not None: body['repo_name'] = self.repo_name - if self.repo_path is not None: body['repo_path'] = self.repo_path + if self.repo_name is not None: + body["repo_name"] = self.repo_name + if self.repo_path is not None: + body["repo_path"] = self.repo_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoInstallation: """Deserializes the RepoInstallation from a dictionary.""" - return cls(repo_name=d.get('repo_name', None), repo_path=d.get('repo_path', None)) - - - - - + return cls(repo_name=d.get("repo_name", None), repo_path=d.get("repo_path", None)) @dataclass class SearchListingsResponse: listings: Optional[List[Listing]] = None - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the SearchListingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listings: body['listings'] = [v.as_dict() for v in self.listings] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.listings: + body["listings"] = [v.as_dict() for v in self.listings] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchListingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listings: body['listings'] = self.listings - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.listings: + body["listings"] = self.listings + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchListingsResponse: """Deserializes the SearchListingsResponse from a dictionary.""" - return cls(listings=_repeated_dict(d, 'listings', Listing), next_page_token=d.get('next_page_token', None)) - - + return cls(listings=_repeated_dict(d, "listings", Listing), next_page_token=d.get("next_page_token", None)) @dataclass class ShareInfo: name: str - + type: ListingShareType - + def as_dict(self) -> dict: """Serializes the ShareInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.type is not None: body['type'] = self.type.value + if self.name is not None: + body["name"] = self.name + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the ShareInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.type is not None: body['type'] = self.type + if self.name is not None: + body["name"] = self.name + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ShareInfo: """Deserializes the ShareInfo from a dictionary.""" - return cls(name=d.get('name', None), type=_enum(d, 'type', ListingShareType)) - - + return cls(name=d.get("name", None), type=_enum(d, "type", ListingShareType)) @dataclass class SharedDataObject: data_object_type: Optional[str] = None """The type of the data object. Could be one of: TABLE, SCHEMA, NOTEBOOK_FILE, MODEL, VOLUME""" - + name: Optional[str] = None """Name of the shared object""" - + def as_dict(self) -> dict: """Serializes the SharedDataObject into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_object_type is not None: body['data_object_type'] = self.data_object_type - if self.name is not None: body['name'] = self.name + if self.data_object_type is not None: + body["data_object_type"] = self.data_object_type + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_object_type is not None: body['data_object_type'] = self.data_object_type - if self.name is not None: body['name'] = self.name + if self.data_object_type is not None: + body["data_object_type"] = self.data_object_type + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SharedDataObject: """Deserializes the SharedDataObject from a dictionary.""" - return cls(data_object_type=d.get('data_object_type', None), name=d.get('name', None)) - - + return cls(data_object_type=d.get("data_object_type", None), name=d.get("name", None)) @dataclass class TokenDetail: bearer_token: Optional[str] = None - + endpoint: Optional[str] = None - + expiration_time: Optional[str] = None - + share_credentials_version: Optional[int] = None """These field names must follow the delta sharing protocol. Original message: RetrieveToken.Response in managed-catalog/api/messages/recipient.proto""" - + def as_dict(self) -> dict: """Serializes the TokenDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bearer_token is not None: body['bearerToken'] = self.bearer_token - if self.endpoint is not None: body['endpoint'] = self.endpoint - if self.expiration_time is not None: body['expirationTime'] = self.expiration_time - if self.share_credentials_version is not None: body['shareCredentialsVersion'] = self.share_credentials_version + if self.bearer_token is not None: + body["bearerToken"] = self.bearer_token + if self.endpoint is not None: + body["endpoint"] = self.endpoint + if self.expiration_time is not None: + body["expirationTime"] = self.expiration_time + if self.share_credentials_version is not None: + body["shareCredentialsVersion"] = self.share_credentials_version return body def as_shallow_dict(self) -> dict: """Serializes the TokenDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.bearer_token is not None: body['bearerToken'] = self.bearer_token - if self.endpoint is not None: body['endpoint'] = self.endpoint - if self.expiration_time is not None: body['expirationTime'] = self.expiration_time - if self.share_credentials_version is not None: body['shareCredentialsVersion'] = self.share_credentials_version + if self.bearer_token is not None: + body["bearerToken"] = self.bearer_token + if self.endpoint is not None: + body["endpoint"] = self.endpoint + if self.expiration_time is not None: + body["expirationTime"] = self.expiration_time + if self.share_credentials_version is not None: + body["shareCredentialsVersion"] = self.share_credentials_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenDetail: """Deserializes the TokenDetail from a dictionary.""" - return cls(bearer_token=d.get('bearerToken', None), endpoint=d.get('endpoint', None), expiration_time=d.get('expirationTime', None), share_credentials_version=d.get('shareCredentialsVersion', None)) - - + return cls( + bearer_token=d.get("bearerToken", None), + endpoint=d.get("endpoint", None), + expiration_time=d.get("expirationTime", None), + share_credentials_version=d.get("shareCredentialsVersion", None), + ) @dataclass @@ -2583,577 +2990,617 @@ class TokenInfo: activation_url: Optional[str] = None """Full activation url to retrieve the access token. It will be empty if the token is already retrieved.""" - + created_at: Optional[int] = None """Time at which this Recipient Token was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of Recipient Token creator.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token in epoch milliseconds.""" - + id: Optional[str] = None """Unique id of the Recipient Token.""" - + updated_at: Optional[int] = None """Time at which this Recipient Token was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of Recipient Token updater.""" - + def as_dict(self) -> dict: """Serializes the TokenInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activation_url is not None: body['activation_url'] = self.activation_url - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.id is not None: body['id'] = self.id - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.activation_url is not None: + body["activation_url"] = self.activation_url + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.id is not None: + body["id"] = self.id + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the TokenInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.activation_url is not None: body['activation_url'] = self.activation_url - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.id is not None: body['id'] = self.id - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.activation_url is not None: + body["activation_url"] = self.activation_url + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.id is not None: + body["id"] = self.id + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenInfo: """Deserializes the TokenInfo from a dictionary.""" - return cls(activation_url=d.get('activation_url', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), expiration_time=d.get('expiration_time', None), id=d.get('id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + activation_url=d.get("activation_url", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + expiration_time=d.get("expiration_time", None), + id=d.get("id", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class UpdateExchangeFilterRequest: filter: ExchangeFilter - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateExchangeFilterRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter: body['filter'] = self.filter.as_dict() - if self.id is not None: body['id'] = self.id + if self.filter: + body["filter"] = self.filter.as_dict() + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter: body['filter'] = self.filter - if self.id is not None: body['id'] = self.id + if self.filter: + body["filter"] = self.filter + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeFilterRequest: """Deserializes the UpdateExchangeFilterRequest from a dictionary.""" - return cls(filter=_from_dict(d, 'filter', ExchangeFilter), id=d.get('id', None)) - - + return cls(filter=_from_dict(d, "filter", ExchangeFilter), id=d.get("id", None)) @dataclass class UpdateExchangeFilterResponse: filter: Optional[ExchangeFilter] = None - + def as_dict(self) -> dict: """Serializes the UpdateExchangeFilterResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter: body['filter'] = self.filter.as_dict() + if self.filter: + body["filter"] = self.filter.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExchangeFilterResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter: body['filter'] = self.filter + if self.filter: + body["filter"] = self.filter return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeFilterResponse: """Deserializes the UpdateExchangeFilterResponse from a dictionary.""" - return cls(filter=_from_dict(d, 'filter', ExchangeFilter)) - - + return cls(filter=_from_dict(d, "filter", ExchangeFilter)) @dataclass class UpdateExchangeRequest: exchange: Exchange - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateExchangeRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange: body['exchange'] = self.exchange.as_dict() - if self.id is not None: body['id'] = self.id + if self.exchange: + body["exchange"] = self.exchange.as_dict() + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExchangeRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange: body['exchange'] = self.exchange - if self.id is not None: body['id'] = self.id + if self.exchange: + body["exchange"] = self.exchange + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeRequest: """Deserializes the UpdateExchangeRequest from a dictionary.""" - return cls(exchange=_from_dict(d, 'exchange', Exchange), id=d.get('id', None)) - - + return cls(exchange=_from_dict(d, "exchange", Exchange), id=d.get("id", None)) @dataclass class UpdateExchangeResponse: exchange: Optional[Exchange] = None - + def as_dict(self) -> dict: """Serializes the UpdateExchangeResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exchange: body['exchange'] = self.exchange.as_dict() + if self.exchange: + body["exchange"] = self.exchange.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExchangeResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.exchange: body['exchange'] = self.exchange + if self.exchange: + body["exchange"] = self.exchange return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeResponse: """Deserializes the UpdateExchangeResponse from a dictionary.""" - return cls(exchange=_from_dict(d, 'exchange', Exchange)) - - + return cls(exchange=_from_dict(d, "exchange", Exchange)) @dataclass class UpdateInstallationRequest: installation: InstallationDetail - + installation_id: Optional[str] = None - + listing_id: Optional[str] = None - + rotate_token: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the UpdateInstallationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installation: body['installation'] = self.installation.as_dict() - if self.installation_id is not None: body['installation_id'] = self.installation_id - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.rotate_token is not None: body['rotate_token'] = self.rotate_token + if self.installation: + body["installation"] = self.installation.as_dict() + if self.installation_id is not None: + body["installation_id"] = self.installation_id + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.rotate_token is not None: + body["rotate_token"] = self.rotate_token return body def as_shallow_dict(self) -> dict: """Serializes the UpdateInstallationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.installation: body['installation'] = self.installation - if self.installation_id is not None: body['installation_id'] = self.installation_id - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.rotate_token is not None: body['rotate_token'] = self.rotate_token + if self.installation: + body["installation"] = self.installation + if self.installation_id is not None: + body["installation_id"] = self.installation_id + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.rotate_token is not None: + body["rotate_token"] = self.rotate_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateInstallationRequest: """Deserializes the UpdateInstallationRequest from a dictionary.""" - return cls(installation=_from_dict(d, 'installation', InstallationDetail), installation_id=d.get('installation_id', None), listing_id=d.get('listing_id', None), rotate_token=d.get('rotate_token', None)) - - + return cls( + installation=_from_dict(d, "installation", InstallationDetail), + installation_id=d.get("installation_id", None), + listing_id=d.get("listing_id", None), + rotate_token=d.get("rotate_token", None), + ) @dataclass class UpdateInstallationResponse: installation: Optional[InstallationDetail] = None - + def as_dict(self) -> dict: """Serializes the UpdateInstallationResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.installation: body['installation'] = self.installation.as_dict() + if self.installation: + body["installation"] = self.installation.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateInstallationResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.installation: body['installation'] = self.installation + if self.installation: + body["installation"] = self.installation return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateInstallationResponse: """Deserializes the UpdateInstallationResponse from a dictionary.""" - return cls(installation=_from_dict(d, 'installation', InstallationDetail)) - - + return cls(installation=_from_dict(d, "installation", InstallationDetail)) @dataclass class UpdateListingRequest: listing: Listing - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateListingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.listing: body['listing'] = self.listing.as_dict() + if self.id is not None: + body["id"] = self.id + if self.listing: + body["listing"] = self.listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateListingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.listing: body['listing'] = self.listing + if self.id is not None: + body["id"] = self.id + if self.listing: + body["listing"] = self.listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateListingRequest: """Deserializes the UpdateListingRequest from a dictionary.""" - return cls(id=d.get('id', None), listing=_from_dict(d, 'listing', Listing)) - - + return cls(id=d.get("id", None), listing=_from_dict(d, "listing", Listing)) @dataclass class UpdateListingResponse: listing: Optional[Listing] = None - + def as_dict(self) -> dict: """Serializes the UpdateListingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing: body['listing'] = self.listing.as_dict() + if self.listing: + body["listing"] = self.listing.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateListingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing: body['listing'] = self.listing + if self.listing: + body["listing"] = self.listing return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateListingResponse: """Deserializes the UpdateListingResponse from a dictionary.""" - return cls(listing=_from_dict(d, 'listing', Listing)) - - + return cls(listing=_from_dict(d, "listing", Listing)) @dataclass class UpdatePersonalizationRequestRequest: status: PersonalizationRequestStatus - + listing_id: Optional[str] = None - + reason: Optional[str] = None - + request_id: Optional[str] = None - + share: Optional[ShareInfo] = None - + def as_dict(self) -> dict: """Serializes the UpdatePersonalizationRequestRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.reason is not None: body['reason'] = self.reason - if self.request_id is not None: body['request_id'] = self.request_id - if self.share: body['share'] = self.share.as_dict() - if self.status is not None: body['status'] = self.status.value + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.reason is not None: + body["reason"] = self.reason + if self.request_id is not None: + body["request_id"] = self.request_id + if self.share: + body["share"] = self.share.as_dict() + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePersonalizationRequestRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.listing_id is not None: body['listing_id'] = self.listing_id - if self.reason is not None: body['reason'] = self.reason - if self.request_id is not None: body['request_id'] = self.request_id - if self.share: body['share'] = self.share - if self.status is not None: body['status'] = self.status + if self.listing_id is not None: + body["listing_id"] = self.listing_id + if self.reason is not None: + body["reason"] = self.reason + if self.request_id is not None: + body["request_id"] = self.request_id + if self.share: + body["share"] = self.share + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalizationRequestRequest: """Deserializes the UpdatePersonalizationRequestRequest from a dictionary.""" - return cls(listing_id=d.get('listing_id', None), reason=d.get('reason', None), request_id=d.get('request_id', None), share=_from_dict(d, 'share', ShareInfo), status=_enum(d, 'status', PersonalizationRequestStatus)) - - + return cls( + listing_id=d.get("listing_id", None), + reason=d.get("reason", None), + request_id=d.get("request_id", None), + share=_from_dict(d, "share", ShareInfo), + status=_enum(d, "status", PersonalizationRequestStatus), + ) @dataclass class UpdatePersonalizationRequestResponse: request: Optional[PersonalizationRequest] = None - + def as_dict(self) -> dict: """Serializes the UpdatePersonalizationRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.request: body['request'] = self.request.as_dict() + if self.request: + body["request"] = self.request.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.request: body['request'] = self.request + if self.request: + body["request"] = self.request return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalizationRequestResponse: """Deserializes the UpdatePersonalizationRequestResponse from a dictionary.""" - return cls(request=_from_dict(d, 'request', PersonalizationRequest)) - - + return cls(request=_from_dict(d, "request", PersonalizationRequest)) @dataclass class UpdateProviderAnalyticsDashboardRequest: id: Optional[str] = None """id is immutable property and can't be updated.""" - + version: Optional[int] = None """this is the version of the dashboard template we want to update our user to current expectation is that it should be equal to latest version of the dashboard template""" - + def as_dict(self) -> dict: """Serializes the UpdateProviderAnalyticsDashboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.version is not None: body['version'] = self.version + if self.id is not None: + body["id"] = self.id + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProviderAnalyticsDashboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.version is not None: body['version'] = self.version + if self.id is not None: + body["id"] = self.id + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderAnalyticsDashboardRequest: """Deserializes the UpdateProviderAnalyticsDashboardRequest from a dictionary.""" - return cls(id=d.get('id', None), version=d.get('version', None)) - - + return cls(id=d.get("id", None), version=d.get("version", None)) @dataclass class UpdateProviderAnalyticsDashboardResponse: id: str """id & version should be the same as the request""" - + dashboard_id: str """this is newly created Lakeview dashboard for the user""" - + version: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the UpdateProviderAnalyticsDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.id is not None: body['id'] = self.id - if self.version is not None: body['version'] = self.version + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.id is not None: + body["id"] = self.id + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.id is not None: body['id'] = self.id - if self.version is not None: body['version'] = self.version + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.id is not None: + body["id"] = self.id + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderAnalyticsDashboardResponse: """Deserializes the UpdateProviderAnalyticsDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), id=d.get('id', None), version=d.get('version', None)) - - + return cls(dashboard_id=d.get("dashboard_id", None), id=d.get("id", None), version=d.get("version", None)) @dataclass class UpdateProviderRequest: provider: ProviderInfo - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateProviderRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.provider: body['provider'] = self.provider.as_dict() + if self.id is not None: + body["id"] = self.id + if self.provider: + body["provider"] = self.provider.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProviderRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.provider: body['provider'] = self.provider + if self.id is not None: + body["id"] = self.id + if self.provider: + body["provider"] = self.provider return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderRequest: """Deserializes the UpdateProviderRequest from a dictionary.""" - return cls(id=d.get('id', None), provider=_from_dict(d, 'provider', ProviderInfo)) - - + return cls(id=d.get("id", None), provider=_from_dict(d, "provider", ProviderInfo)) @dataclass class UpdateProviderResponse: provider: Optional[ProviderInfo] = None - + def as_dict(self) -> dict: """Serializes the UpdateProviderResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.provider: body['provider'] = self.provider.as_dict() + if self.provider: + body["provider"] = self.provider.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProviderResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.provider: body['provider'] = self.provider + if self.provider: + body["provider"] = self.provider return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderResponse: """Deserializes the UpdateProviderResponse from a dictionary.""" - return cls(provider=_from_dict(d, 'provider', ProviderInfo)) - - + return cls(provider=_from_dict(d, "provider", ProviderInfo)) class Visibility(Enum): - - - PRIVATE = 'PRIVATE' - PUBLIC = 'PUBLIC' + PRIVATE = "PRIVATE" + PUBLIC = "PUBLIC" class ConsumerFulfillmentsAPI: """Fulfillments are entities that allow consumers to preview installations.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - , listing_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[SharedDataObject]: + def get( + self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[SharedDataObject]: """Get listing content metadata. - + Get a high level preview of the metadata of listing installable content. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`SharedDataObject` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{listing_id}/content', query=query - - , headers=headers - ) - if 'shared_data_objects' in json: - for v in json['shared_data_objects']: - yield SharedDataObject.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def list(self - , listing_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ListingFulfillment]: + json = self._api.do( + "GET", f"/api/2.1/marketplace-consumer/listings/{listing_id}/content", query=query, headers=headers + ) + if "shared_data_objects" in json: + for v in json["shared_data_objects"]: + yield SharedDataObject.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list( + self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ListingFulfillment]: """List all listing fulfillments. - + Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation. Standard installations contain metadata about the attached share or git repo. Only one of these fields will be present. Personalized installations contain metadata about the attached share or git repo, as well as the Delta Sharing recipient type. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListingFulfillment` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{listing_id}/fulfillments', query=query - - , headers=headers - ) - if 'fulfillments' in json: - for v in json['fulfillments']: - yield ListingFulfillment.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + json = self._api.do( + "GET", f"/api/2.1/marketplace-consumer/listings/{listing_id}/fulfillments", query=query, headers=headers + ) + if "fulfillments" in json: + for v in json["fulfillments"]: + yield ListingFulfillment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + class ConsumerInstallationsAPI: """Installations are entities that allow consumers to interact with Databricks Marketplace listings.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , listing_id: str - , * - , accepted_consumer_terms: Optional[ConsumerTerms] = None, catalog_name: Optional[str] = None, recipient_type: Optional[DeltaSharingRecipientType] = None, repo_detail: Optional[RepoInstallation] = None, share_name: Optional[str] = None) -> Installation: + def create( + self, + listing_id: str, + *, + accepted_consumer_terms: Optional[ConsumerTerms] = None, + catalog_name: Optional[str] = None, + recipient_type: Optional[DeltaSharingRecipientType] = None, + repo_detail: Optional[RepoInstallation] = None, + share_name: Optional[str] = None, + ) -> Installation: """Install from a listing. - + Install payload associated with a Databricks Marketplace listing. - + :param listing_id: str :param accepted_consumer_terms: :class:`ConsumerTerms` (optional) :param catalog_name: str (optional) @@ -3161,234 +3608,221 @@ def create(self :param repo_detail: :class:`RepoInstallation` (optional) for git repo installations :param share_name: str (optional) - + :returns: :class:`Installation` """ body = {} - if accepted_consumer_terms is not None: body['accepted_consumer_terms'] = accepted_consumer_terms.as_dict() - if catalog_name is not None: body['catalog_name'] = catalog_name - if recipient_type is not None: body['recipient_type'] = recipient_type.value - if repo_detail is not None: body['repo_detail'] = repo_detail.as_dict() - if share_name is not None: body['share_name'] = share_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations', body=body - - , headers=headers - ) + if accepted_consumer_terms is not None: + body["accepted_consumer_terms"] = accepted_consumer_terms.as_dict() + if catalog_name is not None: + body["catalog_name"] = catalog_name + if recipient_type is not None: + body["recipient_type"] = recipient_type.value + if repo_detail is not None: + body["repo_detail"] = repo_detail.as_dict() + if share_name is not None: + body["share_name"] = share_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations", body=body, headers=headers + ) return Installation.from_dict(res) - - - - - def delete(self - , listing_id: str, installation_id: str - ): + def delete(self, listing_id: str, installation_id: str): """Uninstall from a listing. - + Uninstall an installation associated with a Databricks Marketplace listing. - + :param listing_id: str :param installation_id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}' - - , headers=headers - ) - - - - - - - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[InstallationDetail]: + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}", + headers=headers, + ) + + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[InstallationDetail]: """List all installations. - + List all installations across all listings. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`InstallationDetail` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.1/marketplace-consumer/installations', query=query - - , headers=headers - ) - if 'installations' in json: - for v in json['installations']: - yield InstallationDetail.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def list_listing_installations(self - , listing_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[InstallationDetail]: + json = self._api.do("GET", "/api/2.1/marketplace-consumer/installations", query=query, headers=headers) + if "installations" in json: + for v in json["installations"]: + yield InstallationDetail.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_listing_installations( + self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[InstallationDetail]: """List installations for a listing. - + List all installations for a particular listing. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`InstallationDetail` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations', query=query - - , headers=headers - ) - if 'installations' in json: - for v in json['installations']: - yield InstallationDetail.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , listing_id: str, installation_id: str, installation: InstallationDetail - , * - , rotate_token: Optional[bool] = None) -> UpdateInstallationResponse: + json = self._api.do( + "GET", + f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations", + query=query, + headers=headers, + ) + if "installations" in json: + for v in json["installations"]: + yield InstallationDetail.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + listing_id: str, + installation_id: str, + installation: InstallationDetail, + *, + rotate_token: Optional[bool] = None, + ) -> UpdateInstallationResponse: """Update an installation. - + This is a update API that will update the part of the fields defined in the installation table as well as interact with external services according to the fields not included in the installation table 1. the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the rotateToken flag is true and the tokenInfo field is empty - + :param listing_id: str :param installation_id: str :param installation: :class:`InstallationDetail` :param rotate_token: bool (optional) - + :returns: :class:`UpdateInstallationResponse` """ body = {} - if installation is not None: body['installation'] = installation.as_dict() - if rotate_token is not None: body['rotate_token'] = rotate_token - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}', body=body - - , headers=headers - ) + if installation is not None: + body["installation"] = installation.as_dict() + if rotate_token is not None: + body["rotate_token"] = rotate_token + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", + f"/api/2.1/marketplace-consumer/listings/{listing_id}/installations/{installation_id}", + body=body, + headers=headers, + ) return UpdateInstallationResponse.from_dict(res) - - + class ConsumerListingsAPI: """Listings are the core entities in the Marketplace. They represent the products that are available for consumption.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def batch_get(self - - , * - , ids: Optional[List[str]] = None) -> BatchGetListingsResponse: + def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetListingsResponse: """Get one batch of listings. One may specify up to 50 IDs per request. - + Batch get a published listing in the Databricks Marketplace that the consumer has access to. - + :param ids: List[str] (optional) - + :returns: :class:`BatchGetListingsResponse` """ - + query = {} - if ids is not None: query['ids'] = [v for v in ids] - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.1/marketplace-consumer/listings:batchGet', query=query - - , headers=headers - ) - return BatchGetListingsResponse.from_dict(res) + if ids is not None: + query["ids"] = [v for v in ids] + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.1/marketplace-consumer/listings:batchGet", query=query, headers=headers) + return BatchGetListingsResponse.from_dict(res) - def get(self - , id: str - ) -> GetListingResponse: + def get(self, id: str) -> GetListingResponse: """Get listing. - + Get a published listing in the Databricks Marketplace that the consumer has access to. - + :param id: str - + :returns: :class:`GetListingResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{id}' - - , headers=headers - ) - return GetListingResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/marketplace-consumer/listings/{id}", headers=headers) + return GetListingResponse.from_dict(res) - def list(self - - , * - , assets: Optional[List[AssetType]] = None, categories: Optional[List[Category]] = None, is_free: Optional[bool] = None, is_private_exchange: Optional[bool] = None, is_staff_pick: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, provider_ids: Optional[List[str]] = None, tags: Optional[List[ListingTag]] = None) -> Iterator[Listing]: + def list( + self, + *, + assets: Optional[List[AssetType]] = None, + categories: Optional[List[Category]] = None, + is_free: Optional[bool] = None, + is_private_exchange: Optional[bool] = None, + is_staff_pick: Optional[bool] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + provider_ids: Optional[List[str]] = None, + tags: Optional[List[ListingTag]] = None, + ) -> Iterator[Listing]: """List listings. - + List all published listings in the Databricks Marketplace that the consumer has access to. - + :param assets: List[:class:`AssetType`] (optional) Matches any of the following asset types :param categories: List[:class:`Category`] (optional) @@ -3405,50 +3839,59 @@ def list(self Matches any of the following provider ids :param tags: List[:class:`ListingTag`] (optional) Matches any of the following tags - + :returns: Iterator over :class:`Listing` """ - + query = {} - if assets is not None: query['assets'] = [v.value for v in assets] - if categories is not None: query['categories'] = [v.value for v in categories] - if is_free is not None: query['is_free'] = is_free - if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange - if is_staff_pick is not None: query['is_staff_pick'] = is_staff_pick - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids] - if tags is not None: query['tags'] = [v.as_dict() for v in tags] - headers = {'Accept': 'application/json',} - - - + if assets is not None: + query["assets"] = [v.value for v in assets] + if categories is not None: + query["categories"] = [v.value for v in categories] + if is_free is not None: + query["is_free"] = is_free + if is_private_exchange is not None: + query["is_private_exchange"] = is_private_exchange + if is_staff_pick is not None: + query["is_staff_pick"] = is_staff_pick + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if provider_ids is not None: + query["provider_ids"] = [v for v in provider_ids] + if tags is not None: + query["tags"] = [v.as_dict() for v in tags] + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.1/marketplace-consumer/listings', query=query - - , headers=headers - ) - if 'listings' in json: - for v in json['listings']: - yield Listing.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def search(self - , query: str - , * - , assets: Optional[List[AssetType]] = None, categories: Optional[List[Category]] = None, is_free: Optional[bool] = None, is_private_exchange: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, provider_ids: Optional[List[str]] = None) -> Iterator[Listing]: + json = self._api.do("GET", "/api/2.1/marketplace-consumer/listings", query=query, headers=headers) + if "listings" in json: + for v in json["listings"]: + yield Listing.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def search( + self, + query: str, + *, + assets: Optional[List[AssetType]] = None, + categories: Optional[List[Category]] = None, + is_free: Optional[bool] = None, + is_private_exchange: Optional[bool] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + provider_ids: Optional[List[str]] = None, + ) -> Iterator[Listing]: """Search listings. - + Search published listings in the Databricks Marketplace that the consumer has access to. This query supports a variety of different search parameters and performs fuzzy matching. - + :param query: str Fuzzy matches query :param assets: List[:class:`AssetType`] (optional) @@ -3461,60 +3904,64 @@ def search(self :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - + :returns: Iterator over :class:`Listing` """ - + query = {} - if assets is not None: query['assets'] = [v.value for v in assets] - if categories is not None: query['categories'] = [v.value for v in categories] - if is_free is not None: query['is_free'] = is_free - if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids] - if query is not None: query['query'] = query - headers = {'Accept': 'application/json',} - - - + if assets is not None: + query["assets"] = [v.value for v in assets] + if categories is not None: + query["categories"] = [v.value for v in categories] + if is_free is not None: + query["is_free"] = is_free + if is_private_exchange is not None: + query["is_private_exchange"] = is_private_exchange + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + if provider_ids is not None: + query["provider_ids"] = [v for v in provider_ids] + if query is not None: + query["query"] = query + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.1/marketplace-consumer/search-listings', query=query - - , headers=headers - ) - if 'listings' in json: - for v in json['listings']: - yield Listing.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + json = self._api.do("GET", "/api/2.1/marketplace-consumer/search-listings", query=query, headers=headers) + if "listings" in json: + for v in json["listings"]: + yield Listing.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + class ConsumerPersonalizationRequestsAPI: """Personalization Requests allow customers to interact with the individualized Marketplace listing flow.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , listing_id: str, intended_use: str, accepted_consumer_terms: ConsumerTerms - , * - , comment: Optional[str] = None, company: Optional[str] = None, first_name: Optional[str] = None, is_from_lighthouse: Optional[bool] = None, last_name: Optional[str] = None, recipient_type: Optional[DeltaSharingRecipientType] = None) -> CreatePersonalizationRequestResponse: + def create( + self, + listing_id: str, + intended_use: str, + accepted_consumer_terms: ConsumerTerms, + *, + comment: Optional[str] = None, + company: Optional[str] = None, + first_name: Optional[str] = None, + is_from_lighthouse: Optional[bool] = None, + last_name: Optional[str] = None, + recipient_type: Optional[DeltaSharingRecipientType] = None, + ) -> CreatePersonalizationRequestResponse: """Create a personalization request. - + Create a personalization request for a listing. - + :param listing_id: str :param intended_use: str :param accepted_consumer_terms: :class:`ConsumerTerms` @@ -3524,1213 +3971,976 @@ def create(self :param is_from_lighthouse: bool (optional) :param last_name: str (optional) :param recipient_type: :class:`DeltaSharingRecipientType` (optional) - + :returns: :class:`CreatePersonalizationRequestResponse` """ body = {} - if accepted_consumer_terms is not None: body['accepted_consumer_terms'] = accepted_consumer_terms.as_dict() - if comment is not None: body['comment'] = comment - if company is not None: body['company'] = company - if first_name is not None: body['first_name'] = first_name - if intended_use is not None: body['intended_use'] = intended_use - if is_from_lighthouse is not None: body['is_from_lighthouse'] = is_from_lighthouse - if last_name is not None: body['last_name'] = last_name - if recipient_type is not None: body['recipient_type'] = recipient_type.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests', body=body - - , headers=headers - ) + if accepted_consumer_terms is not None: + body["accepted_consumer_terms"] = accepted_consumer_terms.as_dict() + if comment is not None: + body["comment"] = comment + if company is not None: + body["company"] = company + if first_name is not None: + body["first_name"] = first_name + if intended_use is not None: + body["intended_use"] = intended_use + if is_from_lighthouse is not None: + body["is_from_lighthouse"] = is_from_lighthouse + if last_name is not None: + body["last_name"] = last_name + if recipient_type is not None: + body["recipient_type"] = recipient_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests", + body=body, + headers=headers, + ) return CreatePersonalizationRequestResponse.from_dict(res) - - - - - def get(self - , listing_id: str - ) -> GetPersonalizationRequestResponse: + def get(self, listing_id: str) -> GetPersonalizationRequestResponse: """Get the personalization request for a listing. - + Get the personalization request for a listing. Each consumer can make at *most* one personalization request for a listing. - + :param listing_id: str - + :returns: :class:`GetPersonalizationRequestResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests' - - , headers=headers - ) - return GetPersonalizationRequestResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/marketplace-consumer/listings/{listing_id}/personalization-requests", headers=headers + ) + return GetPersonalizationRequestResponse.from_dict(res) - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PersonalizationRequest]: + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[PersonalizationRequest]: """List all personalization requests. - + List personalization requests for a consumer across all listings. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`PersonalizationRequest` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.1/marketplace-consumer/personalization-requests', query=query - - , headers=headers - ) - if 'personalization_requests' in json: - for v in json['personalization_requests']: - yield PersonalizationRequest.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + json = self._api.do( + "GET", "/api/2.1/marketplace-consumer/personalization-requests", query=query, headers=headers + ) + if "personalization_requests" in json: + for v in json["personalization_requests"]: + yield PersonalizationRequest.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + class ConsumerProvidersAPI: """Providers are the entities that publish listings to the Marketplace.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def batch_get(self - - , * - , ids: Optional[List[str]] = None) -> BatchGetProvidersResponse: + def batch_get(self, *, ids: Optional[List[str]] = None) -> BatchGetProvidersResponse: """Get one batch of providers. One may specify up to 50 IDs per request. - + Batch get a provider in the Databricks Marketplace with at least one visible listing. - + :param ids: List[str] (optional) - + :returns: :class:`BatchGetProvidersResponse` """ - + query = {} - if ids is not None: query['ids'] = [v for v in ids] - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.1/marketplace-consumer/providers:batchGet', query=query - - , headers=headers - ) - return BatchGetProvidersResponse.from_dict(res) + if ids is not None: + query["ids"] = [v for v in ids] + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.1/marketplace-consumer/providers:batchGet", query=query, headers=headers) + return BatchGetProvidersResponse.from_dict(res) - def get(self - , id: str - ) -> GetProviderResponse: + def get(self, id: str) -> GetProviderResponse: """Get a provider. - + Get a provider in the Databricks Marketplace with at least one visible listing. - + :param id: str - + :returns: :class:`GetProviderResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/marketplace-consumer/providers/{id}' - - , headers=headers - ) - return GetProviderResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , is_featured: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderInfo]: + res = self._api.do("GET", f"/api/2.1/marketplace-consumer/providers/{id}", headers=headers) + return GetProviderResponse.from_dict(res) + + def list( + self, *, is_featured: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ProviderInfo]: """List providers. - + List all providers in the Databricks Marketplace with at least one visible listing. - + :param is_featured: bool (optional) :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ProviderInfo` """ - + query = {} - if is_featured is not None: query['is_featured'] = is_featured - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if is_featured is not None: + query["is_featured"] = is_featured + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.1/marketplace-consumer/providers', query=query - - , headers=headers - ) - if 'providers' in json: - for v in json['providers']: - yield ProviderInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + json = self._api.do("GET", "/api/2.1/marketplace-consumer/providers", query=query, headers=headers) + if "providers" in json: + for v in json["providers"]: + yield ProviderInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + class ProviderExchangeFiltersAPI: """Marketplace exchanges filters curate which groups can access an exchange.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , filter: ExchangeFilter - ) -> CreateExchangeFilterResponse: + def create(self, filter: ExchangeFilter) -> CreateExchangeFilterResponse: """Create a new exchange filter. - + Add an exchange filter. - + :param filter: :class:`ExchangeFilter` - + :returns: :class:`CreateExchangeFilterResponse` """ body = {} - if filter is not None: body['filter'] = filter.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/marketplace-exchange/filters', body=body - - , headers=headers - ) - return CreateExchangeFilterResponse.from_dict(res) + if filter is not None: + body["filter"] = filter.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/marketplace-exchange/filters", body=body, headers=headers) + return CreateExchangeFilterResponse.from_dict(res) - def delete(self - , id: str - ): + def delete(self, id: str): """Delete an exchange filter. - + Delete an exchange filter - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/marketplace-exchange/filters/{id}' - - , headers=headers - ) - - - - - - - def list(self - , exchange_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ExchangeFilter]: + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/marketplace-exchange/filters/{id}", headers=headers) + + def list( + self, exchange_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ExchangeFilter]: """List exchange filters. - + List exchange filter - + :param exchange_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeFilter` """ - + query = {} - if exchange_id is not None: query['exchange_id'] = exchange_id - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if exchange_id is not None: + query["exchange_id"] = exchange_id + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/marketplace-exchange/filters', query=query - - , headers=headers - ) - if 'filters' in json: - for v in json['filters']: - yield ExchangeFilter.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , id: str, filter: ExchangeFilter - ) -> UpdateExchangeFilterResponse: + json = self._api.do("GET", "/api/2.0/marketplace-exchange/filters", query=query, headers=headers) + if "filters" in json: + for v in json["filters"]: + yield ExchangeFilter.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, id: str, filter: ExchangeFilter) -> UpdateExchangeFilterResponse: """Update exchange filter. - + Update an exchange filter. - + :param id: str :param filter: :class:`ExchangeFilter` - + :returns: :class:`UpdateExchangeFilterResponse` """ body = {} - if filter is not None: body['filter'] = filter.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/marketplace-exchange/filters/{id}', body=body - - , headers=headers - ) + if filter is not None: + body["filter"] = filter.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", f"/api/2.0/marketplace-exchange/filters/{id}", body=body, headers=headers) return UpdateExchangeFilterResponse.from_dict(res) - - + class ProviderExchangesAPI: """Marketplace exchanges allow providers to share their listings with a curated set of customers.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def add_listing_to_exchange(self - , listing_id: str, exchange_id: str - ) -> AddExchangeForListingResponse: + def add_listing_to_exchange(self, listing_id: str, exchange_id: str) -> AddExchangeForListingResponse: """Add an exchange for listing. - + Associate an exchange with a listing - + :param listing_id: str :param exchange_id: str - + :returns: :class:`AddExchangeForListingResponse` """ body = {} - if exchange_id is not None: body['exchange_id'] = exchange_id - if listing_id is not None: body['listing_id'] = listing_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/marketplace-exchange/exchanges-for-listing', body=body - - , headers=headers - ) - return AddExchangeForListingResponse.from_dict(res) + if exchange_id is not None: + body["exchange_id"] = exchange_id + if listing_id is not None: + body["listing_id"] = listing_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/marketplace-exchange/exchanges-for-listing", body=body, headers=headers) + return AddExchangeForListingResponse.from_dict(res) - def create(self - , exchange: Exchange - ) -> CreateExchangeResponse: + def create(self, exchange: Exchange) -> CreateExchangeResponse: """Create an exchange. - + Create an exchange - + :param exchange: :class:`Exchange` - + :returns: :class:`CreateExchangeResponse` """ body = {} - if exchange is not None: body['exchange'] = exchange.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/marketplace-exchange/exchanges', body=body - - , headers=headers - ) - return CreateExchangeResponse.from_dict(res) + if exchange is not None: + body["exchange"] = exchange.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/marketplace-exchange/exchanges", body=body, headers=headers) + return CreateExchangeResponse.from_dict(res) - def delete(self - , id: str - ): + def delete(self, id: str): """Delete an exchange. - + This removes a listing from marketplace. - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/marketplace-exchange/exchanges/{id}' - - , headers=headers - ) - - - - - - - def delete_listing_from_exchange(self - , id: str - ): + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/marketplace-exchange/exchanges/{id}", headers=headers) + + def delete_listing_from_exchange(self, id: str): """Remove an exchange for listing. - + Disassociate an exchange with a listing - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/marketplace-exchange/exchanges-for-listing/{id}' - - , headers=headers - ) - - - - - - - def get(self - , id: str - ) -> GetExchangeResponse: + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/marketplace-exchange/exchanges-for-listing/{id}", headers=headers) + + def get(self, id: str) -> GetExchangeResponse: """Get an exchange. - + Get an exchange. - + :param id: str - + :returns: :class:`GetExchangeResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/marketplace-exchange/exchanges/{id}' - - , headers=headers - ) - return GetExchangeResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Exchange]: + res = self._api.do("GET", f"/api/2.0/marketplace-exchange/exchanges/{id}", headers=headers) + return GetExchangeResponse.from_dict(res) + + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Exchange]: """List exchanges. - + List exchanges visible to provider - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Exchange` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/marketplace-exchange/exchanges', query=query - - , headers=headers - ) - if 'exchanges' in json: - for v in json['exchanges']: - yield Exchange.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def list_exchanges_for_listing(self - , listing_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ExchangeListing]: + json = self._api.do("GET", "/api/2.0/marketplace-exchange/exchanges", query=query, headers=headers) + if "exchanges" in json: + for v in json["exchanges"]: + yield Exchange.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_exchanges_for_listing( + self, listing_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ExchangeListing]: """List exchanges for listing. - + List exchanges associated with a listing - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeListing` """ - + query = {} - if listing_id is not None: query['listing_id'] = listing_id - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if listing_id is not None: + query["listing_id"] = listing_id + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/marketplace-exchange/exchanges-for-listing', query=query - - , headers=headers - ) - if 'exchange_listing' in json: - for v in json['exchange_listing']: - yield ExchangeListing.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def list_listings_for_exchange(self - , exchange_id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ExchangeListing]: + json = self._api.do( + "GET", "/api/2.0/marketplace-exchange/exchanges-for-listing", query=query, headers=headers + ) + if "exchange_listing" in json: + for v in json["exchange_listing"]: + yield ExchangeListing.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_listings_for_exchange( + self, exchange_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ExchangeListing]: """List listings for exchange. - + List listings associated with an exchange - + :param exchange_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeListing` """ - + query = {} - if exchange_id is not None: query['exchange_id'] = exchange_id - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if exchange_id is not None: + query["exchange_id"] = exchange_id + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/marketplace-exchange/listings-for-exchange', query=query - - , headers=headers - ) - if 'exchange_listings' in json: - for v in json['exchange_listings']: - yield ExchangeListing.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , id: str, exchange: Exchange - ) -> UpdateExchangeResponse: + json = self._api.do( + "GET", "/api/2.0/marketplace-exchange/listings-for-exchange", query=query, headers=headers + ) + if "exchange_listings" in json: + for v in json["exchange_listings"]: + yield ExchangeListing.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, id: str, exchange: Exchange) -> UpdateExchangeResponse: """Update exchange. - + Update an exchange - + :param id: str :param exchange: :class:`Exchange` - + :returns: :class:`UpdateExchangeResponse` """ body = {} - if exchange is not None: body['exchange'] = exchange.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/marketplace-exchange/exchanges/{id}', body=body - - , headers=headers - ) + if exchange is not None: + body["exchange"] = exchange.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", f"/api/2.0/marketplace-exchange/exchanges/{id}", body=body, headers=headers) return UpdateExchangeResponse.from_dict(res) - - + class ProviderFilesAPI: """Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , file_parent: FileParent, marketplace_file_type: MarketplaceFileType, mime_type: str - , * - , display_name: Optional[str] = None) -> CreateFileResponse: + def create( + self, + file_parent: FileParent, + marketplace_file_type: MarketplaceFileType, + mime_type: str, + *, + display_name: Optional[str] = None, + ) -> CreateFileResponse: """Create a file. - + Create a file. Currently, only provider icons and attached notebooks are supported. - + :param file_parent: :class:`FileParent` :param marketplace_file_type: :class:`MarketplaceFileType` :param mime_type: str :param display_name: str (optional) - + :returns: :class:`CreateFileResponse` """ body = {} - if display_name is not None: body['display_name'] = display_name - if file_parent is not None: body['file_parent'] = file_parent.as_dict() - if marketplace_file_type is not None: body['marketplace_file_type'] = marketplace_file_type.value - if mime_type is not None: body['mime_type'] = mime_type - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/marketplace-provider/files', body=body - - , headers=headers - ) + if display_name is not None: + body["display_name"] = display_name + if file_parent is not None: + body["file_parent"] = file_parent.as_dict() + if marketplace_file_type is not None: + body["marketplace_file_type"] = marketplace_file_type.value + if mime_type is not None: + body["mime_type"] = mime_type + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/marketplace-provider/files", body=body, headers=headers) return CreateFileResponse.from_dict(res) - - - - - def delete(self - , file_id: str - ): + def delete(self, file_id: str): """Delete a file. - + Delete a file - + :param file_id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/marketplace-provider/files/{file_id}' - - , headers=headers - ) - - - - - - - def get(self - , file_id: str - ) -> GetFileResponse: + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/marketplace-provider/files/{file_id}", headers=headers) + + def get(self, file_id: str) -> GetFileResponse: """Get a file. - + Get a file - + :param file_id: str - + :returns: :class:`GetFileResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/marketplace-provider/files/{file_id}' - - , headers=headers - ) - return GetFileResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/marketplace-provider/files/{file_id}", headers=headers) + return GetFileResponse.from_dict(res) - def list(self - , file_parent: FileParent - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FileInfo]: + def list( + self, file_parent: FileParent, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[FileInfo]: """List files. - + List files attached to a parent entity. - + :param file_parent: :class:`FileParent` :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FileInfo` """ - + query = {} - if file_parent is not None: query['file_parent'] = file_parent.as_dict() - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if file_parent is not None: + query["file_parent"] = file_parent.as_dict() + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/marketplace-provider/files', query=query - - , headers=headers - ) - if 'file_infos' in json: - for v in json['file_infos']: - yield FileInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + json = self._api.do("GET", "/api/2.0/marketplace-provider/files", query=query, headers=headers) + if "file_infos" in json: + for v in json["file_infos"]: + yield FileInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + class ProviderListingsAPI: """Listings are the core entities in the Marketplace. They represent the products that are available for consumption.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , listing: Listing - ) -> CreateListingResponse: + def create(self, listing: Listing) -> CreateListingResponse: """Create a listing. - + Create a new listing - + :param listing: :class:`Listing` - + :returns: :class:`CreateListingResponse` """ body = {} - if listing is not None: body['listing'] = listing.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/marketplace-provider/listing', body=body - - , headers=headers - ) - return CreateListingResponse.from_dict(res) + if listing is not None: + body["listing"] = listing.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/marketplace-provider/listing", body=body, headers=headers) + return CreateListingResponse.from_dict(res) - def delete(self - , id: str - ): + def delete(self, id: str): """Delete a listing. - + Delete a listing - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/marketplace-provider/listings/{id}' - - , headers=headers - ) - - - - - - - def get(self - , id: str - ) -> GetListingResponse: + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/marketplace-provider/listings/{id}", headers=headers) + + def get(self, id: str) -> GetListingResponse: """Get a listing. - + Get a listing - + :param id: str - + :returns: :class:`GetListingResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/marketplace-provider/listings/{id}' - - , headers=headers - ) - return GetListingResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/marketplace-provider/listings/{id}", headers=headers) + return GetListingResponse.from_dict(res) - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Listing]: + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Listing]: """List listings. - + List listings owned by this provider - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Listing` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/marketplace-provider/listings', query=query - - , headers=headers - ) - if 'listings' in json: - for v in json['listings']: - yield Listing.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , id: str, listing: Listing - ) -> UpdateListingResponse: + json = self._api.do("GET", "/api/2.0/marketplace-provider/listings", query=query, headers=headers) + if "listings" in json: + for v in json["listings"]: + yield Listing.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, id: str, listing: Listing) -> UpdateListingResponse: """Update listing. - + Update a listing - + :param id: str :param listing: :class:`Listing` - + :returns: :class:`UpdateListingResponse` """ body = {} - if listing is not None: body['listing'] = listing.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/marketplace-provider/listings/{id}', body=body - - , headers=headers - ) + if listing is not None: + body["listing"] = listing.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", f"/api/2.0/marketplace-provider/listings/{id}", body=body, headers=headers) return UpdateListingResponse.from_dict(res) - - + class ProviderPersonalizationRequestsAPI: """Personalization requests are an alternate to instantly available listings. Control the lifecycle of personalized solutions.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PersonalizationRequest]: + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[PersonalizationRequest]: """All personalization requests across all listings. - + List personalization requests to this provider. This will return all personalization requests, regardless of which listing they are for. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`PersonalizationRequest` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/marketplace-provider/personalization-requests', query=query - - , headers=headers - ) - if 'personalization_requests' in json: - for v in json['personalization_requests']: - yield PersonalizationRequest.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , listing_id: str, request_id: str, status: PersonalizationRequestStatus - , * - , reason: Optional[str] = None, share: Optional[ShareInfo] = None) -> UpdatePersonalizationRequestResponse: + json = self._api.do( + "GET", "/api/2.0/marketplace-provider/personalization-requests", query=query, headers=headers + ) + if "personalization_requests" in json: + for v in json["personalization_requests"]: + yield PersonalizationRequest.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + listing_id: str, + request_id: str, + status: PersonalizationRequestStatus, + *, + reason: Optional[str] = None, + share: Optional[ShareInfo] = None, + ) -> UpdatePersonalizationRequestResponse: """Update personalization request status. - + Update personalization request. This method only permits updating the status of the request. - + :param listing_id: str :param request_id: str :param status: :class:`PersonalizationRequestStatus` :param reason: str (optional) :param share: :class:`ShareInfo` (optional) - + :returns: :class:`UpdatePersonalizationRequestResponse` """ body = {} - if reason is not None: body['reason'] = reason - if share is not None: body['share'] = share.as_dict() - if status is not None: body['status'] = status.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/marketplace-provider/listings/{listing_id}/personalization-requests/{request_id}/request-status', body=body - - , headers=headers - ) + if reason is not None: + body["reason"] = reason + if share is not None: + body["share"] = share.as_dict() + if status is not None: + body["status"] = status.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", + f"/api/2.0/marketplace-provider/listings/{listing_id}/personalization-requests/{request_id}/request-status", + body=body, + headers=headers, + ) return UpdatePersonalizationRequestResponse.from_dict(res) - - + class ProviderProviderAnalyticsDashboardsAPI: """Manage templated analytics solution for providers.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - def create(self) -> ProviderAnalyticsDashboard: """Create provider analytics dashboard. - + Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the Lakeview dashboard id. - + :returns: :class:`ProviderAnalyticsDashboard` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('POST','/api/2.0/marketplace-provider/analytics_dashboard' - , headers=headers - ) - return ProviderAnalyticsDashboard.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("POST", "/api/2.0/marketplace-provider/analytics_dashboard", headers=headers) + return ProviderAnalyticsDashboard.from_dict(res) def get(self) -> ListProviderAnalyticsDashboardResponse: """Get provider analytics dashboard. - + Get provider analytics dashboard. - + :returns: :class:`ListProviderAnalyticsDashboardResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/marketplace-provider/analytics_dashboard' - , headers=headers - ) - return ListProviderAnalyticsDashboardResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/marketplace-provider/analytics_dashboard", headers=headers) + return ListProviderAnalyticsDashboardResponse.from_dict(res) def get_latest_version(self) -> GetLatestVersionProviderAnalyticsDashboardResponse: """Get latest version of provider analytics dashboard. - + Get latest version of provider analytics dashboard. - + :returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/marketplace-provider/analytics_dashboard/latest' - , headers=headers - ) - return GetLatestVersionProviderAnalyticsDashboardResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/marketplace-provider/analytics_dashboard/latest", headers=headers) + return GetLatestVersionProviderAnalyticsDashboardResponse.from_dict(res) - def update(self - , id: str - , * - , version: Optional[int] = None) -> UpdateProviderAnalyticsDashboardResponse: + def update(self, id: str, *, version: Optional[int] = None) -> UpdateProviderAnalyticsDashboardResponse: """Update provider analytics dashboard. - + Update provider analytics dashboard. - + :param id: str id is immutable property and can't be updated. :param version: int (optional) this is the version of the dashboard template we want to update our user to current expectation is that it should be equal to latest version of the dashboard template - + :returns: :class:`UpdateProviderAnalyticsDashboardResponse` """ body = {} - if version is not None: body['version'] = version - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/marketplace-provider/analytics_dashboard/{id}', body=body - - , headers=headers - ) + if version is not None: + body["version"] = version + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", f"/api/2.0/marketplace-provider/analytics_dashboard/{id}", body=body, headers=headers) return UpdateProviderAnalyticsDashboardResponse.from_dict(res) - - + class ProviderProvidersAPI: """Providers are entities that manage assets in Marketplace.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , provider: ProviderInfo - ) -> CreateProviderResponse: + def create(self, provider: ProviderInfo) -> CreateProviderResponse: """Create a provider. - + Create a provider - + :param provider: :class:`ProviderInfo` - + :returns: :class:`CreateProviderResponse` """ body = {} - if provider is not None: body['provider'] = provider.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/marketplace-provider/provider', body=body - - , headers=headers - ) - return CreateProviderResponse.from_dict(res) + if provider is not None: + body["provider"] = provider.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/marketplace-provider/provider", body=body, headers=headers) + return CreateProviderResponse.from_dict(res) - def delete(self - , id: str - ): + def delete(self, id: str): """Delete provider. - + Delete provider - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/marketplace-provider/providers/{id}' - - , headers=headers - ) - - - - - - - def get(self - , id: str - ) -> GetProviderResponse: + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/marketplace-provider/providers/{id}", headers=headers) + + def get(self, id: str) -> GetProviderResponse: """Get provider. - + Get provider profile - + :param id: str - + :returns: :class:`GetProviderResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/marketplace-provider/providers/{id}' - - , headers=headers - ) - return GetProviderResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderInfo]: + res = self._api.do("GET", f"/api/2.0/marketplace-provider/providers/{id}", headers=headers) + return GetProviderResponse.from_dict(res) + + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderInfo]: """List providers. - + List provider profiles for account. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ProviderInfo` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/marketplace-provider/providers', query=query - - , headers=headers - ) - if 'providers' in json: - for v in json['providers']: - yield ProviderInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , id: str, provider: ProviderInfo - ) -> UpdateProviderResponse: + json = self._api.do("GET", "/api/2.0/marketplace-provider/providers", query=query, headers=headers) + if "providers" in json: + for v in json["providers"]: + yield ProviderInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, id: str, provider: ProviderInfo) -> UpdateProviderResponse: """Update provider. - + Update provider profile - + :param id: str :param provider: :class:`ProviderInfo` - + :returns: :class:`UpdateProviderResponse` """ body = {} - if provider is not None: body['provider'] = provider.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/marketplace-provider/providers/{id}', body=body - - , headers=headers - ) - return UpdateProviderResponse.from_dict(res) + if provider is not None: + body["provider"] = provider.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - \ No newline at end of file + res = self._api.do("PUT", f"/api/2.0/marketplace-provider/providers/{id}", body=body, headers=headers) + return UpdateProviderResponse.from_dict(res) diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 483e59300..b5a58078d 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -1,30 +1,28 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Callable, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class Activity: """Activity recorded for the action.""" - + activity_type: Optional[ActivityType] = None """Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding stage transition. @@ -39,13 +37,13 @@ class Activity: * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing model versions in a stage.""" - + comment: Optional[str] = None """User-provided comment associated with the activity.""" - + creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + from_stage: Optional[Stage] = None """Source stage of the transition (if the activity is stage transition related). Valid values are: @@ -56,18 +54,18 @@ class Activity: * `Production`: Production stage. * `Archived`: Archived stage.""" - + id: Optional[str] = None """Unique identifier for the object.""" - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + system_comment: Optional[str] = None """Comment made by system, for example explaining an activity of type `SYSTEM_TRANSITION`. It usually describes a side effect, such as a version being archived as part of another version's stage transition, and may not be returned for some activity types.""" - + to_stage: Optional[Stage] = None """Target stage of the transition (if the activity is stage transition related). Valid values are: @@ -78,89 +76,117 @@ class Activity: * `Production`: Production stage. * `Archived`: Archived stage.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + def as_dict(self) -> dict: """Serializes the Activity into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activity_type is not None: body['activity_type'] = self.activity_type.value - if self.comment is not None: body['comment'] = self.comment - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.from_stage is not None: body['from_stage'] = self.from_stage.value - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.system_comment is not None: body['system_comment'] = self.system_comment - if self.to_stage is not None: body['to_stage'] = self.to_stage.value - if self.user_id is not None: body['user_id'] = self.user_id + if self.activity_type is not None: + body["activity_type"] = self.activity_type.value + if self.comment is not None: + body["comment"] = self.comment + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.from_stage is not None: + body["from_stage"] = self.from_stage.value + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.system_comment is not None: + body["system_comment"] = self.system_comment + if self.to_stage is not None: + body["to_stage"] = self.to_stage.value + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the Activity into a shallow dictionary of its immediate attributes.""" body = {} - if self.activity_type is not None: body['activity_type'] = self.activity_type - if self.comment is not None: body['comment'] = self.comment - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.from_stage is not None: body['from_stage'] = self.from_stage - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.system_comment is not None: body['system_comment'] = self.system_comment - if self.to_stage is not None: body['to_stage'] = self.to_stage - if self.user_id is not None: body['user_id'] = self.user_id + if self.activity_type is not None: + body["activity_type"] = self.activity_type + if self.comment is not None: + body["comment"] = self.comment + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.from_stage is not None: + body["from_stage"] = self.from_stage + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.system_comment is not None: + body["system_comment"] = self.system_comment + if self.to_stage is not None: + body["to_stage"] = self.to_stage + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Activity: """Deserializes the Activity from a dictionary.""" - return cls(activity_type=_enum(d, 'activity_type', ActivityType), comment=d.get('comment', None), creation_timestamp=d.get('creation_timestamp', None), from_stage=_enum(d, 'from_stage', Stage), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), system_comment=d.get('system_comment', None), to_stage=_enum(d, 'to_stage', Stage), user_id=d.get('user_id', None)) - - + return cls( + activity_type=_enum(d, "activity_type", ActivityType), + comment=d.get("comment", None), + creation_timestamp=d.get("creation_timestamp", None), + from_stage=_enum(d, "from_stage", Stage), + id=d.get("id", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + system_comment=d.get("system_comment", None), + to_stage=_enum(d, "to_stage", Stage), + user_id=d.get("user_id", None), + ) class ActivityAction(Enum): """An action that a user (with sufficient permissions) could take on an activity. Valid values are: * `APPROVE_TRANSITION_REQUEST`: Approve a transition request - + * `REJECT_TRANSITION_REQUEST`: Reject a transition request - + * `CANCEL_TRANSITION_REQUEST`: Cancel (delete) a transition request""" - - APPROVE_TRANSITION_REQUEST = 'APPROVE_TRANSITION_REQUEST' - CANCEL_TRANSITION_REQUEST = 'CANCEL_TRANSITION_REQUEST' - REJECT_TRANSITION_REQUEST = 'REJECT_TRANSITION_REQUEST' + + APPROVE_TRANSITION_REQUEST = "APPROVE_TRANSITION_REQUEST" + CANCEL_TRANSITION_REQUEST = "CANCEL_TRANSITION_REQUEST" + REJECT_TRANSITION_REQUEST = "REJECT_TRANSITION_REQUEST" + class ActivityType(Enum): """Type of activity. Valid values are: * `APPLIED_TRANSITION`: User applied the corresponding stage transition. - + * `REQUESTED_TRANSITION`: User requested the corresponding stage transition. - + * `CANCELLED_REQUEST`: User cancelled an existing transition request. - + * `APPROVED_REQUEST`: User approved the corresponding stage transition. - + * `REJECTED_REQUEST`: User rejected the coressponding stage transition. - + * `SYSTEM_TRANSITION`: For events performed as a side effect, such as archiving existing model versions in a stage.""" - - APPLIED_TRANSITION = 'APPLIED_TRANSITION' - APPROVED_REQUEST = 'APPROVED_REQUEST' - CANCELLED_REQUEST = 'CANCELLED_REQUEST' - NEW_COMMENT = 'NEW_COMMENT' - REJECTED_REQUEST = 'REJECTED_REQUEST' - REQUESTED_TRANSITION = 'REQUESTED_TRANSITION' - SYSTEM_TRANSITION = 'SYSTEM_TRANSITION' + + APPLIED_TRANSITION = "APPLIED_TRANSITION" + APPROVED_REQUEST = "APPROVED_REQUEST" + CANCELLED_REQUEST = "CANCELLED_REQUEST" + NEW_COMMENT = "NEW_COMMENT" + REJECTED_REQUEST = "REJECTED_REQUEST" + REQUESTED_TRANSITION = "REQUESTED_TRANSITION" + SYSTEM_TRANSITION = "SYSTEM_TRANSITION" + @dataclass class ApproveTransitionRequest: name: str """Name of the model.""" - + version: str """Version of the model.""" - + stage: Stage """Target stage of the transition. Valid values are: @@ -171,249 +197,293 @@ class ApproveTransitionRequest: * `Production`: Production stage. * `Archived`: Archived stage.""" - + archive_existing_versions: bool """Specifies whether to archive all current model versions in the target stage.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the ApproveTransitionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.archive_existing_versions is not None: body['archive_existing_versions'] = self.archive_existing_versions - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.stage is not None: body['stage'] = self.stage.value - if self.version is not None: body['version'] = self.version + if self.archive_existing_versions is not None: + body["archive_existing_versions"] = self.archive_existing_versions + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.stage is not None: + body["stage"] = self.stage.value + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ApproveTransitionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.archive_existing_versions is not None: body['archive_existing_versions'] = self.archive_existing_versions - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.stage is not None: body['stage'] = self.stage - if self.version is not None: body['version'] = self.version + if self.archive_existing_versions is not None: + body["archive_existing_versions"] = self.archive_existing_versions + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.stage is not None: + body["stage"] = self.stage + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ApproveTransitionRequest: """Deserializes the ApproveTransitionRequest from a dictionary.""" - return cls(archive_existing_versions=d.get('archive_existing_versions', None), comment=d.get('comment', None), name=d.get('name', None), stage=_enum(d, 'stage', Stage), version=d.get('version', None)) - - + return cls( + archive_existing_versions=d.get("archive_existing_versions", None), + comment=d.get("comment", None), + name=d.get("name", None), + stage=_enum(d, "stage", Stage), + version=d.get("version", None), + ) @dataclass class ApproveTransitionRequestResponse: activity: Optional[Activity] = None """Activity recorded for the action.""" - + def as_dict(self) -> dict: """Serializes the ApproveTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activity: body['activity'] = self.activity.as_dict() + if self.activity: + body["activity"] = self.activity.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ApproveTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.activity: body['activity'] = self.activity + if self.activity: + body["activity"] = self.activity return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ApproveTransitionRequestResponse: """Deserializes the ApproveTransitionRequestResponse from a dictionary.""" - return cls(activity=_from_dict(d, 'activity', Activity)) - - + return cls(activity=_from_dict(d, "activity", Activity)) class CommentActivityAction(Enum): """An action that a user (with sufficient permissions) could take on a comment. Valid values are: * `EDIT_COMMENT`: Edit the comment - + * `DELETE_COMMENT`: Delete the comment""" - - DELETE_COMMENT = 'DELETE_COMMENT' - EDIT_COMMENT = 'EDIT_COMMENT' + + DELETE_COMMENT = "DELETE_COMMENT" + EDIT_COMMENT = "EDIT_COMMENT" + @dataclass class CommentObject: """Comment details.""" - + available_actions: Optional[List[CommentActivityAction]] = None """Array of actions on the activity allowed for the current viewer.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + id: Optional[str] = None """Comment ID""" - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + def as_dict(self) -> dict: """Serializes the CommentObject into a dictionary suitable for use as a JSON request body.""" body = {} - if self.available_actions: body['available_actions'] = [v.value for v in self.available_actions] - if self.comment is not None: body['comment'] = self.comment - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.user_id is not None: body['user_id'] = self.user_id + if self.available_actions: + body["available_actions"] = [v.value for v in self.available_actions] + if self.comment is not None: + body["comment"] = self.comment + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the CommentObject into a shallow dictionary of its immediate attributes.""" body = {} - if self.available_actions: body['available_actions'] = self.available_actions - if self.comment is not None: body['comment'] = self.comment - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.user_id is not None: body['user_id'] = self.user_id + if self.available_actions: + body["available_actions"] = self.available_actions + if self.comment is not None: + body["comment"] = self.comment + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CommentObject: """Deserializes the CommentObject from a dictionary.""" - return cls(available_actions=_repeated_enum(d, 'available_actions', CommentActivityAction), comment=d.get('comment', None), creation_timestamp=d.get('creation_timestamp', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), user_id=d.get('user_id', None)) - - + return cls( + available_actions=_repeated_enum(d, "available_actions", CommentActivityAction), + comment=d.get("comment", None), + creation_timestamp=d.get("creation_timestamp", None), + id=d.get("id", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + user_id=d.get("user_id", None), + ) @dataclass class CreateComment: name: str """Name of the model.""" - + version: str """Version of the model.""" - + comment: str """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the CreateComment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.version is not None: body['version'] = self.version + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the CreateComment into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.version is not None: body['version'] = self.version + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateComment: """Deserializes the CreateComment from a dictionary.""" - return cls(comment=d.get('comment', None), name=d.get('name', None), version=d.get('version', None)) - - + return cls(comment=d.get("comment", None), name=d.get("name", None), version=d.get("version", None)) @dataclass class CreateCommentResponse: comment: Optional[CommentObject] = None """Comment details.""" - + def as_dict(self) -> dict: """Serializes the CreateCommentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment: body['comment'] = self.comment.as_dict() + if self.comment: + body["comment"] = self.comment.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateCommentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment: body['comment'] = self.comment + if self.comment: + body["comment"] = self.comment return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCommentResponse: """Deserializes the CreateCommentResponse from a dictionary.""" - return cls(comment=_from_dict(d, 'comment', CommentObject)) - - + return cls(comment=_from_dict(d, "comment", CommentObject)) @dataclass class CreateExperiment: name: str """Experiment name.""" - + artifact_location: Optional[str] = None """Location where all artifacts for the experiment are stored. If not provided, the remote server will select an appropriate default.""" - + tags: Optional[List[ExperimentTag]] = None """A collection of tags to set on the experiment. Maximum tag size and number of tags per request depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to support up to 20 tags per request.""" - + def as_dict(self) -> dict: """Serializes the CreateExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_location is not None: body['artifact_location'] = self.artifact_location - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.artifact_location is not None: + body["artifact_location"] = self.artifact_location + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_location is not None: body['artifact_location'] = self.artifact_location - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = self.tags + if self.artifact_location is not None: + body["artifact_location"] = self.artifact_location + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExperiment: """Deserializes the CreateExperiment from a dictionary.""" - return cls(artifact_location=d.get('artifact_location', None), name=d.get('name', None), tags=_repeated_dict(d, 'tags', ExperimentTag)) - - + return cls( + artifact_location=d.get("artifact_location", None), + name=d.get("name", None), + tags=_repeated_dict(d, "tags", ExperimentTag), + ) @dataclass class CreateExperimentResponse: experiment_id: Optional[str] = None """Unique identifier for the experiment.""" - + def as_dict(self) -> dict: """Serializes the CreateExperimentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateExperimentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateExperimentResponse: """Deserializes the CreateExperimentResponse from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None)) - - + return cls(experiment_id=d.get("experiment_id", None)) @dataclass @@ -421,356 +491,446 @@ class CreateForecastingExperimentRequest: train_data_path: str """The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used as training data for the forecasting model.""" - + target_column: str """The column in the input training table used as the prediction target for model training. The values in this column are used as the ground truth for model training.""" - + time_column: str """The column in the input training table that represents each row's timestamp.""" - + forecast_granularity: str """The time interval between consecutive rows in the time series data. Possible values include: '1 second', '1 minute', '5 minutes', '10 minutes', '15 minutes', '30 minutes', 'Hourly', 'Daily', 'Weekly', 'Monthly', 'Quarterly', 'Yearly'.""" - + forecast_horizon: int """The number of time steps into the future to make predictions, calculated as a multiple of forecast_granularity. This value represents how far ahead the model should forecast.""" - + custom_weights_column: Optional[str] = None """The column in the training table used to customize weights for each time series.""" - + experiment_path: Optional[str] = None """The path in the workspace to store the created experiment.""" - + future_feature_data_path: Optional[str] = None """The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used to store future feature data for predictions.""" - + holiday_regions: Optional[List[str]] = None """The region code(s) to automatically add holiday features. Currently supports only one region.""" - + include_features: Optional[List[str]] = None """Specifies the list of feature columns to include in model training. These columns must exist in the training data and be of type string, numerical, or boolean. If not specified, no additional features will be included. Note: Certain columns are automatically handled: - Automatically excluded: split_column, target_column, custom_weights_column. - Automatically included: time_column.""" - + max_runtime: Optional[int] = None """The maximum duration for the experiment in minutes. The experiment stops automatically if it exceeds this limit.""" - + prediction_data_path: Optional[str] = None """The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used to store predictions.""" - + primary_metric: Optional[str] = None """The evaluation metric used to optimize the forecasting model.""" - + register_to: Optional[str] = None """The fully qualified path of a Unity Catalog model, formatted as catalog_name.schema_name.model_name, used to store the best model.""" - + split_column: Optional[str] = None """// The column in the training table used for custom data splits. Values must be 'train', 'validate', or 'test'.""" - + timeseries_identifier_columns: Optional[List[str]] = None """The column in the training table used to group the dataset for predicting individual time series.""" - + training_frameworks: Optional[List[str]] = None """List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', 'DeepAR'. An empty list includes all supported frameworks.""" - + def as_dict(self) -> dict: """Serializes the CreateForecastingExperimentRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_weights_column is not None: body['custom_weights_column'] = self.custom_weights_column - if self.experiment_path is not None: body['experiment_path'] = self.experiment_path - if self.forecast_granularity is not None: body['forecast_granularity'] = self.forecast_granularity - if self.forecast_horizon is not None: body['forecast_horizon'] = self.forecast_horizon - if self.future_feature_data_path is not None: body['future_feature_data_path'] = self.future_feature_data_path - if self.holiday_regions: body['holiday_regions'] = [v for v in self.holiday_regions] - if self.include_features: body['include_features'] = [v for v in self.include_features] - if self.max_runtime is not None: body['max_runtime'] = self.max_runtime - if self.prediction_data_path is not None: body['prediction_data_path'] = self.prediction_data_path - if self.primary_metric is not None: body['primary_metric'] = self.primary_metric - if self.register_to is not None: body['register_to'] = self.register_to - if self.split_column is not None: body['split_column'] = self.split_column - if self.target_column is not None: body['target_column'] = self.target_column - if self.time_column is not None: body['time_column'] = self.time_column - if self.timeseries_identifier_columns: body['timeseries_identifier_columns'] = [v for v in self.timeseries_identifier_columns] - if self.train_data_path is not None: body['train_data_path'] = self.train_data_path - if self.training_frameworks: body['training_frameworks'] = [v for v in self.training_frameworks] + if self.custom_weights_column is not None: + body["custom_weights_column"] = self.custom_weights_column + if self.experiment_path is not None: + body["experiment_path"] = self.experiment_path + if self.forecast_granularity is not None: + body["forecast_granularity"] = self.forecast_granularity + if self.forecast_horizon is not None: + body["forecast_horizon"] = self.forecast_horizon + if self.future_feature_data_path is not None: + body["future_feature_data_path"] = self.future_feature_data_path + if self.holiday_regions: + body["holiday_regions"] = [v for v in self.holiday_regions] + if self.include_features: + body["include_features"] = [v for v in self.include_features] + if self.max_runtime is not None: + body["max_runtime"] = self.max_runtime + if self.prediction_data_path is not None: + body["prediction_data_path"] = self.prediction_data_path + if self.primary_metric is not None: + body["primary_metric"] = self.primary_metric + if self.register_to is not None: + body["register_to"] = self.register_to + if self.split_column is not None: + body["split_column"] = self.split_column + if self.target_column is not None: + body["target_column"] = self.target_column + if self.time_column is not None: + body["time_column"] = self.time_column + if self.timeseries_identifier_columns: + body["timeseries_identifier_columns"] = [v for v in self.timeseries_identifier_columns] + if self.train_data_path is not None: + body["train_data_path"] = self.train_data_path + if self.training_frameworks: + body["training_frameworks"] = [v for v in self.training_frameworks] return body def as_shallow_dict(self) -> dict: """Serializes the CreateForecastingExperimentRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_weights_column is not None: body['custom_weights_column'] = self.custom_weights_column - if self.experiment_path is not None: body['experiment_path'] = self.experiment_path - if self.forecast_granularity is not None: body['forecast_granularity'] = self.forecast_granularity - if self.forecast_horizon is not None: body['forecast_horizon'] = self.forecast_horizon - if self.future_feature_data_path is not None: body['future_feature_data_path'] = self.future_feature_data_path - if self.holiday_regions: body['holiday_regions'] = self.holiday_regions - if self.include_features: body['include_features'] = self.include_features - if self.max_runtime is not None: body['max_runtime'] = self.max_runtime - if self.prediction_data_path is not None: body['prediction_data_path'] = self.prediction_data_path - if self.primary_metric is not None: body['primary_metric'] = self.primary_metric - if self.register_to is not None: body['register_to'] = self.register_to - if self.split_column is not None: body['split_column'] = self.split_column - if self.target_column is not None: body['target_column'] = self.target_column - if self.time_column is not None: body['time_column'] = self.time_column - if self.timeseries_identifier_columns: body['timeseries_identifier_columns'] = self.timeseries_identifier_columns - if self.train_data_path is not None: body['train_data_path'] = self.train_data_path - if self.training_frameworks: body['training_frameworks'] = self.training_frameworks + if self.custom_weights_column is not None: + body["custom_weights_column"] = self.custom_weights_column + if self.experiment_path is not None: + body["experiment_path"] = self.experiment_path + if self.forecast_granularity is not None: + body["forecast_granularity"] = self.forecast_granularity + if self.forecast_horizon is not None: + body["forecast_horizon"] = self.forecast_horizon + if self.future_feature_data_path is not None: + body["future_feature_data_path"] = self.future_feature_data_path + if self.holiday_regions: + body["holiday_regions"] = self.holiday_regions + if self.include_features: + body["include_features"] = self.include_features + if self.max_runtime is not None: + body["max_runtime"] = self.max_runtime + if self.prediction_data_path is not None: + body["prediction_data_path"] = self.prediction_data_path + if self.primary_metric is not None: + body["primary_metric"] = self.primary_metric + if self.register_to is not None: + body["register_to"] = self.register_to + if self.split_column is not None: + body["split_column"] = self.split_column + if self.target_column is not None: + body["target_column"] = self.target_column + if self.time_column is not None: + body["time_column"] = self.time_column + if self.timeseries_identifier_columns: + body["timeseries_identifier_columns"] = self.timeseries_identifier_columns + if self.train_data_path is not None: + body["train_data_path"] = self.train_data_path + if self.training_frameworks: + body["training_frameworks"] = self.training_frameworks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateForecastingExperimentRequest: """Deserializes the CreateForecastingExperimentRequest from a dictionary.""" - return cls(custom_weights_column=d.get('custom_weights_column', None), experiment_path=d.get('experiment_path', None), forecast_granularity=d.get('forecast_granularity', None), forecast_horizon=d.get('forecast_horizon', None), future_feature_data_path=d.get('future_feature_data_path', None), holiday_regions=d.get('holiday_regions', None), include_features=d.get('include_features', None), max_runtime=d.get('max_runtime', None), prediction_data_path=d.get('prediction_data_path', None), primary_metric=d.get('primary_metric', None), register_to=d.get('register_to', None), split_column=d.get('split_column', None), target_column=d.get('target_column', None), time_column=d.get('time_column', None), timeseries_identifier_columns=d.get('timeseries_identifier_columns', None), train_data_path=d.get('train_data_path', None), training_frameworks=d.get('training_frameworks', None)) - - + return cls( + custom_weights_column=d.get("custom_weights_column", None), + experiment_path=d.get("experiment_path", None), + forecast_granularity=d.get("forecast_granularity", None), + forecast_horizon=d.get("forecast_horizon", None), + future_feature_data_path=d.get("future_feature_data_path", None), + holiday_regions=d.get("holiday_regions", None), + include_features=d.get("include_features", None), + max_runtime=d.get("max_runtime", None), + prediction_data_path=d.get("prediction_data_path", None), + primary_metric=d.get("primary_metric", None), + register_to=d.get("register_to", None), + split_column=d.get("split_column", None), + target_column=d.get("target_column", None), + time_column=d.get("time_column", None), + timeseries_identifier_columns=d.get("timeseries_identifier_columns", None), + train_data_path=d.get("train_data_path", None), + training_frameworks=d.get("training_frameworks", None), + ) @dataclass class CreateForecastingExperimentResponse: experiment_id: Optional[str] = None """The unique ID of the created forecasting experiment""" - + def as_dict(self) -> dict: """Serializes the CreateForecastingExperimentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateForecastingExperimentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateForecastingExperimentResponse: """Deserializes the CreateForecastingExperimentResponse from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None)) - - + return cls(experiment_id=d.get("experiment_id", None)) @dataclass class CreateLoggedModelRequest: experiment_id: str """The ID of the experiment that owns the model.""" - + model_type: Optional[str] = None """The type of the model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``.""" - + name: Optional[str] = None """The name of the model (optional). If not specified one will be generated.""" - + params: Optional[List[LoggedModelParameter]] = None """Parameters attached to the model.""" - + source_run_id: Optional[str] = None """The ID of the run that created the model.""" - + tags: Optional[List[LoggedModelTag]] = None """Tags attached to the model.""" - + def as_dict(self) -> dict: """Serializes the CreateLoggedModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.model_type is not None: body['model_type'] = self.model_type - if self.name is not None: body['name'] = self.name - if self.params: body['params'] = [v.as_dict() for v in self.params] - if self.source_run_id is not None: body['source_run_id'] = self.source_run_id - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.model_type is not None: + body["model_type"] = self.model_type + if self.name is not None: + body["name"] = self.name + if self.params: + body["params"] = [v.as_dict() for v in self.params] + if self.source_run_id is not None: + body["source_run_id"] = self.source_run_id + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateLoggedModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.model_type is not None: body['model_type'] = self.model_type - if self.name is not None: body['name'] = self.name - if self.params: body['params'] = self.params - if self.source_run_id is not None: body['source_run_id'] = self.source_run_id - if self.tags: body['tags'] = self.tags + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.model_type is not None: + body["model_type"] = self.model_type + if self.name is not None: + body["name"] = self.name + if self.params: + body["params"] = self.params + if self.source_run_id is not None: + body["source_run_id"] = self.source_run_id + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateLoggedModelRequest: """Deserializes the CreateLoggedModelRequest from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None), model_type=d.get('model_type', None), name=d.get('name', None), params=_repeated_dict(d, 'params', LoggedModelParameter), source_run_id=d.get('source_run_id', None), tags=_repeated_dict(d, 'tags', LoggedModelTag)) - - + return cls( + experiment_id=d.get("experiment_id", None), + model_type=d.get("model_type", None), + name=d.get("name", None), + params=_repeated_dict(d, "params", LoggedModelParameter), + source_run_id=d.get("source_run_id", None), + tags=_repeated_dict(d, "tags", LoggedModelTag), + ) @dataclass class CreateLoggedModelResponse: model: Optional[LoggedModel] = None """The newly created logged model.""" - + def as_dict(self) -> dict: """Serializes the CreateLoggedModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model: body['model'] = self.model.as_dict() + if self.model: + body["model"] = self.model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateLoggedModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model: body['model'] = self.model + if self.model: + body["model"] = self.model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateLoggedModelResponse: """Deserializes the CreateLoggedModelResponse from a dictionary.""" - return cls(model=_from_dict(d, 'model', LoggedModel)) - - + return cls(model=_from_dict(d, "model", LoggedModel)) @dataclass class CreateModelRequest: name: str """Register models under this name""" - + description: Optional[str] = None """Optional description for registered model.""" - + tags: Optional[List[ModelTag]] = None """Additional metadata for registered model.""" - + def as_dict(self) -> dict: """Serializes the CreateModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = self.tags + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateModelRequest: """Deserializes the CreateModelRequest from a dictionary.""" - return cls(description=d.get('description', None), name=d.get('name', None), tags=_repeated_dict(d, 'tags', ModelTag)) - - + return cls( + description=d.get("description", None), name=d.get("name", None), tags=_repeated_dict(d, "tags", ModelTag) + ) @dataclass class CreateModelResponse: registered_model: Optional[Model] = None - + def as_dict(self) -> dict: """Serializes the CreateModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.registered_model: body['registered_model'] = self.registered_model.as_dict() + if self.registered_model: + body["registered_model"] = self.registered_model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.registered_model: body['registered_model'] = self.registered_model + if self.registered_model: + body["registered_model"] = self.registered_model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateModelResponse: """Deserializes the CreateModelResponse from a dictionary.""" - return cls(registered_model=_from_dict(d, 'registered_model', Model)) - - + return cls(registered_model=_from_dict(d, "registered_model", Model)) @dataclass class CreateModelVersionRequest: name: str """Register model under this name""" - + source: str """URI indicating the location of the model artifacts.""" - + description: Optional[str] = None """Optional description for model version.""" - + run_id: Optional[str] = None """MLflow run ID for correlation, if `source` was generated by an experiment run in MLflow tracking server""" - + run_link: Optional[str] = None """MLflow run link - this is the exact link of the run that generated this model version, potentially hosted at another instance of MLflow.""" - + tags: Optional[List[ModelVersionTag]] = None """Additional metadata for model version.""" - + def as_dict(self) -> dict: """Serializes the CreateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_link is not None: body['run_link'] = self.run_link - if self.source is not None: body['source'] = self.source - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_link is not None: + body["run_link"] = self.run_link + if self.source is not None: + body["source"] = self.source + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateModelVersionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_link is not None: body['run_link'] = self.run_link - if self.source is not None: body['source'] = self.source - if self.tags: body['tags'] = self.tags + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_link is not None: + body["run_link"] = self.run_link + if self.source is not None: + body["source"] = self.source + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionRequest: """Deserializes the CreateModelVersionRequest from a dictionary.""" - return cls(description=d.get('description', None), name=d.get('name', None), run_id=d.get('run_id', None), run_link=d.get('run_link', None), source=d.get('source', None), tags=_repeated_dict(d, 'tags', ModelVersionTag)) - - + return cls( + description=d.get("description", None), + name=d.get("name", None), + run_id=d.get("run_id", None), + run_link=d.get("run_link", None), + source=d.get("source", None), + tags=_repeated_dict(d, "tags", ModelVersionTag), + ) @dataclass class CreateModelVersionResponse: model_version: Optional[ModelVersion] = None """Return new version number generated for this model in registry.""" - + def as_dict(self) -> dict: """Serializes the CreateModelVersionResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_version: body['model_version'] = self.model_version.as_dict() + if self.model_version: + body["model_version"] = self.model_version.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_version: body['model_version'] = self.model_version + if self.model_version: + body["model_version"] = self.model_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionResponse: """Deserializes the CreateModelVersionResponse from a dictionary.""" - return cls(model_version=_from_dict(d, 'model_version', ModelVersion)) - - + return cls(model_version=_from_dict(d, "model_version", ModelVersion)) @dataclass @@ -804,18 +964,18 @@ class CreateRegistryWebhook: to production. * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - + description: Optional[str] = None """User-specified description for the webhook.""" - + http_url_spec: Optional[HttpUrlSpec] = None - + job_spec: Optional[JobSpec] = None - + model_name: Optional[str] = None """If model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.""" - + status: Optional[RegistryWebhookStatus] = None """Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. @@ -824,116 +984,147 @@ class CreateRegistryWebhook: * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event.""" - + def as_dict(self) -> dict: """Serializes the CreateRegistryWebhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.events: body['events'] = [v.value for v in self.events] - if self.http_url_spec: body['http_url_spec'] = self.http_url_spec.as_dict() - if self.job_spec: body['job_spec'] = self.job_spec.as_dict() - if self.model_name is not None: body['model_name'] = self.model_name - if self.status is not None: body['status'] = self.status.value + if self.description is not None: + body["description"] = self.description + if self.events: + body["events"] = [v.value for v in self.events] + if self.http_url_spec: + body["http_url_spec"] = self.http_url_spec.as_dict() + if self.job_spec: + body["job_spec"] = self.job_spec.as_dict() + if self.model_name is not None: + body["model_name"] = self.model_name + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateRegistryWebhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.events: body['events'] = self.events - if self.http_url_spec: body['http_url_spec'] = self.http_url_spec - if self.job_spec: body['job_spec'] = self.job_spec - if self.model_name is not None: body['model_name'] = self.model_name - if self.status is not None: body['status'] = self.status + if self.description is not None: + body["description"] = self.description + if self.events: + body["events"] = self.events + if self.http_url_spec: + body["http_url_spec"] = self.http_url_spec + if self.job_spec: + body["job_spec"] = self.job_spec + if self.model_name is not None: + body["model_name"] = self.model_name + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRegistryWebhook: """Deserializes the CreateRegistryWebhook from a dictionary.""" - return cls(description=d.get('description', None), events=_repeated_enum(d, 'events', RegistryWebhookEvent), http_url_spec=_from_dict(d, 'http_url_spec', HttpUrlSpec), job_spec=_from_dict(d, 'job_spec', JobSpec), model_name=d.get('model_name', None), status=_enum(d, 'status', RegistryWebhookStatus)) - - + return cls( + description=d.get("description", None), + events=_repeated_enum(d, "events", RegistryWebhookEvent), + http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec), + job_spec=_from_dict(d, "job_spec", JobSpec), + model_name=d.get("model_name", None), + status=_enum(d, "status", RegistryWebhookStatus), + ) @dataclass class CreateRun: experiment_id: Optional[str] = None """ID of the associated experiment.""" - + run_name: Optional[str] = None """The name of the run.""" - + start_time: Optional[int] = None """Unix timestamp in milliseconds of when the run started.""" - + tags: Optional[List[RunTag]] = None """Additional metadata for run.""" - + user_id: Optional[str] = None """ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in a future MLflow release. Use 'mlflow.user' tag instead.""" - + def as_dict(self) -> dict: """Serializes the CreateRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.start_time is not None: body['start_time'] = self.start_time - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] - if self.user_id is not None: body['user_id'] = self.user_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.start_time is not None: + body["start_time"] = self.start_time + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.start_time is not None: body['start_time'] = self.start_time - if self.tags: body['tags'] = self.tags - if self.user_id is not None: body['user_id'] = self.user_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.start_time is not None: + body["start_time"] = self.start_time + if self.tags: + body["tags"] = self.tags + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRun: """Deserializes the CreateRun from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None), run_name=d.get('run_name', None), start_time=d.get('start_time', None), tags=_repeated_dict(d, 'tags', RunTag), user_id=d.get('user_id', None)) - - + return cls( + experiment_id=d.get("experiment_id", None), + run_name=d.get("run_name", None), + start_time=d.get("start_time", None), + tags=_repeated_dict(d, "tags", RunTag), + user_id=d.get("user_id", None), + ) @dataclass class CreateRunResponse: run: Optional[Run] = None """The newly created run.""" - + def as_dict(self) -> dict: """Serializes the CreateRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run: body['run'] = self.run.as_dict() + if self.run: + body["run"] = self.run.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.run: body['run'] = self.run + if self.run: + body["run"] = self.run return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRunResponse: """Deserializes the CreateRunResponse from a dictionary.""" - return cls(run=_from_dict(d, 'run', Run)) - - + return cls(run=_from_dict(d, "run", Run)) @dataclass class CreateTransitionRequest: name: str """Name of the model.""" - + version: str """Version of the model.""" - + stage: Stage """Target stage of the transition. Valid values are: @@ -944,175 +1135,202 @@ class CreateTransitionRequest: * `Production`: Production stage. * `Archived`: Archived stage.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the CreateTransitionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.stage is not None: body['stage'] = self.stage.value - if self.version is not None: body['version'] = self.version + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.stage is not None: + body["stage"] = self.stage.value + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the CreateTransitionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.stage is not None: body['stage'] = self.stage - if self.version is not None: body['version'] = self.version + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.stage is not None: + body["stage"] = self.stage + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTransitionRequest: """Deserializes the CreateTransitionRequest from a dictionary.""" - return cls(comment=d.get('comment', None), name=d.get('name', None), stage=_enum(d, 'stage', Stage), version=d.get('version', None)) - - + return cls( + comment=d.get("comment", None), + name=d.get("name", None), + stage=_enum(d, "stage", Stage), + version=d.get("version", None), + ) @dataclass class CreateTransitionRequestResponse: request: Optional[TransitionRequest] = None """Transition request details.""" - + def as_dict(self) -> dict: """Serializes the CreateTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.request: body['request'] = self.request.as_dict() + if self.request: + body["request"] = self.request.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.request: body['request'] = self.request + if self.request: + body["request"] = self.request return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTransitionRequestResponse: """Deserializes the CreateTransitionRequestResponse from a dictionary.""" - return cls(request=_from_dict(d, 'request', TransitionRequest)) - - + return cls(request=_from_dict(d, "request", TransitionRequest)) @dataclass class CreateWebhookResponse: webhook: Optional[RegistryWebhook] = None - + def as_dict(self) -> dict: """Serializes the CreateWebhookResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.webhook: body['webhook'] = self.webhook.as_dict() + if self.webhook: + body["webhook"] = self.webhook.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateWebhookResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.webhook: body['webhook'] = self.webhook + if self.webhook: + body["webhook"] = self.webhook return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWebhookResponse: """Deserializes the CreateWebhookResponse from a dictionary.""" - return cls(webhook=_from_dict(d, 'webhook', RegistryWebhook)) - - + return cls(webhook=_from_dict(d, "webhook", RegistryWebhook)) @dataclass class Dataset: """Dataset. Represents a reference to data used for training, testing, or evaluation during the model development process.""" - + name: str """The name of the dataset. E.g. “my.uc.table@2” “nyc-taxi-dataset”, “fantastic-elk-3”""" - + digest: str """Dataset digest, e.g. an md5 hash of the dataset that uniquely identifies it within datasets of the same name.""" - + source_type: str """The type of the dataset source, e.g. ‘databricks-uc-table’, ‘DBFS’, ‘S3’, ...""" - + source: str """Source information for the dataset. Note that the source may not exactly reproduce the dataset if it was transformed / modified before use with MLflow.""" - + profile: Optional[str] = None """The profile of the dataset. Summary statistics for the dataset, such as the number of rows in a table, the mean / std / mode of each column in a table, or the number of elements in an array.""" - + schema: Optional[str] = None """The schema of the dataset. E.g., MLflow ColSpec JSON for a dataframe, MLflow TensorSpec JSON for an ndarray, or another schema format.""" - + def as_dict(self) -> dict: """Serializes the Dataset into a dictionary suitable for use as a JSON request body.""" body = {} - if self.digest is not None: body['digest'] = self.digest - if self.name is not None: body['name'] = self.name - if self.profile is not None: body['profile'] = self.profile - if self.schema is not None: body['schema'] = self.schema - if self.source is not None: body['source'] = self.source - if self.source_type is not None: body['source_type'] = self.source_type + if self.digest is not None: + body["digest"] = self.digest + if self.name is not None: + body["name"] = self.name + if self.profile is not None: + body["profile"] = self.profile + if self.schema is not None: + body["schema"] = self.schema + if self.source is not None: + body["source"] = self.source + if self.source_type is not None: + body["source_type"] = self.source_type return body def as_shallow_dict(self) -> dict: """Serializes the Dataset into a shallow dictionary of its immediate attributes.""" body = {} - if self.digest is not None: body['digest'] = self.digest - if self.name is not None: body['name'] = self.name - if self.profile is not None: body['profile'] = self.profile - if self.schema is not None: body['schema'] = self.schema - if self.source is not None: body['source'] = self.source - if self.source_type is not None: body['source_type'] = self.source_type + if self.digest is not None: + body["digest"] = self.digest + if self.name is not None: + body["name"] = self.name + if self.profile is not None: + body["profile"] = self.profile + if self.schema is not None: + body["schema"] = self.schema + if self.source is not None: + body["source"] = self.source + if self.source_type is not None: + body["source_type"] = self.source_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Dataset: """Deserializes the Dataset from a dictionary.""" - return cls(digest=d.get('digest', None), name=d.get('name', None), profile=d.get('profile', None), schema=d.get('schema', None), source=d.get('source', None), source_type=d.get('source_type', None)) - - + return cls( + digest=d.get("digest", None), + name=d.get("name", None), + profile=d.get("profile", None), + schema=d.get("schema", None), + source=d.get("source", None), + source_type=d.get("source_type", None), + ) @dataclass class DatasetInput: """DatasetInput. Represents a dataset and input tags.""" - + dataset: Dataset """The dataset being used as a Run input.""" - + tags: Optional[List[InputTag]] = None """A list of tags for the dataset input, e.g. a “context” tag with value “training”""" - + def as_dict(self) -> dict: """Serializes the DatasetInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset: body['dataset'] = self.dataset.as_dict() - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.dataset: + body["dataset"] = self.dataset.as_dict() + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the DatasetInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset: body['dataset'] = self.dataset - if self.tags: body['tags'] = self.tags + if self.dataset: + body["dataset"] = self.dataset + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatasetInput: """Deserializes the DatasetInput from a dictionary.""" - return cls(dataset=_from_dict(d, 'dataset', Dataset), tags=_repeated_dict(d, 'tags', InputTag)) - - - - - + return cls(dataset=_from_dict(d, "dataset", Dataset), tags=_repeated_dict(d, "tags", InputTag)) @dataclass @@ -1131,33 +1349,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCommentResponse: """Deserializes the DeleteCommentResponse from a dictionary.""" return cls() - - @dataclass class DeleteExperiment: experiment_id: str """ID of the associated experiment.""" - + def as_dict(self) -> dict: """Serializes the DeleteExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteExperiment: """Deserializes the DeleteExperiment from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None)) - - + return cls(experiment_id=d.get("experiment_id", None)) @dataclass @@ -1176,11 +1392,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteExperimentResponse: """Deserializes the DeleteExperimentResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -1199,11 +1410,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteLoggedModelResponse: """Deserializes the DeleteLoggedModelResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -1222,11 +1428,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteLoggedModelTagResponse: """Deserializes the DeleteLoggedModelTagResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -1245,11 +1446,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteModelResponse: """Deserializes the DeleteModelResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -1268,11 +1464,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteModelTagResponse: """Deserializes the DeleteModelTagResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -1291,11 +1482,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteModelVersionResponse: """Deserializes the DeleteModelVersionResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -1314,33 +1500,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteModelVersionTagResponse: """Deserializes the DeleteModelVersionTagResponse from a dictionary.""" return cls() - - @dataclass class DeleteRun: run_id: str """ID of the run to delete.""" - + def as_dict(self) -> dict: """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRun: """Deserializes the DeleteRun from a dictionary.""" - return cls(run_id=d.get('run_id', None)) - - + return cls(run_id=d.get("run_id", None)) @dataclass @@ -1359,100 +1543,108 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse: """Deserializes the DeleteRunResponse from a dictionary.""" return cls() - - @dataclass class DeleteRuns: experiment_id: str """The ID of the experiment containing the runs to delete.""" - + max_timestamp_millis: int """The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only runs created prior to or at this timestamp are deleted.""" - + max_runs: Optional[int] = None """An optional positive integer indicating the maximum number of runs to delete. The maximum allowed value for max_runs is 10000.""" - + def as_dict(self) -> dict: """Serializes the DeleteRuns into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.max_runs is not None: body['max_runs'] = self.max_runs - if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.max_runs is not None: + body["max_runs"] = self.max_runs + if self.max_timestamp_millis is not None: + body["max_timestamp_millis"] = self.max_timestamp_millis return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRuns into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.max_runs is not None: body['max_runs'] = self.max_runs - if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.max_runs is not None: + body["max_runs"] = self.max_runs + if self.max_timestamp_millis is not None: + body["max_timestamp_millis"] = self.max_timestamp_millis return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRuns: """Deserializes the DeleteRuns from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None), max_runs=d.get('max_runs', None), max_timestamp_millis=d.get('max_timestamp_millis', None)) - - + return cls( + experiment_id=d.get("experiment_id", None), + max_runs=d.get("max_runs", None), + max_timestamp_millis=d.get("max_timestamp_millis", None), + ) @dataclass class DeleteRunsResponse: runs_deleted: Optional[int] = None """The number of runs deleted.""" - + def as_dict(self) -> dict: """Serializes the DeleteRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted + if self.runs_deleted is not None: + body["runs_deleted"] = self.runs_deleted return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted + if self.runs_deleted is not None: + body["runs_deleted"] = self.runs_deleted return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRunsResponse: """Deserializes the DeleteRunsResponse from a dictionary.""" - return cls(runs_deleted=d.get('runs_deleted', None)) - - + return cls(runs_deleted=d.get("runs_deleted", None)) @dataclass class DeleteTag: run_id: str """ID of the run that the tag was logged under. Must be provided.""" - + key: str """Name of the tag. Maximum size is 255 bytes. Must be provided.""" - + def as_dict(self) -> dict: """Serializes the DeleteTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.run_id is not None: body['run_id'] = self.run_id + if self.key is not None: + body["key"] = self.key + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the DeleteTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.run_id is not None: body['run_id'] = self.run_id + if self.key is not None: + body["key"] = self.key + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteTag: """Deserializes the DeleteTag from a dictionary.""" - return cls(key=d.get('key', None), run_id=d.get('run_id', None)) - - + return cls(key=d.get("key", None), run_id=d.get("run_id", None)) @dataclass @@ -1471,11 +1663,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteTagResponse: """Deserializes the DeleteTagResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -1494,19 +1681,14 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteTransitionRequestResponse: """Deserializes the DeleteTransitionRequestResponse from a dictionary.""" return cls() - - class DeleteTransitionRequestStage(Enum): - - - ARCHIVED = 'Archived' - NONE = 'None' - PRODUCTION = 'Production' - STAGING = 'Staging' - + ARCHIVED = "Archived" + NONE = "None" + PRODUCTION = "Production" + STAGING = "Staging" @dataclass @@ -1525,350 +1707,426 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteWebhookResponse: """Deserializes the DeleteWebhookResponse from a dictionary.""" return cls() - - @dataclass class Experiment: """An experiment and its metadata.""" - + artifact_location: Optional[str] = None """Location where artifacts for the experiment are stored.""" - + creation_time: Optional[int] = None """Creation time""" - + experiment_id: Optional[str] = None """Unique identifier for the experiment.""" - + last_update_time: Optional[int] = None """Last update time""" - + lifecycle_stage: Optional[str] = None """Current life cycle stage of the experiment: "active" or "deleted". Deleted experiments are not returned by APIs.""" - + name: Optional[str] = None """Human readable name that identifies the experiment.""" - + tags: Optional[List[ExperimentTag]] = None """Tags: Additional metadata key-value pairs.""" - + def as_dict(self) -> dict: """Serializes the Experiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_location is not None: body['artifact_location'] = self.artifact_location - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.last_update_time is not None: body['last_update_time'] = self.last_update_time - if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.artifact_location is not None: + body["artifact_location"] = self.artifact_location + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.last_update_time is not None: + body["last_update_time"] = self.last_update_time + if self.lifecycle_stage is not None: + body["lifecycle_stage"] = self.lifecycle_stage + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the Experiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_location is not None: body['artifact_location'] = self.artifact_location - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.last_update_time is not None: body['last_update_time'] = self.last_update_time - if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = self.tags + if self.artifact_location is not None: + body["artifact_location"] = self.artifact_location + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.last_update_time is not None: + body["last_update_time"] = self.last_update_time + if self.lifecycle_stage is not None: + body["lifecycle_stage"] = self.lifecycle_stage + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Experiment: """Deserializes the Experiment from a dictionary.""" - return cls(artifact_location=d.get('artifact_location', None), creation_time=d.get('creation_time', None), experiment_id=d.get('experiment_id', None), last_update_time=d.get('last_update_time', None), lifecycle_stage=d.get('lifecycle_stage', None), name=d.get('name', None), tags=_repeated_dict(d, 'tags', ExperimentTag)) - - + return cls( + artifact_location=d.get("artifact_location", None), + creation_time=d.get("creation_time", None), + experiment_id=d.get("experiment_id", None), + last_update_time=d.get("last_update_time", None), + lifecycle_stage=d.get("lifecycle_stage", None), + name=d.get("name", None), + tags=_repeated_dict(d, "tags", ExperimentTag), + ) @dataclass class ExperimentAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[ExperimentPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ExperimentAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentAccessControlRequest: """Deserializes the ExperimentAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ExperimentPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", ExperimentPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class ExperimentAccessControlResponse: all_permissions: Optional[List[ExperimentPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ExperimentAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentAccessControlResponse: """Deserializes the ExperimentAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', ExperimentPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", ExperimentPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class ExperimentPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[ExperimentPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ExperimentPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermission: """Deserializes the ExperimentPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ExperimentPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", ExperimentPermissionLevel), + ) class ExperimentPermissionLevel(Enum): """Permission level""" - - CAN_EDIT = 'CAN_EDIT' - CAN_MANAGE = 'CAN_MANAGE' - CAN_READ = 'CAN_READ' + + CAN_EDIT = "CAN_EDIT" + CAN_MANAGE = "CAN_MANAGE" + CAN_READ = "CAN_READ" + @dataclass class ExperimentPermissions: access_control_list: Optional[List[ExperimentAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ExperimentPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissions: """Deserializes the ExperimentPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', ExperimentAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", ExperimentAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class ExperimentPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[ExperimentPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ExperimentPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissionsDescription: """Deserializes the ExperimentPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ExperimentPermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", ExperimentPermissionLevel), + ) @dataclass class ExperimentPermissionsRequest: access_control_list: Optional[List[ExperimentAccessControlRequest]] = None - + experiment_id: Optional[str] = None """The experiment for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the ExperimentPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissionsRequest: """Deserializes the ExperimentPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', ExperimentAccessControlRequest), experiment_id=d.get('experiment_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", ExperimentAccessControlRequest), + experiment_id=d.get("experiment_id", None), + ) @dataclass class ExperimentTag: """A tag for an experiment.""" - + key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the ExperimentTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ExperimentTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExperimentTag: """Deserializes the ExperimentTag from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class FileInfo: """Metadata of a single artifact file or directory.""" - + file_size: Optional[int] = None """The size in bytes of the file. Unset for directories.""" - + is_dir: Optional[bool] = None """Whether the path is a directory.""" - + path: Optional[str] = None """The path relative to the root artifact directory run.""" - + def as_dict(self) -> dict: """Serializes the FileInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file_size is not None: body['file_size'] = self.file_size - if self.is_dir is not None: body['is_dir'] = self.is_dir - if self.path is not None: body['path'] = self.path + if self.file_size is not None: + body["file_size"] = self.file_size + if self.is_dir is not None: + body["is_dir"] = self.is_dir + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the FileInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.file_size is not None: body['file_size'] = self.file_size - if self.is_dir is not None: body['is_dir'] = self.is_dir - if self.path is not None: body['path'] = self.path + if self.file_size is not None: + body["file_size"] = self.file_size + if self.is_dir is not None: + body["is_dir"] = self.is_dir + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileInfo: """Deserializes the FileInfo from a dictionary.""" - return cls(file_size=d.get('file_size', None), is_dir=d.get('is_dir', None), path=d.get('path', None)) - - + return cls(file_size=d.get("file_size", None), is_dir=d.get("is_dir", None), path=d.get("path", None)) @dataclass @@ -1876,224 +2134,218 @@ class FinalizeLoggedModelRequest: status: LoggedModelStatus """Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that something went wrong when logging the model weights / agent code.""" - + model_id: Optional[str] = None """The ID of the logged model to finalize.""" - + def as_dict(self) -> dict: """Serializes the FinalizeLoggedModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id - if self.status is not None: body['status'] = self.status.value + if self.model_id is not None: + body["model_id"] = self.model_id + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the FinalizeLoggedModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id - if self.status is not None: body['status'] = self.status + if self.model_id is not None: + body["model_id"] = self.model_id + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FinalizeLoggedModelRequest: """Deserializes the FinalizeLoggedModelRequest from a dictionary.""" - return cls(model_id=d.get('model_id', None), status=_enum(d, 'status', LoggedModelStatus)) - - + return cls(model_id=d.get("model_id", None), status=_enum(d, "status", LoggedModelStatus)) @dataclass class FinalizeLoggedModelResponse: model: Optional[LoggedModel] = None """The updated logged model.""" - + def as_dict(self) -> dict: """Serializes the FinalizeLoggedModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model: body['model'] = self.model.as_dict() + if self.model: + body["model"] = self.model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the FinalizeLoggedModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model: body['model'] = self.model + if self.model: + body["model"] = self.model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FinalizeLoggedModelResponse: """Deserializes the FinalizeLoggedModelResponse from a dictionary.""" - return cls(model=_from_dict(d, 'model', LoggedModel)) - - + return cls(model=_from_dict(d, "model", LoggedModel)) @dataclass class ForecastingExperiment: """Represents a forecasting experiment with its unique identifier, URL, and state.""" - + experiment_id: Optional[str] = None """The unique ID for the forecasting experiment.""" - + experiment_page_url: Optional[str] = None """The URL to the forecasting experiment page.""" - + state: Optional[ForecastingExperimentState] = None """The current state of the forecasting experiment.""" - + def as_dict(self) -> dict: """Serializes the ForecastingExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.experiment_page_url is not None: body['experiment_page_url'] = self.experiment_page_url - if self.state is not None: body['state'] = self.state.value + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.experiment_page_url is not None: + body["experiment_page_url"] = self.experiment_page_url + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the ForecastingExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.experiment_page_url is not None: body['experiment_page_url'] = self.experiment_page_url - if self.state is not None: body['state'] = self.state + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.experiment_page_url is not None: + body["experiment_page_url"] = self.experiment_page_url + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ForecastingExperiment: """Deserializes the ForecastingExperiment from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None), experiment_page_url=d.get('experiment_page_url', None), state=_enum(d, 'state', ForecastingExperimentState)) - - + return cls( + experiment_id=d.get("experiment_id", None), + experiment_page_url=d.get("experiment_page_url", None), + state=_enum(d, "state", ForecastingExperimentState), + ) class ForecastingExperimentState(Enum): - - - CANCELLED = 'CANCELLED' - FAILED = 'FAILED' - PENDING = 'PENDING' - RUNNING = 'RUNNING' - SUCCEEDED = 'SUCCEEDED' - + CANCELLED = "CANCELLED" + FAILED = "FAILED" + PENDING = "PENDING" + RUNNING = "RUNNING" + SUCCEEDED = "SUCCEEDED" @dataclass class GetExperimentByNameResponse: experiment: Optional[Experiment] = None """Experiment details.""" - + def as_dict(self) -> dict: """Serializes the GetExperimentByNameResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment: body['experiment'] = self.experiment.as_dict() + if self.experiment: + body["experiment"] = self.experiment.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetExperimentByNameResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment: body['experiment'] = self.experiment + if self.experiment: + body["experiment"] = self.experiment return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetExperimentByNameResponse: """Deserializes the GetExperimentByNameResponse from a dictionary.""" - return cls(experiment=_from_dict(d, 'experiment', Experiment)) - - - - - + return cls(experiment=_from_dict(d, "experiment", Experiment)) @dataclass class GetExperimentPermissionLevelsResponse: permission_levels: Optional[List[ExperimentPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetExperimentPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetExperimentPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetExperimentPermissionLevelsResponse: """Deserializes the GetExperimentPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', ExperimentPermissionsDescription)) - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", ExperimentPermissionsDescription)) @dataclass class GetExperimentResponse: experiment: Optional[Experiment] = None """Experiment details.""" - + def as_dict(self) -> dict: """Serializes the GetExperimentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment: body['experiment'] = self.experiment.as_dict() + if self.experiment: + body["experiment"] = self.experiment.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetExperimentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment: body['experiment'] = self.experiment + if self.experiment: + body["experiment"] = self.experiment return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetExperimentResponse: """Deserializes the GetExperimentResponse from a dictionary.""" - return cls(experiment=_from_dict(d, 'experiment', Experiment)) - - - - - - - - + return cls(experiment=_from_dict(d, "experiment", Experiment)) @dataclass class GetLatestVersionsRequest: name: str """Registered model unique name identifier.""" - + stages: Optional[List[str]] = None """List of stages.""" - + def as_dict(self) -> dict: """Serializes the GetLatestVersionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.stages: body['stages'] = [v for v in self.stages] + if self.name is not None: + body["name"] = self.name + if self.stages: + body["stages"] = [v for v in self.stages] return body def as_shallow_dict(self) -> dict: """Serializes the GetLatestVersionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.stages: body['stages'] = self.stages + if self.name is not None: + body["name"] = self.name + if self.stages: + body["stages"] = self.stages return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetLatestVersionsRequest: """Deserializes the GetLatestVersionsRequest from a dictionary.""" - return cls(name=d.get('name', None), stages=d.get('stages', None)) - - + return cls(name=d.get("name", None), stages=d.get("stages", None)) @dataclass @@ -2101,53 +2353,50 @@ class GetLatestVersionsResponse: model_versions: Optional[List[ModelVersion]] = None """Latest version models for each requests stage. Only return models with current `READY` status. If no `stages` provided, returns the latest version for each stage, including `"None"`.""" - + def as_dict(self) -> dict: """Serializes the GetLatestVersionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions] + if self.model_versions: + body["model_versions"] = [v.as_dict() for v in self.model_versions] return body def as_shallow_dict(self) -> dict: """Serializes the GetLatestVersionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_versions: body['model_versions'] = self.model_versions + if self.model_versions: + body["model_versions"] = self.model_versions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetLatestVersionsResponse: """Deserializes the GetLatestVersionsResponse from a dictionary.""" - return cls(model_versions=_repeated_dict(d, 'model_versions', ModelVersion)) - - - - - + return cls(model_versions=_repeated_dict(d, "model_versions", ModelVersion)) @dataclass class GetLoggedModelResponse: model: Optional[LoggedModel] = None """The retrieved logged model.""" - + def as_dict(self) -> dict: """Serializes the GetLoggedModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model: body['model'] = self.model.as_dict() + if self.model: + body["model"] = self.model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetLoggedModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model: body['model'] = self.model + if self.model: + body["model"] = self.model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetLoggedModelResponse: """Deserializes the GetLoggedModelResponse from a dictionary.""" - return cls(model=_from_dict(d, 'model', LoggedModel)) - - + return cls(model=_from_dict(d, "model", LoggedModel)) @dataclass @@ -2156,219 +2405,214 @@ class GetMetricHistoryResponse: """All logged values for this metric if `max_results` is not specified in the request or if the total count of metrics returned is less than the service level pagination threshold. Otherwise, this is one page of results.""" - + next_page_token: Optional[str] = None """A token that can be used to issue a query for the next page of metric history values. A missing token indicates that no additional metrics are available to fetch.""" - + def as_dict(self) -> dict: """Serializes the GetMetricHistoryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.metrics: + body["metrics"] = [v.as_dict() for v in self.metrics] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetMetricHistoryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.metrics: body['metrics'] = self.metrics - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.metrics: + body["metrics"] = self.metrics + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetMetricHistoryResponse: """Deserializes the GetMetricHistoryResponse from a dictionary.""" - return cls(metrics=_repeated_dict(d, 'metrics', Metric), next_page_token=d.get('next_page_token', None)) - - - - - + return cls(metrics=_repeated_dict(d, "metrics", Metric), next_page_token=d.get("next_page_token", None)) @dataclass class GetModelResponse: registered_model_databricks: Optional[ModelDatabricks] = None - + def as_dict(self) -> dict: """Serializes the GetModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.registered_model_databricks: body['registered_model_databricks'] = self.registered_model_databricks.as_dict() + if self.registered_model_databricks: + body["registered_model_databricks"] = self.registered_model_databricks.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.registered_model_databricks: body['registered_model_databricks'] = self.registered_model_databricks + if self.registered_model_databricks: + body["registered_model_databricks"] = self.registered_model_databricks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetModelResponse: """Deserializes the GetModelResponse from a dictionary.""" - return cls(registered_model_databricks=_from_dict(d, 'registered_model_databricks', ModelDatabricks)) - - - - - + return cls(registered_model_databricks=_from_dict(d, "registered_model_databricks", ModelDatabricks)) @dataclass class GetModelVersionDownloadUriResponse: artifact_uri: Optional[str] = None """URI corresponding to where artifacts for this model version are stored.""" - + def as_dict(self) -> dict: """Serializes the GetModelVersionDownloadUriResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri + if self.artifact_uri is not None: + body["artifact_uri"] = self.artifact_uri return body def as_shallow_dict(self) -> dict: """Serializes the GetModelVersionDownloadUriResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri + if self.artifact_uri is not None: + body["artifact_uri"] = self.artifact_uri return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetModelVersionDownloadUriResponse: """Deserializes the GetModelVersionDownloadUriResponse from a dictionary.""" - return cls(artifact_uri=d.get('artifact_uri', None)) - - - - - + return cls(artifact_uri=d.get("artifact_uri", None)) @dataclass class GetModelVersionResponse: model_version: Optional[ModelVersion] = None - + def as_dict(self) -> dict: """Serializes the GetModelVersionResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_version: body['model_version'] = self.model_version.as_dict() + if self.model_version: + body["model_version"] = self.model_version.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetModelVersionResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_version: body['model_version'] = self.model_version + if self.model_version: + body["model_version"] = self.model_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetModelVersionResponse: """Deserializes the GetModelVersionResponse from a dictionary.""" - return cls(model_version=_from_dict(d, 'model_version', ModelVersion)) - - - - - + return cls(model_version=_from_dict(d, "model_version", ModelVersion)) @dataclass class GetRegisteredModelPermissionLevelsResponse: permission_levels: Optional[List[RegisteredModelPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetRegisteredModelPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetRegisteredModelPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRegisteredModelPermissionLevelsResponse: """Deserializes the GetRegisteredModelPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', RegisteredModelPermissionsDescription)) - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", RegisteredModelPermissionsDescription)) @dataclass class GetRunResponse: run: Optional[Run] = None """Run metadata (name, start time, etc) and data (metrics, params, and tags).""" - + def as_dict(self) -> dict: """Serializes the GetRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run: body['run'] = self.run.as_dict() + if self.run: + body["run"] = self.run.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.run: body['run'] = self.run + if self.run: + body["run"] = self.run return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRunResponse: """Deserializes the GetRunResponse from a dictionary.""" - return cls(run=_from_dict(d, 'run', Run)) - - + return cls(run=_from_dict(d, "run", Run)) @dataclass class HttpUrlSpec: url: str """External HTTPS URL called on event trigger (by using a POST request).""" - + authorization: Optional[str] = None """Value of the authorization header that should be sent in the request sent by the wehbook. It should be of the form `" "`. If set to an empty string, no authorization header will be included in the request.""" - + enable_ssl_verification: Optional[bool] = None """Enable/disable SSL certificate validation. Default is true. For self-signed certificates, this field must be false AND the destination server must disable certificate validation as well. For security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.""" - + secret: Optional[str] = None """Shared secret required for HMAC encoding payload. The HMAC-encoded payload will be sent in the header as: { "X-Databricks-Signature": $encoded_payload }.""" - + def as_dict(self) -> dict: """Serializes the HttpUrlSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authorization is not None: body['authorization'] = self.authorization - if self.enable_ssl_verification is not None: body['enable_ssl_verification'] = self.enable_ssl_verification - if self.secret is not None: body['secret'] = self.secret - if self.url is not None: body['url'] = self.url + if self.authorization is not None: + body["authorization"] = self.authorization + if self.enable_ssl_verification is not None: + body["enable_ssl_verification"] = self.enable_ssl_verification + if self.secret is not None: + body["secret"] = self.secret + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the HttpUrlSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.authorization is not None: body['authorization'] = self.authorization - if self.enable_ssl_verification is not None: body['enable_ssl_verification'] = self.enable_ssl_verification - if self.secret is not None: body['secret'] = self.secret - if self.url is not None: body['url'] = self.url + if self.authorization is not None: + body["authorization"] = self.authorization + if self.enable_ssl_verification is not None: + body["enable_ssl_verification"] = self.enable_ssl_verification + if self.secret is not None: + body["secret"] = self.secret + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> HttpUrlSpec: """Deserializes the HttpUrlSpec from a dictionary.""" - return cls(authorization=d.get('authorization', None), enable_ssl_verification=d.get('enable_ssl_verification', None), secret=d.get('secret', None), url=d.get('url', None)) - - + return cls( + authorization=d.get("authorization", None), + enable_ssl_verification=d.get("enable_ssl_verification", None), + secret=d.get("secret", None), + url=d.get("url", None), + ) @dataclass @@ -2379,295 +2623,315 @@ class HttpUrlSpecWithoutSecret: security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.""" - + url: Optional[str] = None """External HTTPS URL called on event trigger (by using a POST request).""" - + def as_dict(self) -> dict: """Serializes the HttpUrlSpecWithoutSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enable_ssl_verification is not None: body['enable_ssl_verification'] = self.enable_ssl_verification - if self.url is not None: body['url'] = self.url + if self.enable_ssl_verification is not None: + body["enable_ssl_verification"] = self.enable_ssl_verification + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the HttpUrlSpecWithoutSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.enable_ssl_verification is not None: body['enable_ssl_verification'] = self.enable_ssl_verification - if self.url is not None: body['url'] = self.url + if self.enable_ssl_verification is not None: + body["enable_ssl_verification"] = self.enable_ssl_verification + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> HttpUrlSpecWithoutSecret: """Deserializes the HttpUrlSpecWithoutSecret from a dictionary.""" - return cls(enable_ssl_verification=d.get('enable_ssl_verification', None), url=d.get('url', None)) - - + return cls(enable_ssl_verification=d.get("enable_ssl_verification", None), url=d.get("url", None)) @dataclass class InputTag: """Tag for a dataset input.""" - + key: str """The tag key.""" - + value: str """The tag value.""" - + def as_dict(self) -> dict: """Serializes the InputTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the InputTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> InputTag: """Deserializes the InputTag from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class JobSpec: job_id: str """ID of the job that the webhook runs.""" - + access_token: str """The personal access token used to authorize webhook's job runs.""" - + workspace_url: Optional[str] = None """URL of the workspace containing the job that this webhook runs. If not specified, the job’s workspace URL is assumed to be the same as the workspace where the webhook is created.""" - + def as_dict(self) -> dict: """Serializes the JobSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_token is not None: body['access_token'] = self.access_token - if self.job_id is not None: body['job_id'] = self.job_id - if self.workspace_url is not None: body['workspace_url'] = self.workspace_url + if self.access_token is not None: + body["access_token"] = self.access_token + if self.job_id is not None: + body["job_id"] = self.job_id + if self.workspace_url is not None: + body["workspace_url"] = self.workspace_url return body def as_shallow_dict(self) -> dict: """Serializes the JobSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_token is not None: body['access_token'] = self.access_token - if self.job_id is not None: body['job_id'] = self.job_id - if self.workspace_url is not None: body['workspace_url'] = self.workspace_url + if self.access_token is not None: + body["access_token"] = self.access_token + if self.job_id is not None: + body["job_id"] = self.job_id + if self.workspace_url is not None: + body["workspace_url"] = self.workspace_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobSpec: """Deserializes the JobSpec from a dictionary.""" - return cls(access_token=d.get('access_token', None), job_id=d.get('job_id', None), workspace_url=d.get('workspace_url', None)) - - + return cls( + access_token=d.get("access_token", None), + job_id=d.get("job_id", None), + workspace_url=d.get("workspace_url", None), + ) @dataclass class JobSpecWithoutSecret: job_id: Optional[str] = None """ID of the job that the webhook runs.""" - + workspace_url: Optional[str] = None """URL of the workspace containing the job that this webhook runs. Defaults to the workspace URL in which the webhook is created. If not specified, the job’s workspace is assumed to be the same as the webhook’s.""" - + def as_dict(self) -> dict: """Serializes the JobSpecWithoutSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id - if self.workspace_url is not None: body['workspace_url'] = self.workspace_url + if self.job_id is not None: + body["job_id"] = self.job_id + if self.workspace_url is not None: + body["workspace_url"] = self.workspace_url return body def as_shallow_dict(self) -> dict: """Serializes the JobSpecWithoutSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id - if self.workspace_url is not None: body['workspace_url'] = self.workspace_url + if self.job_id is not None: + body["job_id"] = self.job_id + if self.workspace_url is not None: + body["workspace_url"] = self.workspace_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> JobSpecWithoutSecret: """Deserializes the JobSpecWithoutSecret from a dictionary.""" - return cls(job_id=d.get('job_id', None), workspace_url=d.get('workspace_url', None)) - - - - - + return cls(job_id=d.get("job_id", None), workspace_url=d.get("workspace_url", None)) @dataclass class ListArtifactsResponse: files: Optional[List[FileInfo]] = None """The file location and metadata for artifacts.""" - + next_page_token: Optional[str] = None """The token that can be used to retrieve the next page of artifact results.""" - + root_uri: Optional[str] = None """The root artifact directory for the run.""" - + def as_dict(self) -> dict: """Serializes the ListArtifactsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.files: body['files'] = [v.as_dict() for v in self.files] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.root_uri is not None: body['root_uri'] = self.root_uri + if self.files: + body["files"] = [v.as_dict() for v in self.files] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.root_uri is not None: + body["root_uri"] = self.root_uri return body def as_shallow_dict(self) -> dict: """Serializes the ListArtifactsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.files: body['files'] = self.files - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.root_uri is not None: body['root_uri'] = self.root_uri + if self.files: + body["files"] = self.files + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.root_uri is not None: + body["root_uri"] = self.root_uri return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListArtifactsResponse: """Deserializes the ListArtifactsResponse from a dictionary.""" - return cls(files=_repeated_dict(d, 'files', FileInfo), next_page_token=d.get('next_page_token', None), root_uri=d.get('root_uri', None)) - - - - - + return cls( + files=_repeated_dict(d, "files", FileInfo), + next_page_token=d.get("next_page_token", None), + root_uri=d.get("root_uri", None), + ) @dataclass class ListExperimentsResponse: experiments: Optional[List[Experiment]] = None """Paginated Experiments beginning with the first item on the requested page.""" - + next_page_token: Optional[str] = None """Token that can be used to retrieve the next page of experiments. Empty token means no more experiment is available for retrieval.""" - + def as_dict(self) -> dict: """Serializes the ListExperimentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiments: body['experiments'] = [v.as_dict() for v in self.experiments] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.experiments: + body["experiments"] = [v.as_dict() for v in self.experiments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListExperimentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiments: body['experiments'] = self.experiments - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.experiments: + body["experiments"] = self.experiments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListExperimentsResponse: """Deserializes the ListExperimentsResponse from a dictionary.""" - return cls(experiments=_repeated_dict(d, 'experiments', Experiment), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + experiments=_repeated_dict(d, "experiments", Experiment), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListModelsResponse: next_page_token: Optional[str] = None """Pagination token to request next page of models for the same query.""" - + registered_models: Optional[List[Model]] = None - + def as_dict(self) -> dict: """Serializes the ListModelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = [v.as_dict() for v in self.registered_models] return body def as_shallow_dict(self) -> dict: """Serializes the ListModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.registered_models: body['registered_models'] = self.registered_models + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = self.registered_models return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListModelsResponse: """Deserializes the ListModelsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), registered_models=_repeated_dict(d, 'registered_models', Model)) - - + return cls( + next_page_token=d.get("next_page_token", None), + registered_models=_repeated_dict(d, "registered_models", Model), + ) @dataclass class ListRegistryWebhooks: next_page_token: Optional[str] = None """Token that can be used to retrieve the next page of artifact results""" - + webhooks: Optional[List[RegistryWebhook]] = None """Array of registry webhooks.""" - + def as_dict(self) -> dict: """Serializes the ListRegistryWebhooks into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.webhooks: body['webhooks'] = [v.as_dict() for v in self.webhooks] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.webhooks: + body["webhooks"] = [v.as_dict() for v in self.webhooks] return body def as_shallow_dict(self) -> dict: """Serializes the ListRegistryWebhooks into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.webhooks: body['webhooks'] = self.webhooks + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.webhooks: + body["webhooks"] = self.webhooks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListRegistryWebhooks: """Deserializes the ListRegistryWebhooks from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), webhooks=_repeated_dict(d, 'webhooks', RegistryWebhook)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), webhooks=_repeated_dict(d, "webhooks", RegistryWebhook) + ) @dataclass class ListTransitionRequestsResponse: requests: Optional[List[Activity]] = None """Array of open transition requests.""" - + def as_dict(self) -> dict: """Serializes the ListTransitionRequestsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.requests: body['requests'] = [v.as_dict() for v in self.requests] + if self.requests: + body["requests"] = [v.as_dict() for v in self.requests] return body def as_shallow_dict(self) -> dict: """Serializes the ListTransitionRequestsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.requests: body['requests'] = self.requests + if self.requests: + body["requests"] = self.requests return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListTransitionRequestsResponse: """Deserializes the ListTransitionRequestsResponse from a dictionary.""" - return cls(requests=_repeated_dict(d, 'requests', Activity)) - - - - - + return cls(requests=_repeated_dict(d, "requests", Activity)) @dataclass @@ -2675,42 +2939,53 @@ class LogBatch: metrics: Optional[List[Metric]] = None """Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and tags in total.""" - + params: Optional[List[Param]] = None """Params to log. A single request can contain up to 100 params, and up to 1000 metrics, params, and tags in total.""" - + run_id: Optional[str] = None """ID of the run to log under""" - + tags: Optional[List[RunTag]] = None """Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags in total.""" - + def as_dict(self) -> dict: """Serializes the LogBatch into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics] - if self.params: body['params'] = [v.as_dict() for v in self.params] - if self.run_id is not None: body['run_id'] = self.run_id - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.metrics: + body["metrics"] = [v.as_dict() for v in self.metrics] + if self.params: + body["params"] = [v.as_dict() for v in self.params] + if self.run_id is not None: + body["run_id"] = self.run_id + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the LogBatch into a shallow dictionary of its immediate attributes.""" body = {} - if self.metrics: body['metrics'] = self.metrics - if self.params: body['params'] = self.params - if self.run_id is not None: body['run_id'] = self.run_id - if self.tags: body['tags'] = self.tags + if self.metrics: + body["metrics"] = self.metrics + if self.params: + body["params"] = self.params + if self.run_id is not None: + body["run_id"] = self.run_id + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogBatch: """Deserializes the LogBatch from a dictionary.""" - return cls(metrics=_repeated_dict(d, 'metrics', Metric), params=_repeated_dict(d, 'params', Param), run_id=d.get('run_id', None), tags=_repeated_dict(d, 'tags', RunTag)) - - + return cls( + metrics=_repeated_dict(d, "metrics", Metric), + params=_repeated_dict(d, "params", Param), + run_id=d.get("run_id", None), + tags=_repeated_dict(d, "tags", RunTag), + ) @dataclass @@ -2729,43 +3004,49 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogBatchResponse: """Deserializes the LogBatchResponse from a dictionary.""" return cls() - - @dataclass class LogInputs: run_id: str """ID of the run to log under""" - + datasets: Optional[List[DatasetInput]] = None """Dataset inputs""" - + models: Optional[List[ModelInput]] = None """Model inputs""" - + def as_dict(self) -> dict: """Serializes the LogInputs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.datasets: body['datasets'] = [v.as_dict() for v in self.datasets] - if self.models: body['models'] = [v.as_dict() for v in self.models] - if self.run_id is not None: body['run_id'] = self.run_id + if self.datasets: + body["datasets"] = [v.as_dict() for v in self.datasets] + if self.models: + body["models"] = [v.as_dict() for v in self.models] + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the LogInputs into a shallow dictionary of its immediate attributes.""" body = {} - if self.datasets: body['datasets'] = self.datasets - if self.models: body['models'] = self.models - if self.run_id is not None: body['run_id'] = self.run_id + if self.datasets: + body["datasets"] = self.datasets + if self.models: + body["models"] = self.models + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogInputs: """Deserializes the LogInputs from a dictionary.""" - return cls(datasets=_repeated_dict(d, 'datasets', DatasetInput), models=_repeated_dict(d, 'models', ModelInput), run_id=d.get('run_id', None)) - - + return cls( + datasets=_repeated_dict(d, "datasets", DatasetInput), + models=_repeated_dict(d, "models", ModelInput), + run_id=d.get("run_id", None), + ) @dataclass @@ -2784,38 +3065,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogInputsResponse: """Deserializes the LogInputsResponse from a dictionary.""" return cls() - - @dataclass class LogLoggedModelParamsRequest: model_id: Optional[str] = None """The ID of the logged model to log params for.""" - + params: Optional[List[LoggedModelParameter]] = None """Parameters to attach to the model.""" - + def as_dict(self) -> dict: """Serializes the LogLoggedModelParamsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id - if self.params: body['params'] = [v.as_dict() for v in self.params] + if self.model_id is not None: + body["model_id"] = self.model_id + if self.params: + body["params"] = [v.as_dict() for v in self.params] return body def as_shallow_dict(self) -> dict: """Serializes the LogLoggedModelParamsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id - if self.params: body['params'] = self.params + if self.model_id is not None: + body["model_id"] = self.model_id + if self.params: + body["params"] = self.params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogLoggedModelParamsRequest: """Deserializes the LogLoggedModelParamsRequest from a dictionary.""" - return cls(model_id=d.get('model_id', None), params=_repeated_dict(d, 'params', LoggedModelParameter)) - - + return cls(model_id=d.get("model_id", None), params=_repeated_dict(d, "params", LoggedModelParameter)) @dataclass @@ -2834,76 +3115,100 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogLoggedModelParamsRequestResponse: """Deserializes the LogLoggedModelParamsRequestResponse from a dictionary.""" return cls() - - @dataclass class LogMetric: key: str """Name of the metric.""" - + value: float """Double value of the metric being logged.""" - + timestamp: int """Unix timestamp in milliseconds at the time metric was logged.""" - + dataset_digest: Optional[str] = None """Dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset that uniquely identifies it within datasets of the same name.""" - + dataset_name: Optional[str] = None """The name of the dataset associated with the metric. E.g. “my.uc.table@2” “nyc-taxi-dataset”, “fantastic-elk-3”""" - + model_id: Optional[str] = None """ID of the logged model associated with the metric, if applicable""" - + run_id: Optional[str] = None """ID of the run under which to log the metric. Must be provided.""" - + run_uuid: Optional[str] = None """[Deprecated, use `run_id` instead] ID of the run under which to log the metric. This field will be removed in a future MLflow version.""" - + step: Optional[int] = None """Step at which to log the metric""" - + def as_dict(self) -> dict: """Serializes the LogMetric into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name - if self.key is not None: body['key'] = self.key - if self.model_id is not None: body['model_id'] = self.model_id - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.step is not None: body['step'] = self.step - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.value is not None: body['value'] = self.value + if self.dataset_digest is not None: + body["dataset_digest"] = self.dataset_digest + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name + if self.key is not None: + body["key"] = self.key + if self.model_id is not None: + body["model_id"] = self.model_id + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.step is not None: + body["step"] = self.step + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the LogMetric into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name - if self.key is not None: body['key'] = self.key - if self.model_id is not None: body['model_id'] = self.model_id - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.step is not None: body['step'] = self.step - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.value is not None: body['value'] = self.value + if self.dataset_digest is not None: + body["dataset_digest"] = self.dataset_digest + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name + if self.key is not None: + body["key"] = self.key + if self.model_id is not None: + body["model_id"] = self.model_id + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.step is not None: + body["step"] = self.step + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogMetric: """Deserializes the LogMetric from a dictionary.""" - return cls(dataset_digest=d.get('dataset_digest', None), dataset_name=d.get('dataset_name', None), key=d.get('key', None), model_id=d.get('model_id', None), run_id=d.get('run_id', None), run_uuid=d.get('run_uuid', None), step=d.get('step', None), timestamp=d.get('timestamp', None), value=d.get('value', None)) - - + return cls( + dataset_digest=d.get("dataset_digest", None), + dataset_name=d.get("dataset_name", None), + key=d.get("key", None), + model_id=d.get("model_id", None), + run_id=d.get("run_id", None), + run_uuid=d.get("run_uuid", None), + step=d.get("step", None), + timestamp=d.get("timestamp", None), + value=d.get("value", None), + ) @dataclass @@ -2922,38 +3227,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogMetricResponse: """Deserializes the LogMetricResponse from a dictionary.""" return cls() - - @dataclass class LogModel: model_json: Optional[str] = None """MLmodel file in json format.""" - + run_id: Optional[str] = None """ID of the run to log under""" - + def as_dict(self) -> dict: """Serializes the LogModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_json is not None: body['model_json'] = self.model_json - if self.run_id is not None: body['run_id'] = self.run_id + if self.model_json is not None: + body["model_json"] = self.model_json + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the LogModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_json is not None: body['model_json'] = self.model_json - if self.run_id is not None: body['run_id'] = self.run_id + if self.model_json is not None: + body["model_json"] = self.model_json + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogModel: """Deserializes the LogModel from a dictionary.""" - return cls(model_json=d.get('model_json', None), run_id=d.get('run_id', None)) - - + return cls(model_json=d.get("model_json", None), run_id=d.get("run_id", None)) @dataclass @@ -2972,38 +3277,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogModelResponse: """Deserializes the LogModelResponse from a dictionary.""" return cls() - - @dataclass class LogOutputsRequest: run_id: str """The ID of the Run from which to log outputs.""" - + models: Optional[List[ModelOutput]] = None """The model outputs from the Run.""" - + def as_dict(self) -> dict: """Serializes the LogOutputsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.models: body['models'] = [v.as_dict() for v in self.models] - if self.run_id is not None: body['run_id'] = self.run_id + if self.models: + body["models"] = [v.as_dict() for v in self.models] + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the LogOutputsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.models: body['models'] = self.models - if self.run_id is not None: body['run_id'] = self.run_id + if self.models: + body["models"] = self.models + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogOutputsRequest: """Deserializes the LogOutputsRequest from a dictionary.""" - return cls(models=_repeated_dict(d, 'models', ModelOutput), run_id=d.get('run_id', None)) - - + return cls(models=_repeated_dict(d, "models", ModelOutput), run_id=d.get("run_id", None)) @dataclass @@ -3022,49 +3327,58 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogOutputsResponse: """Deserializes the LogOutputsResponse from a dictionary.""" return cls() - - @dataclass class LogParam: key: str """Name of the param. Maximum size is 255 bytes.""" - + value: str """String value of the param being logged. Maximum size is 500 bytes.""" - + run_id: Optional[str] = None """ID of the run under which to log the param. Must be provided.""" - + run_uuid: Optional[str] = None """[Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will be removed in a future MLflow version.""" - + def as_dict(self) -> dict: """Serializes the LogParam into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the LogParam into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LogParam: """Deserializes the LogParam from a dictionary.""" - return cls(key=d.get('key', None), run_id=d.get('run_id', None), run_uuid=d.get('run_uuid', None), value=d.get('value', None)) - - + return cls( + key=d.get("key", None), + run_id=d.get("run_id", None), + run_uuid=d.get("run_uuid", None), + value=d.get("value", None), + ) @dataclass @@ -3083,595 +3397,750 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> LogParamResponse: """Deserializes the LogParamResponse from a dictionary.""" return cls() - - @dataclass class LoggedModel: """A logged model message includes logged model attributes, tags, registration info, params, and linked run metrics.""" - + data: Optional[LoggedModelData] = None """The params and metrics attached to the logged model.""" - + info: Optional[LoggedModelInfo] = None """The logged model attributes such as model ID, status, tags, etc.""" - + def as_dict(self) -> dict: """Serializes the LoggedModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data: body['data'] = self.data.as_dict() - if self.info: body['info'] = self.info.as_dict() + if self.data: + body["data"] = self.data.as_dict() + if self.info: + body["info"] = self.info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.data: body['data'] = self.data - if self.info: body['info'] = self.info + if self.data: + body["data"] = self.data + if self.info: + body["info"] = self.info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModel: """Deserializes the LoggedModel from a dictionary.""" - return cls(data=_from_dict(d, 'data', LoggedModelData), info=_from_dict(d, 'info', LoggedModelInfo)) - - + return cls(data=_from_dict(d, "data", LoggedModelData), info=_from_dict(d, "info", LoggedModelInfo)) @dataclass class LoggedModelData: """A LoggedModelData message includes logged model params and linked metrics.""" - + metrics: Optional[List[Metric]] = None """Performance metrics linked to the model.""" - + params: Optional[List[LoggedModelParameter]] = None """Immutable string key-value pairs of the model.""" - + def as_dict(self) -> dict: """Serializes the LoggedModelData into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics] - if self.params: body['params'] = [v.as_dict() for v in self.params] + if self.metrics: + body["metrics"] = [v.as_dict() for v in self.metrics] + if self.params: + body["params"] = [v.as_dict() for v in self.params] return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModelData into a shallow dictionary of its immediate attributes.""" body = {} - if self.metrics: body['metrics'] = self.metrics - if self.params: body['params'] = self.params + if self.metrics: + body["metrics"] = self.metrics + if self.params: + body["params"] = self.params return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModelData: """Deserializes the LoggedModelData from a dictionary.""" - return cls(metrics=_repeated_dict(d, 'metrics', Metric), params=_repeated_dict(d, 'params', LoggedModelParameter)) - - + return cls( + metrics=_repeated_dict(d, "metrics", Metric), params=_repeated_dict(d, "params", LoggedModelParameter) + ) @dataclass class LoggedModelInfo: """A LoggedModelInfo includes logged model attributes, tags, and registration info.""" - + artifact_uri: Optional[str] = None """The URI of the directory where model artifacts are stored.""" - + creation_timestamp_ms: Optional[int] = None """The timestamp when the model was created in milliseconds since the UNIX epoch.""" - + creator_id: Optional[int] = None """The ID of the user or principal that created the model.""" - + experiment_id: Optional[str] = None """The ID of the experiment that owns the model.""" - + last_updated_timestamp_ms: Optional[int] = None """The timestamp when the model was last updated in milliseconds since the UNIX epoch.""" - + model_id: Optional[str] = None """The unique identifier for the logged model.""" - + model_type: Optional[str] = None """The type of model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``.""" - + name: Optional[str] = None """The name of the model.""" - + source_run_id: Optional[str] = None """The ID of the run that created the model.""" - + status: Optional[LoggedModelStatus] = None """The status of whether or not the model is ready for use.""" - + status_message: Optional[str] = None """Details on the current model status.""" - + tags: Optional[List[LoggedModelTag]] = None """Mutable string key-value pairs set on the model.""" - + def as_dict(self) -> dict: """Serializes the LoggedModelInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri - if self.creation_timestamp_ms is not None: body['creation_timestamp_ms'] = self.creation_timestamp_ms - if self.creator_id is not None: body['creator_id'] = self.creator_id - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.last_updated_timestamp_ms is not None: body['last_updated_timestamp_ms'] = self.last_updated_timestamp_ms - if self.model_id is not None: body['model_id'] = self.model_id - if self.model_type is not None: body['model_type'] = self.model_type - if self.name is not None: body['name'] = self.name - if self.source_run_id is not None: body['source_run_id'] = self.source_run_id - if self.status is not None: body['status'] = self.status.value - if self.status_message is not None: body['status_message'] = self.status_message - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.artifact_uri is not None: + body["artifact_uri"] = self.artifact_uri + if self.creation_timestamp_ms is not None: + body["creation_timestamp_ms"] = self.creation_timestamp_ms + if self.creator_id is not None: + body["creator_id"] = self.creator_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.last_updated_timestamp_ms is not None: + body["last_updated_timestamp_ms"] = self.last_updated_timestamp_ms + if self.model_id is not None: + body["model_id"] = self.model_id + if self.model_type is not None: + body["model_type"] = self.model_type + if self.name is not None: + body["name"] = self.name + if self.source_run_id is not None: + body["source_run_id"] = self.source_run_id + if self.status is not None: + body["status"] = self.status.value + if self.status_message is not None: + body["status_message"] = self.status_message + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModelInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri - if self.creation_timestamp_ms is not None: body['creation_timestamp_ms'] = self.creation_timestamp_ms - if self.creator_id is not None: body['creator_id'] = self.creator_id - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.last_updated_timestamp_ms is not None: body['last_updated_timestamp_ms'] = self.last_updated_timestamp_ms - if self.model_id is not None: body['model_id'] = self.model_id - if self.model_type is not None: body['model_type'] = self.model_type - if self.name is not None: body['name'] = self.name - if self.source_run_id is not None: body['source_run_id'] = self.source_run_id - if self.status is not None: body['status'] = self.status - if self.status_message is not None: body['status_message'] = self.status_message - if self.tags: body['tags'] = self.tags + if self.artifact_uri is not None: + body["artifact_uri"] = self.artifact_uri + if self.creation_timestamp_ms is not None: + body["creation_timestamp_ms"] = self.creation_timestamp_ms + if self.creator_id is not None: + body["creator_id"] = self.creator_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.last_updated_timestamp_ms is not None: + body["last_updated_timestamp_ms"] = self.last_updated_timestamp_ms + if self.model_id is not None: + body["model_id"] = self.model_id + if self.model_type is not None: + body["model_type"] = self.model_type + if self.name is not None: + body["name"] = self.name + if self.source_run_id is not None: + body["source_run_id"] = self.source_run_id + if self.status is not None: + body["status"] = self.status + if self.status_message is not None: + body["status_message"] = self.status_message + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModelInfo: """Deserializes the LoggedModelInfo from a dictionary.""" - return cls(artifact_uri=d.get('artifact_uri', None), creation_timestamp_ms=d.get('creation_timestamp_ms', None), creator_id=d.get('creator_id', None), experiment_id=d.get('experiment_id', None), last_updated_timestamp_ms=d.get('last_updated_timestamp_ms', None), model_id=d.get('model_id', None), model_type=d.get('model_type', None), name=d.get('name', None), source_run_id=d.get('source_run_id', None), status=_enum(d, 'status', LoggedModelStatus), status_message=d.get('status_message', None), tags=_repeated_dict(d, 'tags', LoggedModelTag)) - - + return cls( + artifact_uri=d.get("artifact_uri", None), + creation_timestamp_ms=d.get("creation_timestamp_ms", None), + creator_id=d.get("creator_id", None), + experiment_id=d.get("experiment_id", None), + last_updated_timestamp_ms=d.get("last_updated_timestamp_ms", None), + model_id=d.get("model_id", None), + model_type=d.get("model_type", None), + name=d.get("name", None), + source_run_id=d.get("source_run_id", None), + status=_enum(d, "status", LoggedModelStatus), + status_message=d.get("status_message", None), + tags=_repeated_dict(d, "tags", LoggedModelTag), + ) @dataclass class LoggedModelParameter: """Parameter associated with a LoggedModel.""" - + key: Optional[str] = None """The key identifying this param.""" - + value: Optional[str] = None """The value of this param.""" - + def as_dict(self) -> dict: """Serializes the LoggedModelParameter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModelParameter into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModelParameter: """Deserializes the LoggedModelParameter from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) class LoggedModelStatus(Enum): """A LoggedModelStatus enum value represents the status of a logged model.""" - - LOGGED_MODEL_PENDING = 'LOGGED_MODEL_PENDING' - LOGGED_MODEL_READY = 'LOGGED_MODEL_READY' - LOGGED_MODEL_UPLOAD_FAILED = 'LOGGED_MODEL_UPLOAD_FAILED' + + LOGGED_MODEL_PENDING = "LOGGED_MODEL_PENDING" + LOGGED_MODEL_READY = "LOGGED_MODEL_READY" + LOGGED_MODEL_UPLOAD_FAILED = "LOGGED_MODEL_UPLOAD_FAILED" + @dataclass class LoggedModelTag: """Tag for a LoggedModel.""" - + key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the LoggedModelTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the LoggedModelTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LoggedModelTag: """Deserializes the LoggedModelTag from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class Metric: """Metric associated with a run, represented as a key-value pair.""" - + dataset_digest: Optional[str] = None """The dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset that uniquely identifies it within datasets of the same name.""" - + dataset_name: Optional[str] = None """The name of the dataset associated with the metric. E.g. “my.uc.table@2” “nyc-taxi-dataset”, “fantastic-elk-3”""" - + key: Optional[str] = None """The key identifying the metric.""" - + model_id: Optional[str] = None """The ID of the logged model or registered model version associated with the metric, if applicable.""" - + run_id: Optional[str] = None """The ID of the run containing the metric.""" - + step: Optional[int] = None """The step at which the metric was logged.""" - + timestamp: Optional[int] = None """The timestamp at which the metric was recorded.""" - + value: Optional[float] = None """The value of the metric.""" - + def as_dict(self) -> dict: """Serializes the Metric into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name - if self.key is not None: body['key'] = self.key - if self.model_id is not None: body['model_id'] = self.model_id - if self.run_id is not None: body['run_id'] = self.run_id - if self.step is not None: body['step'] = self.step - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.value is not None: body['value'] = self.value + if self.dataset_digest is not None: + body["dataset_digest"] = self.dataset_digest + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name + if self.key is not None: + body["key"] = self.key + if self.model_id is not None: + body["model_id"] = self.model_id + if self.run_id is not None: + body["run_id"] = self.run_id + if self.step is not None: + body["step"] = self.step + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the Metric into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name - if self.key is not None: body['key'] = self.key - if self.model_id is not None: body['model_id'] = self.model_id - if self.run_id is not None: body['run_id'] = self.run_id - if self.step is not None: body['step'] = self.step - if self.timestamp is not None: body['timestamp'] = self.timestamp - if self.value is not None: body['value'] = self.value + if self.dataset_digest is not None: + body["dataset_digest"] = self.dataset_digest + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name + if self.key is not None: + body["key"] = self.key + if self.model_id is not None: + body["model_id"] = self.model_id + if self.run_id is not None: + body["run_id"] = self.run_id + if self.step is not None: + body["step"] = self.step + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Metric: """Deserializes the Metric from a dictionary.""" - return cls(dataset_digest=d.get('dataset_digest', None), dataset_name=d.get('dataset_name', None), key=d.get('key', None), model_id=d.get('model_id', None), run_id=d.get('run_id', None), step=d.get('step', None), timestamp=d.get('timestamp', None), value=d.get('value', None)) - - + return cls( + dataset_digest=d.get("dataset_digest", None), + dataset_name=d.get("dataset_name", None), + key=d.get("key", None), + model_id=d.get("model_id", None), + run_id=d.get("run_id", None), + step=d.get("step", None), + timestamp=d.get("timestamp", None), + value=d.get("value", None), + ) @dataclass class Model: creation_timestamp: Optional[int] = None """Timestamp recorded when this `registered_model` was created.""" - + description: Optional[str] = None """Description of this `registered_model`.""" - + last_updated_timestamp: Optional[int] = None """Timestamp recorded when metadata for this `registered_model` was last updated.""" - + latest_versions: Optional[List[ModelVersion]] = None """Collection of latest model versions for each stage. Only contains models with current `READY` status.""" - + name: Optional[str] = None """Unique name for the model.""" - + tags: Optional[List[ModelTag]] = None """Tags: Additional metadata key-value pairs for this `registered_model`.""" - + user_id: Optional[str] = None """User that created this `registered_model`""" - + def as_dict(self) -> dict: """Serializes the Model into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.description is not None: body['description'] = self.description - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions] - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] - if self.user_id is not None: body['user_id'] = self.user_id + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.description is not None: + body["description"] = self.description + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.latest_versions: + body["latest_versions"] = [v.as_dict() for v in self.latest_versions] + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the Model into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.description is not None: body['description'] = self.description - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.latest_versions: body['latest_versions'] = self.latest_versions - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = self.tags - if self.user_id is not None: body['user_id'] = self.user_id + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.description is not None: + body["description"] = self.description + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.latest_versions: + body["latest_versions"] = self.latest_versions + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = self.tags + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Model: """Deserializes the Model from a dictionary.""" - return cls(creation_timestamp=d.get('creation_timestamp', None), description=d.get('description', None), last_updated_timestamp=d.get('last_updated_timestamp', None), latest_versions=_repeated_dict(d, 'latest_versions', ModelVersion), name=d.get('name', None), tags=_repeated_dict(d, 'tags', ModelTag), user_id=d.get('user_id', None)) - - + return cls( + creation_timestamp=d.get("creation_timestamp", None), + description=d.get("description", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + latest_versions=_repeated_dict(d, "latest_versions", ModelVersion), + name=d.get("name", None), + tags=_repeated_dict(d, "tags", ModelTag), + user_id=d.get("user_id", None), + ) @dataclass class ModelDatabricks: creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + description: Optional[str] = None """User-specified description for the object.""" - + id: Optional[str] = None """Unique identifier for the object.""" - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + latest_versions: Optional[List[ModelVersion]] = None """Array of model versions, each the latest version for its stage.""" - + name: Optional[str] = None """Name of the model.""" - + permission_level: Optional[PermissionLevel] = None """Permission level of the requesting user on the object. For what is allowed at each level, see [MLflow Model permissions](..).""" - + tags: Optional[List[ModelTag]] = None """Array of tags associated with the model.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + def as_dict(self) -> dict: """Serializes the ModelDatabricks into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.latest_versions: body['latest_versions'] = [v.as_dict() for v in self.latest_versions] - if self.name is not None: body['name'] = self.name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] - if self.user_id is not None: body['user_id'] = self.user_id + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.latest_versions: + body["latest_versions"] = [v.as_dict() for v in self.latest_versions] + if self.name is not None: + body["name"] = self.name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the ModelDatabricks into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.latest_versions: body['latest_versions'] = self.latest_versions - if self.name is not None: body['name'] = self.name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.tags: body['tags'] = self.tags - if self.user_id is not None: body['user_id'] = self.user_id + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.latest_versions: + body["latest_versions"] = self.latest_versions + if self.name is not None: + body["name"] = self.name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.tags: + body["tags"] = self.tags + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelDatabricks: """Deserializes the ModelDatabricks from a dictionary.""" - return cls(creation_timestamp=d.get('creation_timestamp', None), description=d.get('description', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), latest_versions=_repeated_dict(d, 'latest_versions', ModelVersion), name=d.get('name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), tags=_repeated_dict(d, 'tags', ModelTag), user_id=d.get('user_id', None)) - - + return cls( + creation_timestamp=d.get("creation_timestamp", None), + description=d.get("description", None), + id=d.get("id", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + latest_versions=_repeated_dict(d, "latest_versions", ModelVersion), + name=d.get("name", None), + permission_level=_enum(d, "permission_level", PermissionLevel), + tags=_repeated_dict(d, "tags", ModelTag), + user_id=d.get("user_id", None), + ) @dataclass class ModelInput: """Represents a LoggedModel or Registered Model Version input to a Run.""" - + model_id: str """The unique identifier of the model.""" - + def as_dict(self) -> dict: """Serializes the ModelInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id + if self.model_id is not None: + body["model_id"] = self.model_id return body def as_shallow_dict(self) -> dict: """Serializes the ModelInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id + if self.model_id is not None: + body["model_id"] = self.model_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelInput: """Deserializes the ModelInput from a dictionary.""" - return cls(model_id=d.get('model_id', None)) - - + return cls(model_id=d.get("model_id", None)) @dataclass class ModelOutput: """Represents a LoggedModel output of a Run.""" - + model_id: str """The unique identifier of the model.""" - + step: int """The step at which the model was produced.""" - + def as_dict(self) -> dict: """Serializes the ModelOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id - if self.step is not None: body['step'] = self.step + if self.model_id is not None: + body["model_id"] = self.model_id + if self.step is not None: + body["step"] = self.step return body def as_shallow_dict(self) -> dict: """Serializes the ModelOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id - if self.step is not None: body['step'] = self.step + if self.model_id is not None: + body["model_id"] = self.model_id + if self.step is not None: + body["step"] = self.step return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelOutput: """Deserializes the ModelOutput from a dictionary.""" - return cls(model_id=d.get('model_id', None), step=d.get('step', None)) - - + return cls(model_id=d.get("model_id", None), step=d.get("step", None)) @dataclass class ModelTag: key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the ModelTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ModelTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelTag: """Deserializes the ModelTag from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class ModelVersion: creation_timestamp: Optional[int] = None """Timestamp recorded when this `model_version` was created.""" - + current_stage: Optional[str] = None """Current stage for this `model_version`.""" - + description: Optional[str] = None """Description of this `model_version`.""" - + last_updated_timestamp: Optional[int] = None """Timestamp recorded when metadata for this `model_version` was last updated.""" - + name: Optional[str] = None """Unique name of the model""" - + run_id: Optional[str] = None """MLflow run ID used when creating `model_version`, if `source` was generated by an experiment run stored in MLflow tracking server.""" - + run_link: Optional[str] = None """Run Link: Direct link to the run that generated this version""" - + source: Optional[str] = None """URI indicating the location of the source model artifacts, used when creating `model_version`""" - + status: Optional[ModelVersionStatus] = None """Current status of `model_version`""" - + status_message: Optional[str] = None """Details on current `status`, if it is pending or failed.""" - + tags: Optional[List[ModelVersionTag]] = None """Tags: Additional metadata key-value pairs for this `model_version`.""" - + user_id: Optional[str] = None """User that created this `model_version`.""" - + version: Optional[str] = None """Model's version number.""" - + def as_dict(self) -> dict: """Serializes the ModelVersion into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.current_stage is not None: body['current_stage'] = self.current_stage - if self.description is not None: body['description'] = self.description - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.name is not None: body['name'] = self.name - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_link is not None: body['run_link'] = self.run_link - if self.source is not None: body['source'] = self.source - if self.status is not None: body['status'] = self.status.value - if self.status_message is not None: body['status_message'] = self.status_message - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] - if self.user_id is not None: body['user_id'] = self.user_id - if self.version is not None: body['version'] = self.version + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.current_stage is not None: + body["current_stage"] = self.current_stage + if self.description is not None: + body["description"] = self.description + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.name is not None: + body["name"] = self.name + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_link is not None: + body["run_link"] = self.run_link + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status.value + if self.status_message is not None: + body["status_message"] = self.status_message + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] + if self.user_id is not None: + body["user_id"] = self.user_id + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ModelVersion into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.current_stage is not None: body['current_stage'] = self.current_stage - if self.description is not None: body['description'] = self.description - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.name is not None: body['name'] = self.name - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_link is not None: body['run_link'] = self.run_link - if self.source is not None: body['source'] = self.source - if self.status is not None: body['status'] = self.status - if self.status_message is not None: body['status_message'] = self.status_message - if self.tags: body['tags'] = self.tags - if self.user_id is not None: body['user_id'] = self.user_id - if self.version is not None: body['version'] = self.version + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.current_stage is not None: + body["current_stage"] = self.current_stage + if self.description is not None: + body["description"] = self.description + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.name is not None: + body["name"] = self.name + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_link is not None: + body["run_link"] = self.run_link + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status + if self.status_message is not None: + body["status_message"] = self.status_message + if self.tags: + body["tags"] = self.tags + if self.user_id is not None: + body["user_id"] = self.user_id + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelVersion: """Deserializes the ModelVersion from a dictionary.""" - return cls(creation_timestamp=d.get('creation_timestamp', None), current_stage=d.get('current_stage', None), description=d.get('description', None), last_updated_timestamp=d.get('last_updated_timestamp', None), name=d.get('name', None), run_id=d.get('run_id', None), run_link=d.get('run_link', None), source=d.get('source', None), status=_enum(d, 'status', ModelVersionStatus), status_message=d.get('status_message', None), tags=_repeated_dict(d, 'tags', ModelVersionTag), user_id=d.get('user_id', None), version=d.get('version', None)) - - + return cls( + creation_timestamp=d.get("creation_timestamp", None), + current_stage=d.get("current_stage", None), + description=d.get("description", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + name=d.get("name", None), + run_id=d.get("run_id", None), + run_link=d.get("run_link", None), + source=d.get("source", None), + status=_enum(d, "status", ModelVersionStatus), + status_message=d.get("status_message", None), + tags=_repeated_dict(d, "tags", ModelVersionTag), + user_id=d.get("user_id", None), + version=d.get("version", None), + ) @dataclass class ModelVersionDatabricks: creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + current_stage: Optional[Stage] = None """Stage of the model version. Valid values are: @@ -3682,32 +4151,32 @@ class ModelVersionDatabricks: * `Production`: Production stage. * `Archived`: Archived stage.""" - + description: Optional[str] = None """User-specified description for the object.""" - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + name: Optional[str] = None """Name of the model.""" - + permission_level: Optional[PermissionLevel] = None """Permission level of the requesting user on the object. For what is allowed at each level, see [MLflow Model permissions](..).""" - + run_id: Optional[str] = None """Unique identifier for the MLflow tracking run associated with the source model artifacts.""" - + run_link: Optional[str] = None """URL of the run associated with the model artifacts. This field is set at model version creation time only for model versions whose source run is from a tracking server that is different from the registry server.""" - + source: Optional[str] = None """URI that indicates the location of the source model artifacts. This is used when creating the model version.""" - + status: Optional[Status] = None """The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. @@ -3715,369 +4184,468 @@ class ModelVersionDatabricks: * `FAILED_REGISTRATION`: Request to register a new model version has failed. * `READY`: Model version is ready for use.""" - + status_message: Optional[str] = None """Details on the current status, for example why registration failed.""" - + tags: Optional[List[ModelVersionTag]] = None """Array of tags that are associated with the model version.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + version: Optional[str] = None """Version of the model.""" - + def as_dict(self) -> dict: """Serializes the ModelVersionDatabricks into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.current_stage is not None: body['current_stage'] = self.current_stage.value - if self.description is not None: body['description'] = self.description - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.name is not None: body['name'] = self.name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_link is not None: body['run_link'] = self.run_link - if self.source is not None: body['source'] = self.source - if self.status is not None: body['status'] = self.status.value - if self.status_message is not None: body['status_message'] = self.status_message - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] - if self.user_id is not None: body['user_id'] = self.user_id - if self.version is not None: body['version'] = self.version + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.current_stage is not None: + body["current_stage"] = self.current_stage.value + if self.description is not None: + body["description"] = self.description + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.name is not None: + body["name"] = self.name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_link is not None: + body["run_link"] = self.run_link + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status.value + if self.status_message is not None: + body["status_message"] = self.status_message + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] + if self.user_id is not None: + body["user_id"] = self.user_id + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the ModelVersionDatabricks into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.current_stage is not None: body['current_stage'] = self.current_stage - if self.description is not None: body['description'] = self.description - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.name is not None: body['name'] = self.name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_link is not None: body['run_link'] = self.run_link - if self.source is not None: body['source'] = self.source - if self.status is not None: body['status'] = self.status - if self.status_message is not None: body['status_message'] = self.status_message - if self.tags: body['tags'] = self.tags - if self.user_id is not None: body['user_id'] = self.user_id - if self.version is not None: body['version'] = self.version + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.current_stage is not None: + body["current_stage"] = self.current_stage + if self.description is not None: + body["description"] = self.description + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.name is not None: + body["name"] = self.name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_link is not None: + body["run_link"] = self.run_link + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status + if self.status_message is not None: + body["status_message"] = self.status_message + if self.tags: + body["tags"] = self.tags + if self.user_id is not None: + body["user_id"] = self.user_id + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelVersionDatabricks: """Deserializes the ModelVersionDatabricks from a dictionary.""" - return cls(creation_timestamp=d.get('creation_timestamp', None), current_stage=_enum(d, 'current_stage', Stage), description=d.get('description', None), last_updated_timestamp=d.get('last_updated_timestamp', None), name=d.get('name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), run_id=d.get('run_id', None), run_link=d.get('run_link', None), source=d.get('source', None), status=_enum(d, 'status', Status), status_message=d.get('status_message', None), tags=_repeated_dict(d, 'tags', ModelVersionTag), user_id=d.get('user_id', None), version=d.get('version', None)) - - + return cls( + creation_timestamp=d.get("creation_timestamp", None), + current_stage=_enum(d, "current_stage", Stage), + description=d.get("description", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + name=d.get("name", None), + permission_level=_enum(d, "permission_level", PermissionLevel), + run_id=d.get("run_id", None), + run_link=d.get("run_link", None), + source=d.get("source", None), + status=_enum(d, "status", Status), + status_message=d.get("status_message", None), + tags=_repeated_dict(d, "tags", ModelVersionTag), + user_id=d.get("user_id", None), + version=d.get("version", None), + ) class ModelVersionStatus(Enum): """Current status of `model_version`""" - - FAILED_REGISTRATION = 'FAILED_REGISTRATION' - PENDING_REGISTRATION = 'PENDING_REGISTRATION' - READY = 'READY' + + FAILED_REGISTRATION = "FAILED_REGISTRATION" + PENDING_REGISTRATION = "PENDING_REGISTRATION" + READY = "READY" + @dataclass class ModelVersionTag: key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the ModelVersionTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the ModelVersionTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelVersionTag: """Deserializes the ModelVersionTag from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class Param: """Param associated with a run.""" - + key: Optional[str] = None """Key identifying this param.""" - + value: Optional[str] = None """Value associated with this param.""" - + def as_dict(self) -> dict: """Serializes the Param into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the Param into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Param: """Deserializes the Param from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) class PermissionLevel(Enum): """Permission level of the requesting user on the object. For what is allowed at each level, see [MLflow Model permissions](..).""" - - CAN_EDIT = 'CAN_EDIT' - CAN_MANAGE = 'CAN_MANAGE' - CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' - CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' - CAN_READ = 'CAN_READ' + + CAN_EDIT = "CAN_EDIT" + CAN_MANAGE = "CAN_MANAGE" + CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS" + CAN_MANAGE_STAGING_VERSIONS = "CAN_MANAGE_STAGING_VERSIONS" + CAN_READ = "CAN_READ" + @dataclass class RegisteredModelAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[RegisteredModelPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAccessControlRequest: """Deserializes the RegisteredModelAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', RegisteredModelPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", RegisteredModelPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class RegisteredModelAccessControlResponse: all_permissions: Optional[List[RegisteredModelPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAccessControlResponse: """Deserializes the RegisteredModelAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', RegisteredModelPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", RegisteredModelPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class RegisteredModelPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[RegisteredModelPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermission: """Deserializes the RegisteredModelPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', RegisteredModelPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", RegisteredModelPermissionLevel), + ) class RegisteredModelPermissionLevel(Enum): """Permission level""" - - CAN_EDIT = 'CAN_EDIT' - CAN_MANAGE = 'CAN_MANAGE' - CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' - CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' - CAN_READ = 'CAN_READ' + + CAN_EDIT = "CAN_EDIT" + CAN_MANAGE = "CAN_MANAGE" + CAN_MANAGE_PRODUCTION_VERSIONS = "CAN_MANAGE_PRODUCTION_VERSIONS" + CAN_MANAGE_STAGING_VERSIONS = "CAN_MANAGE_STAGING_VERSIONS" + CAN_READ = "CAN_READ" + @dataclass class RegisteredModelPermissions: access_control_list: Optional[List[RegisteredModelAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the RegisteredModelPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissions: """Deserializes the RegisteredModelPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', RegisteredModelAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", RegisteredModelAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class RegisteredModelPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[RegisteredModelPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissionsDescription: """Deserializes the RegisteredModelPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', RegisteredModelPermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", RegisteredModelPermissionLevel), + ) @dataclass class RegisteredModelPermissionsRequest: access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None - + registered_model_id: Optional[str] = None """The registered model for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.registered_model_id is not None: + body["registered_model_id"] = self.registered_model_id return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.registered_model_id is not None: + body["registered_model_id"] = self.registered_model_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissionsRequest: """Deserializes the RegisteredModelPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', RegisteredModelAccessControlRequest), registered_model_id=d.get('registered_model_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", RegisteredModelAccessControlRequest), + registered_model_id=d.get("registered_model_id", None), + ) @dataclass class RegistryWebhook: creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + description: Optional[str] = None """User-specified description for the webhook.""" - + events: Optional[List[RegistryWebhookEvent]] = None """Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. @@ -4107,20 +4675,20 @@ class RegistryWebhook: to production. * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - + http_url_spec: Optional[HttpUrlSpecWithoutSecret] = None - + id: Optional[str] = None """Webhook ID""" - + job_spec: Optional[JobSpecWithoutSecret] = None - + last_updated_timestamp: Optional[int] = None """Time of the object at last update, as a Unix timestamp in milliseconds.""" - + model_name: Optional[str] = None """Name of the model whose events would trigger this webhook.""" - + status: Optional[RegistryWebhookStatus] = None """Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. @@ -4129,80 +4697,107 @@ class RegistryWebhook: * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event.""" - + def as_dict(self) -> dict: """Serializes the RegistryWebhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.description is not None: body['description'] = self.description - if self.events: body['events'] = [v.value for v in self.events] - if self.http_url_spec: body['http_url_spec'] = self.http_url_spec.as_dict() - if self.id is not None: body['id'] = self.id - if self.job_spec: body['job_spec'] = self.job_spec.as_dict() - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.model_name is not None: body['model_name'] = self.model_name - if self.status is not None: body['status'] = self.status.value + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.description is not None: + body["description"] = self.description + if self.events: + body["events"] = [v.value for v in self.events] + if self.http_url_spec: + body["http_url_spec"] = self.http_url_spec.as_dict() + if self.id is not None: + body["id"] = self.id + if self.job_spec: + body["job_spec"] = self.job_spec.as_dict() + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.model_name is not None: + body["model_name"] = self.model_name + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the RegistryWebhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.description is not None: body['description'] = self.description - if self.events: body['events'] = self.events - if self.http_url_spec: body['http_url_spec'] = self.http_url_spec - if self.id is not None: body['id'] = self.id - if self.job_spec: body['job_spec'] = self.job_spec - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.model_name is not None: body['model_name'] = self.model_name - if self.status is not None: body['status'] = self.status + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.description is not None: + body["description"] = self.description + if self.events: + body["events"] = self.events + if self.http_url_spec: + body["http_url_spec"] = self.http_url_spec + if self.id is not None: + body["id"] = self.id + if self.job_spec: + body["job_spec"] = self.job_spec + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.model_name is not None: + body["model_name"] = self.model_name + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegistryWebhook: """Deserializes the RegistryWebhook from a dictionary.""" - return cls(creation_timestamp=d.get('creation_timestamp', None), description=d.get('description', None), events=_repeated_enum(d, 'events', RegistryWebhookEvent), http_url_spec=_from_dict(d, 'http_url_spec', HttpUrlSpecWithoutSecret), id=d.get('id', None), job_spec=_from_dict(d, 'job_spec', JobSpecWithoutSecret), last_updated_timestamp=d.get('last_updated_timestamp', None), model_name=d.get('model_name', None), status=_enum(d, 'status', RegistryWebhookStatus)) - - + return cls( + creation_timestamp=d.get("creation_timestamp", None), + description=d.get("description", None), + events=_repeated_enum(d, "events", RegistryWebhookEvent), + http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpecWithoutSecret), + id=d.get("id", None), + job_spec=_from_dict(d, "job_spec", JobSpecWithoutSecret), + last_updated_timestamp=d.get("last_updated_timestamp", None), + model_name=d.get("model_name", None), + status=_enum(d, "status", RegistryWebhookStatus), + ) class RegistryWebhookEvent(Enum): - - - COMMENT_CREATED = 'COMMENT_CREATED' - MODEL_VERSION_CREATED = 'MODEL_VERSION_CREATED' - MODEL_VERSION_TAG_SET = 'MODEL_VERSION_TAG_SET' - MODEL_VERSION_TRANSITIONED_STAGE = 'MODEL_VERSION_TRANSITIONED_STAGE' - MODEL_VERSION_TRANSITIONED_TO_ARCHIVED = 'MODEL_VERSION_TRANSITIONED_TO_ARCHIVED' - MODEL_VERSION_TRANSITIONED_TO_PRODUCTION = 'MODEL_VERSION_TRANSITIONED_TO_PRODUCTION' - MODEL_VERSION_TRANSITIONED_TO_STAGING = 'MODEL_VERSION_TRANSITIONED_TO_STAGING' - REGISTERED_MODEL_CREATED = 'REGISTERED_MODEL_CREATED' - TRANSITION_REQUEST_CREATED = 'TRANSITION_REQUEST_CREATED' - TRANSITION_REQUEST_TO_ARCHIVED_CREATED = 'TRANSITION_REQUEST_TO_ARCHIVED_CREATED' - TRANSITION_REQUEST_TO_PRODUCTION_CREATED = 'TRANSITION_REQUEST_TO_PRODUCTION_CREATED' - TRANSITION_REQUEST_TO_STAGING_CREATED = 'TRANSITION_REQUEST_TO_STAGING_CREATED' + + COMMENT_CREATED = "COMMENT_CREATED" + MODEL_VERSION_CREATED = "MODEL_VERSION_CREATED" + MODEL_VERSION_TAG_SET = "MODEL_VERSION_TAG_SET" + MODEL_VERSION_TRANSITIONED_STAGE = "MODEL_VERSION_TRANSITIONED_STAGE" + MODEL_VERSION_TRANSITIONED_TO_ARCHIVED = "MODEL_VERSION_TRANSITIONED_TO_ARCHIVED" + MODEL_VERSION_TRANSITIONED_TO_PRODUCTION = "MODEL_VERSION_TRANSITIONED_TO_PRODUCTION" + MODEL_VERSION_TRANSITIONED_TO_STAGING = "MODEL_VERSION_TRANSITIONED_TO_STAGING" + REGISTERED_MODEL_CREATED = "REGISTERED_MODEL_CREATED" + TRANSITION_REQUEST_CREATED = "TRANSITION_REQUEST_CREATED" + TRANSITION_REQUEST_TO_ARCHIVED_CREATED = "TRANSITION_REQUEST_TO_ARCHIVED_CREATED" + TRANSITION_REQUEST_TO_PRODUCTION_CREATED = "TRANSITION_REQUEST_TO_PRODUCTION_CREATED" + TRANSITION_REQUEST_TO_STAGING_CREATED = "TRANSITION_REQUEST_TO_STAGING_CREATED" + class RegistryWebhookStatus(Enum): """Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event.""" - - ACTIVE = 'ACTIVE' - DISABLED = 'DISABLED' - TEST_MODE = 'TEST_MODE' + + ACTIVE = "ACTIVE" + DISABLED = "DISABLED" + TEST_MODE = "TEST_MODE" + @dataclass class RejectTransitionRequest: name: str """Name of the model.""" - + version: str """Version of the model.""" - + stage: Stage """Target stage of the transition. Valid values are: @@ -4213,138 +4808,151 @@ class RejectTransitionRequest: * `Production`: Production stage. * `Archived`: Archived stage.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the RejectTransitionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.stage is not None: body['stage'] = self.stage.value - if self.version is not None: body['version'] = self.version + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.stage is not None: + body["stage"] = self.stage.value + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the RejectTransitionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.stage is not None: body['stage'] = self.stage - if self.version is not None: body['version'] = self.version + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.stage is not None: + body["stage"] = self.stage + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequest: """Deserializes the RejectTransitionRequest from a dictionary.""" - return cls(comment=d.get('comment', None), name=d.get('name', None), stage=_enum(d, 'stage', Stage), version=d.get('version', None)) - - + return cls( + comment=d.get("comment", None), + name=d.get("name", None), + stage=_enum(d, "stage", Stage), + version=d.get("version", None), + ) @dataclass class RejectTransitionRequestResponse: activity: Optional[Activity] = None """Activity recorded for the action.""" - + def as_dict(self) -> dict: """Serializes the RejectTransitionRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activity: body['activity'] = self.activity.as_dict() + if self.activity: + body["activity"] = self.activity.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RejectTransitionRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.activity: body['activity'] = self.activity + if self.activity: + body["activity"] = self.activity return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequestResponse: """Deserializes the RejectTransitionRequestResponse from a dictionary.""" - return cls(activity=_from_dict(d, 'activity', Activity)) - - + return cls(activity=_from_dict(d, "activity", Activity)) @dataclass class RenameModelRequest: name: str """Registered model unique name identifier.""" - + new_name: Optional[str] = None """If provided, updates the name for this `registered_model`.""" - + def as_dict(self) -> dict: """Serializes the RenameModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name return body def as_shallow_dict(self) -> dict: """Serializes the RenameModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RenameModelRequest: """Deserializes the RenameModelRequest from a dictionary.""" - return cls(name=d.get('name', None), new_name=d.get('new_name', None)) - - + return cls(name=d.get("name", None), new_name=d.get("new_name", None)) @dataclass class RenameModelResponse: registered_model: Optional[Model] = None - + def as_dict(self) -> dict: """Serializes the RenameModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.registered_model: body['registered_model'] = self.registered_model.as_dict() + if self.registered_model: + body["registered_model"] = self.registered_model.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the RenameModelResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.registered_model: body['registered_model'] = self.registered_model + if self.registered_model: + body["registered_model"] = self.registered_model return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RenameModelResponse: """Deserializes the RenameModelResponse from a dictionary.""" - return cls(registered_model=_from_dict(d, 'registered_model', Model)) - - + return cls(registered_model=_from_dict(d, "registered_model", Model)) @dataclass class RestoreExperiment: experiment_id: str """ID of the associated experiment.""" - + def as_dict(self) -> dict: """Serializes the RestoreExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body def as_shallow_dict(self) -> dict: """Serializes the RestoreExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestoreExperiment: """Deserializes the RestoreExperiment from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None)) - - + return cls(experiment_id=d.get("experiment_id", None)) @dataclass @@ -4363,33 +4971,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RestoreExperimentResponse: """Deserializes the RestoreExperimentResponse from a dictionary.""" return cls() - - @dataclass class RestoreRun: run_id: str """ID of the run to restore.""" - + def as_dict(self) -> dict: """Serializes the RestoreRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body def as_shallow_dict(self) -> dict: """Serializes the RestoreRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_id is not None: body['run_id'] = self.run_id + if self.run_id is not None: + body["run_id"] = self.run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestoreRun: """Deserializes the RestoreRun from a dictionary.""" - return cls(run_id=d.get('run_id', None)) - - + return cls(run_id=d.get("run_id", None)) @dataclass @@ -4408,446 +5014,536 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RestoreRunResponse: """Deserializes the RestoreRunResponse from a dictionary.""" return cls() - - @dataclass class RestoreRuns: experiment_id: str """The ID of the experiment containing the runs to restore.""" - + min_timestamp_millis: int """The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only runs deleted no earlier than this timestamp are restored.""" - + max_runs: Optional[int] = None """An optional positive integer indicating the maximum number of runs to restore. The maximum allowed value for max_runs is 10000.""" - + def as_dict(self) -> dict: """Serializes the RestoreRuns into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.max_runs is not None: body['max_runs'] = self.max_runs - if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.max_runs is not None: + body["max_runs"] = self.max_runs + if self.min_timestamp_millis is not None: + body["min_timestamp_millis"] = self.min_timestamp_millis return body def as_shallow_dict(self) -> dict: """Serializes the RestoreRuns into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.max_runs is not None: body['max_runs'] = self.max_runs - if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.max_runs is not None: + body["max_runs"] = self.max_runs + if self.min_timestamp_millis is not None: + body["min_timestamp_millis"] = self.min_timestamp_millis return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestoreRuns: """Deserializes the RestoreRuns from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None), max_runs=d.get('max_runs', None), min_timestamp_millis=d.get('min_timestamp_millis', None)) - - + return cls( + experiment_id=d.get("experiment_id", None), + max_runs=d.get("max_runs", None), + min_timestamp_millis=d.get("min_timestamp_millis", None), + ) @dataclass class RestoreRunsResponse: runs_restored: Optional[int] = None """The number of runs restored.""" - + def as_dict(self) -> dict: """Serializes the RestoreRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.runs_restored is not None: body['runs_restored'] = self.runs_restored + if self.runs_restored is not None: + body["runs_restored"] = self.runs_restored return body def as_shallow_dict(self) -> dict: """Serializes the RestoreRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.runs_restored is not None: body['runs_restored'] = self.runs_restored + if self.runs_restored is not None: + body["runs_restored"] = self.runs_restored return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestoreRunsResponse: """Deserializes the RestoreRunsResponse from a dictionary.""" - return cls(runs_restored=d.get('runs_restored', None)) - - + return cls(runs_restored=d.get("runs_restored", None)) @dataclass class Run: """A single run.""" - + data: Optional[RunData] = None """Run data.""" - + info: Optional[RunInfo] = None """Run metadata.""" - + inputs: Optional[RunInputs] = None """Run inputs.""" - + def as_dict(self) -> dict: """Serializes the Run into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data: body['data'] = self.data.as_dict() - if self.info: body['info'] = self.info.as_dict() - if self.inputs: body['inputs'] = self.inputs.as_dict() + if self.data: + body["data"] = self.data.as_dict() + if self.info: + body["info"] = self.info.as_dict() + if self.inputs: + body["inputs"] = self.inputs.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Run into a shallow dictionary of its immediate attributes.""" body = {} - if self.data: body['data'] = self.data - if self.info: body['info'] = self.info - if self.inputs: body['inputs'] = self.inputs + if self.data: + body["data"] = self.data + if self.info: + body["info"] = self.info + if self.inputs: + body["inputs"] = self.inputs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Run: """Deserializes the Run from a dictionary.""" - return cls(data=_from_dict(d, 'data', RunData), info=_from_dict(d, 'info', RunInfo), inputs=_from_dict(d, 'inputs', RunInputs)) - - + return cls( + data=_from_dict(d, "data", RunData), + info=_from_dict(d, "info", RunInfo), + inputs=_from_dict(d, "inputs", RunInputs), + ) @dataclass class RunData: """Run data (metrics, params, and tags).""" - + metrics: Optional[List[Metric]] = None """Run metrics.""" - + params: Optional[List[Param]] = None """Run parameters.""" - + tags: Optional[List[RunTag]] = None """Additional metadata key-value pairs.""" - + def as_dict(self) -> dict: """Serializes the RunData into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metrics: body['metrics'] = [v.as_dict() for v in self.metrics] - if self.params: body['params'] = [v.as_dict() for v in self.params] - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.metrics: + body["metrics"] = [v.as_dict() for v in self.metrics] + if self.params: + body["params"] = [v.as_dict() for v in self.params] + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the RunData into a shallow dictionary of its immediate attributes.""" body = {} - if self.metrics: body['metrics'] = self.metrics - if self.params: body['params'] = self.params - if self.tags: body['tags'] = self.tags + if self.metrics: + body["metrics"] = self.metrics + if self.params: + body["params"] = self.params + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunData: """Deserializes the RunData from a dictionary.""" - return cls(metrics=_repeated_dict(d, 'metrics', Metric), params=_repeated_dict(d, 'params', Param), tags=_repeated_dict(d, 'tags', RunTag)) - - + return cls( + metrics=_repeated_dict(d, "metrics", Metric), + params=_repeated_dict(d, "params", Param), + tags=_repeated_dict(d, "tags", RunTag), + ) @dataclass class RunInfo: """Metadata of a single run.""" - + artifact_uri: Optional[str] = None """URI of the directory where artifacts should be uploaded. This can be a local path (starting with "/"), or a distributed file system (DFS) path, like ``s3://bucket/directory`` or ``dbfs:/my/directory``. If not set, the local ``./mlruns`` directory is chosen.""" - + end_time: Optional[int] = None """Unix timestamp of when the run ended in milliseconds.""" - + experiment_id: Optional[str] = None """The experiment ID.""" - + lifecycle_stage: Optional[str] = None """Current life cycle stage of the experiment : OneOf("active", "deleted")""" - + run_id: Optional[str] = None """Unique identifier for the run.""" - + run_name: Optional[str] = None """The name of the run.""" - + run_uuid: Optional[str] = None """[Deprecated, use run_id instead] Unique identifier for the run. This field will be removed in a future MLflow version.""" - + start_time: Optional[int] = None """Unix timestamp of when the run started in milliseconds.""" - + status: Optional[RunInfoStatus] = None """Current status of the run.""" - + user_id: Optional[str] = None """User who initiated the run. This field is deprecated as of MLflow 1.0, and will be removed in a future MLflow release. Use 'mlflow.user' tag instead.""" - + def as_dict(self) -> dict: """Serializes the RunInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri - if self.end_time is not None: body['end_time'] = self.end_time - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.start_time is not None: body['start_time'] = self.start_time - if self.status is not None: body['status'] = self.status.value - if self.user_id is not None: body['user_id'] = self.user_id + if self.artifact_uri is not None: + body["artifact_uri"] = self.artifact_uri + if self.end_time is not None: + body["end_time"] = self.end_time + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.lifecycle_stage is not None: + body["lifecycle_stage"] = self.lifecycle_stage + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.start_time is not None: + body["start_time"] = self.start_time + if self.status is not None: + body["status"] = self.status.value + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the RunInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri - if self.end_time is not None: body['end_time'] = self.end_time - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.start_time is not None: body['start_time'] = self.start_time - if self.status is not None: body['status'] = self.status - if self.user_id is not None: body['user_id'] = self.user_id + if self.artifact_uri is not None: + body["artifact_uri"] = self.artifact_uri + if self.end_time is not None: + body["end_time"] = self.end_time + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.lifecycle_stage is not None: + body["lifecycle_stage"] = self.lifecycle_stage + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.start_time is not None: + body["start_time"] = self.start_time + if self.status is not None: + body["status"] = self.status + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunInfo: """Deserializes the RunInfo from a dictionary.""" - return cls(artifact_uri=d.get('artifact_uri', None), end_time=d.get('end_time', None), experiment_id=d.get('experiment_id', None), lifecycle_stage=d.get('lifecycle_stage', None), run_id=d.get('run_id', None), run_name=d.get('run_name', None), run_uuid=d.get('run_uuid', None), start_time=d.get('start_time', None), status=_enum(d, 'status', RunInfoStatus), user_id=d.get('user_id', None)) - - + return cls( + artifact_uri=d.get("artifact_uri", None), + end_time=d.get("end_time", None), + experiment_id=d.get("experiment_id", None), + lifecycle_stage=d.get("lifecycle_stage", None), + run_id=d.get("run_id", None), + run_name=d.get("run_name", None), + run_uuid=d.get("run_uuid", None), + start_time=d.get("start_time", None), + status=_enum(d, "status", RunInfoStatus), + user_id=d.get("user_id", None), + ) class RunInfoStatus(Enum): """Status of a run.""" - - FAILED = 'FAILED' - FINISHED = 'FINISHED' - KILLED = 'KILLED' - RUNNING = 'RUNNING' - SCHEDULED = 'SCHEDULED' + + FAILED = "FAILED" + FINISHED = "FINISHED" + KILLED = "KILLED" + RUNNING = "RUNNING" + SCHEDULED = "SCHEDULED" + @dataclass class RunInputs: """Run inputs.""" - + dataset_inputs: Optional[List[DatasetInput]] = None """Run metrics.""" - + model_inputs: Optional[List[ModelInput]] = None """Model inputs to the Run.""" - + def as_dict(self) -> dict: """Serializes the RunInputs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset_inputs: body['dataset_inputs'] = [v.as_dict() for v in self.dataset_inputs] - if self.model_inputs: body['model_inputs'] = [v.as_dict() for v in self.model_inputs] + if self.dataset_inputs: + body["dataset_inputs"] = [v.as_dict() for v in self.dataset_inputs] + if self.model_inputs: + body["model_inputs"] = [v.as_dict() for v in self.model_inputs] return body def as_shallow_dict(self) -> dict: """Serializes the RunInputs into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset_inputs: body['dataset_inputs'] = self.dataset_inputs - if self.model_inputs: body['model_inputs'] = self.model_inputs + if self.dataset_inputs: + body["dataset_inputs"] = self.dataset_inputs + if self.model_inputs: + body["model_inputs"] = self.model_inputs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunInputs: """Deserializes the RunInputs from a dictionary.""" - return cls(dataset_inputs=_repeated_dict(d, 'dataset_inputs', DatasetInput), model_inputs=_repeated_dict(d, 'model_inputs', ModelInput)) - - + return cls( + dataset_inputs=_repeated_dict(d, "dataset_inputs", DatasetInput), + model_inputs=_repeated_dict(d, "model_inputs", ModelInput), + ) @dataclass class RunTag: """Tag for a run.""" - + key: Optional[str] = None """The tag key.""" - + value: Optional[str] = None """The tag value.""" - + def as_dict(self) -> dict: """Serializes the RunTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the RunTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunTag: """Deserializes the RunTag from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class SearchExperiments: filter: Optional[str] = None """String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'")""" - + max_results: Optional[int] = None """Maximum number of experiments desired. Max threshold is 3000.""" - + order_by: Optional[List[str]] = None """List of columns for ordering search results, which can include experiment name and last updated timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are done by experiment id DESC.""" - + page_token: Optional[str] = None """Token indicating the page of experiments to fetch""" - + view_type: Optional[ViewType] = None """Qualifier for type of experiments to be returned. If unspecified, return only active experiments.""" - + def as_dict(self) -> dict: """Serializes the SearchExperiments into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filter is not None: body['filter'] = self.filter - if self.max_results is not None: body['max_results'] = self.max_results - if self.order_by: body['order_by'] = [v for v in self.order_by] - if self.page_token is not None: body['page_token'] = self.page_token - if self.view_type is not None: body['view_type'] = self.view_type.value + if self.filter is not None: + body["filter"] = self.filter + if self.max_results is not None: + body["max_results"] = self.max_results + if self.order_by: + body["order_by"] = [v for v in self.order_by] + if self.page_token is not None: + body["page_token"] = self.page_token + if self.view_type is not None: + body["view_type"] = self.view_type.value return body def as_shallow_dict(self) -> dict: """Serializes the SearchExperiments into a shallow dictionary of its immediate attributes.""" body = {} - if self.filter is not None: body['filter'] = self.filter - if self.max_results is not None: body['max_results'] = self.max_results - if self.order_by: body['order_by'] = self.order_by - if self.page_token is not None: body['page_token'] = self.page_token - if self.view_type is not None: body['view_type'] = self.view_type + if self.filter is not None: + body["filter"] = self.filter + if self.max_results is not None: + body["max_results"] = self.max_results + if self.order_by: + body["order_by"] = self.order_by + if self.page_token is not None: + body["page_token"] = self.page_token + if self.view_type is not None: + body["view_type"] = self.view_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchExperiments: """Deserializes the SearchExperiments from a dictionary.""" - return cls(filter=d.get('filter', None), max_results=d.get('max_results', None), order_by=d.get('order_by', None), page_token=d.get('page_token', None), view_type=_enum(d, 'view_type', ViewType)) - - + return cls( + filter=d.get("filter", None), + max_results=d.get("max_results", None), + order_by=d.get("order_by", None), + page_token=d.get("page_token", None), + view_type=_enum(d, "view_type", ViewType), + ) @dataclass class SearchExperimentsResponse: experiments: Optional[List[Experiment]] = None """Experiments that match the search criteria""" - + next_page_token: Optional[str] = None """Token that can be used to retrieve the next page of experiments. An empty token means that no more experiments are available for retrieval.""" - + def as_dict(self) -> dict: """Serializes the SearchExperimentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiments: body['experiments'] = [v.as_dict() for v in self.experiments] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.experiments: + body["experiments"] = [v.as_dict() for v in self.experiments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchExperimentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiments: body['experiments'] = self.experiments - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.experiments: + body["experiments"] = self.experiments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchExperimentsResponse: """Deserializes the SearchExperimentsResponse from a dictionary.""" - return cls(experiments=_repeated_dict(d, 'experiments', Experiment), next_page_token=d.get('next_page_token', None)) - - + return cls( + experiments=_repeated_dict(d, "experiments", Experiment), next_page_token=d.get("next_page_token", None) + ) @dataclass class SearchLoggedModelsDataset: dataset_name: str """The name of the dataset.""" - + dataset_digest: Optional[str] = None """The digest of the dataset.""" - + def as_dict(self) -> dict: """Serializes the SearchLoggedModelsDataset into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.dataset_digest is not None: + body["dataset_digest"] = self.dataset_digest + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name return body def as_shallow_dict(self) -> dict: """Serializes the SearchLoggedModelsDataset into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name + if self.dataset_digest is not None: + body["dataset_digest"] = self.dataset_digest + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsDataset: """Deserializes the SearchLoggedModelsDataset from a dictionary.""" - return cls(dataset_digest=d.get('dataset_digest', None), dataset_name=d.get('dataset_name', None)) - - + return cls(dataset_digest=d.get("dataset_digest", None), dataset_name=d.get("dataset_name", None)) @dataclass class SearchLoggedModelsOrderBy: field_name: str """The name of the field to order by, e.g. "metrics.accuracy".""" - + ascending: Optional[bool] = None """Whether the search results order is ascending or not.""" - + dataset_digest: Optional[str] = None """If ``field_name`` refers to a metric, this field specifies the digest of the dataset associated with the metric. Only metrics associated with the specified dataset name and digest will be considered for ordering. This field may only be set if ``dataset_name`` is also set.""" - + dataset_name: Optional[str] = None """If ``field_name`` refers to a metric, this field specifies the name of the dataset associated with the metric. Only metrics associated with the specified dataset name will be considered for ordering. This field may only be set if ``field_name`` refers to a metric.""" - + def as_dict(self) -> dict: """Serializes the SearchLoggedModelsOrderBy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ascending is not None: body['ascending'] = self.ascending - if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name - if self.field_name is not None: body['field_name'] = self.field_name + if self.ascending is not None: + body["ascending"] = self.ascending + if self.dataset_digest is not None: + body["dataset_digest"] = self.dataset_digest + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name + if self.field_name is not None: + body["field_name"] = self.field_name return body def as_shallow_dict(self) -> dict: """Serializes the SearchLoggedModelsOrderBy into a shallow dictionary of its immediate attributes.""" body = {} - if self.ascending is not None: body['ascending'] = self.ascending - if self.dataset_digest is not None: body['dataset_digest'] = self.dataset_digest - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name - if self.field_name is not None: body['field_name'] = self.field_name + if self.ascending is not None: + body["ascending"] = self.ascending + if self.dataset_digest is not None: + body["dataset_digest"] = self.dataset_digest + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name + if self.field_name is not None: + body["field_name"] = self.field_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsOrderBy: """Deserializes the SearchLoggedModelsOrderBy from a dictionary.""" - return cls(ascending=d.get('ascending', None), dataset_digest=d.get('dataset_digest', None), dataset_name=d.get('dataset_name', None), field_name=d.get('field_name', None)) - - + return cls( + ascending=d.get("ascending", None), + dataset_digest=d.get("dataset_digest", None), + dataset_name=d.get("dataset_name", None), + field_name=d.get("field_name", None), + ) @dataclass @@ -4858,156 +5554,179 @@ class SearchLoggedModelsRequest: logged models with accuracy > 0.9 on the test_dataset. Metric values from ANY dataset matching the criteria are considered. If no datasets are specified, then metrics across all datasets are considered in the filter.""" - + experiment_ids: Optional[List[str]] = None """The IDs of the experiments in which to search for logged models.""" - + filter: Optional[str] = None """A filter expression over logged model info and data that allows returning a subset of logged models. The syntax is a subset of SQL that supports AND'ing together binary operations. Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``.""" - + max_results: Optional[int] = None """The maximum number of Logged Models to return. The maximum limit is 50.""" - + order_by: Optional[List[SearchLoggedModelsOrderBy]] = None """The list of columns for ordering the results, with additional fields for sorting criteria.""" - + page_token: Optional[str] = None """The token indicating the page of logged models to fetch.""" - + def as_dict(self) -> dict: """Serializes the SearchLoggedModelsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.datasets: body['datasets'] = [v.as_dict() for v in self.datasets] - if self.experiment_ids: body['experiment_ids'] = [v for v in self.experiment_ids] - if self.filter is not None: body['filter'] = self.filter - if self.max_results is not None: body['max_results'] = self.max_results - if self.order_by: body['order_by'] = [v.as_dict() for v in self.order_by] - if self.page_token is not None: body['page_token'] = self.page_token + if self.datasets: + body["datasets"] = [v.as_dict() for v in self.datasets] + if self.experiment_ids: + body["experiment_ids"] = [v for v in self.experiment_ids] + if self.filter is not None: + body["filter"] = self.filter + if self.max_results is not None: + body["max_results"] = self.max_results + if self.order_by: + body["order_by"] = [v.as_dict() for v in self.order_by] + if self.page_token is not None: + body["page_token"] = self.page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchLoggedModelsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.datasets: body['datasets'] = self.datasets - if self.experiment_ids: body['experiment_ids'] = self.experiment_ids - if self.filter is not None: body['filter'] = self.filter - if self.max_results is not None: body['max_results'] = self.max_results - if self.order_by: body['order_by'] = self.order_by - if self.page_token is not None: body['page_token'] = self.page_token + if self.datasets: + body["datasets"] = self.datasets + if self.experiment_ids: + body["experiment_ids"] = self.experiment_ids + if self.filter is not None: + body["filter"] = self.filter + if self.max_results is not None: + body["max_results"] = self.max_results + if self.order_by: + body["order_by"] = self.order_by + if self.page_token is not None: + body["page_token"] = self.page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsRequest: """Deserializes the SearchLoggedModelsRequest from a dictionary.""" - return cls(datasets=_repeated_dict(d, 'datasets', SearchLoggedModelsDataset), experiment_ids=d.get('experiment_ids', None), filter=d.get('filter', None), max_results=d.get('max_results', None), order_by=_repeated_dict(d, 'order_by', SearchLoggedModelsOrderBy), page_token=d.get('page_token', None)) - - + return cls( + datasets=_repeated_dict(d, "datasets", SearchLoggedModelsDataset), + experiment_ids=d.get("experiment_ids", None), + filter=d.get("filter", None), + max_results=d.get("max_results", None), + order_by=_repeated_dict(d, "order_by", SearchLoggedModelsOrderBy), + page_token=d.get("page_token", None), + ) @dataclass class SearchLoggedModelsResponse: models: Optional[List[LoggedModel]] = None """Logged models that match the search criteria.""" - + next_page_token: Optional[str] = None """The token that can be used to retrieve the next page of logged models.""" - + def as_dict(self) -> dict: """Serializes the SearchLoggedModelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.models: body['models'] = [v.as_dict() for v in self.models] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.models: + body["models"] = [v.as_dict() for v in self.models] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchLoggedModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.models: body['models'] = self.models - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.models: + body["models"] = self.models + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsResponse: """Deserializes the SearchLoggedModelsResponse from a dictionary.""" - return cls(models=_repeated_dict(d, 'models', LoggedModel), next_page_token=d.get('next_page_token', None)) - - - - - + return cls(models=_repeated_dict(d, "models", LoggedModel), next_page_token=d.get("next_page_token", None)) @dataclass class SearchModelVersionsResponse: model_versions: Optional[List[ModelVersion]] = None """Models that match the search criteria""" - + next_page_token: Optional[str] = None """Pagination token to request next page of models for the same search query.""" - + def as_dict(self) -> dict: """Serializes the SearchModelVersionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.model_versions: + body["model_versions"] = [v.as_dict() for v in self.model_versions] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the SearchModelVersionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_versions: body['model_versions'] = self.model_versions - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.model_versions: + body["model_versions"] = self.model_versions + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchModelVersionsResponse: """Deserializes the SearchModelVersionsResponse from a dictionary.""" - return cls(model_versions=_repeated_dict(d, 'model_versions', ModelVersion), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + model_versions=_repeated_dict(d, "model_versions", ModelVersion), + next_page_token=d.get("next_page_token", None), + ) @dataclass class SearchModelsResponse: next_page_token: Optional[str] = None """Pagination token to request the next page of models.""" - + registered_models: Optional[List[Model]] = None """Registered Models that match the search criteria.""" - + def as_dict(self) -> dict: """Serializes the SearchModelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = [v.as_dict() for v in self.registered_models] return body def as_shallow_dict(self) -> dict: """Serializes the SearchModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.registered_models: body['registered_models'] = self.registered_models + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = self.registered_models return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchModelsResponse: """Deserializes the SearchModelsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), registered_models=_repeated_dict(d, 'registered_models', Model)) - - + return cls( + next_page_token=d.get("next_page_token", None), + registered_models=_repeated_dict(d, "registered_models", Model), + ) @dataclass class SearchRuns: experiment_ids: Optional[List[str]] = None """List of experiment IDs to search over.""" - + filter: Optional[str] = None """A filter expression over params, metrics, and tags, that allows returning a subset of runs. The syntax is a subset of SQL that supports ANDing together binary operations between a param, @@ -5019,116 +5738,139 @@ class SearchRuns: quotes: `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'` Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`.""" - + max_results: Optional[int] = None """Maximum number of runs desired. Max threshold is 50000""" - + order_by: Optional[List[str]] = None """List of columns to be ordered by, including attributes, params, metrics, and tags with an optional `"DESC"` or `"ASC"` annotation, where `"ASC"` is the default. Example: `["params.input DESC", "metrics.alpha ASC", "metrics.rmse"]`. Tiebreaks are done by start_time `DESC` followed by `run_id` for runs with the same start time (and this is the default ordering criterion if order_by is not provided).""" - + page_token: Optional[str] = None """Token for the current page of runs.""" - + run_view_type: Optional[ViewType] = None """Whether to display only active, only deleted, or all runs. Defaults to only active runs.""" - + def as_dict(self) -> dict: """Serializes the SearchRuns into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_ids: body['experiment_ids'] = [v for v in self.experiment_ids] - if self.filter is not None: body['filter'] = self.filter - if self.max_results is not None: body['max_results'] = self.max_results - if self.order_by: body['order_by'] = [v for v in self.order_by] - if self.page_token is not None: body['page_token'] = self.page_token - if self.run_view_type is not None: body['run_view_type'] = self.run_view_type.value + if self.experiment_ids: + body["experiment_ids"] = [v for v in self.experiment_ids] + if self.filter is not None: + body["filter"] = self.filter + if self.max_results is not None: + body["max_results"] = self.max_results + if self.order_by: + body["order_by"] = [v for v in self.order_by] + if self.page_token is not None: + body["page_token"] = self.page_token + if self.run_view_type is not None: + body["run_view_type"] = self.run_view_type.value return body def as_shallow_dict(self) -> dict: """Serializes the SearchRuns into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_ids: body['experiment_ids'] = self.experiment_ids - if self.filter is not None: body['filter'] = self.filter - if self.max_results is not None: body['max_results'] = self.max_results - if self.order_by: body['order_by'] = self.order_by - if self.page_token is not None: body['page_token'] = self.page_token - if self.run_view_type is not None: body['run_view_type'] = self.run_view_type + if self.experiment_ids: + body["experiment_ids"] = self.experiment_ids + if self.filter is not None: + body["filter"] = self.filter + if self.max_results is not None: + body["max_results"] = self.max_results + if self.order_by: + body["order_by"] = self.order_by + if self.page_token is not None: + body["page_token"] = self.page_token + if self.run_view_type is not None: + body["run_view_type"] = self.run_view_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchRuns: """Deserializes the SearchRuns from a dictionary.""" - return cls(experiment_ids=d.get('experiment_ids', None), filter=d.get('filter', None), max_results=d.get('max_results', None), order_by=d.get('order_by', None), page_token=d.get('page_token', None), run_view_type=_enum(d, 'run_view_type', ViewType)) - - + return cls( + experiment_ids=d.get("experiment_ids", None), + filter=d.get("filter", None), + max_results=d.get("max_results", None), + order_by=d.get("order_by", None), + page_token=d.get("page_token", None), + run_view_type=_enum(d, "run_view_type", ViewType), + ) @dataclass class SearchRunsResponse: next_page_token: Optional[str] = None """Token for the next page of runs.""" - + runs: Optional[List[Run]] = None """Runs that match the search criteria.""" - + def as_dict(self) -> dict: """Serializes the SearchRunsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.runs: body['runs'] = [v.as_dict() for v in self.runs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.runs: + body["runs"] = [v.as_dict() for v in self.runs] return body def as_shallow_dict(self) -> dict: """Serializes the SearchRunsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.runs: body['runs'] = self.runs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.runs: + body["runs"] = self.runs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SearchRunsResponse: """Deserializes the SearchRunsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), runs=_repeated_dict(d, 'runs', Run)) - - + return cls(next_page_token=d.get("next_page_token", None), runs=_repeated_dict(d, "runs", Run)) @dataclass class SetExperimentTag: experiment_id: str """ID of the experiment under which to log the tag. Must be provided.""" - + key: str """Name of the tag. Keys up to 250 bytes in size are supported.""" - + value: str """String value of the tag being logged. Values up to 64KB in size are supported.""" - + def as_dict(self) -> dict: """Serializes the SetExperimentTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the SetExperimentTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetExperimentTag: """Deserializes the SetExperimentTag from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None), key=d.get('key', None), value=d.get('value', None)) - - + return cls(experiment_id=d.get("experiment_id", None), key=d.get("key", None), value=d.get("value", None)) @dataclass @@ -5147,38 +5889,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetExperimentTagResponse: """Deserializes the SetExperimentTagResponse from a dictionary.""" return cls() - - @dataclass class SetLoggedModelTagsRequest: model_id: Optional[str] = None """The ID of the logged model to set the tags on.""" - + tags: Optional[List[LoggedModelTag]] = None """The tags to set on the logged model.""" - + def as_dict(self) -> dict: """Serializes the SetLoggedModelTagsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.model_id is not None: + body["model_id"] = self.model_id + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the SetLoggedModelTagsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_id is not None: body['model_id'] = self.model_id - if self.tags: body['tags'] = self.tags + if self.model_id is not None: + body["model_id"] = self.model_id + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetLoggedModelTagsRequest: """Deserializes the SetLoggedModelTagsRequest from a dictionary.""" - return cls(model_id=d.get('model_id', None), tags=_repeated_dict(d, 'tags', LoggedModelTag)) - - + return cls(model_id=d.get("model_id", None), tags=_repeated_dict(d, "tags", LoggedModelTag)) @dataclass @@ -5197,46 +5939,48 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetLoggedModelTagsResponse: """Deserializes the SetLoggedModelTagsResponse from a dictionary.""" return cls() - - @dataclass class SetModelTagRequest: name: str """Unique name of the model.""" - + key: str """Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists, its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed to support key values up to 250 bytes in size.""" - + value: str """String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size.""" - + def as_dict(self) -> dict: """Serializes the SetModelTagRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.name is not None: body['name'] = self.name - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.name is not None: + body["name"] = self.name + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the SetModelTagRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.name is not None: body['name'] = self.name - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.name is not None: + body["name"] = self.name + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetModelTagRequest: """Deserializes the SetModelTagRequest from a dictionary.""" - return cls(key=d.get('key', None), name=d.get('name', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), name=d.get("name", None), value=d.get("value", None)) @dataclass @@ -5255,51 +5999,57 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetModelTagResponse: """Deserializes the SetModelTagResponse from a dictionary.""" return cls() - - @dataclass class SetModelVersionTagRequest: name: str """Unique name of the model.""" - + version: str """Model version number.""" - + key: str """Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists, its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed to support key values up to 250 bytes in size.""" - + value: str """String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size.""" - + def as_dict(self) -> dict: """Serializes the SetModelVersionTagRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.name is not None: body['name'] = self.name - if self.value is not None: body['value'] = self.value - if self.version is not None: body['version'] = self.version + if self.key is not None: + body["key"] = self.key + if self.name is not None: + body["name"] = self.name + if self.value is not None: + body["value"] = self.value + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the SetModelVersionTagRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.name is not None: body['name'] = self.name - if self.value is not None: body['value'] = self.value - if self.version is not None: body['version'] = self.version + if self.key is not None: + body["key"] = self.key + if self.name is not None: + body["name"] = self.name + if self.value is not None: + body["value"] = self.value + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetModelVersionTagRequest: """Deserializes the SetModelVersionTagRequest from a dictionary.""" - return cls(key=d.get('key', None), name=d.get('name', None), value=d.get('value', None), version=d.get('version', None)) - - + return cls( + key=d.get("key", None), name=d.get("name", None), value=d.get("value", None), version=d.get("version", None) + ) @dataclass @@ -5318,49 +6068,58 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetModelVersionTagResponse: """Deserializes the SetModelVersionTagResponse from a dictionary.""" return cls() - - @dataclass class SetTag: key: str """Name of the tag. Keys up to 250 bytes in size are supported.""" - + value: str """String value of the tag being logged. Values up to 64KB in size are supported.""" - + run_id: Optional[str] = None """ID of the run under which to log the tag. Must be provided.""" - + run_uuid: Optional[str] = None """[Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be removed in a future MLflow version.""" - + def as_dict(self) -> dict: """Serializes the SetTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the SetTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetTag: """Deserializes the SetTag from a dictionary.""" - return cls(key=d.get('key', None), run_id=d.get('run_id', None), run_uuid=d.get('run_uuid', None), value=d.get('value', None)) - - + return cls( + key=d.get("key", None), + run_id=d.get("run_id", None), + run_uuid=d.get("run_uuid", None), + value=d.get("value", None), + ) @dataclass @@ -5379,134 +6138,138 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetTagResponse: """Deserializes the SetTagResponse from a dictionary.""" return cls() - - class Stage(Enum): """Stage of the model version. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage.""" - - ARCHIVED = 'Archived' - NONE = 'None' - PRODUCTION = 'Production' - STAGING = 'Staging' + + ARCHIVED = "Archived" + NONE = "None" + PRODUCTION = "Production" + STAGING = "Staging" + class Status(Enum): """The status of the model version. Valid values are: * `PENDING_REGISTRATION`: Request to register a new model version is pending as server performs background tasks. - + * `FAILED_REGISTRATION`: Request to register a new model version has failed. - + * `READY`: Model version is ready for use.""" - - FAILED_REGISTRATION = 'FAILED_REGISTRATION' - PENDING_REGISTRATION = 'PENDING_REGISTRATION' - READY = 'READY' + + FAILED_REGISTRATION = "FAILED_REGISTRATION" + PENDING_REGISTRATION = "PENDING_REGISTRATION" + READY = "READY" + @dataclass class TestRegistryWebhook: """Test webhook response object.""" - + body: Optional[str] = None """Body of the response from the webhook URL""" - + status_code: Optional[int] = None """Status code returned by the webhook URL""" - + def as_dict(self) -> dict: """Serializes the TestRegistryWebhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.body is not None: body['body'] = self.body - if self.status_code is not None: body['status_code'] = self.status_code + if self.body is not None: + body["body"] = self.body + if self.status_code is not None: + body["status_code"] = self.status_code return body def as_shallow_dict(self) -> dict: """Serializes the TestRegistryWebhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.body is not None: body['body'] = self.body - if self.status_code is not None: body['status_code'] = self.status_code + if self.body is not None: + body["body"] = self.body + if self.status_code is not None: + body["status_code"] = self.status_code return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhook: """Deserializes the TestRegistryWebhook from a dictionary.""" - return cls(body=d.get('body', None), status_code=d.get('status_code', None)) - - + return cls(body=d.get("body", None), status_code=d.get("status_code", None)) @dataclass class TestRegistryWebhookRequest: id: str """Webhook ID""" - + event: Optional[RegistryWebhookEvent] = None """If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the test trigger uses a randomly chosen event associated with the webhook.""" - + def as_dict(self) -> dict: """Serializes the TestRegistryWebhookRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.event is not None: body['event'] = self.event.value - if self.id is not None: body['id'] = self.id + if self.event is not None: + body["event"] = self.event.value + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the TestRegistryWebhookRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.event is not None: body['event'] = self.event - if self.id is not None: body['id'] = self.id + if self.event is not None: + body["event"] = self.event + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookRequest: """Deserializes the TestRegistryWebhookRequest from a dictionary.""" - return cls(event=_enum(d, 'event', RegistryWebhookEvent), id=d.get('id', None)) - - + return cls(event=_enum(d, "event", RegistryWebhookEvent), id=d.get("id", None)) @dataclass class TestRegistryWebhookResponse: webhook: Optional[TestRegistryWebhook] = None """Test webhook response object.""" - + def as_dict(self) -> dict: """Serializes the TestRegistryWebhookResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.webhook: body['webhook'] = self.webhook.as_dict() + if self.webhook: + body["webhook"] = self.webhook.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TestRegistryWebhookResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.webhook: body['webhook'] = self.webhook + if self.webhook: + body["webhook"] = self.webhook return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookResponse: """Deserializes the TestRegistryWebhookResponse from a dictionary.""" - return cls(webhook=_from_dict(d, 'webhook', TestRegistryWebhook)) - - + return cls(webhook=_from_dict(d, "webhook", TestRegistryWebhook)) @dataclass class TransitionModelVersionStageDatabricks: name: str """Name of the model.""" - + version: str """Version of the model.""" - + stage: Stage """Target stage of the transition. Valid values are: @@ -5517,54 +6280,68 @@ class TransitionModelVersionStageDatabricks: * `Production`: Production stage. * `Archived`: Archived stage.""" - + archive_existing_versions: bool """Specifies whether to archive all current model versions in the target stage.""" - + comment: Optional[str] = None """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the TransitionModelVersionStageDatabricks into a dictionary suitable for use as a JSON request body.""" body = {} - if self.archive_existing_versions is not None: body['archive_existing_versions'] = self.archive_existing_versions - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.stage is not None: body['stage'] = self.stage.value - if self.version is not None: body['version'] = self.version + if self.archive_existing_versions is not None: + body["archive_existing_versions"] = self.archive_existing_versions + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.stage is not None: + body["stage"] = self.stage.value + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the TransitionModelVersionStageDatabricks into a shallow dictionary of its immediate attributes.""" body = {} - if self.archive_existing_versions is not None: body['archive_existing_versions'] = self.archive_existing_versions - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.stage is not None: body['stage'] = self.stage - if self.version is not None: body['version'] = self.version + if self.archive_existing_versions is not None: + body["archive_existing_versions"] = self.archive_existing_versions + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.stage is not None: + body["stage"] = self.stage + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransitionModelVersionStageDatabricks: """Deserializes the TransitionModelVersionStageDatabricks from a dictionary.""" - return cls(archive_existing_versions=d.get('archive_existing_versions', None), comment=d.get('comment', None), name=d.get('name', None), stage=_enum(d, 'stage', Stage), version=d.get('version', None)) - - + return cls( + archive_existing_versions=d.get("archive_existing_versions", None), + comment=d.get("comment", None), + name=d.get("name", None), + stage=_enum(d, "stage", Stage), + version=d.get("version", None), + ) @dataclass class TransitionRequest: """Transition request details.""" - + available_actions: Optional[List[ActivityAction]] = None """Array of actions on the activity allowed for the current viewer.""" - + comment: Optional[str] = None """User-provided comment associated with the transition request.""" - + creation_timestamp: Optional[int] = None """Creation time of the object, as a Unix timestamp in milliseconds.""" - + to_stage: Optional[Stage] = None """Target stage of the transition (if the activity is stage transition related). Valid values are: @@ -5575,145 +6352,163 @@ class TransitionRequest: * `Production`: Production stage. * `Archived`: Archived stage.""" - + user_id: Optional[str] = None """The username of the user that created the object.""" - + def as_dict(self) -> dict: """Serializes the TransitionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.available_actions: body['available_actions'] = [v.value for v in self.available_actions] - if self.comment is not None: body['comment'] = self.comment - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.to_stage is not None: body['to_stage'] = self.to_stage.value - if self.user_id is not None: body['user_id'] = self.user_id + if self.available_actions: + body["available_actions"] = [v.value for v in self.available_actions] + if self.comment is not None: + body["comment"] = self.comment + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.to_stage is not None: + body["to_stage"] = self.to_stage.value + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the TransitionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.available_actions: body['available_actions'] = self.available_actions - if self.comment is not None: body['comment'] = self.comment - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.to_stage is not None: body['to_stage'] = self.to_stage - if self.user_id is not None: body['user_id'] = self.user_id + if self.available_actions: + body["available_actions"] = self.available_actions + if self.comment is not None: + body["comment"] = self.comment + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.to_stage is not None: + body["to_stage"] = self.to_stage + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransitionRequest: """Deserializes the TransitionRequest from a dictionary.""" - return cls(available_actions=_repeated_enum(d, 'available_actions', ActivityAction), comment=d.get('comment', None), creation_timestamp=d.get('creation_timestamp', None), to_stage=_enum(d, 'to_stage', Stage), user_id=d.get('user_id', None)) - - + return cls( + available_actions=_repeated_enum(d, "available_actions", ActivityAction), + comment=d.get("comment", None), + creation_timestamp=d.get("creation_timestamp", None), + to_stage=_enum(d, "to_stage", Stage), + user_id=d.get("user_id", None), + ) @dataclass class TransitionStageResponse: model_version: Optional[ModelVersionDatabricks] = None - + def as_dict(self) -> dict: """Serializes the TransitionStageResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_version: body['model_version'] = self.model_version.as_dict() + if self.model_version: + body["model_version"] = self.model_version.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TransitionStageResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_version: body['model_version'] = self.model_version + if self.model_version: + body["model_version"] = self.model_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransitionStageResponse: """Deserializes the TransitionStageResponse from a dictionary.""" - return cls(model_version=_from_dict(d, 'model_version', ModelVersionDatabricks)) - - + return cls(model_version=_from_dict(d, "model_version", ModelVersionDatabricks)) @dataclass class UpdateComment: id: str """Unique identifier of an activity""" - + comment: str """User-provided comment on the action.""" - + def as_dict(self) -> dict: """Serializes the UpdateComment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.id is not None: body['id'] = self.id + if self.comment is not None: + body["comment"] = self.comment + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateComment into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.id is not None: body['id'] = self.id + if self.comment is not None: + body["comment"] = self.comment + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateComment: """Deserializes the UpdateComment from a dictionary.""" - return cls(comment=d.get('comment', None), id=d.get('id', None)) - - + return cls(comment=d.get("comment", None), id=d.get("id", None)) @dataclass class UpdateCommentResponse: comment: Optional[CommentObject] = None """Comment details.""" - + def as_dict(self) -> dict: """Serializes the UpdateCommentResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment: body['comment'] = self.comment.as_dict() + if self.comment: + body["comment"] = self.comment.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCommentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment: body['comment'] = self.comment + if self.comment: + body["comment"] = self.comment return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCommentResponse: """Deserializes the UpdateCommentResponse from a dictionary.""" - return cls(comment=_from_dict(d, 'comment', CommentObject)) - - + return cls(comment=_from_dict(d, "comment", CommentObject)) @dataclass class UpdateExperiment: experiment_id: str """ID of the associated experiment.""" - + new_name: Optional[str] = None """If provided, the experiment's name is changed to the new name. The new name must be unique.""" - + def as_dict(self) -> dict: """Serializes the UpdateExperiment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.new_name is not None: body['new_name'] = self.new_name + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.new_name is not None: + body["new_name"] = self.new_name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateExperiment into a shallow dictionary of its immediate attributes.""" body = {} - if self.experiment_id is not None: body['experiment_id'] = self.experiment_id - if self.new_name is not None: body['new_name'] = self.new_name + if self.experiment_id is not None: + body["experiment_id"] = self.experiment_id + if self.new_name is not None: + body["new_name"] = self.new_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateExperiment: """Deserializes the UpdateExperiment from a dictionary.""" - return cls(experiment_id=d.get('experiment_id', None), new_name=d.get('new_name', None)) - - + return cls(experiment_id=d.get("experiment_id", None), new_name=d.get("new_name", None)) @dataclass @@ -5732,38 +6527,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateExperimentResponse: """Deserializes the UpdateExperimentResponse from a dictionary.""" return cls() - - @dataclass class UpdateModelRequest: name: str """Registered model unique name identifier.""" - + description: Optional[str] = None """If provided, updates the description for this `registered_model`.""" - + def as_dict(self) -> dict: """Serializes the UpdateModelRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateModelRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateModelRequest: """Deserializes the UpdateModelRequest from a dictionary.""" - return cls(description=d.get('description', None), name=d.get('name', None)) - - + return cls(description=d.get("description", None), name=d.get("name", None)) @dataclass @@ -5782,43 +6577,45 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateModelResponse: """Deserializes the UpdateModelResponse from a dictionary.""" return cls() - - @dataclass class UpdateModelVersionRequest: name: str """Name of the registered model""" - + version: str """Model version number""" - + description: Optional[str] = None """If provided, updates the description for this `registered_model`.""" - + def as_dict(self) -> dict: """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.version is not None: body['version'] = self.version + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.version is not None: body['version'] = self.version + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionRequest: """Deserializes the UpdateModelVersionRequest from a dictionary.""" - return cls(description=d.get('description', None), name=d.get('name', None), version=d.get('version', None)) - - + return cls(description=d.get("description", None), name=d.get("name", None), version=d.get("version", None)) @dataclass @@ -5837,18 +6634,16 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionResponse: """Deserializes the UpdateModelVersionResponse from a dictionary.""" return cls() - - @dataclass class UpdateRegistryWebhook: id: str """Webhook ID""" - + description: Optional[str] = None """User-specified description for the webhook.""" - + events: Optional[List[RegistryWebhookEvent]] = None """Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. @@ -5878,11 +6673,11 @@ class UpdateRegistryWebhook: to production. * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - + http_url_spec: Optional[HttpUrlSpec] = None - + job_spec: Optional[JobSpec] = None - + status: Optional[RegistryWebhookStatus] = None """Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. @@ -5891,116 +6686,148 @@ class UpdateRegistryWebhook: * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event.""" - + def as_dict(self) -> dict: """Serializes the UpdateRegistryWebhook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.events: body['events'] = [v.value for v in self.events] - if self.http_url_spec: body['http_url_spec'] = self.http_url_spec.as_dict() - if self.id is not None: body['id'] = self.id - if self.job_spec: body['job_spec'] = self.job_spec.as_dict() - if self.status is not None: body['status'] = self.status.value + if self.description is not None: + body["description"] = self.description + if self.events: + body["events"] = [v.value for v in self.events] + if self.http_url_spec: + body["http_url_spec"] = self.http_url_spec.as_dict() + if self.id is not None: + body["id"] = self.id + if self.job_spec: + body["job_spec"] = self.job_spec.as_dict() + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRegistryWebhook into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.events: body['events'] = self.events - if self.http_url_spec: body['http_url_spec'] = self.http_url_spec - if self.id is not None: body['id'] = self.id - if self.job_spec: body['job_spec'] = self.job_spec - if self.status is not None: body['status'] = self.status + if self.description is not None: + body["description"] = self.description + if self.events: + body["events"] = self.events + if self.http_url_spec: + body["http_url_spec"] = self.http_url_spec + if self.id is not None: + body["id"] = self.id + if self.job_spec: + body["job_spec"] = self.job_spec + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRegistryWebhook: """Deserializes the UpdateRegistryWebhook from a dictionary.""" - return cls(description=d.get('description', None), events=_repeated_enum(d, 'events', RegistryWebhookEvent), http_url_spec=_from_dict(d, 'http_url_spec', HttpUrlSpec), id=d.get('id', None), job_spec=_from_dict(d, 'job_spec', JobSpec), status=_enum(d, 'status', RegistryWebhookStatus)) - - + return cls( + description=d.get("description", None), + events=_repeated_enum(d, "events", RegistryWebhookEvent), + http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec), + id=d.get("id", None), + job_spec=_from_dict(d, "job_spec", JobSpec), + status=_enum(d, "status", RegistryWebhookStatus), + ) @dataclass class UpdateRun: end_time: Optional[int] = None """Unix timestamp in milliseconds of when the run ended.""" - + run_id: Optional[str] = None """ID of the run to update. Must be provided.""" - + run_name: Optional[str] = None """Updated name of the run.""" - + run_uuid: Optional[str] = None """[Deprecated, use `run_id` instead] ID of the run to update. This field will be removed in a future MLflow version.""" - + status: Optional[UpdateRunStatus] = None """Updated status of the run.""" - + def as_dict(self) -> dict: """Serializes the UpdateRun into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end_time is not None: body['end_time'] = self.end_time - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.status is not None: body['status'] = self.status.value + if self.end_time is not None: + body["end_time"] = self.end_time + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRun into a shallow dictionary of its immediate attributes.""" body = {} - if self.end_time is not None: body['end_time'] = self.end_time - if self.run_id is not None: body['run_id'] = self.run_id - if self.run_name is not None: body['run_name'] = self.run_name - if self.run_uuid is not None: body['run_uuid'] = self.run_uuid - if self.status is not None: body['status'] = self.status + if self.end_time is not None: + body["end_time"] = self.end_time + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_name is not None: + body["run_name"] = self.run_name + if self.run_uuid is not None: + body["run_uuid"] = self.run_uuid + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRun: """Deserializes the UpdateRun from a dictionary.""" - return cls(end_time=d.get('end_time', None), run_id=d.get('run_id', None), run_name=d.get('run_name', None), run_uuid=d.get('run_uuid', None), status=_enum(d, 'status', UpdateRunStatus)) - - + return cls( + end_time=d.get("end_time", None), + run_id=d.get("run_id", None), + run_name=d.get("run_name", None), + run_uuid=d.get("run_uuid", None), + status=_enum(d, "status", UpdateRunStatus), + ) @dataclass class UpdateRunResponse: run_info: Optional[RunInfo] = None """Updated metadata of the run.""" - + def as_dict(self) -> dict: """Serializes the UpdateRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.run_info: body['run_info'] = self.run_info.as_dict() + if self.run_info: + body["run_info"] = self.run_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRunResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.run_info: body['run_info'] = self.run_info + if self.run_info: + body["run_info"] = self.run_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRunResponse: """Deserializes the UpdateRunResponse from a dictionary.""" - return cls(run_info=_from_dict(d, 'run_info', RunInfo)) - - + return cls(run_info=_from_dict(d, "run_info", RunInfo)) class UpdateRunStatus(Enum): """Status of a run.""" - - FAILED = 'FAILED' - FINISHED = 'FINISHED' - KILLED = 'KILLED' - RUNNING = 'RUNNING' - SCHEDULED = 'SCHEDULED' + + FAILED = "FAILED" + FINISHED = "FINISHED" + KILLED = "KILLED" + RUNNING = "RUNNING" + SCHEDULED = "SCHEDULED" + @dataclass class UpdateWebhookResponse: @@ -6018,50 +6845,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateWebhookResponse: """Deserializes the UpdateWebhookResponse from a dictionary.""" return cls() - - class ViewType(Enum): """Qualifier for the view type.""" - - ACTIVE_ONLY = 'ACTIVE_ONLY' - ALL = 'ALL' - DELETED_ONLY = 'DELETED_ONLY' + ACTIVE_ONLY = "ACTIVE_ONLY" + ALL = "ALL" + DELETED_ONLY = "DELETED_ONLY" class ExperimentsAPI: """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment. Each experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking server. - + Experiments are located in the workspace file tree. You manage experiments using the same tools you use to manage other workspace objects such as folders, notebooks, and libraries.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_experiment(self - , name: str - , * - , artifact_location: Optional[str] = None, tags: Optional[List[ExperimentTag]] = None) -> CreateExperimentResponse: + def create_experiment( + self, name: str, *, artifact_location: Optional[str] = None, tags: Optional[List[ExperimentTag]] = None + ) -> CreateExperimentResponse: """Create experiment. - + Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that another experiment with the same name does not already exist and fails if another experiment with the same name already exists. - + Throws `RESOURCE_ALREADY_EXISTS` if an experiment with the given name exists. - + :param name: str Experiment name. :param artifact_location: str (optional) @@ -6072,31 +6887,36 @@ def create_experiment(self depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to support up to 20 tags per request. - + :returns: :class:`CreateExperimentResponse` """ body = {} - if artifact_location is not None: body['artifact_location'] = artifact_location - if name is not None: body['name'] = name - if tags is not None: body['tags'] = [v.as_dict() for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/experiments/create', body=body - - , headers=headers - ) + if artifact_location is not None: + body["artifact_location"] = artifact_location + if name is not None: + body["name"] = name + if tags is not None: + body["tags"] = [v.as_dict() for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/experiments/create", body=body, headers=headers) return CreateExperimentResponse.from_dict(res) - - - - - def create_logged_model(self - , experiment_id: str - , * - , model_type: Optional[str] = None, name: Optional[str] = None, params: Optional[List[LoggedModelParameter]] = None, source_run_id: Optional[str] = None, tags: Optional[List[LoggedModelTag]] = None) -> CreateLoggedModelResponse: + def create_logged_model( + self, + experiment_id: str, + *, + model_type: Optional[str] = None, + name: Optional[str] = None, + params: Optional[List[LoggedModelParameter]] = None, + source_run_id: Optional[str] = None, + tags: Optional[List[LoggedModelTag]] = None, + ) -> CreateLoggedModelResponse: """Create a logged model. - + :param experiment_id: str The ID of the experiment that owns the model. :param model_type: str (optional) @@ -6109,38 +6929,45 @@ def create_logged_model(self The ID of the run that created the model. :param tags: List[:class:`LoggedModelTag`] (optional) Tags attached to the model. - + :returns: :class:`CreateLoggedModelResponse` """ body = {} - if experiment_id is not None: body['experiment_id'] = experiment_id - if model_type is not None: body['model_type'] = model_type - if name is not None: body['name'] = name - if params is not None: body['params'] = [v.as_dict() for v in params] - if source_run_id is not None: body['source_run_id'] = source_run_id - if tags is not None: body['tags'] = [v.as_dict() for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/logged-models', body=body - - , headers=headers - ) + if experiment_id is not None: + body["experiment_id"] = experiment_id + if model_type is not None: + body["model_type"] = model_type + if name is not None: + body["name"] = name + if params is not None: + body["params"] = [v.as_dict() for v in params] + if source_run_id is not None: + body["source_run_id"] = source_run_id + if tags is not None: + body["tags"] = [v.as_dict() for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/logged-models", body=body, headers=headers) return CreateLoggedModelResponse.from_dict(res) - - - - - def create_run(self - - , * - , experiment_id: Optional[str] = None, run_name: Optional[str] = None, start_time: Optional[int] = None, tags: Optional[List[RunTag]] = None, user_id: Optional[str] = None) -> CreateRunResponse: + def create_run( + self, + *, + experiment_id: Optional[str] = None, + run_name: Optional[str] = None, + start_time: Optional[int] = None, + tags: Optional[List[RunTag]] = None, + user_id: Optional[str] = None, + ) -> CreateRunResponse: """Create a run. - + Creates a new run within an experiment. A run is usually a single execution of a machine learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric`, and `mlflowRunTag` associated with a single execution. - + :param experiment_id: str (optional) ID of the associated experiment. :param run_name: str (optional) @@ -6152,138 +6979,110 @@ def create_run(self :param user_id: str (optional) ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in a future MLflow release. Use 'mlflow.user' tag instead. - + :returns: :class:`CreateRunResponse` """ body = {} - if experiment_id is not None: body['experiment_id'] = experiment_id - if run_name is not None: body['run_name'] = run_name - if start_time is not None: body['start_time'] = start_time - if tags is not None: body['tags'] = [v.as_dict() for v in tags] - if user_id is not None: body['user_id'] = user_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/runs/create', body=body - - , headers=headers - ) + if experiment_id is not None: + body["experiment_id"] = experiment_id + if run_name is not None: + body["run_name"] = run_name + if start_time is not None: + body["start_time"] = start_time + if tags is not None: + body["tags"] = [v.as_dict() for v in tags] + if user_id is not None: + body["user_id"] = user_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/runs/create", body=body, headers=headers) return CreateRunResponse.from_dict(res) - - - - - def delete_experiment(self - , experiment_id: str - ): + def delete_experiment(self, experiment_id: str): """Delete an experiment. - + Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the experiment uses FileStore, artifacts associated with the experiment are also deleted. - + :param experiment_id: str ID of the associated experiment. - - + + """ body = {} - if experiment_id is not None: body['experiment_id'] = experiment_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/experiments/delete', body=body - - , headers=headers - ) - + if experiment_id is not None: + body["experiment_id"] = experiment_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/experiments/delete", body=body, headers=headers) - def delete_logged_model(self - , model_id: str - ): + def delete_logged_model(self, model_id: str): """Delete a logged model. - + :param model_id: str The ID of the logged model to delete. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/mlflow/logged-models/{model_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def delete_logged_model_tag(self - , model_id: str, tag_key: str - ): + self._api.do("DELETE", f"/api/2.0/mlflow/logged-models/{model_id}", headers=headers) + + def delete_logged_model_tag(self, model_id: str, tag_key: str): """Delete a tag on a logged model. - + :param model_id: str The ID of the logged model to delete the tag from. :param tag_key: str The tag key. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/mlflow/logged-models/{model_id}/tags/{tag_key}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def delete_run(self - , run_id: str - ): + self._api.do("DELETE", f"/api/2.0/mlflow/logged-models/{model_id}/tags/{tag_key}", headers=headers) + + def delete_run(self, run_id: str): """Delete a run. - + Marks a run for deletion. - + :param run_id: str ID of the run to delete. - - + + """ body = {} - if run_id is not None: body['run_id'] = run_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/delete', body=body - - , headers=headers - ) - + if run_id is not None: + body["run_id"] = run_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/runs/delete", body=body, headers=headers) - def delete_runs(self - , experiment_id: str, max_timestamp_millis: int - , * - , max_runs: Optional[int] = None) -> DeleteRunsResponse: + def delete_runs( + self, experiment_id: str, max_timestamp_millis: int, *, max_runs: Optional[int] = None + ) -> DeleteRunsResponse: """Delete runs by creation time. - + Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on - + :param experiment_id: str The ID of the experiment containing the runs to delete. :param max_timestamp_millis: int @@ -6292,150 +7091,132 @@ def delete_runs(self :param max_runs: int (optional) An optional positive integer indicating the maximum number of runs to delete. The maximum allowed value for max_runs is 10000. - + :returns: :class:`DeleteRunsResponse` """ body = {} - if experiment_id is not None: body['experiment_id'] = experiment_id - if max_runs is not None: body['max_runs'] = max_runs - if max_timestamp_millis is not None: body['max_timestamp_millis'] = max_timestamp_millis - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/databricks/runs/delete-runs', body=body - - , headers=headers - ) + if experiment_id is not None: + body["experiment_id"] = experiment_id + if max_runs is not None: + body["max_runs"] = max_runs + if max_timestamp_millis is not None: + body["max_timestamp_millis"] = max_timestamp_millis + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/databricks/runs/delete-runs", body=body, headers=headers) return DeleteRunsResponse.from_dict(res) - - - - - def delete_tag(self - , run_id: str, key: str - ): + def delete_tag(self, run_id: str, key: str): """Delete a tag on a run. - + Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. - + :param run_id: str ID of the run that the tag was logged under. Must be provided. :param key: str Name of the tag. Maximum size is 255 bytes. Must be provided. - - + + """ body = {} - if key is not None: body['key'] = key - if run_id is not None: body['run_id'] = run_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/delete-tag', body=body - - , headers=headers - ) - + if key is not None: + body["key"] = key + if run_id is not None: + body["run_id"] = run_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/runs/delete-tag", body=body, headers=headers) - def finalize_logged_model(self - , model_id: str, status: LoggedModelStatus - ) -> FinalizeLoggedModelResponse: + def finalize_logged_model(self, model_id: str, status: LoggedModelStatus) -> FinalizeLoggedModelResponse: """Finalize a logged model. - + :param model_id: str The ID of the logged model to finalize. :param status: :class:`LoggedModelStatus` Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that something went wrong when logging the model weights / agent code. - + :returns: :class:`FinalizeLoggedModelResponse` """ body = {} - if status is not None: body['status'] = status.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/mlflow/logged-models/{model_id}', body=body - - , headers=headers - ) - return FinalizeLoggedModelResponse.from_dict(res) + if status is not None: + body["status"] = status.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PATCH", f"/api/2.0/mlflow/logged-models/{model_id}", body=body, headers=headers) + return FinalizeLoggedModelResponse.from_dict(res) - def get_by_name(self - , experiment_name: str - ) -> GetExperimentByNameResponse: + def get_by_name(self, experiment_name: str) -> GetExperimentByNameResponse: """Get an experiment by name. - + Gets metadata for an experiment. - + This endpoint will return deleted experiments, but prefers the active experiment if an active and deleted experiment share the same name. If multiple deleted experiments share the same name, the API will return one of them. - + Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists. - + :param experiment_name: str Name of the associated experiment. - + :returns: :class:`GetExperimentByNameResponse` """ - + query = {} - if experiment_name is not None: query['experiment_name'] = experiment_name - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/mlflow/experiments/get-by-name', query=query - - , headers=headers - ) - return GetExperimentByNameResponse.from_dict(res) + if experiment_name is not None: + query["experiment_name"] = experiment_name + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/mlflow/experiments/get-by-name", query=query, headers=headers) + return GetExperimentByNameResponse.from_dict(res) - def get_experiment(self - , experiment_id: str - ) -> GetExperimentResponse: + def get_experiment(self, experiment_id: str) -> GetExperimentResponse: """Get an experiment. - + Gets metadata for an experiment. This method works on deleted experiments. - + :param experiment_id: str ID of the associated experiment. - + :returns: :class:`GetExperimentResponse` """ - + query = {} - if experiment_id is not None: query['experiment_id'] = experiment_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/mlflow/experiments/get', query=query - - , headers=headers - ) - return GetExperimentResponse.from_dict(res) + if experiment_id is not None: + query["experiment_id"] = experiment_id + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/mlflow/experiments/get", query=query, headers=headers) + return GetExperimentResponse.from_dict(res) - def get_history(self - , metric_key: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None, run_id: Optional[str] = None, run_uuid: Optional[str] = None) -> Iterator[Metric]: + def get_history( + self, + metric_key: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + run_id: Optional[str] = None, + run_uuid: Optional[str] = None, + ) -> Iterator[Metric]: """Get metric history for a run. - + Gets a list of all values for the specified metric for a given run. - + :param metric_key: str Name of the metric. :param max_results: int (optional) @@ -6448,157 +7229,131 @@ def get_history(self :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run from which to fetch metric values. This field will be removed in a future MLflow version. - + :returns: Iterator over :class:`Metric` """ - - query = {} - if max_results is not None: query['max_results'] = max_results - if metric_key is not None: query['metric_key'] = metric_key - if page_token is not None: query['page_token'] = page_token - if run_id is not None: query['run_id'] = run_id - if run_uuid is not None: query['run_uuid'] = run_uuid - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/mlflow/metrics/get-history', query=query - - , headers=headers - ) - if 'metrics' in json: - for v in json['metrics']: - yield Metric.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if max_results is not None: + query["max_results"] = max_results + if metric_key is not None: + query["metric_key"] = metric_key + if page_token is not None: + query["page_token"] = page_token + if run_id is not None: + query["run_id"] = run_id + if run_uuid is not None: + query["run_uuid"] = run_uuid + headers = { + "Accept": "application/json", + } - def get_logged_model(self - , model_id: str - ) -> GetLoggedModelResponse: + while True: + json = self._api.do("GET", "/api/2.0/mlflow/metrics/get-history", query=query, headers=headers) + if "metrics" in json: + for v in json["metrics"]: + yield Metric.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def get_logged_model(self, model_id: str) -> GetLoggedModelResponse: """Get a logged model. - + :param model_id: str The ID of the logged model to retrieve. - + :returns: :class:`GetLoggedModelResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/mlflow/logged-models/{model_id}' - - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/mlflow/logged-models/{model_id}", headers=headers) return GetLoggedModelResponse.from_dict(res) - - - - - def get_permission_levels(self - , experiment_id: str - ) -> GetExperimentPermissionLevelsResponse: + def get_permission_levels(self, experiment_id: str) -> GetExperimentPermissionLevelsResponse: """Get experiment permission levels. - + Gets the permission levels that a user can have on an object. - + :param experiment_id: str The experiment for which to get or manage permissions. - + :returns: :class:`GetExperimentPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/experiments/{experiment_id}/permissionLevels' - - , headers=headers - ) - return GetExperimentPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/experiments/{experiment_id}/permissionLevels", headers=headers) + return GetExperimentPermissionLevelsResponse.from_dict(res) - def get_permissions(self - , experiment_id: str - ) -> ExperimentPermissions: + def get_permissions(self, experiment_id: str) -> ExperimentPermissions: """Get experiment permissions. - + Gets the permissions of an experiment. Experiments can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. - + :returns: :class:`ExperimentPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/experiments/{experiment_id}' - - , headers=headers - ) - return ExperimentPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/experiments/{experiment_id}", headers=headers) + return ExperimentPermissions.from_dict(res) - def get_run(self - , run_id: str - , * - , run_uuid: Optional[str] = None) -> GetRunResponse: + def get_run(self, run_id: str, *, run_uuid: Optional[str] = None) -> GetRunResponse: """Get a run. - + Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the same key are logged for a run, return only the value with the latest timestamp. - + If there are multiple values with the latest timestamp, return the maximum of these values. - + :param run_id: str ID of the run to fetch. Must be provided. :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run to fetch. This field will be removed in a future MLflow version. - + :returns: :class:`GetRunResponse` """ - + query = {} - if run_id is not None: query['run_id'] = run_id - if run_uuid is not None: query['run_uuid'] = run_uuid - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/mlflow/runs/get', query=query - - , headers=headers - ) + if run_id is not None: + query["run_id"] = run_id + if run_uuid is not None: + query["run_uuid"] = run_uuid + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/mlflow/runs/get", query=query, headers=headers) return GetRunResponse.from_dict(res) - - - - - def list_artifacts(self - - , * - , page_token: Optional[str] = None, path: Optional[str] = None, run_id: Optional[str] = None, run_uuid: Optional[str] = None) -> Iterator[FileInfo]: + def list_artifacts( + self, + *, + page_token: Optional[str] = None, + path: Optional[str] = None, + run_id: Optional[str] = None, + run_uuid: Optional[str] = None, + ) -> Iterator[FileInfo]: """List artifacts. - + List artifacts for a run. Takes an optional `artifact_path` prefix which if specified, the response contains only artifacts with the specified prefix. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). - + :param page_token: str (optional) The token indicating the page of artifact results to fetch. `page_token` is not supported when listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. @@ -6612,44 +7367,43 @@ def list_artifacts(self :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run whose artifacts to list. This field will be removed in a future MLflow version. - + :returns: Iterator over :class:`FileInfo` """ - - query = {} - if page_token is not None: query['page_token'] = page_token - if path is not None: query['path'] = path - if run_id is not None: query['run_id'] = run_id - if run_uuid is not None: query['run_uuid'] = run_uuid - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/mlflow/artifacts/list', query=query - - , headers=headers - ) - if 'files' in json: - for v in json['files']: - yield FileInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_token is not None: + query["page_token"] = page_token + if path is not None: + query["path"] = path + if run_id is not None: + query["run_id"] = run_id + if run_uuid is not None: + query["run_uuid"] = run_uuid + headers = { + "Accept": "application/json", + } - def list_experiments(self - - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None, view_type: Optional[ViewType] = None) -> Iterator[Experiment]: + while True: + json = self._api.do("GET", "/api/2.0/mlflow/artifacts/list", query=query, headers=headers) + if "files" in json: + for v in json["files"]: + yield FileInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_experiments( + self, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + view_type: Optional[ViewType] = None, + ) -> Iterator[Experiment]: """List experiments. - + Gets a list of all experiments. - + :param max_results: int (optional) Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If `max_results` is too large, it'll be automatically capped at 1000. Callers of this endpoint are @@ -6658,82 +7412,81 @@ def list_experiments(self Token indicating the page of experiments to fetch :param view_type: :class:`ViewType` (optional) Qualifier for type of experiments to be returned. If unspecified, return only active experiments. - + :returns: Iterator over :class:`Experiment` """ - - query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - if view_type is not None: query['view_type'] = view_type.value - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/mlflow/experiments/list', query=query - - , headers=headers - ) - if 'experiments' in json: - for v in json['experiments']: - yield Experiment.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + if view_type is not None: + query["view_type"] = view_type.value + headers = { + "Accept": "application/json", + } - def log_batch(self - - , * - , metrics: Optional[List[Metric]] = None, params: Optional[List[Param]] = None, run_id: Optional[str] = None, tags: Optional[List[RunTag]] = None): + while True: + json = self._api.do("GET", "/api/2.0/mlflow/experiments/list", query=query, headers=headers) + if "experiments" in json: + for v in json["experiments"]: + yield Experiment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def log_batch( + self, + *, + metrics: Optional[List[Metric]] = None, + params: Optional[List[Param]] = None, + run_id: Optional[str] = None, + tags: Optional[List[RunTag]] = None, + ): """Log a batch of metrics/params/tags for a run. - + Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server will respond with an error (non-200 status code). - + In case of error (due to internal server error or an invalid request), partial data may be written. - + You can write metrics, params, and tags in interleaving fashion, but within a given entity type are guaranteed to follow the order specified in the request body. - + The overwrite behavior for metrics, params, and tags is as follows: - + * Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to the set of values for the metric with the provided key. - + * Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple tag values with the same key are provided in the same API request, the last-provided tag value is written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent. - + * Parameters: once written, param values cannot be changed (attempting to overwrite a param value will result in an error). However, logging the same param (key, value) is permitted. Specifically, logging a param is idempotent. - + Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB in size and contain: - + * No more than 1000 metrics, params, and tags in total - + * Up to 1000 metrics - + * Up to 100 params - + * Up to 100 tags - + For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900 metrics, 50 params, and 51 tags is invalid. - + The following limits also apply to metric, param, and tag keys and values: - + * Metric keys, param keys, and tag keys can be up to 250 characters in length - + * Parameter and tag values can be up to 250 characters in length - + :param metrics: List[:class:`Metric`] (optional) Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and tags in total. @@ -6745,102 +7498,100 @@ def log_batch(self :param tags: List[:class:`RunTag`] (optional) Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags in total. - - - """ - body = {} - if metrics is not None: body['metrics'] = [v.as_dict() for v in metrics] - if params is not None: body['params'] = [v.as_dict() for v in params] - if run_id is not None: body['run_id'] = run_id - if tags is not None: body['tags'] = [v.as_dict() for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/log-batch', body=body - - , headers=headers - ) - - - - - def log_inputs(self - , run_id: str - , * - , datasets: Optional[List[DatasetInput]] = None, models: Optional[List[ModelInput]] = None): + """ + body = {} + if metrics is not None: + body["metrics"] = [v.as_dict() for v in metrics] + if params is not None: + body["params"] = [v.as_dict() for v in params] + if run_id is not None: + body["run_id"] = run_id + if tags is not None: + body["tags"] = [v.as_dict() for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/mlflow/runs/log-batch", body=body, headers=headers) + + def log_inputs( + self, run_id: str, *, datasets: Optional[List[DatasetInput]] = None, models: Optional[List[ModelInput]] = None + ): """Log inputs to a run. - + **NOTE:** Experimental: This API may change or be removed in a future release without warning. - + Logs inputs, such as datasets and models, to an MLflow Run. - + :param run_id: str ID of the run to log under :param datasets: List[:class:`DatasetInput`] (optional) Dataset inputs :param models: List[:class:`ModelInput`] (optional) Model inputs - - + + """ body = {} - if datasets is not None: body['datasets'] = [v.as_dict() for v in datasets] - if models is not None: body['models'] = [v.as_dict() for v in models] - if run_id is not None: body['run_id'] = run_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/log-inputs', body=body - - , headers=headers - ) - + if datasets is not None: + body["datasets"] = [v.as_dict() for v in datasets] + if models is not None: + body["models"] = [v.as_dict() for v in models] + if run_id is not None: + body["run_id"] = run_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/runs/log-inputs", body=body, headers=headers) - def log_logged_model_params(self - , model_id: str - , * - , params: Optional[List[LoggedModelParameter]] = None): + def log_logged_model_params(self, model_id: str, *, params: Optional[List[LoggedModelParameter]] = None): """Log params for a logged model. - + Logs params for a logged model. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training. A param can be logged only once for a logged model, and attempting to overwrite an existing param with a different value will result in an error - + :param model_id: str The ID of the logged model to log params for. :param params: List[:class:`LoggedModelParameter`] (optional) Parameters to attach to the model. - - - """ - body = {} - if params is not None: body['params'] = [v.as_dict() for v in params] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST',f'/api/2.0/mlflow/logged-models/{model_id}/params', body=body - - , headers=headers - ) - - - - - def log_metric(self - , key: str, value: float, timestamp: int - , * - , dataset_digest: Optional[str] = None, dataset_name: Optional[str] = None, model_id: Optional[str] = None, run_id: Optional[str] = None, run_uuid: Optional[str] = None, step: Optional[int] = None): + """ + body = {} + if params is not None: + body["params"] = [v.as_dict() for v in params] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", f"/api/2.0/mlflow/logged-models/{model_id}/params", body=body, headers=headers) + + def log_metric( + self, + key: str, + value: float, + timestamp: int, + *, + dataset_digest: Optional[str] = None, + dataset_name: Optional[str] = None, + model_id: Optional[str] = None, + run_id: Optional[str] = None, + run_uuid: Optional[str] = None, + step: Optional[int] = None, + ): """Log a metric for a run. - + Log a metric for a run. A metric is a key-value pair (string key, float value) with an associated timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be logged multiple times. - + :param key: str Name of the metric. :param value: float @@ -6862,103 +7613,92 @@ def log_metric(self removed in a future MLflow version. :param step: int (optional) Step at which to log the metric - - - """ - body = {} - if dataset_digest is not None: body['dataset_digest'] = dataset_digest - if dataset_name is not None: body['dataset_name'] = dataset_name - if key is not None: body['key'] = key - if model_id is not None: body['model_id'] = model_id - if run_id is not None: body['run_id'] = run_id - if run_uuid is not None: body['run_uuid'] = run_uuid - if step is not None: body['step'] = step - if timestamp is not None: body['timestamp'] = timestamp - if value is not None: body['value'] = value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/log-metric', body=body - - , headers=headers - ) - - - - - def log_model(self - - , * - , model_json: Optional[str] = None, run_id: Optional[str] = None): + """ + body = {} + if dataset_digest is not None: + body["dataset_digest"] = dataset_digest + if dataset_name is not None: + body["dataset_name"] = dataset_name + if key is not None: + body["key"] = key + if model_id is not None: + body["model_id"] = model_id + if run_id is not None: + body["run_id"] = run_id + if run_uuid is not None: + body["run_uuid"] = run_uuid + if step is not None: + body["step"] = step + if timestamp is not None: + body["timestamp"] = timestamp + if value is not None: + body["value"] = value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/mlflow/runs/log-metric", body=body, headers=headers) + + def log_model(self, *, model_json: Optional[str] = None, run_id: Optional[str] = None): """Log a model. - + **NOTE:** Experimental: This API may change or be removed in a future release without warning. - + :param model_json: str (optional) MLmodel file in json format. :param run_id: str (optional) ID of the run to log under - - + + """ body = {} - if model_json is not None: body['model_json'] = model_json - if run_id is not None: body['run_id'] = run_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/log-model', body=body - - , headers=headers - ) - + if model_json is not None: + body["model_json"] = model_json + if run_id is not None: + body["run_id"] = run_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/runs/log-model", body=body, headers=headers) - def log_outputs(self - , run_id: str - , * - , models: Optional[List[ModelOutput]] = None): + def log_outputs(self, run_id: str, *, models: Optional[List[ModelOutput]] = None): """Log outputs from a run. - + **NOTE**: Experimental: This API may change or be removed in a future release without warning. - + Logs outputs, such as models, from an MLflow Run. - + :param run_id: str The ID of the Run from which to log outputs. :param models: List[:class:`ModelOutput`] (optional) The model outputs from the Run. - - + + """ body = {} - if models is not None: body['models'] = [v.as_dict() for v in models] - if run_id is not None: body['run_id'] = run_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/outputs', body=body - - , headers=headers - ) - + if models is not None: + body["models"] = [v.as_dict() for v in models] + if run_id is not None: + body["run_id"] = run_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/runs/outputs", body=body, headers=headers) - def log_param(self - , key: str, value: str - , * - , run_id: Optional[str] = None, run_uuid: Optional[str] = None): + def log_param(self, key: str, value: str, *, run_id: Optional[str] = None, run_uuid: Optional[str] = None): """Log a param for a run. - + Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A param can be logged only once for a run. - + :param key: str Name of the param. Maximum size is 255 bytes. :param value: str @@ -6968,94 +7708,80 @@ def log_param(self :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will be removed in a future MLflow version. - - - """ - body = {} - if key is not None: body['key'] = key - if run_id is not None: body['run_id'] = run_id - if run_uuid is not None: body['run_uuid'] = run_uuid - if value is not None: body['value'] = value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/log-parameter', body=body - - , headers=headers - ) - - - - - def restore_experiment(self - , experiment_id: str - ): + """ + body = {} + if key is not None: + body["key"] = key + if run_id is not None: + body["run_id"] = run_id + if run_uuid is not None: + body["run_uuid"] = run_uuid + if value is not None: + body["value"] = value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/mlflow/runs/log-parameter", body=body, headers=headers) + + def restore_experiment(self, experiment_id: str): """Restore an experiment. - + Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are also restored. - + Throws `RESOURCE_DOES_NOT_EXIST` if experiment was never created or was permanently deleted. - + :param experiment_id: str ID of the associated experiment. - - + + """ body = {} - if experiment_id is not None: body['experiment_id'] = experiment_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/experiments/restore', body=body - - , headers=headers - ) - + if experiment_id is not None: + body["experiment_id"] = experiment_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/experiments/restore", body=body, headers=headers) - def restore_run(self - , run_id: str - ): + def restore_run(self, run_id: str): """Restore a run. - + Restores a deleted run. This also restores associated metadata, runs, metrics, params, and tags. - + Throws `RESOURCE_DOES_NOT_EXIST` if the run was never created or was permanently deleted. - + :param run_id: str ID of the run to restore. - - + + """ body = {} - if run_id is not None: body['run_id'] = run_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/restore', body=body - - , headers=headers - ) - + if run_id is not None: + body["run_id"] = run_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/runs/restore", body=body, headers=headers) - def restore_runs(self - , experiment_id: str, min_timestamp_millis: int - , * - , max_runs: Optional[int] = None) -> RestoreRunsResponse: + def restore_runs( + self, experiment_id: str, min_timestamp_millis: int, *, max_runs: Optional[int] = None + ) -> RestoreRunsResponse: """Restore runs by deletion time. - + Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on - + :param experiment_id: str The ID of the experiment containing the runs to restore. :param min_timestamp_millis: int @@ -7064,33 +7790,37 @@ def restore_runs(self :param max_runs: int (optional) An optional positive integer indicating the maximum number of runs to restore. The maximum allowed value for max_runs is 10000. - + :returns: :class:`RestoreRunsResponse` """ body = {} - if experiment_id is not None: body['experiment_id'] = experiment_id - if max_runs is not None: body['max_runs'] = max_runs - if min_timestamp_millis is not None: body['min_timestamp_millis'] = min_timestamp_millis - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/databricks/runs/restore-runs', body=body - - , headers=headers - ) + if experiment_id is not None: + body["experiment_id"] = experiment_id + if max_runs is not None: + body["max_runs"] = max_runs + if min_timestamp_millis is not None: + body["min_timestamp_millis"] = min_timestamp_millis + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/databricks/runs/restore-runs", body=body, headers=headers) return RestoreRunsResponse.from_dict(res) - - - - - def search_experiments(self - - , * - , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None, view_type: Optional[ViewType] = None) -> Iterator[Experiment]: + def search_experiments( + self, + *, + filter: Optional[str] = None, + max_results: Optional[int] = None, + order_by: Optional[List[str]] = None, + page_token: Optional[str] = None, + view_type: Optional[ViewType] = None, + ) -> Iterator[Experiment]: """Search experiments. - + Searches for experiments that satisfy specified search criteria. - + :param filter: str (optional) String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'") :param max_results: int (optional) @@ -7103,44 +7833,48 @@ def search_experiments(self Token indicating the page of experiments to fetch :param view_type: :class:`ViewType` (optional) Qualifier for type of experiments to be returned. If unspecified, return only active experiments. - + :returns: Iterator over :class:`Experiment` """ body = {} - if filter is not None: body['filter'] = filter - if max_results is not None: body['max_results'] = max_results - if order_by is not None: body['order_by'] = [v for v in order_by] - if page_token is not None: body['page_token'] = page_token - if view_type is not None: body['view_type'] = view_type.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - - - while True: - json = self._api.do('POST','/api/2.0/mlflow/experiments/search', body=body - - , headers=headers - ) - if 'experiments' in json: - for v in json['experiments']: - yield Experiment.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - body['page_token'] = json['next_page_token'] - - - - - + if filter is not None: + body["filter"] = filter + if max_results is not None: + body["max_results"] = max_results + if order_by is not None: + body["order_by"] = [v for v in order_by] + if page_token is not None: + body["page_token"] = page_token + if view_type is not None: + body["view_type"] = view_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - def search_logged_models(self - - , * - , datasets: Optional[List[SearchLoggedModelsDataset]] = None, experiment_ids: Optional[List[str]] = None, filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[SearchLoggedModelsOrderBy]] = None, page_token: Optional[str] = None) -> SearchLoggedModelsResponse: + while True: + json = self._api.do("POST", "/api/2.0/mlflow/experiments/search", body=body, headers=headers) + if "experiments" in json: + for v in json["experiments"]: + yield Experiment.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + body["page_token"] = json["next_page_token"] + + def search_logged_models( + self, + *, + datasets: Optional[List[SearchLoggedModelsDataset]] = None, + experiment_ids: Optional[List[str]] = None, + filter: Optional[str] = None, + max_results: Optional[int] = None, + order_by: Optional[List[SearchLoggedModelsOrderBy]] = None, + page_token: Optional[str] = None, + ) -> SearchLoggedModelsResponse: """Search logged models. - + Search for Logged Models that satisfy specified search criteria. - + :param datasets: List[:class:`SearchLoggedModelsDataset`] (optional) List of datasets on which to apply the metrics filter clauses. For example, a filter with `metrics.accuracy > 0.9` and dataset info with name "test_dataset" means we will return all logged @@ -7152,7 +7886,7 @@ def search_logged_models(self :param filter: str (optional) A filter expression over logged model info and data that allows returning a subset of logged models. The syntax is a subset of SQL that supports AND'ing together binary operations. - + Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``. :param max_results: int (optional) The maximum number of Logged Models to return. The maximum limit is 50. @@ -7160,50 +7894,58 @@ def search_logged_models(self The list of columns for ordering the results, with additional fields for sorting criteria. :param page_token: str (optional) The token indicating the page of logged models to fetch. - + :returns: :class:`SearchLoggedModelsResponse` """ body = {} - if datasets is not None: body['datasets'] = [v.as_dict() for v in datasets] - if experiment_ids is not None: body['experiment_ids'] = [v for v in experiment_ids] - if filter is not None: body['filter'] = filter - if max_results is not None: body['max_results'] = max_results - if order_by is not None: body['order_by'] = [v.as_dict() for v in order_by] - if page_token is not None: body['page_token'] = page_token - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/logged-models/search', body=body - - , headers=headers - ) + if datasets is not None: + body["datasets"] = [v.as_dict() for v in datasets] + if experiment_ids is not None: + body["experiment_ids"] = [v for v in experiment_ids] + if filter is not None: + body["filter"] = filter + if max_results is not None: + body["max_results"] = max_results + if order_by is not None: + body["order_by"] = [v.as_dict() for v in order_by] + if page_token is not None: + body["page_token"] = page_token + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/logged-models/search", body=body, headers=headers) return SearchLoggedModelsResponse.from_dict(res) - - - - - def search_runs(self - - , * - , experiment_ids: Optional[List[str]] = None, filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None, run_view_type: Optional[ViewType] = None) -> Iterator[Run]: + def search_runs( + self, + *, + experiment_ids: Optional[List[str]] = None, + filter: Optional[str] = None, + max_results: Optional[int] = None, + order_by: Optional[List[str]] = None, + page_token: Optional[str] = None, + run_view_type: Optional[ViewType] = None, + ) -> Iterator[Run]: """Search for runs. - + Searches for runs that satisfy expressions. - + Search expressions can use `mlflowMetric` and `mlflowParam` keys. - + :param experiment_ids: List[str] (optional) List of experiment IDs to search over. :param filter: str (optional) A filter expression over params, metrics, and tags, that allows returning a subset of runs. The syntax is a subset of SQL that supports ANDing together binary operations between a param, metric, or tag and a constant. - + Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'` - + You can select columns with special characters (hyphen, space, period, etc.) by using double quotes: `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'` - + Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`. :param max_results: int (optional) Maximum number of runs desired. Max threshold is 50000 @@ -7217,133 +7959,114 @@ def search_runs(self Token for the current page of runs. :param run_view_type: :class:`ViewType` (optional) Whether to display only active, only deleted, or all runs. Defaults to only active runs. - + :returns: Iterator over :class:`Run` """ body = {} - if experiment_ids is not None: body['experiment_ids'] = [v for v in experiment_ids] - if filter is not None: body['filter'] = filter - if max_results is not None: body['max_results'] = max_results - if order_by is not None: body['order_by'] = [v for v in order_by] - if page_token is not None: body['page_token'] = page_token - if run_view_type is not None: body['run_view_type'] = run_view_type.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - - - while True: - json = self._api.do('POST','/api/2.0/mlflow/runs/search', body=body - - , headers=headers - ) - if 'runs' in json: - for v in json['runs']: - yield Run.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - body['page_token'] = json['next_page_token'] - - - - - + if experiment_ids is not None: + body["experiment_ids"] = [v for v in experiment_ids] + if filter is not None: + body["filter"] = filter + if max_results is not None: + body["max_results"] = max_results + if order_by is not None: + body["order_by"] = [v for v in order_by] + if page_token is not None: + body["page_token"] = page_token + if run_view_type is not None: + body["run_view_type"] = run_view_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - def set_experiment_tag(self - , experiment_id: str, key: str, value: str - ): + while True: + json = self._api.do("POST", "/api/2.0/mlflow/runs/search", body=body, headers=headers) + if "runs" in json: + for v in json["runs"]: + yield Run.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + body["page_token"] = json["next_page_token"] + + def set_experiment_tag(self, experiment_id: str, key: str, value: str): """Set a tag for an experiment. - + Sets a tag on an experiment. Experiment tags are metadata that can be updated. - + :param experiment_id: str ID of the experiment under which to log the tag. Must be provided. :param key: str Name of the tag. Keys up to 250 bytes in size are supported. :param value: str String value of the tag being logged. Values up to 64KB in size are supported. - - + + """ body = {} - if experiment_id is not None: body['experiment_id'] = experiment_id - if key is not None: body['key'] = key - if value is not None: body['value'] = value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/experiments/set-experiment-tag', body=body - - , headers=headers - ) - + if experiment_id is not None: + body["experiment_id"] = experiment_id + if key is not None: + body["key"] = key + if value is not None: + body["value"] = value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/experiments/set-experiment-tag", body=body, headers=headers) - def set_logged_model_tags(self - , model_id: str - , * - , tags: Optional[List[LoggedModelTag]] = None): + def set_logged_model_tags(self, model_id: str, *, tags: Optional[List[LoggedModelTag]] = None): """Set a tag for a logged model. - + :param model_id: str The ID of the logged model to set the tags on. :param tags: List[:class:`LoggedModelTag`] (optional) The tags to set on the logged model. - - + + """ body = {} - if tags is not None: body['tags'] = [v.as_dict() for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/mlflow/logged-models/{model_id}/tags', body=body - - , headers=headers - ) - + if tags is not None: + body["tags"] = [v.as_dict() for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("PATCH", f"/api/2.0/mlflow/logged-models/{model_id}/tags", body=body, headers=headers) - def set_permissions(self - , experiment_id: str - , * - , access_control_list: Optional[List[ExperimentAccessControlRequest]] = None) -> ExperimentPermissions: + def set_permissions( + self, experiment_id: str, *, access_control_list: Optional[List[ExperimentAccessControlRequest]] = None + ) -> ExperimentPermissions: """Set experiment permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional) - + :returns: :class:`ExperimentPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/experiments/{experiment_id}', body=body - - , headers=headers - ) - return ExperimentPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PUT", f"/api/2.0/permissions/experiments/{experiment_id}", body=body, headers=headers) + return ExperimentPermissions.from_dict(res) - def set_tag(self - , key: str, value: str - , * - , run_id: Optional[str] = None, run_uuid: Optional[str] = None): + def set_tag(self, key: str, value: str, *, run_id: Optional[str] = None, run_uuid: Optional[str] = None): """Set a tag for a run. - + Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. - + :param key: str Name of the tag. Keys up to 250 bytes in size are supported. :param value: str @@ -7353,92 +8076,86 @@ def set_tag(self :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be removed in a future MLflow version. - - - """ - body = {} - if key is not None: body['key'] = key - if run_id is not None: body['run_id'] = run_id - if run_uuid is not None: body['run_uuid'] = run_uuid - if value is not None: body['value'] = value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/runs/set-tag', body=body - - , headers=headers - ) - - - - - def update_experiment(self - , experiment_id: str - , * - , new_name: Optional[str] = None): + """ + body = {} + if key is not None: + body["key"] = key + if run_id is not None: + body["run_id"] = run_id + if run_uuid is not None: + body["run_uuid"] = run_uuid + if value is not None: + body["value"] = value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/mlflow/runs/set-tag", body=body, headers=headers) + + def update_experiment(self, experiment_id: str, *, new_name: Optional[str] = None): """Update an experiment. - + Updates experiment metadata. - + :param experiment_id: str ID of the associated experiment. :param new_name: str (optional) If provided, the experiment's name is changed to the new name. The new name must be unique. - - + + """ body = {} - if experiment_id is not None: body['experiment_id'] = experiment_id - if new_name is not None: body['new_name'] = new_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/experiments/update', body=body - - , headers=headers - ) - + if experiment_id is not None: + body["experiment_id"] = experiment_id + if new_name is not None: + body["new_name"] = new_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/experiments/update", body=body, headers=headers) - def update_permissions(self - , experiment_id: str - , * - , access_control_list: Optional[List[ExperimentAccessControlRequest]] = None) -> ExperimentPermissions: + def update_permissions( + self, experiment_id: str, *, access_control_list: Optional[List[ExperimentAccessControlRequest]] = None + ) -> ExperimentPermissions: """Update experiment permissions. - + Updates the permissions on an experiment. Experiments can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional) - + :returns: :class:`ExperimentPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/experiments/{experiment_id}', body=body - - , headers=headers - ) - return ExperimentPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PATCH", f"/api/2.0/permissions/experiments/{experiment_id}", body=body, headers=headers) + return ExperimentPermissions.from_dict(res) - def update_run(self - - , * - , end_time: Optional[int] = None, run_id: Optional[str] = None, run_name: Optional[str] = None, run_uuid: Optional[str] = None, status: Optional[UpdateRunStatus] = None) -> UpdateRunResponse: + def update_run( + self, + *, + end_time: Optional[int] = None, + run_id: Optional[str] = None, + run_name: Optional[str] = None, + run_uuid: Optional[str] = None, + status: Optional[UpdateRunStatus] = None, + ) -> UpdateRunResponse: """Update a run. - + Updates run metadata. - + :param end_time: int (optional) Unix timestamp in milliseconds of when the run ended. :param run_id: str (optional) @@ -7450,75 +8167,95 @@ def update_run(self MLflow version. :param status: :class:`UpdateRunStatus` (optional) Updated status of the run. - + :returns: :class:`UpdateRunResponse` """ body = {} - if end_time is not None: body['end_time'] = end_time - if run_id is not None: body['run_id'] = run_id - if run_name is not None: body['run_name'] = run_name - if run_uuid is not None: body['run_uuid'] = run_uuid - if status is not None: body['status'] = status.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/runs/update', body=body - - , headers=headers - ) + if end_time is not None: + body["end_time"] = end_time + if run_id is not None: + body["run_id"] = run_id + if run_name is not None: + body["run_name"] = run_name + if run_uuid is not None: + body["run_uuid"] = run_uuid + if status is not None: + body["status"] = status.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/runs/update", body=body, headers=headers) return UpdateRunResponse.from_dict(res) - - + class ForecastingAPI: """The Forecasting API allows you to create and get serverless forecasting experiments""" - + def __init__(self, api_client): self._api = api_client - - - - - def wait_get_experiment_forecasting_succeeded(self, experiment_id: str, - timeout=timedelta(minutes=120), callback: Optional[Callable[[ForecastingExperiment], None]] = None) -> ForecastingExperiment: - deadline = time.time() + timeout.total_seconds() - target_states = (ForecastingExperimentState.SUCCEEDED, ) - failure_states = (ForecastingExperimentState.FAILED, ForecastingExperimentState.CANCELLED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get_experiment(experiment_id=experiment_id) - status = poll.state - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"experiment_id={experiment_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - - def create_experiment(self - , train_data_path: str, target_column: str, time_column: str, forecast_granularity: str, forecast_horizon: int - , * - , custom_weights_column: Optional[str] = None, experiment_path: Optional[str] = None, future_feature_data_path: Optional[str] = None, holiday_regions: Optional[List[str]] = None, include_features: Optional[List[str]] = None, max_runtime: Optional[int] = None, prediction_data_path: Optional[str] = None, primary_metric: Optional[str] = None, register_to: Optional[str] = None, split_column: Optional[str] = None, timeseries_identifier_columns: Optional[List[str]] = None, training_frameworks: Optional[List[str]] = None) -> Wait[ForecastingExperiment]: + def wait_get_experiment_forecasting_succeeded( + self, + experiment_id: str, + timeout=timedelta(minutes=120), + callback: Optional[Callable[[ForecastingExperiment], None]] = None, + ) -> ForecastingExperiment: + deadline = time.time() + timeout.total_seconds() + target_states = (ForecastingExperimentState.SUCCEEDED,) + failure_states = ( + ForecastingExperimentState.FAILED, + ForecastingExperimentState.CANCELLED, + ) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get_experiment(experiment_id=experiment_id) + status = poll.state + status_message = f"current status: {status}" + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach SUCCEEDED, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"experiment_id={experiment_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def create_experiment( + self, + train_data_path: str, + target_column: str, + time_column: str, + forecast_granularity: str, + forecast_horizon: int, + *, + custom_weights_column: Optional[str] = None, + experiment_path: Optional[str] = None, + future_feature_data_path: Optional[str] = None, + holiday_regions: Optional[List[str]] = None, + include_features: Optional[List[str]] = None, + max_runtime: Optional[int] = None, + prediction_data_path: Optional[str] = None, + primary_metric: Optional[str] = None, + register_to: Optional[str] = None, + split_column: Optional[str] = None, + timeseries_identifier_columns: Optional[List[str]] = None, + training_frameworks: Optional[List[str]] = None, + ) -> Wait[ForecastingExperiment]: """Create a forecasting experiment. - + Creates a serverless forecasting experiment. Returns the experiment ID. - + :param train_data_path: str The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used as training data for the forecasting model. @@ -7567,216 +8304,255 @@ def create_experiment(self :param training_frameworks: List[str] (optional) List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', 'DeepAR'. An empty list includes all supported frameworks. - + :returns: Long-running operation waiter for :class:`ForecastingExperiment`. See :method:wait_get_experiment_forecasting_succeeded for more details. """ body = {} - if custom_weights_column is not None: body['custom_weights_column'] = custom_weights_column - if experiment_path is not None: body['experiment_path'] = experiment_path - if forecast_granularity is not None: body['forecast_granularity'] = forecast_granularity - if forecast_horizon is not None: body['forecast_horizon'] = forecast_horizon - if future_feature_data_path is not None: body['future_feature_data_path'] = future_feature_data_path - if holiday_regions is not None: body['holiday_regions'] = [v for v in holiday_regions] - if include_features is not None: body['include_features'] = [v for v in include_features] - if max_runtime is not None: body['max_runtime'] = max_runtime - if prediction_data_path is not None: body['prediction_data_path'] = prediction_data_path - if primary_metric is not None: body['primary_metric'] = primary_metric - if register_to is not None: body['register_to'] = register_to - if split_column is not None: body['split_column'] = split_column - if target_column is not None: body['target_column'] = target_column - if time_column is not None: body['time_column'] = time_column - if timeseries_identifier_columns is not None: body['timeseries_identifier_columns'] = [v for v in timeseries_identifier_columns] - if train_data_path is not None: body['train_data_path'] = train_data_path - if training_frameworks is not None: body['training_frameworks'] = [v for v in training_frameworks] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.0/automl/create-forecasting-experiment', body=body - - , headers=headers - ) - return Wait(self.wait_get_experiment_forecasting_succeeded - , response = CreateForecastingExperimentResponse.from_dict(op_response) - , experiment_id=op_response['experiment_id']) - - - def create_experiment_and_wait(self - , train_data_path: str, target_column: str, time_column: str, forecast_granularity: str, forecast_horizon: int - , * - , custom_weights_column: Optional[str] = None, experiment_path: Optional[str] = None, future_feature_data_path: Optional[str] = None, holiday_regions: Optional[List[str]] = None, include_features: Optional[List[str]] = None, max_runtime: Optional[int] = None, prediction_data_path: Optional[str] = None, primary_metric: Optional[str] = None, register_to: Optional[str] = None, split_column: Optional[str] = None, timeseries_identifier_columns: Optional[List[str]] = None, training_frameworks: Optional[List[str]] = None, - timeout=timedelta(minutes=120)) -> ForecastingExperiment: - return self.create_experiment(custom_weights_column=custom_weights_column, experiment_path=experiment_path, forecast_granularity=forecast_granularity, forecast_horizon=forecast_horizon, future_feature_data_path=future_feature_data_path, holiday_regions=holiday_regions, include_features=include_features, max_runtime=max_runtime, prediction_data_path=prediction_data_path, primary_metric=primary_metric, register_to=register_to, split_column=split_column, target_column=target_column, time_column=time_column, timeseries_identifier_columns=timeseries_identifier_columns, train_data_path=train_data_path, training_frameworks=training_frameworks).result(timeout=timeout) - - - - - def get_experiment(self - , experiment_id: str - ) -> ForecastingExperiment: + if custom_weights_column is not None: + body["custom_weights_column"] = custom_weights_column + if experiment_path is not None: + body["experiment_path"] = experiment_path + if forecast_granularity is not None: + body["forecast_granularity"] = forecast_granularity + if forecast_horizon is not None: + body["forecast_horizon"] = forecast_horizon + if future_feature_data_path is not None: + body["future_feature_data_path"] = future_feature_data_path + if holiday_regions is not None: + body["holiday_regions"] = [v for v in holiday_regions] + if include_features is not None: + body["include_features"] = [v for v in include_features] + if max_runtime is not None: + body["max_runtime"] = max_runtime + if prediction_data_path is not None: + body["prediction_data_path"] = prediction_data_path + if primary_metric is not None: + body["primary_metric"] = primary_metric + if register_to is not None: + body["register_to"] = register_to + if split_column is not None: + body["split_column"] = split_column + if target_column is not None: + body["target_column"] = target_column + if time_column is not None: + body["time_column"] = time_column + if timeseries_identifier_columns is not None: + body["timeseries_identifier_columns"] = [v for v in timeseries_identifier_columns] + if train_data_path is not None: + body["train_data_path"] = train_data_path + if training_frameworks is not None: + body["training_frameworks"] = [v for v in training_frameworks] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.0/automl/create-forecasting-experiment", body=body, headers=headers) + return Wait( + self.wait_get_experiment_forecasting_succeeded, + response=CreateForecastingExperimentResponse.from_dict(op_response), + experiment_id=op_response["experiment_id"], + ) + + def create_experiment_and_wait( + self, + train_data_path: str, + target_column: str, + time_column: str, + forecast_granularity: str, + forecast_horizon: int, + *, + custom_weights_column: Optional[str] = None, + experiment_path: Optional[str] = None, + future_feature_data_path: Optional[str] = None, + holiday_regions: Optional[List[str]] = None, + include_features: Optional[List[str]] = None, + max_runtime: Optional[int] = None, + prediction_data_path: Optional[str] = None, + primary_metric: Optional[str] = None, + register_to: Optional[str] = None, + split_column: Optional[str] = None, + timeseries_identifier_columns: Optional[List[str]] = None, + training_frameworks: Optional[List[str]] = None, + timeout=timedelta(minutes=120), + ) -> ForecastingExperiment: + return self.create_experiment( + custom_weights_column=custom_weights_column, + experiment_path=experiment_path, + forecast_granularity=forecast_granularity, + forecast_horizon=forecast_horizon, + future_feature_data_path=future_feature_data_path, + holiday_regions=holiday_regions, + include_features=include_features, + max_runtime=max_runtime, + prediction_data_path=prediction_data_path, + primary_metric=primary_metric, + register_to=register_to, + split_column=split_column, + target_column=target_column, + time_column=time_column, + timeseries_identifier_columns=timeseries_identifier_columns, + train_data_path=train_data_path, + training_frameworks=training_frameworks, + ).result(timeout=timeout) + + def get_experiment(self, experiment_id: str) -> ForecastingExperiment: """Get a forecasting experiment. - + Public RPC to get forecasting experiment - + :param experiment_id: str The unique ID of a forecasting experiment - + :returns: :class:`ForecastingExperiment` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/automl/get-forecasting-experiment/{experiment_id}' - - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/automl/get-forecasting-experiment/{experiment_id}", headers=headers) return ForecastingExperiment.from_dict(res) - - + class ModelRegistryAPI: """Note: This API reference documents APIs for the Workspace Model Registry. Databricks recommends using [Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides centralized model governance, cross-workspace access, lineage, and deployment. Workspace Model Registry will be deprecated in the future. - + The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def approve_transition_request(self - , name: str, version: str, stage: Stage, archive_existing_versions: bool - , * - , comment: Optional[str] = None) -> ApproveTransitionRequestResponse: + def approve_transition_request( + self, name: str, version: str, stage: Stage, archive_existing_versions: bool, *, comment: Optional[str] = None + ) -> ApproveTransitionRequestResponse: """Approve transition request. - + Approves a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param archive_existing_versions: bool Specifies whether to archive all current model versions in the target stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`ApproveTransitionRequestResponse` """ body = {} - if archive_existing_versions is not None: body['archive_existing_versions'] = archive_existing_versions - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if stage is not None: body['stage'] = stage.value - if version is not None: body['version'] = version - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/transition-requests/approve', body=body - - , headers=headers - ) + if archive_existing_versions is not None: + body["archive_existing_versions"] = archive_existing_versions + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if stage is not None: + body["stage"] = stage.value + if version is not None: + body["version"] = version + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/transition-requests/approve", body=body, headers=headers) return ApproveTransitionRequestResponse.from_dict(res) - - - - - def create_comment(self - , name: str, version: str, comment: str - ) -> CreateCommentResponse: + def create_comment(self, name: str, version: str, comment: str) -> CreateCommentResponse: """Post a comment. - + Posts a comment on a model version. A comment can be submitted either by a user or programmatically to display relevant information about the model. For example, test results or deployment errors. - + :param name: str Name of the model. :param version: str Version of the model. :param comment: str User-provided comment on the action. - + :returns: :class:`CreateCommentResponse` """ body = {} - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if version is not None: body['version'] = version - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/comments/create', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if version is not None: + body["version"] = version + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/comments/create", body=body, headers=headers) return CreateCommentResponse.from_dict(res) - - - - - def create_model(self - , name: str - , * - , description: Optional[str] = None, tags: Optional[List[ModelTag]] = None) -> CreateModelResponse: + def create_model( + self, name: str, *, description: Optional[str] = None, tags: Optional[List[ModelTag]] = None + ) -> CreateModelResponse: """Create a model. - + Creates a new registered model with the name specified in the request body. - + Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. - + :param name: str Register models under this name :param description: str (optional) Optional description for registered model. :param tags: List[:class:`ModelTag`] (optional) Additional metadata for registered model. - + :returns: :class:`CreateModelResponse` """ body = {} - if description is not None: body['description'] = description - if name is not None: body['name'] = name - if tags is not None: body['tags'] = [v.as_dict() for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/registered-models/create', body=body - - , headers=headers - ) + if description is not None: + body["description"] = description + if name is not None: + body["name"] = name + if tags is not None: + body["tags"] = [v.as_dict() for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/registered-models/create", body=body, headers=headers) return CreateModelResponse.from_dict(res) - - - - - def create_model_version(self - , name: str, source: str - , * - , description: Optional[str] = None, run_id: Optional[str] = None, run_link: Optional[str] = None, tags: Optional[List[ModelVersionTag]] = None) -> CreateModelVersionResponse: + def create_model_version( + self, + name: str, + source: str, + *, + description: Optional[str] = None, + run_id: Optional[str] = None, + run_link: Optional[str] = None, + tags: Optional[List[ModelVersionTag]] = None, + ) -> CreateModelVersionResponse: """Create a model version. - + Creates a model version. - + :param name: str Register model under this name :param source: str @@ -7791,110 +8567,117 @@ def create_model_version(self hosted at another instance of MLflow. :param tags: List[:class:`ModelVersionTag`] (optional) Additional metadata for model version. - + :returns: :class:`CreateModelVersionResponse` """ body = {} - if description is not None: body['description'] = description - if name is not None: body['name'] = name - if run_id is not None: body['run_id'] = run_id - if run_link is not None: body['run_link'] = run_link - if source is not None: body['source'] = source - if tags is not None: body['tags'] = [v.as_dict() for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/model-versions/create', body=body - - , headers=headers - ) + if description is not None: + body["description"] = description + if name is not None: + body["name"] = name + if run_id is not None: + body["run_id"] = run_id + if run_link is not None: + body["run_link"] = run_link + if source is not None: + body["source"] = source + if tags is not None: + body["tags"] = [v.as_dict() for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/model-versions/create", body=body, headers=headers) return CreateModelVersionResponse.from_dict(res) - - - - - def create_transition_request(self - , name: str, version: str, stage: Stage - , * - , comment: Optional[str] = None) -> CreateTransitionRequestResponse: + def create_transition_request( + self, name: str, version: str, stage: Stage, *, comment: Optional[str] = None + ) -> CreateTransitionRequestResponse: """Make a transition request. - + Creates a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`CreateTransitionRequestResponse` """ body = {} - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if stage is not None: body['stage'] = stage.value - if version is not None: body['version'] = version - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/transition-requests/create', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if stage is not None: + body["stage"] = stage.value + if version is not None: + body["version"] = version + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/transition-requests/create", body=body, headers=headers) return CreateTransitionRequestResponse.from_dict(res) - - - - - def create_webhook(self - , events: List[RegistryWebhookEvent] - , * - , description: Optional[str] = None, http_url_spec: Optional[HttpUrlSpec] = None, job_spec: Optional[JobSpec] = None, model_name: Optional[str] = None, status: Optional[RegistryWebhookStatus] = None) -> CreateWebhookResponse: + def create_webhook( + self, + events: List[RegistryWebhookEvent], + *, + description: Optional[str] = None, + http_url_spec: Optional[HttpUrlSpec] = None, + job_spec: Optional[JobSpec] = None, + model_name: Optional[str] = None, + status: Optional[RegistryWebhookStatus] = None, + ) -> CreateWebhookResponse: """Create a webhook. - + **NOTE**: This endpoint is in Public Preview. - + Creates a registry webhook. - + :param events: List[:class:`RegistryWebhookEvent`] Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. - + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - + * `COMMENT_CREATED`: A user wrote a comment on a registered model. - + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be specified for a registry-wide webhook, which can be created by not specifying a model name in the create request. - + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to staging. - + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to production. - + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. :param description: str (optional) User-specified description for the webhook. @@ -7906,155 +8689,129 @@ def create_webhook(self :param status: :class:`RegistryWebhookStatus` (optional) Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event. - + :returns: :class:`CreateWebhookResponse` """ body = {} - if description is not None: body['description'] = description - if events is not None: body['events'] = [v.value for v in events] - if http_url_spec is not None: body['http_url_spec'] = http_url_spec.as_dict() - if job_spec is not None: body['job_spec'] = job_spec.as_dict() - if model_name is not None: body['model_name'] = model_name - if status is not None: body['status'] = status.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/registry-webhooks/create', body=body - - , headers=headers - ) + if description is not None: + body["description"] = description + if events is not None: + body["events"] = [v.value for v in events] + if http_url_spec is not None: + body["http_url_spec"] = http_url_spec.as_dict() + if job_spec is not None: + body["job_spec"] = job_spec.as_dict() + if model_name is not None: + body["model_name"] = model_name + if status is not None: + body["status"] = status.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/registry-webhooks/create", body=body, headers=headers) return CreateWebhookResponse.from_dict(res) - - - - - def delete_comment(self - , id: str - ): + def delete_comment(self, id: str): """Delete a comment. - + Deletes a comment on a model version. - + :param id: str Unique identifier of an activity - - + + """ - + query = {} - if id is not None: query['id'] = id - headers = {'Accept': 'application/json',} - - self._api.do('DELETE','/api/2.0/mlflow/comments/delete', query=query - - , headers=headers - ) - + if id is not None: + query["id"] = id + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", "/api/2.0/mlflow/comments/delete", query=query, headers=headers) - def delete_model(self - , name: str - ): + def delete_model(self, name: str): """Delete a model. - + Deletes a registered model. - + :param name: str Registered model unique name identifier. - - + + """ - + query = {} - if name is not None: query['name'] = name - headers = {'Accept': 'application/json',} - - self._api.do('DELETE','/api/2.0/mlflow/registered-models/delete', query=query - - , headers=headers - ) - + if name is not None: + query["name"] = name + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", "/api/2.0/mlflow/registered-models/delete", query=query, headers=headers) - def delete_model_tag(self - , name: str, key: str - ): + def delete_model_tag(self, name: str, key: str): """Delete a model tag. - + Deletes the tag for a registered model. - + :param name: str Name of the registered model that the tag was logged under. :param key: str Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size is 250 bytes. - - + + """ - + query = {} - if key is not None: query['key'] = key - if name is not None: query['name'] = name - headers = {'Accept': 'application/json',} - - self._api.do('DELETE','/api/2.0/mlflow/registered-models/delete-tag', query=query - - , headers=headers - ) - + if key is not None: + query["key"] = key + if name is not None: + query["name"] = name + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", "/api/2.0/mlflow/registered-models/delete-tag", query=query, headers=headers) - def delete_model_version(self - , name: str, version: str - ): + def delete_model_version(self, name: str, version: str): """Delete a model version. - + Deletes a model version. - + :param name: str Name of the registered model :param version: str Model version number - - + + """ - + query = {} - if name is not None: query['name'] = name - if version is not None: query['version'] = version - headers = {'Accept': 'application/json',} - - self._api.do('DELETE','/api/2.0/mlflow/model-versions/delete', query=query - - , headers=headers - ) - + if name is not None: + query["name"] = name + if version is not None: + query["version"] = version + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", "/api/2.0/mlflow/model-versions/delete", query=query, headers=headers) - def delete_model_version_tag(self - , name: str, version: str, key: str - ): + def delete_model_version_tag(self, name: str, version: str, key: str): """Delete a model version tag. - + Deletes a model version tag. - + :param name: str Name of the registered model that the tag was logged under. :param version: str @@ -8062,361 +8819,308 @@ def delete_model_version_tag(self :param key: str Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size is 250 bytes. - - - """ - - query = {} - if key is not None: query['key'] = key - if name is not None: query['name'] = name - if version is not None: query['version'] = version - headers = {'Accept': 'application/json',} - - self._api.do('DELETE','/api/2.0/mlflow/model-versions/delete-tag', query=query - - , headers=headers - ) - - - - - def delete_transition_request(self - , name: str, version: str, stage: DeleteTransitionRequestStage, creator: str - , * - , comment: Optional[str] = None): + """ + + query = {} + if key is not None: + query["key"] = key + if name is not None: + query["name"] = name + if version is not None: + query["version"] = version + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", "/api/2.0/mlflow/model-versions/delete-tag", query=query, headers=headers) + + def delete_transition_request( + self, + name: str, + version: str, + stage: DeleteTransitionRequestStage, + creator: str, + *, + comment: Optional[str] = None, + ): """Delete a transition request. - + Cancels a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`DeleteTransitionRequestStage` Target stage of the transition request. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param creator: str Username of the user who created this request. Of the transition requests matching the specified details, only the one transition created by this user will be deleted. :param comment: str (optional) User-provided comment on the action. - - - """ - - query = {} - if comment is not None: query['comment'] = comment - if creator is not None: query['creator'] = creator - if name is not None: query['name'] = name - if stage is not None: query['stage'] = stage.value - if version is not None: query['version'] = version - headers = {'Accept': 'application/json',} - - self._api.do('DELETE','/api/2.0/mlflow/transition-requests/delete', query=query - - , headers=headers - ) - - - - - def delete_webhook(self - - , * - , id: Optional[str] = None): + """ + + query = {} + if comment is not None: + query["comment"] = comment + if creator is not None: + query["creator"] = creator + if name is not None: + query["name"] = name + if stage is not None: + query["stage"] = stage.value + if version is not None: + query["version"] = version + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", "/api/2.0/mlflow/transition-requests/delete", query=query, headers=headers) + + def delete_webhook(self, *, id: Optional[str] = None): """Delete a webhook. - + **NOTE:** This endpoint is in Public Preview. - + Deletes a registry webhook. - + :param id: str (optional) Webhook ID required to delete a registry webhook. - - + + """ - + query = {} - if id is not None: query['id'] = id - headers = {'Accept': 'application/json',} - - self._api.do('DELETE','/api/2.0/mlflow/registry-webhooks/delete', query=query - - , headers=headers - ) - + if id is not None: + query["id"] = id + headers = { + "Accept": "application/json", + } - - - + self._api.do("DELETE", "/api/2.0/mlflow/registry-webhooks/delete", query=query, headers=headers) - def get_latest_versions(self - , name: str - , * - , stages: Optional[List[str]] = None) -> Iterator[ModelVersion]: + def get_latest_versions(self, name: str, *, stages: Optional[List[str]] = None) -> Iterator[ModelVersion]: """Get the latest version. - + Gets the latest version of a registered model. - + :param name: str Registered model unique name identifier. :param stages: List[str] (optional) List of stages. - + :returns: Iterator over :class:`ModelVersion` """ body = {} - if name is not None: body['name'] = name - if stages is not None: body['stages'] = [v for v in stages] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - json = self._api.do('POST','/api/2.0/mlflow/registered-models/get-latest-versions', body=body - - , headers=headers - ) + if name is not None: + body["name"] = name + if stages is not None: + body["stages"] = [v for v in stages] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + json = self._api.do("POST", "/api/2.0/mlflow/registered-models/get-latest-versions", body=body, headers=headers) parsed = GetLatestVersionsResponse.from_dict(json).model_versions return parsed if parsed is not None else [] - - - - - - def get_model(self - , name: str - ) -> GetModelResponse: + def get_model(self, name: str) -> GetModelResponse: """Get model. - + Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also returns the model's Databricks workspace ID and the permission level of the requesting user on the model. - + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel - + :param name: str Registered model unique name identifier. - + :returns: :class:`GetModelResponse` """ - + query = {} - if name is not None: query['name'] = name - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/mlflow/databricks/registered-models/get', query=query - - , headers=headers - ) - return GetModelResponse.from_dict(res) + if name is not None: + query["name"] = name + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/mlflow/databricks/registered-models/get", query=query, headers=headers) + return GetModelResponse.from_dict(res) - def get_model_version(self - , name: str, version: str - ) -> GetModelVersionResponse: + def get_model_version(self, name: str, version: str) -> GetModelVersionResponse: """Get a model version. - + Get a model version. - + :param name: str Name of the registered model :param version: str Model version number - + :returns: :class:`GetModelVersionResponse` """ - + query = {} - if name is not None: query['name'] = name - if version is not None: query['version'] = version - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/mlflow/model-versions/get', query=query - - , headers=headers - ) + if name is not None: + query["name"] = name + if version is not None: + query["version"] = version + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/mlflow/model-versions/get", query=query, headers=headers) return GetModelVersionResponse.from_dict(res) - - - - - def get_model_version_download_uri(self - , name: str, version: str - ) -> GetModelVersionDownloadUriResponse: + def get_model_version_download_uri(self, name: str, version: str) -> GetModelVersionDownloadUriResponse: """Get a model version URI. - + Gets a URI to download the model version. - + :param name: str Name of the registered model :param version: str Model version number - + :returns: :class:`GetModelVersionDownloadUriResponse` """ - + query = {} - if name is not None: query['name'] = name - if version is not None: query['version'] = version - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/mlflow/model-versions/get-download-uri', query=query - - , headers=headers - ) + if name is not None: + query["name"] = name + if version is not None: + query["version"] = version + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/mlflow/model-versions/get-download-uri", query=query, headers=headers) return GetModelVersionDownloadUriResponse.from_dict(res) - - - - - def get_permission_levels(self - , registered_model_id: str - ) -> GetRegisteredModelPermissionLevelsResponse: + def get_permission_levels(self, registered_model_id: str) -> GetRegisteredModelPermissionLevelsResponse: """Get registered model permission levels. - + Gets the permission levels that a user can have on an object. - + :param registered_model_id: str The registered model for which to get or manage permissions. - + :returns: :class:`GetRegisteredModelPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/registered-models/{registered_model_id}/permissionLevels' - - , headers=headers - ) - return GetRegisteredModelPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/permissions/registered-models/{registered_model_id}/permissionLevels", headers=headers + ) + return GetRegisteredModelPermissionLevelsResponse.from_dict(res) - def get_permissions(self - , registered_model_id: str - ) -> RegisteredModelPermissions: + def get_permissions(self, registered_model_id: str) -> RegisteredModelPermissions: """Get registered model permissions. - + Gets the permissions of a registered model. Registered models can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. - + :returns: :class:`RegisteredModelPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/registered-models/{registered_model_id}' - - , headers=headers - ) - return RegisteredModelPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list_models(self - - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Model]: + res = self._api.do("GET", f"/api/2.0/permissions/registered-models/{registered_model_id}", headers=headers) + return RegisteredModelPermissions.from_dict(res) + + def list_models(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Model]: """List models. - + Lists all available registered models, up to the limit specified in __max_results__. - + :param max_results: int (optional) Maximum number of registered models desired. Max threshold is 1000. :param page_token: str (optional) Pagination token to go to the next page based on a previous query. - + :returns: Iterator over :class:`Model` """ - - query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/mlflow/registered-models/list', query=query - - , headers=headers - ) - if 'registered_models' in json: - for v in json['registered_models']: - yield Model.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def list_transition_requests(self - , name: str, version: str - ) -> Iterator[Activity]: + while True: + json = self._api.do("GET", "/api/2.0/mlflow/registered-models/list", query=query, headers=headers) + if "registered_models" in json: + for v in json["registered_models"]: + yield Model.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_transition_requests(self, name: str, version: str) -> Iterator[Activity]: """List transition requests. - + Gets a list of all open stage transition requests for the model version. - + :param name: str Name of the model. :param version: str Version of the model. - + :returns: Iterator over :class:`Activity` """ - + query = {} - if name is not None: query['name'] = name - if version is not None: query['version'] = version - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/mlflow/transition-requests/list', query=query - - , headers=headers - ) + if name is not None: + query["name"] = name + if version is not None: + query["version"] = version + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/mlflow/transition-requests/list", query=query, headers=headers) parsed = ListTransitionRequestsResponse.from_dict(json).requests return parsed if parsed is not None else [] - - - - - - def list_webhooks(self - - , * - , events: Optional[List[RegistryWebhookEvent]] = None, model_name: Optional[str] = None, page_token: Optional[str] = None) -> Iterator[RegistryWebhook]: + def list_webhooks( + self, + *, + events: Optional[List[RegistryWebhookEvent]] = None, + model_name: Optional[str] = None, + page_token: Optional[str] = None, + ) -> Iterator[RegistryWebhook]: """List registry webhooks. - + **NOTE:** This endpoint is in Public Preview. - + Lists all registry webhooks. - + :param events: List[:class:`RegistryWebhookEvent`] (optional) If `events` is specified, any webhook with one or more of the specified trigger events is included in the output. If `events` is not specified, webhooks of all event types are included in the output. @@ -8425,117 +9129,110 @@ def list_webhooks(self associated model. :param page_token: str (optional) Token indicating the page of artifact results to fetch - + :returns: Iterator over :class:`RegistryWebhook` """ - - query = {} - if events is not None: query['events'] = [v.value for v in events] - if model_name is not None: query['model_name'] = model_name - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/mlflow/registry-webhooks/list', query=query - - , headers=headers - ) - if 'webhooks' in json: - for v in json['webhooks']: - yield RegistryWebhook.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if events is not None: + query["events"] = [v.value for v in events] + if model_name is not None: + query["model_name"] = model_name + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def reject_transition_request(self - , name: str, version: str, stage: Stage - , * - , comment: Optional[str] = None) -> RejectTransitionRequestResponse: + while True: + json = self._api.do("GET", "/api/2.0/mlflow/registry-webhooks/list", query=query, headers=headers) + if "webhooks" in json: + for v in json["webhooks"]: + yield RegistryWebhook.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def reject_transition_request( + self, name: str, version: str, stage: Stage, *, comment: Optional[str] = None + ) -> RejectTransitionRequestResponse: """Reject a transition request. - + Rejects a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`RejectTransitionRequestResponse` """ body = {} - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if stage is not None: body['stage'] = stage.value - if version is not None: body['version'] = version - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/transition-requests/reject', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if stage is not None: + body["stage"] = stage.value + if version is not None: + body["version"] = version + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/mlflow/transition-requests/reject", body=body, headers=headers) return RejectTransitionRequestResponse.from_dict(res) - - - - - def rename_model(self - , name: str - , * - , new_name: Optional[str] = None) -> RenameModelResponse: + def rename_model(self, name: str, *, new_name: Optional[str] = None) -> RenameModelResponse: """Rename a model. - + Renames a registered model. - + :param name: str Registered model unique name identifier. :param new_name: str (optional) If provided, updates the name for this `registered_model`. - + :returns: :class:`RenameModelResponse` """ body = {} - if name is not None: body['name'] = name - if new_name is not None: body['new_name'] = new_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/registered-models/rename', body=body - - , headers=headers - ) - return RenameModelResponse.from_dict(res) + if name is not None: + body["name"] = name + if new_name is not None: + body["new_name"] = new_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/mlflow/registered-models/rename", body=body, headers=headers) + return RenameModelResponse.from_dict(res) - def search_model_versions(self - - , * - , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None) -> Iterator[ModelVersion]: + def search_model_versions( + self, + *, + filter: Optional[str] = None, + max_results: Optional[int] = None, + order_by: Optional[List[str]] = None, + page_token: Optional[str] = None, + ) -> Iterator[ModelVersion]: """Searches model versions. - + Searches for specific model versions based on the supplied __filter__. - + :param filter: str (optional) String filter condition, like "name='my-model-name'". Must be a single boolean condition, with string values wrapped in single quotes. @@ -8547,44 +9244,44 @@ def search_model_versions(self timestamp, followed by name ASC, followed by version DESC. :param page_token: str (optional) Pagination token to go to next page based on previous search query. - + :returns: Iterator over :class:`ModelVersion` """ - - query = {} - if filter is not None: query['filter'] = filter - if max_results is not None: query['max_results'] = max_results - if order_by is not None: query['order_by'] = [v for v in order_by] - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/mlflow/model-versions/search', query=query - - , headers=headers - ) - if 'model_versions' in json: - for v in json['model_versions']: - yield ModelVersion.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if filter is not None: + query["filter"] = filter + if max_results is not None: + query["max_results"] = max_results + if order_by is not None: + query["order_by"] = [v for v in order_by] + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def search_models(self - - , * - , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None) -> Iterator[Model]: + while True: + json = self._api.do("GET", "/api/2.0/mlflow/model-versions/search", query=query, headers=headers) + if "model_versions" in json: + for v in json["model_versions"]: + yield ModelVersion.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def search_models( + self, + *, + filter: Optional[str] = None, + max_results: Optional[int] = None, + order_by: Optional[List[str]] = None, + page_token: Optional[str] = None, + ) -> Iterator[Model]: """Search models. - + Search for registered models based on the specified __filter__. - + :param filter: str (optional) String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically as "name LIKE '%my-model-name%'". Single boolean condition, with string values wrapped in single @@ -8597,43 +9294,37 @@ def search_models(self name ASC. :param page_token: str (optional) Pagination token to go to the next page based on a previous search query. - + :returns: Iterator over :class:`Model` """ - - query = {} - if filter is not None: query['filter'] = filter - if max_results is not None: query['max_results'] = max_results - if order_by is not None: query['order_by'] = [v for v in order_by] - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/mlflow/registered-models/search', query=query - - , headers=headers - ) - if 'registered_models' in json: - for v in json['registered_models']: - yield Model.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if filter is not None: + query["filter"] = filter + if max_results is not None: + query["max_results"] = max_results + if order_by is not None: + query["order_by"] = [v for v in order_by] + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def set_model_tag(self - , name: str, key: str, value: str - ): + while True: + json = self._api.do("GET", "/api/2.0/mlflow/registered-models/search", query=query, headers=headers) + if "registered_models" in json: + for v in json["registered_models"]: + yield Model.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def set_model_tag(self, name: str, key: str, value: str): """Set a tag. - + Sets a tag on a registered model. - + :param name: str Unique name of the model. :param key: str @@ -8643,32 +9334,28 @@ def set_model_tag(self :param value: str String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size. - - + + """ body = {} - if key is not None: body['key'] = key - if name is not None: body['name'] = name - if value is not None: body['value'] = value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/registered-models/set-tag', body=body - - , headers=headers - ) - + if key is not None: + body["key"] = key + if name is not None: + body["name"] = name + if value is not None: + body["value"] = value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/mlflow/registered-models/set-tag", body=body, headers=headers) - def set_model_version_tag(self - , name: str, version: str, key: str, value: str - ): + def set_model_version_tag(self, name: str, version: str, key: str, value: str): """Set a version tag. - + Sets a model version tag. - + :param name: str Unique name of the model. :param version: str @@ -8680,269 +9367,260 @@ def set_model_version_tag(self :param value: str String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size. - - - """ - body = {} - if key is not None: body['key'] = key - if name is not None: body['name'] = name - if value is not None: body['value'] = value - if version is not None: body['version'] = version - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/mlflow/model-versions/set-tag', body=body - - , headers=headers - ) - - - - - def set_permissions(self - , registered_model_id: str - , * - , access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None) -> RegisteredModelPermissions: + """ + body = {} + if key is not None: + body["key"] = key + if name is not None: + body["name"] = name + if value is not None: + body["value"] = value + if version is not None: + body["version"] = version + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/mlflow/model-versions/set-tag", body=body, headers=headers) + + def set_permissions( + self, + registered_model_id: str, + *, + access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None, + ) -> RegisteredModelPermissions: """Set registered model permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional) - + :returns: :class:`RegisteredModelPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/registered-models/{registered_model_id}', body=body - - , headers=headers - ) - return RegisteredModelPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "PUT", f"/api/2.0/permissions/registered-models/{registered_model_id}", body=body, headers=headers + ) + return RegisteredModelPermissions.from_dict(res) - def test_registry_webhook(self - , id: str - , * - , event: Optional[RegistryWebhookEvent] = None) -> TestRegistryWebhookResponse: + def test_registry_webhook( + self, id: str, *, event: Optional[RegistryWebhookEvent] = None + ) -> TestRegistryWebhookResponse: """Test a webhook. - + **NOTE:** This endpoint is in Public Preview. - + Tests a registry webhook. - + :param id: str Webhook ID :param event: :class:`RegistryWebhookEvent` (optional) If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the test trigger uses a randomly chosen event associated with the webhook. - + :returns: :class:`TestRegistryWebhookResponse` """ body = {} - if event is not None: body['event'] = event.value - if id is not None: body['id'] = id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/registry-webhooks/test', body=body - - , headers=headers - ) - return TestRegistryWebhookResponse.from_dict(res) + if event is not None: + body["event"] = event.value + if id is not None: + body["id"] = id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/mlflow/registry-webhooks/test", body=body, headers=headers) + return TestRegistryWebhookResponse.from_dict(res) - def transition_stage(self - , name: str, version: str, stage: Stage, archive_existing_versions: bool - , * - , comment: Optional[str] = None) -> TransitionStageResponse: + def transition_stage( + self, name: str, version: str, stage: Stage, archive_existing_versions: bool, *, comment: Optional[str] = None + ) -> TransitionStageResponse: """Transition a stage. - + Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] that also accepts a comment associated with the transition to be recorded.", - + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param archive_existing_versions: bool Specifies whether to archive all current model versions in the target stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`TransitionStageResponse` """ body = {} - if archive_existing_versions is not None: body['archive_existing_versions'] = archive_existing_versions - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if stage is not None: body['stage'] = stage.value - if version is not None: body['version'] = version - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/mlflow/databricks/model-versions/transition-stage', body=body - - , headers=headers - ) + if archive_existing_versions is not None: + body["archive_existing_versions"] = archive_existing_versions + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if stage is not None: + body["stage"] = stage.value + if version is not None: + body["version"] = version + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", "/api/2.0/mlflow/databricks/model-versions/transition-stage", body=body, headers=headers + ) return TransitionStageResponse.from_dict(res) - - - - - def update_comment(self - , id: str, comment: str - ) -> UpdateCommentResponse: + def update_comment(self, id: str, comment: str) -> UpdateCommentResponse: """Update a comment. - + Post an edit to a comment on a model version. - + :param id: str Unique identifier of an activity :param comment: str User-provided comment on the action. - + :returns: :class:`UpdateCommentResponse` """ body = {} - if comment is not None: body['comment'] = comment - if id is not None: body['id'] = id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/mlflow/comments/update', body=body - - , headers=headers - ) - return UpdateCommentResponse.from_dict(res) + if comment is not None: + body["comment"] = comment + if id is not None: + body["id"] = id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PATCH", "/api/2.0/mlflow/comments/update", body=body, headers=headers) + return UpdateCommentResponse.from_dict(res) - def update_model(self - , name: str - , * - , description: Optional[str] = None): + def update_model(self, name: str, *, description: Optional[str] = None): """Update model. - + Updates a registered model. - + :param name: str Registered model unique name identifier. :param description: str (optional) If provided, updates the description for this `registered_model`. - - + + """ body = {} - if description is not None: body['description'] = description - if name is not None: body['name'] = name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH','/api/2.0/mlflow/registered-models/update', body=body - - , headers=headers - ) - + if description is not None: + body["description"] = description + if name is not None: + body["name"] = name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("PATCH", "/api/2.0/mlflow/registered-models/update", body=body, headers=headers) - def update_model_version(self - , name: str, version: str - , * - , description: Optional[str] = None): + def update_model_version(self, name: str, version: str, *, description: Optional[str] = None): """Update model version. - + Updates the model version. - + :param name: str Name of the registered model :param version: str Model version number :param description: str (optional) If provided, updates the description for this `registered_model`. - - - """ - body = {} - if description is not None: body['description'] = description - if name is not None: body['name'] = name - if version is not None: body['version'] = version - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH','/api/2.0/mlflow/model-versions/update', body=body - - , headers=headers - ) - - - - - def update_permissions(self - , registered_model_id: str - , * - , access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None) -> RegisteredModelPermissions: + """ + body = {} + if description is not None: + body["description"] = description + if name is not None: + body["name"] = name + if version is not None: + body["version"] = version + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", "/api/2.0/mlflow/model-versions/update", body=body, headers=headers) + + def update_permissions( + self, + registered_model_id: str, + *, + access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None, + ) -> RegisteredModelPermissions: """Update registered model permissions. - + Updates the permissions on a registered model. Registered models can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional) - + :returns: :class:`RegisteredModelPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/registered-models/{registered_model_id}', body=body - - , headers=headers - ) - return RegisteredModelPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "PATCH", f"/api/2.0/permissions/registered-models/{registered_model_id}", body=body, headers=headers + ) + return RegisteredModelPermissions.from_dict(res) - def update_webhook(self - , id: str - , * - , description: Optional[str] = None, events: Optional[List[RegistryWebhookEvent]] = None, http_url_spec: Optional[HttpUrlSpec] = None, job_spec: Optional[JobSpec] = None, status: Optional[RegistryWebhookStatus] = None): + def update_webhook( + self, + id: str, + *, + description: Optional[str] = None, + events: Optional[List[RegistryWebhookEvent]] = None, + http_url_spec: Optional[HttpUrlSpec] = None, + job_spec: Optional[JobSpec] = None, + status: Optional[RegistryWebhookStatus] = None, + ): """Update a webhook. - + **NOTE:** This endpoint is in Public Preview. - + Updates a registry webhook. - + :param id: str Webhook ID :param description: str (optional) @@ -8950,59 +9628,61 @@ def update_webhook(self :param events: List[:class:`RegistryWebhookEvent`] (optional) Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. - + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - + * `COMMENT_CREATED`: A user wrote a comment on a registered model. - + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be specified for a registry-wide webhook, which can be created by not specifying a model name in the create request. - + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to staging. - + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to production. - + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. :param http_url_spec: :class:`HttpUrlSpec` (optional) :param job_spec: :class:`JobSpec` (optional) :param status: :class:`RegistryWebhookStatus` (optional) Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event. - - + + """ body = {} - if description is not None: body['description'] = description - if events is not None: body['events'] = [v.value for v in events] - if http_url_spec is not None: body['http_url_spec'] = http_url_spec.as_dict() - if id is not None: body['id'] = id - if job_spec is not None: body['job_spec'] = job_spec.as_dict() - if status is not None: body['status'] = status.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH','/api/2.0/mlflow/registry-webhooks/update', body=body - - , headers=headers - ) - - - - \ No newline at end of file + if description is not None: + body["description"] = description + if events is not None: + body["events"] = [v.value for v in events] + if http_url_spec is not None: + body["http_url_spec"] = http_url_spec.as_dict() + if id is not None: + body["id"] = id + if job_spec is not None: + body["job_spec"] = job_spec.as_dict() + if status is not None: + body["status"] = status.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", "/api/2.0/mlflow/registry-webhooks/update", body=body, headers=headers) diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 0edf98fed..030633eb8 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -1,173 +1,189 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations -from dataclasses import dataclass -from datetime import timedelta -from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +import logging +from dataclasses import dataclass +from typing import Any, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ._internal import _from_dict, _repeated_dict +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order - - @dataclass class CreateCustomAppIntegration: confidential: Optional[bool] = None """This field indicates whether an OAuth client secret is required to authenticate this client.""" - + name: Optional[str] = None """Name of the custom OAuth app""" - + redirect_urls: Optional[List[str]] = None """List of OAuth redirect urls""" - + scopes: Optional[List[str]] = None """OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid, profile, email.""" - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" - + user_authorized_scopes: Optional[List[str]] = None """Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes.""" - + def as_dict(self) -> dict: """Serializes the CreateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.confidential is not None: body['confidential'] = self.confidential - if self.name is not None: body['name'] = self.name - if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] - if self.scopes: body['scopes'] = [v for v in self.scopes] - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes] + if self.confidential is not None: + body["confidential"] = self.confidential + if self.name is not None: + body["name"] = self.name + if self.redirect_urls: + body["redirect_urls"] = [v for v in self.redirect_urls] + if self.scopes: + body["scopes"] = [v for v in self.scopes] + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy.as_dict() + if self.user_authorized_scopes: + body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] return body def as_shallow_dict(self) -> dict: """Serializes the CreateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" body = {} - if self.confidential is not None: body['confidential'] = self.confidential - if self.name is not None: body['name'] = self.name - if self.redirect_urls: body['redirect_urls'] = self.redirect_urls - if self.scopes: body['scopes'] = self.scopes - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy - if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes + if self.confidential is not None: + body["confidential"] = self.confidential + if self.name is not None: + body["name"] = self.name + if self.redirect_urls: + body["redirect_urls"] = self.redirect_urls + if self.scopes: + body["scopes"] = self.scopes + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy + if self.user_authorized_scopes: + body["user_authorized_scopes"] = self.user_authorized_scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCustomAppIntegration: """Deserializes the CreateCustomAppIntegration from a dictionary.""" - return cls(confidential=d.get('confidential', None), name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy), user_authorized_scopes=d.get('user_authorized_scopes', None)) - - + return cls( + confidential=d.get("confidential", None), + name=d.get("name", None), + redirect_urls=d.get("redirect_urls", None), + scopes=d.get("scopes", None), + token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), + user_authorized_scopes=d.get("user_authorized_scopes", None), + ) @dataclass class CreateCustomAppIntegrationOutput: client_id: Optional[str] = None """OAuth client-id generated by the Databricks""" - + client_secret: Optional[str] = None """OAuth client-secret generated by the Databricks. If this is a confidential OAuth app client-secret will be generated.""" - + integration_id: Optional[str] = None """Unique integration id for the custom OAuth app""" - + def as_dict(self) -> dict: """Serializes the CreateCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.client_id is not None: body['client_id'] = self.client_id - if self.client_secret is not None: body['client_secret'] = self.client_secret - if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.client_id is not None: + body["client_id"] = self.client_id + if self.client_secret is not None: + body["client_secret"] = self.client_secret + if self.integration_id is not None: + body["integration_id"] = self.integration_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.client_id is not None: body['client_id'] = self.client_id - if self.client_secret is not None: body['client_secret'] = self.client_secret - if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.client_id is not None: + body["client_id"] = self.client_id + if self.client_secret is not None: + body["client_secret"] = self.client_secret + if self.integration_id is not None: + body["integration_id"] = self.integration_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCustomAppIntegrationOutput: """Deserializes the CreateCustomAppIntegrationOutput from a dictionary.""" - return cls(client_id=d.get('client_id', None), client_secret=d.get('client_secret', None), integration_id=d.get('integration_id', None)) - - + return cls( + client_id=d.get("client_id", None), + client_secret=d.get("client_secret", None), + integration_id=d.get("integration_id", None), + ) @dataclass class CreatePublishedAppIntegration: app_id: Optional[str] = None """App id of the OAuth published app integration. For example power-bi, tableau-deskop""" - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" - + def as_dict(self) -> dict: """Serializes the CreatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.app_id is not None: body['app_id'] = self.app_id - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() + if self.app_id is not None: + body["app_id"] = self.app_id + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_id is not None: body['app_id'] = self.app_id - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + if self.app_id is not None: + body["app_id"] = self.app_id + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePublishedAppIntegration: """Deserializes the CreatePublishedAppIntegration from a dictionary.""" - return cls(app_id=d.get('app_id', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) - - + return cls( + app_id=d.get("app_id", None), token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy) + ) @dataclass class CreatePublishedAppIntegrationOutput: integration_id: Optional[str] = None """Unique integration id for the published OAuth app""" - + def as_dict(self) -> dict: """Serializes the CreatePublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.integration_id is not None: + body["integration_id"] = self.integration_id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.integration_id is not None: body['integration_id'] = self.integration_id + if self.integration_id is not None: + body["integration_id"] = self.integration_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePublishedAppIntegrationOutput: """Deserializes the CreatePublishedAppIntegrationOutput from a dictionary.""" - return cls(integration_id=d.get('integration_id', None)) - - - - - + return cls(integration_id=d.get("integration_id", None)) @dataclass @@ -175,88 +191,107 @@ class CreateServicePrincipalSecretRequest: lifetime: Optional[str] = None """The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a default lifetime of 730 days (63072000s).""" - + service_principal_id: Optional[int] = None """The service principal ID.""" - + def as_dict(self) -> dict: """Serializes the CreateServicePrincipalSecretRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.lifetime is not None: body['lifetime'] = self.lifetime - if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id + if self.lifetime is not None: + body["lifetime"] = self.lifetime + if self.service_principal_id is not None: + body["service_principal_id"] = self.service_principal_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateServicePrincipalSecretRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.lifetime is not None: body['lifetime'] = self.lifetime - if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id + if self.lifetime is not None: + body["lifetime"] = self.lifetime + if self.service_principal_id is not None: + body["service_principal_id"] = self.service_principal_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateServicePrincipalSecretRequest: """Deserializes the CreateServicePrincipalSecretRequest from a dictionary.""" - return cls(lifetime=d.get('lifetime', None), service_principal_id=d.get('service_principal_id', None)) - - + return cls(lifetime=d.get("lifetime", None), service_principal_id=d.get("service_principal_id", None)) @dataclass class CreateServicePrincipalSecretResponse: create_time: Optional[str] = None """UTC time when the secret was created""" - + expire_time: Optional[str] = None """UTC time when the secret will expire. If the field is not present, the secret does not expire.""" - + id: Optional[str] = None """ID of the secret""" - + secret: Optional[str] = None """Secret Value""" - + secret_hash: Optional[str] = None """Secret Hash""" - + status: Optional[str] = None """Status of the secret""" - + update_time: Optional[str] = None """UTC time when the secret was updated""" - + def as_dict(self) -> dict: """Serializes the CreateServicePrincipalSecretResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.expire_time is not None: body['expire_time'] = self.expire_time - if self.id is not None: body['id'] = self.id - if self.secret is not None: body['secret'] = self.secret - if self.secret_hash is not None: body['secret_hash'] = self.secret_hash - if self.status is not None: body['status'] = self.status - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.expire_time is not None: + body["expire_time"] = self.expire_time + if self.id is not None: + body["id"] = self.id + if self.secret is not None: + body["secret"] = self.secret + if self.secret_hash is not None: + body["secret_hash"] = self.secret_hash + if self.status is not None: + body["status"] = self.status + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the CreateServicePrincipalSecretResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.expire_time is not None: body['expire_time'] = self.expire_time - if self.id is not None: body['id'] = self.id - if self.secret is not None: body['secret'] = self.secret - if self.secret_hash is not None: body['secret_hash'] = self.secret_hash - if self.status is not None: body['status'] = self.status - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.expire_time is not None: + body["expire_time"] = self.expire_time + if self.id is not None: + body["id"] = self.id + if self.secret is not None: + body["secret"] = self.secret + if self.secret_hash is not None: + body["secret_hash"] = self.secret_hash + if self.status is not None: + body["status"] = self.status + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateServicePrincipalSecretResponse: """Deserializes the CreateServicePrincipalSecretResponse from a dictionary.""" - return cls(create_time=d.get('create_time', None), expire_time=d.get('expire_time', None), id=d.get('id', None), secret=d.get('secret', None), secret_hash=d.get('secret_hash', None), status=d.get('status', None), update_time=d.get('update_time', None)) - - - - - + return cls( + create_time=d.get("create_time", None), + expire_time=d.get("expire_time", None), + id=d.get("id", None), + secret=d.get("secret", None), + secret_hash=d.get("secret_hash", None), + status=d.get("status", None), + update_time=d.get("update_time", None), + ) @dataclass @@ -275,11 +310,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCustomAppIntegrationOutput: """Deserializes the DeleteCustomAppIntegrationOutput from a dictionary.""" return cls() - - - - - @dataclass @@ -298,11 +328,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeletePublishedAppIntegrationOutput: """Deserializes the DeletePublishedAppIntegrationOutput from a dictionary.""" return cls() - - - - - @dataclass @@ -321,24 +346,16 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - - - - - - - @dataclass class FederationPolicy: create_time: Optional[str] = None """Creation time of the federation policy.""" - + description: Optional[str] = None """Description of the federation policy.""" - + name: Optional[str] = None """Resource name for the federation policy. Example values include `accounts//federationPolicies/my-federation-policy` for Account Federation Policies, @@ -347,603 +364,733 @@ class FederationPolicy: for Service Principal Federation Policies. Typically an output parameter, which does not need to be specified in create or update requests. If specified in a request, must match the value in the request URL.""" - + oidc_policy: Optional[OidcFederationPolicy] = None """Specifies the policy to use for validating OIDC claims in your federated tokens.""" - + policy_id: Optional[str] = None """The ID of the federation policy.""" - + service_principal_id: Optional[int] = None """The service principal ID that this federation policy applies to. Only set for service principal federation policies.""" - + uid: Optional[str] = None """Unique, immutable id of the federation policy.""" - + update_time: Optional[str] = None """Last update time of the federation policy.""" - + def as_dict(self) -> dict: """Serializes the FederationPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.oidc_policy: body['oidc_policy'] = self.oidc_policy.as_dict() - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id - if self.uid is not None: body['uid'] = self.uid - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.oidc_policy: + body["oidc_policy"] = self.oidc_policy.as_dict() + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.service_principal_id is not None: + body["service_principal_id"] = self.service_principal_id + if self.uid is not None: + body["uid"] = self.uid + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the FederationPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.oidc_policy: body['oidc_policy'] = self.oidc_policy - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id - if self.uid is not None: body['uid'] = self.uid - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.oidc_policy: + body["oidc_policy"] = self.oidc_policy + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.service_principal_id is not None: + body["service_principal_id"] = self.service_principal_id + if self.uid is not None: + body["uid"] = self.uid + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FederationPolicy: """Deserializes the FederationPolicy from a dictionary.""" - return cls(create_time=d.get('create_time', None), description=d.get('description', None), name=d.get('name', None), oidc_policy=_from_dict(d, 'oidc_policy', OidcFederationPolicy), policy_id=d.get('policy_id', None), service_principal_id=d.get('service_principal_id', None), uid=d.get('uid', None), update_time=d.get('update_time', None)) - - - - - + return cls( + create_time=d.get("create_time", None), + description=d.get("description", None), + name=d.get("name", None), + oidc_policy=_from_dict(d, "oidc_policy", OidcFederationPolicy), + policy_id=d.get("policy_id", None), + service_principal_id=d.get("service_principal_id", None), + uid=d.get("uid", None), + update_time=d.get("update_time", None), + ) @dataclass class GetCustomAppIntegrationOutput: client_id: Optional[str] = None """The client id of the custom OAuth app""" - + confidential: Optional[bool] = None """This field indicates whether an OAuth client secret is required to authenticate this client.""" - + create_time: Optional[str] = None - + created_by: Optional[int] = None - + creator_username: Optional[str] = None - + integration_id: Optional[str] = None """ID of this custom app""" - + name: Optional[str] = None """The display name of the custom OAuth app""" - + redirect_urls: Optional[List[str]] = None """List of OAuth redirect urls""" - + scopes: Optional[List[str]] = None - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" - + user_authorized_scopes: Optional[List[str]] = None """Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes.""" - + def as_dict(self) -> dict: """Serializes the GetCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.client_id is not None: body['client_id'] = self.client_id - if self.confidential is not None: body['confidential'] = self.confidential - if self.create_time is not None: body['create_time'] = self.create_time - if self.created_by is not None: body['created_by'] = self.created_by - if self.creator_username is not None: body['creator_username'] = self.creator_username - if self.integration_id is not None: body['integration_id'] = self.integration_id - if self.name is not None: body['name'] = self.name - if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] - if self.scopes: body['scopes'] = [v for v in self.scopes] - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes] + if self.client_id is not None: + body["client_id"] = self.client_id + if self.confidential is not None: + body["confidential"] = self.confidential + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by is not None: + body["created_by"] = self.created_by + if self.creator_username is not None: + body["creator_username"] = self.creator_username + if self.integration_id is not None: + body["integration_id"] = self.integration_id + if self.name is not None: + body["name"] = self.name + if self.redirect_urls: + body["redirect_urls"] = [v for v in self.redirect_urls] + if self.scopes: + body["scopes"] = [v for v in self.scopes] + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy.as_dict() + if self.user_authorized_scopes: + body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] return body def as_shallow_dict(self) -> dict: """Serializes the GetCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.client_id is not None: body['client_id'] = self.client_id - if self.confidential is not None: body['confidential'] = self.confidential - if self.create_time is not None: body['create_time'] = self.create_time - if self.created_by is not None: body['created_by'] = self.created_by - if self.creator_username is not None: body['creator_username'] = self.creator_username - if self.integration_id is not None: body['integration_id'] = self.integration_id - if self.name is not None: body['name'] = self.name - if self.redirect_urls: body['redirect_urls'] = self.redirect_urls - if self.scopes: body['scopes'] = self.scopes - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy - if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes + if self.client_id is not None: + body["client_id"] = self.client_id + if self.confidential is not None: + body["confidential"] = self.confidential + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by is not None: + body["created_by"] = self.created_by + if self.creator_username is not None: + body["creator_username"] = self.creator_username + if self.integration_id is not None: + body["integration_id"] = self.integration_id + if self.name is not None: + body["name"] = self.name + if self.redirect_urls: + body["redirect_urls"] = self.redirect_urls + if self.scopes: + body["scopes"] = self.scopes + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy + if self.user_authorized_scopes: + body["user_authorized_scopes"] = self.user_authorized_scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetCustomAppIntegrationOutput: """Deserializes the GetCustomAppIntegrationOutput from a dictionary.""" - return cls(client_id=d.get('client_id', None), confidential=d.get('confidential', None), create_time=d.get('create_time', None), created_by=d.get('created_by', None), creator_username=d.get('creator_username', None), integration_id=d.get('integration_id', None), name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy), user_authorized_scopes=d.get('user_authorized_scopes', None)) - - - - - + return cls( + client_id=d.get("client_id", None), + confidential=d.get("confidential", None), + create_time=d.get("create_time", None), + created_by=d.get("created_by", None), + creator_username=d.get("creator_username", None), + integration_id=d.get("integration_id", None), + name=d.get("name", None), + redirect_urls=d.get("redirect_urls", None), + scopes=d.get("scopes", None), + token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), + user_authorized_scopes=d.get("user_authorized_scopes", None), + ) @dataclass class GetCustomAppIntegrationsOutput: apps: Optional[List[GetCustomAppIntegrationOutput]] = None """List of Custom OAuth App Integrations defined for the account.""" - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the GetCustomAppIntegrationsOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apps: body['apps'] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.apps: + body["apps"] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetCustomAppIntegrationsOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.apps: body['apps'] = self.apps - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.apps: + body["apps"] = self.apps + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetCustomAppIntegrationsOutput: """Deserializes the GetCustomAppIntegrationsOutput from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', GetCustomAppIntegrationOutput), next_page_token=d.get('next_page_token', None)) - - + return cls( + apps=_repeated_dict(d, "apps", GetCustomAppIntegrationOutput), + next_page_token=d.get("next_page_token", None), + ) @dataclass class GetPublishedAppIntegrationOutput: app_id: Optional[str] = None """App-id of the published app integration""" - + create_time: Optional[str] = None - + created_by: Optional[int] = None - + integration_id: Optional[str] = None """Unique integration id for the published OAuth app""" - + name: Optional[str] = None """Display name of the published OAuth app""" - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" - + def as_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.app_id is not None: body['app_id'] = self.app_id - if self.create_time is not None: body['create_time'] = self.create_time - if self.created_by is not None: body['created_by'] = self.created_by - if self.integration_id is not None: body['integration_id'] = self.integration_id - if self.name is not None: body['name'] = self.name - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() + if self.app_id is not None: + body["app_id"] = self.app_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by is not None: + body["created_by"] = self.created_by + if self.integration_id is not None: + body["integration_id"] = self.integration_id + if self.name is not None: + body["name"] = self.name + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_id is not None: body['app_id'] = self.app_id - if self.create_time is not None: body['create_time'] = self.create_time - if self.created_by is not None: body['created_by'] = self.created_by - if self.integration_id is not None: body['integration_id'] = self.integration_id - if self.name is not None: body['name'] = self.name - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + if self.app_id is not None: + body["app_id"] = self.app_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by is not None: + body["created_by"] = self.created_by + if self.integration_id is not None: + body["integration_id"] = self.integration_id + if self.name is not None: + body["name"] = self.name + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPublishedAppIntegrationOutput: """Deserializes the GetPublishedAppIntegrationOutput from a dictionary.""" - return cls(app_id=d.get('app_id', None), create_time=d.get('create_time', None), created_by=d.get('created_by', None), integration_id=d.get('integration_id', None), name=d.get('name', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) - - - - - + return cls( + app_id=d.get("app_id", None), + create_time=d.get("create_time", None), + created_by=d.get("created_by", None), + integration_id=d.get("integration_id", None), + name=d.get("name", None), + token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), + ) @dataclass class GetPublishedAppIntegrationsOutput: apps: Optional[List[GetPublishedAppIntegrationOutput]] = None """List of Published OAuth App Integrations defined for the account.""" - + next_page_token: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationsOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apps: body['apps'] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.apps: + body["apps"] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationsOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.apps: body['apps'] = self.apps - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.apps: + body["apps"] = self.apps + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPublishedAppIntegrationsOutput: """Deserializes the GetPublishedAppIntegrationsOutput from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', GetPublishedAppIntegrationOutput), next_page_token=d.get('next_page_token', None)) - - + return cls( + apps=_repeated_dict(d, "apps", GetPublishedAppIntegrationOutput), + next_page_token=d.get("next_page_token", None), + ) @dataclass class GetPublishedAppsOutput: apps: Optional[List[PublishedAppOutput]] = None """List of Published OAuth Apps.""" - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" - + def as_dict(self) -> dict: """Serializes the GetPublishedAppsOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apps: body['apps'] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.apps: + body["apps"] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the GetPublishedAppsOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.apps: body['apps'] = self.apps - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.apps: + body["apps"] = self.apps + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPublishedAppsOutput: """Deserializes the GetPublishedAppsOutput from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', PublishedAppOutput), next_page_token=d.get('next_page_token', None)) - - - - - - - - - - - + return cls(apps=_repeated_dict(d, "apps", PublishedAppOutput), next_page_token=d.get("next_page_token", None)) @dataclass class ListFederationPoliciesResponse: next_page_token: Optional[str] = None - + policies: Optional[List[FederationPolicy]] = None - + def as_dict(self) -> dict: """Serializes the ListFederationPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.policies: body['policies'] = [v.as_dict() for v in self.policies] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = [v.as_dict() for v in self.policies] return body def as_shallow_dict(self) -> dict: """Serializes the ListFederationPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.policies: body['policies'] = self.policies + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = self.policies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFederationPoliciesResponse: """Deserializes the ListFederationPoliciesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), policies=_repeated_dict(d, 'policies', FederationPolicy)) - - - - - - - - - - - - - - + return cls( + next_page_token=d.get("next_page_token", None), policies=_repeated_dict(d, "policies", FederationPolicy) + ) @dataclass class ListServicePrincipalSecretsResponse: next_page_token: Optional[str] = None """A token, which can be sent as `page_token` to retrieve the next page.""" - + secrets: Optional[List[SecretInfo]] = None """List of the secrets""" - + def as_dict(self) -> dict: """Serializes the ListServicePrincipalSecretsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.secrets: + body["secrets"] = [v.as_dict() for v in self.secrets] return body def as_shallow_dict(self) -> dict: """Serializes the ListServicePrincipalSecretsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.secrets: body['secrets'] = self.secrets + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.secrets: + body["secrets"] = self.secrets return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListServicePrincipalSecretsResponse: """Deserializes the ListServicePrincipalSecretsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), secrets=_repeated_dict(d, 'secrets', SecretInfo)) - - + return cls(next_page_token=d.get("next_page_token", None), secrets=_repeated_dict(d, "secrets", SecretInfo)) @dataclass class OidcFederationPolicy: """Specifies the policy to use for validating OIDC claims in your federated tokens.""" - + audiences: Optional[List[str]] = None """The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience identifier is intended to represent the recipient of the token. Can be any non-empty string value. As long as the audience in the token matches at least one audience in the policy, the token is considered a match. If audiences is unspecified, defaults to your Databricks account id.""" - + issuer: Optional[str] = None """The required token issuer, as specified in the 'iss' claim of federated tokens.""" - + jwks_json: Optional[str] = None """The public keys used to validate the signature of federated tokens, in JWKS format. Most use cases should not need to specify this field. If jwks_uri and jwks_json are both unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for discovering public keys.""" - + jwks_uri: Optional[str] = None """URL of the public keys used to validate the signature of federated tokens, in JWKS format. Most use cases should not need to specify this field. If jwks_uri and jwks_json are both unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for discovering public keys.""" - + subject: Optional[str] = None """The required token subject, as specified in the subject claim of federated tokens. Must be specified for service principal federation policies. Must not be specified for account federation policies.""" - + subject_claim: Optional[str] = None """The claim that contains the subject of the token. If unspecified, the default value is 'sub'.""" - + def as_dict(self) -> dict: """Serializes the OidcFederationPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.audiences: body['audiences'] = [v for v in self.audiences] - if self.issuer is not None: body['issuer'] = self.issuer - if self.jwks_json is not None: body['jwks_json'] = self.jwks_json - if self.jwks_uri is not None: body['jwks_uri'] = self.jwks_uri - if self.subject is not None: body['subject'] = self.subject - if self.subject_claim is not None: body['subject_claim'] = self.subject_claim + if self.audiences: + body["audiences"] = [v for v in self.audiences] + if self.issuer is not None: + body["issuer"] = self.issuer + if self.jwks_json is not None: + body["jwks_json"] = self.jwks_json + if self.jwks_uri is not None: + body["jwks_uri"] = self.jwks_uri + if self.subject is not None: + body["subject"] = self.subject + if self.subject_claim is not None: + body["subject_claim"] = self.subject_claim return body def as_shallow_dict(self) -> dict: """Serializes the OidcFederationPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.audiences: body['audiences'] = self.audiences - if self.issuer is not None: body['issuer'] = self.issuer - if self.jwks_json is not None: body['jwks_json'] = self.jwks_json - if self.jwks_uri is not None: body['jwks_uri'] = self.jwks_uri - if self.subject is not None: body['subject'] = self.subject - if self.subject_claim is not None: body['subject_claim'] = self.subject_claim + if self.audiences: + body["audiences"] = self.audiences + if self.issuer is not None: + body["issuer"] = self.issuer + if self.jwks_json is not None: + body["jwks_json"] = self.jwks_json + if self.jwks_uri is not None: + body["jwks_uri"] = self.jwks_uri + if self.subject is not None: + body["subject"] = self.subject + if self.subject_claim is not None: + body["subject_claim"] = self.subject_claim return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OidcFederationPolicy: """Deserializes the OidcFederationPolicy from a dictionary.""" - return cls(audiences=d.get('audiences', None), issuer=d.get('issuer', None), jwks_json=d.get('jwks_json', None), jwks_uri=d.get('jwks_uri', None), subject=d.get('subject', None), subject_claim=d.get('subject_claim', None)) - - + return cls( + audiences=d.get("audiences", None), + issuer=d.get("issuer", None), + jwks_json=d.get("jwks_json", None), + jwks_uri=d.get("jwks_uri", None), + subject=d.get("subject", None), + subject_claim=d.get("subject_claim", None), + ) @dataclass class PublishedAppOutput: app_id: Optional[str] = None """Unique ID of the published OAuth app.""" - + client_id: Optional[str] = None """Client ID of the published OAuth app. It is the client_id in the OAuth flow""" - + description: Optional[str] = None """Description of the published OAuth app.""" - + is_confidential_client: Optional[bool] = None """Whether the published OAuth app is a confidential client. It is always false for published OAuth apps.""" - + name: Optional[str] = None """The display name of the published OAuth app.""" - + redirect_urls: Optional[List[str]] = None """Redirect URLs of the published OAuth app.""" - + scopes: Optional[List[str]] = None """Required scopes for the published OAuth app.""" - + def as_dict(self) -> dict: """Serializes the PublishedAppOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.app_id is not None: body['app_id'] = self.app_id - if self.client_id is not None: body['client_id'] = self.client_id - if self.description is not None: body['description'] = self.description - if self.is_confidential_client is not None: body['is_confidential_client'] = self.is_confidential_client - if self.name is not None: body['name'] = self.name - if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] - if self.scopes: body['scopes'] = [v for v in self.scopes] + if self.app_id is not None: + body["app_id"] = self.app_id + if self.client_id is not None: + body["client_id"] = self.client_id + if self.description is not None: + body["description"] = self.description + if self.is_confidential_client is not None: + body["is_confidential_client"] = self.is_confidential_client + if self.name is not None: + body["name"] = self.name + if self.redirect_urls: + body["redirect_urls"] = [v for v in self.redirect_urls] + if self.scopes: + body["scopes"] = [v for v in self.scopes] return body def as_shallow_dict(self) -> dict: """Serializes the PublishedAppOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.app_id is not None: body['app_id'] = self.app_id - if self.client_id is not None: body['client_id'] = self.client_id - if self.description is not None: body['description'] = self.description - if self.is_confidential_client is not None: body['is_confidential_client'] = self.is_confidential_client - if self.name is not None: body['name'] = self.name - if self.redirect_urls: body['redirect_urls'] = self.redirect_urls - if self.scopes: body['scopes'] = self.scopes + if self.app_id is not None: + body["app_id"] = self.app_id + if self.client_id is not None: + body["client_id"] = self.client_id + if self.description is not None: + body["description"] = self.description + if self.is_confidential_client is not None: + body["is_confidential_client"] = self.is_confidential_client + if self.name is not None: + body["name"] = self.name + if self.redirect_urls: + body["redirect_urls"] = self.redirect_urls + if self.scopes: + body["scopes"] = self.scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PublishedAppOutput: """Deserializes the PublishedAppOutput from a dictionary.""" - return cls(app_id=d.get('app_id', None), client_id=d.get('client_id', None), description=d.get('description', None), is_confidential_client=d.get('is_confidential_client', None), name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None)) - - + return cls( + app_id=d.get("app_id", None), + client_id=d.get("client_id", None), + description=d.get("description", None), + is_confidential_client=d.get("is_confidential_client", None), + name=d.get("name", None), + redirect_urls=d.get("redirect_urls", None), + scopes=d.get("scopes", None), + ) @dataclass class SecretInfo: create_time: Optional[str] = None """UTC time when the secret was created""" - + expire_time: Optional[str] = None """UTC time when the secret will expire. If the field is not present, the secret does not expire.""" - + id: Optional[str] = None """ID of the secret""" - + secret_hash: Optional[str] = None """Secret Hash""" - + status: Optional[str] = None """Status of the secret""" - + update_time: Optional[str] = None """UTC time when the secret was updated""" - + def as_dict(self) -> dict: """Serializes the SecretInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.expire_time is not None: body['expire_time'] = self.expire_time - if self.id is not None: body['id'] = self.id - if self.secret_hash is not None: body['secret_hash'] = self.secret_hash - if self.status is not None: body['status'] = self.status - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.expire_time is not None: + body["expire_time"] = self.expire_time + if self.id is not None: + body["id"] = self.id + if self.secret_hash is not None: + body["secret_hash"] = self.secret_hash + if self.status is not None: + body["status"] = self.status + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the SecretInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.expire_time is not None: body['expire_time'] = self.expire_time - if self.id is not None: body['id'] = self.id - if self.secret_hash is not None: body['secret_hash'] = self.secret_hash - if self.status is not None: body['status'] = self.status - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.expire_time is not None: + body["expire_time"] = self.expire_time + if self.id is not None: + body["id"] = self.id + if self.secret_hash is not None: + body["secret_hash"] = self.secret_hash + if self.status is not None: + body["status"] = self.status + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SecretInfo: """Deserializes the SecretInfo from a dictionary.""" - return cls(create_time=d.get('create_time', None), expire_time=d.get('expire_time', None), id=d.get('id', None), secret_hash=d.get('secret_hash', None), status=d.get('status', None), update_time=d.get('update_time', None)) - - + return cls( + create_time=d.get("create_time", None), + expire_time=d.get("expire_time", None), + id=d.get("id", None), + secret_hash=d.get("secret_hash", None), + status=d.get("status", None), + update_time=d.get("update_time", None), + ) @dataclass class TokenAccessPolicy: access_token_ttl_in_minutes: Optional[int] = None """access token time to live in minutes""" - + refresh_token_ttl_in_minutes: Optional[int] = None """refresh token time to live in minutes""" - + def as_dict(self) -> dict: """Serializes the TokenAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_token_ttl_in_minutes is not None: body['access_token_ttl_in_minutes'] = self.access_token_ttl_in_minutes - if self.refresh_token_ttl_in_minutes is not None: body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes + if self.access_token_ttl_in_minutes is not None: + body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes + if self.refresh_token_ttl_in_minutes is not None: + body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes return body def as_shallow_dict(self) -> dict: """Serializes the TokenAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_token_ttl_in_minutes is not None: body['access_token_ttl_in_minutes'] = self.access_token_ttl_in_minutes - if self.refresh_token_ttl_in_minutes is not None: body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes + if self.access_token_ttl_in_minutes is not None: + body["access_token_ttl_in_minutes"] = self.access_token_ttl_in_minutes + if self.refresh_token_ttl_in_minutes is not None: + body["refresh_token_ttl_in_minutes"] = self.refresh_token_ttl_in_minutes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenAccessPolicy: """Deserializes the TokenAccessPolicy from a dictionary.""" - return cls(access_token_ttl_in_minutes=d.get('access_token_ttl_in_minutes', None), refresh_token_ttl_in_minutes=d.get('refresh_token_ttl_in_minutes', None)) - - - - - + return cls( + access_token_ttl_in_minutes=d.get("access_token_ttl_in_minutes", None), + refresh_token_ttl_in_minutes=d.get("refresh_token_ttl_in_minutes", None), + ) @dataclass class UpdateCustomAppIntegration: integration_id: Optional[str] = None - + redirect_urls: Optional[List[str]] = None """List of OAuth redirect urls to be updated in the custom OAuth app integration""" - + scopes: Optional[List[str]] = None """List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs this will fully replace the existing values instead of appending""" - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy to be updated in the custom OAuth app integration""" - + user_authorized_scopes: Optional[List[str]] = None """Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes.""" - + def as_dict(self) -> dict: """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.integration_id is not None: body['integration_id'] = self.integration_id - if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] - if self.scopes: body['scopes'] = [v for v in self.scopes] - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes] + if self.integration_id is not None: + body["integration_id"] = self.integration_id + if self.redirect_urls: + body["redirect_urls"] = [v for v in self.redirect_urls] + if self.scopes: + body["scopes"] = [v for v in self.scopes] + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy.as_dict() + if self.user_authorized_scopes: + body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" body = {} - if self.integration_id is not None: body['integration_id'] = self.integration_id - if self.redirect_urls: body['redirect_urls'] = self.redirect_urls - if self.scopes: body['scopes'] = self.scopes - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy - if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes + if self.integration_id is not None: + body["integration_id"] = self.integration_id + if self.redirect_urls: + body["redirect_urls"] = self.redirect_urls + if self.scopes: + body["scopes"] = self.scopes + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy + if self.user_authorized_scopes: + body["user_authorized_scopes"] = self.user_authorized_scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomAppIntegration: """Deserializes the UpdateCustomAppIntegration from a dictionary.""" - return cls(integration_id=d.get('integration_id', None), redirect_urls=d.get('redirect_urls', None), scopes=d.get('scopes', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy), user_authorized_scopes=d.get('user_authorized_scopes', None)) - - + return cls( + integration_id=d.get("integration_id", None), + redirect_urls=d.get("redirect_urls", None), + scopes=d.get("scopes", None), + token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), + user_authorized_scopes=d.get("user_authorized_scopes", None), + ) @dataclass @@ -962,37 +1109,40 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomAppIntegrationOutput: """Deserializes the UpdateCustomAppIntegrationOutput from a dictionary.""" return cls() - - @dataclass class UpdatePublishedAppIntegration: integration_id: Optional[str] = None - + token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy to be updated in the published OAuth app integration""" - + def as_dict(self) -> dict: """Serializes the UpdatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.integration_id is not None: body['integration_id'] = self.integration_id - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() + if self.integration_id is not None: + body["integration_id"] = self.integration_id + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" body = {} - if self.integration_id is not None: body['integration_id'] = self.integration_id - if self.token_access_policy: body['token_access_policy'] = self.token_access_policy + if self.integration_id is not None: + body["integration_id"] = self.integration_id + if self.token_access_policy: + body["token_access_policy"] = self.token_access_policy return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePublishedAppIntegration: """Deserializes the UpdatePublishedAppIntegration from a dictionary.""" - return cls(integration_id=d.get('integration_id', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) - - + return cls( + integration_id=d.get("integration_id", None), + token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), + ) @dataclass @@ -1011,31 +1161,24 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdatePublishedAppIntegrationOutput: """Deserializes the UpdatePublishedAppIntegrationOutput from a dictionary.""" return cls() - - - - - - - class AccountFederationPolicyAPI: """These APIs manage account federation policies. - + Account federation policies allow users and service principals in your Databricks account to securely access Databricks APIs using tokens from your trusted identity providers (IdPs). - + With token federation, your users and service principals can exchange tokens from your IdP for Databricks OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage Databricks secrets, and allows you to centralize management of token issuance policies in your IdP. Databricks token federation is typically used in combination with [SCIM], so users in your IdP are synchronized into your Databricks account. - + Token federation is configured in your Databricks account using an account federation policy. An account federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from * how to determine which Databricks user, or subject, a token is issued for - + To configure a federation policy, you provide the following: * The required token __issuer__, as specified in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to @@ -1046,149 +1189,122 @@ class AccountFederationPolicyAPI: public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for discovering public keys. - + An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"] subject_claim: "sub" ``` - + An example JWT token body that matches this policy and could be used to authenticate to Databricks as user `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub": "username@mycompany.com" } ``` - + You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if your users do not already have the ability to generate tokens that are compatible with your federation policy. - + You do not need to configure an OAuth application in Databricks to use token federation. - + [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , policy: FederationPolicy - , * - , policy_id: Optional[str] = None) -> FederationPolicy: + def create(self, policy: FederationPolicy, *, policy_id: Optional[str] = None) -> FederationPolicy: """Create account federation policy. - + :param policy: :class:`FederationPolicy` :param policy_id: str (optional) The identifier for the federation policy. The identifier must contain only lowercase alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if policy_id is not None: query['policy_id'] = policy_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies', query=query, body=body - - , headers=headers - ) + if policy_id is not None: + query["policy_id"] = policy_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/accounts/{self._api.account_id}/federationPolicies", + query=query, + body=body, + headers=headers, + ) return FederationPolicy.from_dict(res) - - - - - def delete(self - , policy_id: str - ): + def delete(self, policy_id: str): """Delete account federation policy. - + :param policy_id: str The identifier for the federation policy. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}' - - , headers=headers - ) - - - - - - - def get(self - , policy_id: str - ) -> FederationPolicy: + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}", headers=headers + ) + + def get(self, policy_id: str) -> FederationPolicy: """Get account federation policy. - + :param policy_id: str The identifier for the federation policy. - + :returns: :class:`FederationPolicy` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}' - - , headers=headers - ) - return FederationPolicy.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FederationPolicy]: + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}", headers=headers + ) + return FederationPolicy.from_dict(res) + + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FederationPolicy]: """List account federation policies. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies', query=query - - , headers=headers - ) - if 'policies' in json: - for v in json['policies']: - yield FederationPolicy.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , policy_id: str, policy: FederationPolicy - , * - , update_mask: Optional[str] = None) -> FederationPolicy: + json = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/federationPolicies", query=query, headers=headers + ) + if "policies" in json: + for v in json["policies"]: + yield FederationPolicy.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, policy_id: str, policy: FederationPolicy, *, update_mask: Optional[str] = None + ) -> FederationPolicy: """Update account federation policy. - + :param policy_id: str The identifier for the federation policy. :param policy: :class:`FederationPolicy` @@ -1198,47 +1314,51 @@ def update(self should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'description,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if update_mask is not None: query['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}', query=query, body=body - - , headers=headers - ) + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}", + query=query, + body=body, + headers=headers, + ) return FederationPolicy.from_dict(res) - - + class CustomAppIntegrationAPI: """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , confidential: Optional[bool] = None, name: Optional[str] = None, redirect_urls: Optional[List[str]] = None, scopes: Optional[List[str]] = None, token_access_policy: Optional[TokenAccessPolicy] = None, user_authorized_scopes: Optional[List[str]] = None) -> CreateCustomAppIntegrationOutput: + def create( + self, + *, + confidential: Optional[bool] = None, + name: Optional[str] = None, + redirect_urls: Optional[List[str]] = None, + scopes: Optional[List[str]] = None, + token_access_policy: Optional[TokenAccessPolicy] = None, + user_authorized_scopes: Optional[List[str]] = None, + ) -> CreateCustomAppIntegrationOutput: """Create Custom OAuth App Integration. - + Create Custom OAuth App Integration. - + You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param confidential: bool (optional) This field indicates whether an OAuth client secret is required to authenticate this client. :param name: str (optional) @@ -1253,127 +1373,135 @@ def create(self :param user_authorized_scopes: List[str] (optional) Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes. - + :returns: :class:`CreateCustomAppIntegrationOutput` """ body = {} - if confidential is not None: body['confidential'] = confidential - if name is not None: body['name'] = name - if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls] - if scopes is not None: body['scopes'] = [v for v in scopes] - if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() - if user_authorized_scopes is not None: body['user_authorized_scopes'] = [v for v in user_authorized_scopes] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations', body=body - - , headers=headers - ) + if confidential is not None: + body["confidential"] = confidential + if name is not None: + body["name"] = name + if redirect_urls is not None: + body["redirect_urls"] = [v for v in redirect_urls] + if scopes is not None: + body["scopes"] = [v for v in scopes] + if token_access_policy is not None: + body["token_access_policy"] = token_access_policy.as_dict() + if user_authorized_scopes is not None: + body["user_authorized_scopes"] = [v for v in user_authorized_scopes] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations", + body=body, + headers=headers, + ) return CreateCustomAppIntegrationOutput.from_dict(res) - - - - - def delete(self - , integration_id: str - ): + def delete(self, integration_id: str): """Delete Custom OAuth App Integration. - + Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param integration_id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}' - - , headers=headers - ) - - - - - - - def get(self - , integration_id: str - ) -> GetCustomAppIntegrationOutput: + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}", + headers=headers, + ) + + def get(self, integration_id: str) -> GetCustomAppIntegrationOutput: """Get OAuth Custom App Integration. - + Gets the Custom OAuth App Integration for the given integration id. - + :param integration_id: str The OAuth app integration ID. - + :returns: :class:`GetCustomAppIntegrationOutput` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}' - - , headers=headers - ) - return GetCustomAppIntegrationOutput.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , include_creator_username: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[GetCustomAppIntegrationOutput]: + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}", + headers=headers, + ) + return GetCustomAppIntegrationOutput.from_dict(res) + + def list( + self, + *, + include_creator_username: Optional[bool] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[GetCustomAppIntegrationOutput]: """Get custom oauth app integrations. - + Get the list of custom OAuth app integrations for the specified Databricks account - + :param include_creator_username: bool (optional) :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`GetCustomAppIntegrationOutput` """ - + query = {} - if include_creator_username is not None: query['include_creator_username'] = include_creator_username - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if include_creator_username is not None: + query["include_creator_username"] = include_creator_username + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations', query=query - - , headers=headers - ) - if 'apps' in json: - for v in json['apps']: - yield GetCustomAppIntegrationOutput.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , integration_id: str - , * - , redirect_urls: Optional[List[str]] = None, scopes: Optional[List[str]] = None, token_access_policy: Optional[TokenAccessPolicy] = None, user_authorized_scopes: Optional[List[str]] = None): + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations", + query=query, + headers=headers, + ) + if "apps" in json: + for v in json["apps"]: + yield GetCustomAppIntegrationOutput.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + integration_id: str, + *, + redirect_urls: Optional[List[str]] = None, + scopes: Optional[List[str]] = None, + token_access_policy: Optional[TokenAccessPolicy] = None, + user_authorized_scopes: Optional[List[str]] = None, + ): """Updates Custom OAuth App Integration. - + Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param integration_id: str :param redirect_urls: List[str] (optional) List of OAuth redirect urls to be updated in the custom OAuth app integration @@ -1385,257 +1513,240 @@ def update(self :param user_authorized_scopes: List[str] (optional) Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes. - - + + """ body = {} - if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls] - if scopes is not None: body['scopes'] = [v for v in scopes] - if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() - if user_authorized_scopes is not None: body['user_authorized_scopes'] = [v for v in user_authorized_scopes] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}', body=body - - , headers=headers - ) - - - - + if redirect_urls is not None: + body["redirect_urls"] = [v for v in redirect_urls] + if scopes is not None: + body["scopes"] = [v for v in scopes] + if token_access_policy is not None: + body["token_access_policy"] = token_access_policy.as_dict() + if user_authorized_scopes is not None: + body["user_authorized_scopes"] = [v for v in user_authorized_scopes] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations/{integration_id}", + body=body, + headers=headers, + ) + + class OAuthPublishedAppsAPI: """These APIs enable administrators to view all the available published OAuth applications in Databricks. Administrators can add the published OAuth applications to their account through the OAuth Published App Integration APIs.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[PublishedAppOutput]: + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[PublishedAppOutput]: """Get all the published OAuth apps. - + Get all the available published OAuth apps in Databricks. - + :param page_size: int (optional) The max number of OAuth published apps to return in one page. :param page_token: str (optional) A token that can be used to get the next page of results. - + :returns: Iterator over :class:`PublishedAppOutput` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps', query=query - - , headers=headers - ) - if 'apps' in json: - for v in json['apps']: - yield PublishedAppOutput.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + json = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps", query=query, headers=headers + ) + if "apps" in json: + for v in json["apps"]: + yield PublishedAppOutput.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + class PublishedAppIntegrationAPI: """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , app_id: Optional[str] = None, token_access_policy: Optional[TokenAccessPolicy] = None) -> CreatePublishedAppIntegrationOutput: + def create( + self, *, app_id: Optional[str] = None, token_access_policy: Optional[TokenAccessPolicy] = None + ) -> CreatePublishedAppIntegrationOutput: """Create Published OAuth App Integration. - + Create Published OAuth App Integration. - + You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param app_id: str (optional) App id of the OAuth published app integration. For example power-bi, tableau-deskop :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy - + :returns: :class:`CreatePublishedAppIntegrationOutput` """ body = {} - if app_id is not None: body['app_id'] = app_id - if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations', body=body - - , headers=headers - ) + if app_id is not None: + body["app_id"] = app_id + if token_access_policy is not None: + body["token_access_policy"] = token_access_policy.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations", + body=body, + headers=headers, + ) return CreatePublishedAppIntegrationOutput.from_dict(res) - - - - - def delete(self - , integration_id: str - ): + def delete(self, integration_id: str): """Delete Published OAuth App Integration. - + Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param integration_id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}' - - , headers=headers - ) - - - - - - - def get(self - , integration_id: str - ) -> GetPublishedAppIntegrationOutput: + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}", + headers=headers, + ) + + def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput: """Get OAuth Published App Integration. - + Gets the Published OAuth App Integration for the given integration id. - + :param integration_id: str - + :returns: :class:`GetPublishedAppIntegrationOutput` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}' - - , headers=headers - ) - return GetPublishedAppIntegrationOutput.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}", + headers=headers, + ) + return GetPublishedAppIntegrationOutput.from_dict(res) - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[GetPublishedAppIntegrationOutput]: + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[GetPublishedAppIntegrationOutput]: """Get published oauth app integrations. - + Get the list of published OAuth app integrations for the specified Databricks account - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`GetPublishedAppIntegrationOutput` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations', query=query - - , headers=headers - ) - if 'apps' in json: - for v in json['apps']: - yield GetPublishedAppIntegrationOutput.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , integration_id: str - , * - , token_access_policy: Optional[TokenAccessPolicy] = None): + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations", + query=query, + headers=headers, + ) + if "apps" in json: + for v in json["apps"]: + yield GetPublishedAppIntegrationOutput.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update(self, integration_id: str, *, token_access_policy: Optional[TokenAccessPolicy] = None): """Updates Published OAuth App Integration. - + Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param integration_id: str :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy to be updated in the published OAuth app integration - - + + """ body = {} - if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}', body=body - - , headers=headers - ) - - - - + if token_access_policy is not None: + body["token_access_policy"] = token_access_policy.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations/{integration_id}", + body=body, + headers=headers, + ) + + class ServicePrincipalFederationPolicyAPI: """These APIs manage service principal federation policies. - + Service principal federation, also known as Workload Identity Federation, allows your automated workloads running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets. With Workload Identity Federation, your application (or workload) authenticates to Databricks as a Databricks service principal, using tokens provided by the workload runtime. - + Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever possible. Workload Identity Federation is supported by many popular services, including Github Actions, Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others. - + Workload identity federation is configured in your Databricks account using a service principal federation policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the Databricks service principal - + To configure a federation policy, you provide the following: * The required token __issuer__, as specified in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the workload identity provider. * The required token __subject__, as specified in the “sub” claim of @@ -1647,154 +1758,138 @@ class ServicePrincipalFederationPolicyAPI: of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on the issuer’s well known endpoint for discovering public keys. - + An example service principal federation policy, for a Github Actions workload, is: ``` issuer: "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject: "repo:my-github-org/my-repo:environment:prod" ``` - + An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ``` { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub": "repo:my-github-org/my-repo:environment:prod" } ``` - + You may also need to configure the workload runtime to generate tokens for your workloads. - + You do not need to configure an OAuth application in Databricks to use token federation.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , service_principal_id: int, policy: FederationPolicy - , * - , policy_id: Optional[str] = None) -> FederationPolicy: + def create( + self, service_principal_id: int, policy: FederationPolicy, *, policy_id: Optional[str] = None + ) -> FederationPolicy: """Create service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy: :class:`FederationPolicy` :param policy_id: str (optional) The identifier for the federation policy. The identifier must contain only lowercase alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if policy_id is not None: query['policy_id'] = policy_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies', query=query, body=body - - , headers=headers - ) + if policy_id is not None: + query["policy_id"] = policy_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies", + query=query, + body=body, + headers=headers, + ) return FederationPolicy.from_dict(res) - - - - - def delete(self - , service_principal_id: int, policy_id: str - ): + def delete(self, service_principal_id: int, policy_id: str): """Delete service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str The identifier for the federation policy. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}' - - , headers=headers - ) - - - - - - - def get(self - , service_principal_id: int, policy_id: str - ) -> FederationPolicy: + + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}", + headers=headers, + ) + + def get(self, service_principal_id: int, policy_id: str) -> FederationPolicy: """Get service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str The identifier for the federation policy. - + :returns: :class:`FederationPolicy` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}' - - , headers=headers - ) - return FederationPolicy.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - , service_principal_id: int - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FederationPolicy]: + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}", + headers=headers, + ) + return FederationPolicy.from_dict(res) + + def list( + self, service_principal_id: int, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[FederationPolicy]: """List service principal federation policies. - + :param service_principal_id: int The service principal id for the federation policy. :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies', query=query - - , headers=headers - ) - if 'policies' in json: - for v in json['policies']: - yield FederationPolicy.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , service_principal_id: int, policy_id: str, policy: FederationPolicy - , * - , update_mask: Optional[str] = None) -> FederationPolicy: + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies", + query=query, + headers=headers, + ) + if "policies" in json: + for v in json["policies"]: + yield FederationPolicy.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, service_principal_id: int, policy_id: str, policy: FederationPolicy, *, update_mask: Optional[str] = None + ) -> FederationPolicy: """Update service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str @@ -1806,112 +1901,103 @@ def update(self should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'description,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if update_mask is not None: query['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}', query=query, body=body - - , headers=headers - ) + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}", + query=query, + body=body, + headers=headers, + ) return FederationPolicy.from_dict(res) - - + class ServicePrincipalSecretsAPI: """These APIs enable administrators to manage service principal secrets. - + You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using OAuth tokens for service principals], - + In addition, the generated secrets can be used to configure the Databricks Terraform Provider to authenticate with the service principal. For more information, see [Databricks Terraform Provider]. - + [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html - [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal""" - + [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal + """ + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , service_principal_id: int - , * - , lifetime: Optional[str] = None) -> CreateServicePrincipalSecretResponse: + def create( + self, service_principal_id: int, *, lifetime: Optional[str] = None + ) -> CreateServicePrincipalSecretResponse: """Create service principal secret. - + Create a secret for the given service principal. - + :param service_principal_id: int The service principal ID. :param lifetime: str (optional) The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a default lifetime of 730 days (63072000s). - + :returns: :class:`CreateServicePrincipalSecretResponse` """ body = {} - if lifetime is not None: body['lifetime'] = lifetime - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets', body=body - - , headers=headers - ) + if lifetime is not None: + body["lifetime"] = lifetime + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets", + body=body, + headers=headers, + ) return CreateServicePrincipalSecretResponse.from_dict(res) - - - - - def delete(self - , service_principal_id: int, secret_id: str - ): + def delete(self, service_principal_id: int, secret_id: str): """Delete service principal secret. - + Delete a secret from the given service principal. - + :param service_principal_id: int The service principal ID. :param secret_id: str The secret ID. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}' - - , headers=headers - ) - - - - - - - def list(self - , service_principal_id: int - , * - , page_token: Optional[str] = None) -> Iterator[SecretInfo]: + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}", + headers=headers, + ) + + def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]: """List service principal secrets. - + List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the secret values. - + :param service_principal_id: int The service principal ID. :param page_token: str (optional) @@ -1921,28 +2007,27 @@ def list(self previous request. To list all of the secrets for a service principal, it is necessary to continue requesting pages of entries until the response contains no `next_page_token`. Note that the number of entries returned must not be used to determine when the listing is complete. - + :returns: Iterator over :class:`SecretInfo` """ - + query = {} - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets', query=query - - , headers=headers - ) - if 'secrets' in json: - for v in json['secrets']: - yield SecretInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - \ No newline at end of file + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets", + query=query, + headers=headers, + ) + if "secrets" in json: + for v in json["secrets"]: + yield SecretInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index ad1af2456..ef4363bb8 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -1,105 +1,99 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading +from typing import Any, Callable, Dict, Iterator, List, Optional -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum -_LOG = logging.getLogger('databricks.sdk') +_LOG = logging.getLogger("databricks.sdk") -from databricks.sdk.service import compute -from databricks.sdk.service import compute -from databricks.sdk.service import compute -from databricks.sdk.service import compute -from databricks.sdk.service import compute from databricks.sdk.service import compute # all definitions in this file are in alphabetical order + @dataclass class CreatePipeline: allow_duplicate_names: Optional[bool] = None """If false, deployment will fail if name conflicts with that of another pipeline.""" - + budget_policy_id: Optional[str] = None """Budget policy of this pipeline.""" - + catalog: Optional[str] = None """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.""" - + channel: Optional[str] = None """DLT Release Channel that specifies which version to use.""" - + clusters: Optional[List[PipelineCluster]] = None """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str,str]] = None + + configuration: Optional[Dict[str, str]] = None """String-String configuration for this pipeline execution.""" - + continuous: Optional[bool] = None """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - + deployment: Optional[PipelineDeployment] = None """Deployment type of this pipeline.""" - + development: Optional[bool] = None """Whether the pipeline is in Development mode. Defaults to false.""" - + dry_run: Optional[bool] = None - + edition: Optional[str] = None """Pipeline product edition.""" - + event_log: Optional[EventLogSpec] = None """Event log configuration for this pipeline""" - + filters: Optional[Filters] = None """Filters on which Pipeline packages to include in the deployed graph.""" - + gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None """The definition of a gateway pipeline to support change data capture.""" - + id: Optional[str] = None """Unique identifier for this pipeline.""" - + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.""" - + libraries: Optional[List[PipelineLibrary]] = None """Libraries or code needed by this deployment.""" - + name: Optional[str] = None """Friendly identifier for this pipeline.""" - + notifications: Optional[List[Notifications]] = None """List of notification settings for this pipeline.""" - + photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" - + restart_window: Optional[RestartWindow] = None """Restart window of this pipeline.""" - + root_path: Optional[str] = None """Root path for this pipeline. This is used as the root directory when editing the pipeline in the Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution.""" - + run_as: Optional[RunAs] = None """Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created @@ -107,206 +101,298 @@ class CreatePipeline: Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.""" - + schema: Optional[str] = None """The default schema (database) where tables are read from or published to.""" - + serverless: Optional[bool] = None """Whether serverless compute is enabled for this pipeline.""" - + storage: Optional[str] = None """DBFS root directory for storing checkpoints and tables.""" - - tags: Optional[Dict[str,str]] = None + + tags: Optional[Dict[str, str]] = None """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline.""" - + target: Optional[str] = None """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.""" - + trigger: Optional[PipelineTrigger] = None """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" - + def as_dict(self) -> dict: """Serializes the CreatePipeline into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.catalog is not None: body['catalog'] = self.catalog - if self.channel is not None: body['channel'] = self.channel - if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] - if self.configuration: body['configuration'] = self.configuration - if self.continuous is not None: body['continuous'] = self.continuous - if self.deployment: body['deployment'] = self.deployment.as_dict() - if self.development is not None: body['development'] = self.development - if self.dry_run is not None: body['dry_run'] = self.dry_run - if self.edition is not None: body['edition'] = self.edition - if self.event_log: body['event_log'] = self.event_log.as_dict() - if self.filters: body['filters'] = self.filters.as_dict() - if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict() - if self.id is not None: body['id'] = self.id - if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict() - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.name is not None: body['name'] = self.name - if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] - if self.photon is not None: body['photon'] = self.photon - if self.restart_window: body['restart_window'] = self.restart_window.as_dict() - if self.root_path is not None: body['root_path'] = self.root_path - if self.run_as: body['run_as'] = self.run_as.as_dict() - if self.schema is not None: body['schema'] = self.schema - if self.serverless is not None: body['serverless'] = self.serverless - if self.storage is not None: body['storage'] = self.storage - if self.tags: body['tags'] = self.tags - if self.target is not None: body['target'] = self.target - if self.trigger: body['trigger'] = self.trigger.as_dict() + if self.allow_duplicate_names is not None: + body["allow_duplicate_names"] = self.allow_duplicate_names + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.catalog is not None: + body["catalog"] = self.catalog + if self.channel is not None: + body["channel"] = self.channel + if self.clusters: + body["clusters"] = [v.as_dict() for v in self.clusters] + if self.configuration: + body["configuration"] = self.configuration + if self.continuous is not None: + body["continuous"] = self.continuous + if self.deployment: + body["deployment"] = self.deployment.as_dict() + if self.development is not None: + body["development"] = self.development + if self.dry_run is not None: + body["dry_run"] = self.dry_run + if self.edition is not None: + body["edition"] = self.edition + if self.event_log: + body["event_log"] = self.event_log.as_dict() + if self.filters: + body["filters"] = self.filters.as_dict() + if self.gateway_definition: + body["gateway_definition"] = self.gateway_definition.as_dict() + if self.id is not None: + body["id"] = self.id + if self.ingestion_definition: + body["ingestion_definition"] = self.ingestion_definition.as_dict() + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.name is not None: + body["name"] = self.name + if self.notifications: + body["notifications"] = [v.as_dict() for v in self.notifications] + if self.photon is not None: + body["photon"] = self.photon + if self.restart_window: + body["restart_window"] = self.restart_window.as_dict() + if self.root_path is not None: + body["root_path"] = self.root_path + if self.run_as: + body["run_as"] = self.run_as.as_dict() + if self.schema is not None: + body["schema"] = self.schema + if self.serverless is not None: + body["serverless"] = self.serverless + if self.storage is not None: + body["storage"] = self.storage + if self.tags: + body["tags"] = self.tags + if self.target is not None: + body["target"] = self.target + if self.trigger: + body["trigger"] = self.trigger.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.catalog is not None: body['catalog'] = self.catalog - if self.channel is not None: body['channel'] = self.channel - if self.clusters: body['clusters'] = self.clusters - if self.configuration: body['configuration'] = self.configuration - if self.continuous is not None: body['continuous'] = self.continuous - if self.deployment: body['deployment'] = self.deployment - if self.development is not None: body['development'] = self.development - if self.dry_run is not None: body['dry_run'] = self.dry_run - if self.edition is not None: body['edition'] = self.edition - if self.event_log: body['event_log'] = self.event_log - if self.filters: body['filters'] = self.filters - if self.gateway_definition: body['gateway_definition'] = self.gateway_definition - if self.id is not None: body['id'] = self.id - if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition - if self.libraries: body['libraries'] = self.libraries - if self.name is not None: body['name'] = self.name - if self.notifications: body['notifications'] = self.notifications - if self.photon is not None: body['photon'] = self.photon - if self.restart_window: body['restart_window'] = self.restart_window - if self.root_path is not None: body['root_path'] = self.root_path - if self.run_as: body['run_as'] = self.run_as - if self.schema is not None: body['schema'] = self.schema - if self.serverless is not None: body['serverless'] = self.serverless - if self.storage is not None: body['storage'] = self.storage - if self.tags: body['tags'] = self.tags - if self.target is not None: body['target'] = self.target - if self.trigger: body['trigger'] = self.trigger + if self.allow_duplicate_names is not None: + body["allow_duplicate_names"] = self.allow_duplicate_names + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.catalog is not None: + body["catalog"] = self.catalog + if self.channel is not None: + body["channel"] = self.channel + if self.clusters: + body["clusters"] = self.clusters + if self.configuration: + body["configuration"] = self.configuration + if self.continuous is not None: + body["continuous"] = self.continuous + if self.deployment: + body["deployment"] = self.deployment + if self.development is not None: + body["development"] = self.development + if self.dry_run is not None: + body["dry_run"] = self.dry_run + if self.edition is not None: + body["edition"] = self.edition + if self.event_log: + body["event_log"] = self.event_log + if self.filters: + body["filters"] = self.filters + if self.gateway_definition: + body["gateway_definition"] = self.gateway_definition + if self.id is not None: + body["id"] = self.id + if self.ingestion_definition: + body["ingestion_definition"] = self.ingestion_definition + if self.libraries: + body["libraries"] = self.libraries + if self.name is not None: + body["name"] = self.name + if self.notifications: + body["notifications"] = self.notifications + if self.photon is not None: + body["photon"] = self.photon + if self.restart_window: + body["restart_window"] = self.restart_window + if self.root_path is not None: + body["root_path"] = self.root_path + if self.run_as: + body["run_as"] = self.run_as + if self.schema is not None: + body["schema"] = self.schema + if self.serverless is not None: + body["serverless"] = self.serverless + if self.storage is not None: + body["storage"] = self.storage + if self.tags: + body["tags"] = self.tags + if self.target is not None: + body["target"] = self.target + if self.trigger: + body["trigger"] = self.trigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePipeline: """Deserializes the CreatePipeline from a dictionary.""" - return cls(allow_duplicate_names=d.get('allow_duplicate_names', None), budget_policy_id=d.get('budget_policy_id', None), catalog=d.get('catalog', None), channel=d.get('channel', None), clusters=_repeated_dict(d, 'clusters', PipelineCluster), configuration=d.get('configuration', None), continuous=d.get('continuous', None), deployment=_from_dict(d, 'deployment', PipelineDeployment), development=d.get('development', None), dry_run=d.get('dry_run', None), edition=d.get('edition', None), event_log=_from_dict(d, 'event_log', EventLogSpec), filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), restart_window=_from_dict(d, 'restart_window', RestartWindow), root_path=d.get('root_path', None), run_as=_from_dict(d, 'run_as', RunAs), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), tags=d.get('tags', None), target=d.get('target', None), trigger=_from_dict(d, 'trigger', PipelineTrigger)) - - + return cls( + allow_duplicate_names=d.get("allow_duplicate_names", None), + budget_policy_id=d.get("budget_policy_id", None), + catalog=d.get("catalog", None), + channel=d.get("channel", None), + clusters=_repeated_dict(d, "clusters", PipelineCluster), + configuration=d.get("configuration", None), + continuous=d.get("continuous", None), + deployment=_from_dict(d, "deployment", PipelineDeployment), + development=d.get("development", None), + dry_run=d.get("dry_run", None), + edition=d.get("edition", None), + event_log=_from_dict(d, "event_log", EventLogSpec), + filters=_from_dict(d, "filters", Filters), + gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), + id=d.get("id", None), + ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), + libraries=_repeated_dict(d, "libraries", PipelineLibrary), + name=d.get("name", None), + notifications=_repeated_dict(d, "notifications", Notifications), + photon=d.get("photon", None), + restart_window=_from_dict(d, "restart_window", RestartWindow), + root_path=d.get("root_path", None), + run_as=_from_dict(d, "run_as", RunAs), + schema=d.get("schema", None), + serverless=d.get("serverless", None), + storage=d.get("storage", None), + tags=d.get("tags", None), + target=d.get("target", None), + trigger=_from_dict(d, "trigger", PipelineTrigger), + ) @dataclass class CreatePipelineResponse: effective_settings: Optional[PipelineSpec] = None """Only returned when dry_run is true.""" - + pipeline_id: Optional[str] = None """The unique identifier for the newly created pipeline. Only returned when dry_run is false.""" - + def as_dict(self) -> dict: """Serializes the CreatePipelineResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.effective_settings: body['effective_settings'] = self.effective_settings.as_dict() - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.effective_settings: + body["effective_settings"] = self.effective_settings.as_dict() + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id return body def as_shallow_dict(self) -> dict: """Serializes the CreatePipelineResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.effective_settings: body['effective_settings'] = self.effective_settings - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.effective_settings: + body["effective_settings"] = self.effective_settings + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePipelineResponse: """Deserializes the CreatePipelineResponse from a dictionary.""" - return cls(effective_settings=_from_dict(d, 'effective_settings', PipelineSpec), pipeline_id=d.get('pipeline_id', None)) - - + return cls( + effective_settings=_from_dict(d, "effective_settings", PipelineSpec), pipeline_id=d.get("pipeline_id", None) + ) @dataclass class CronTrigger: quartz_cron_schedule: Optional[str] = None - + timezone_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the CronTrigger into a dictionary suitable for use as a JSON request body.""" body = {} - if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.quartz_cron_schedule is not None: + body["quartz_cron_schedule"] = self.quartz_cron_schedule + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the CronTrigger into a shallow dictionary of its immediate attributes.""" body = {} - if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.quartz_cron_schedule is not None: + body["quartz_cron_schedule"] = self.quartz_cron_schedule + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CronTrigger: """Deserializes the CronTrigger from a dictionary.""" - return cls(quartz_cron_schedule=d.get('quartz_cron_schedule', None), timezone_id=d.get('timezone_id', None)) - - + return cls(quartz_cron_schedule=d.get("quartz_cron_schedule", None), timezone_id=d.get("timezone_id", None)) @dataclass class DataPlaneId: instance: Optional[str] = None """The instance name of the data plane emitting an event.""" - + seq_no: Optional[int] = None """A sequence number, unique and increasing within the data plane instance.""" - + def as_dict(self) -> dict: """Serializes the DataPlaneId into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance is not None: body['instance'] = self.instance - if self.seq_no is not None: body['seq_no'] = self.seq_no + if self.instance is not None: + body["instance"] = self.instance + if self.seq_no is not None: + body["seq_no"] = self.seq_no return body def as_shallow_dict(self) -> dict: """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance is not None: body['instance'] = self.instance - if self.seq_no is not None: body['seq_no'] = self.seq_no + if self.instance is not None: + body["instance"] = self.instance + if self.seq_no is not None: + body["seq_no"] = self.seq_no return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataPlaneId: """Deserializes the DataPlaneId from a dictionary.""" - return cls(instance=d.get('instance', None), seq_no=d.get('seq_no', None)) - - + return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None)) class DayOfWeek(Enum): """Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used.""" - - FRIDAY = 'FRIDAY' - MONDAY = 'MONDAY' - SATURDAY = 'SATURDAY' - SUNDAY = 'SUNDAY' - THURSDAY = 'THURSDAY' - TUESDAY = 'TUESDAY' - WEDNESDAY = 'WEDNESDAY' - + FRIDAY = "FRIDAY" + MONDAY = "MONDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" + THURSDAY = "THURSDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" @dataclass @@ -325,94 +411,93 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse: """Deserializes the DeletePipelineResponse from a dictionary.""" return cls() - - class DeploymentKind(Enum): """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a Databricks Asset Bundle.""" - - BUNDLE = 'BUNDLE' + + BUNDLE = "BUNDLE" + @dataclass class EditPipeline: allow_duplicate_names: Optional[bool] = None """If false, deployment will fail if name has changed and conflicts the name of another pipeline.""" - + budget_policy_id: Optional[str] = None """Budget policy of this pipeline.""" - + catalog: Optional[str] = None """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.""" - + channel: Optional[str] = None """DLT Release Channel that specifies which version to use.""" - + clusters: Optional[List[PipelineCluster]] = None """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str,str]] = None + + configuration: Optional[Dict[str, str]] = None """String-String configuration for this pipeline execution.""" - + continuous: Optional[bool] = None """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - + deployment: Optional[PipelineDeployment] = None """Deployment type of this pipeline.""" - + development: Optional[bool] = None """Whether the pipeline is in Development mode. Defaults to false.""" - + edition: Optional[str] = None """Pipeline product edition.""" - + event_log: Optional[EventLogSpec] = None """Event log configuration for this pipeline""" - + expected_last_modified: Optional[int] = None """If present, the last-modified time of the pipeline settings before the edit. If the settings were modified after that time, then the request will fail with a conflict.""" - + filters: Optional[Filters] = None """Filters on which Pipeline packages to include in the deployed graph.""" - + gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None """The definition of a gateway pipeline to support change data capture.""" - + id: Optional[str] = None """Unique identifier for this pipeline.""" - + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.""" - + libraries: Optional[List[PipelineLibrary]] = None """Libraries or code needed by this deployment.""" - + name: Optional[str] = None """Friendly identifier for this pipeline.""" - + notifications: Optional[List[Notifications]] = None """List of notification settings for this pipeline.""" - + photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" - + pipeline_id: Optional[str] = None """Unique identifier for this pipeline.""" - + restart_window: Optional[RestartWindow] = None """Restart window of this pipeline.""" - + root_path: Optional[str] = None """Root path for this pipeline. This is used as the root directory when editing the pipeline in the Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution.""" - + run_as: Optional[RunAs] = None """Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created @@ -420,105 +505,194 @@ class EditPipeline: Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.""" - + schema: Optional[str] = None """The default schema (database) where tables are read from or published to.""" - + serverless: Optional[bool] = None """Whether serverless compute is enabled for this pipeline.""" - + storage: Optional[str] = None """DBFS root directory for storing checkpoints and tables.""" - - tags: Optional[Dict[str,str]] = None + + tags: Optional[Dict[str, str]] = None """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline.""" - + target: Optional[str] = None """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.""" - + trigger: Optional[PipelineTrigger] = None """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" - + def as_dict(self) -> dict: """Serializes the EditPipeline into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.catalog is not None: body['catalog'] = self.catalog - if self.channel is not None: body['channel'] = self.channel - if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] - if self.configuration: body['configuration'] = self.configuration - if self.continuous is not None: body['continuous'] = self.continuous - if self.deployment: body['deployment'] = self.deployment.as_dict() - if self.development is not None: body['development'] = self.development - if self.edition is not None: body['edition'] = self.edition - if self.event_log: body['event_log'] = self.event_log.as_dict() - if self.expected_last_modified is not None: body['expected_last_modified'] = self.expected_last_modified - if self.filters: body['filters'] = self.filters.as_dict() - if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict() - if self.id is not None: body['id'] = self.id - if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict() - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.name is not None: body['name'] = self.name - if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] - if self.photon is not None: body['photon'] = self.photon - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.restart_window: body['restart_window'] = self.restart_window.as_dict() - if self.root_path is not None: body['root_path'] = self.root_path - if self.run_as: body['run_as'] = self.run_as.as_dict() - if self.schema is not None: body['schema'] = self.schema - if self.serverless is not None: body['serverless'] = self.serverless - if self.storage is not None: body['storage'] = self.storage - if self.tags: body['tags'] = self.tags - if self.target is not None: body['target'] = self.target - if self.trigger: body['trigger'] = self.trigger.as_dict() + if self.allow_duplicate_names is not None: + body["allow_duplicate_names"] = self.allow_duplicate_names + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.catalog is not None: + body["catalog"] = self.catalog + if self.channel is not None: + body["channel"] = self.channel + if self.clusters: + body["clusters"] = [v.as_dict() for v in self.clusters] + if self.configuration: + body["configuration"] = self.configuration + if self.continuous is not None: + body["continuous"] = self.continuous + if self.deployment: + body["deployment"] = self.deployment.as_dict() + if self.development is not None: + body["development"] = self.development + if self.edition is not None: + body["edition"] = self.edition + if self.event_log: + body["event_log"] = self.event_log.as_dict() + if self.expected_last_modified is not None: + body["expected_last_modified"] = self.expected_last_modified + if self.filters: + body["filters"] = self.filters.as_dict() + if self.gateway_definition: + body["gateway_definition"] = self.gateway_definition.as_dict() + if self.id is not None: + body["id"] = self.id + if self.ingestion_definition: + body["ingestion_definition"] = self.ingestion_definition.as_dict() + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.name is not None: + body["name"] = self.name + if self.notifications: + body["notifications"] = [v.as_dict() for v in self.notifications] + if self.photon is not None: + body["photon"] = self.photon + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.restart_window: + body["restart_window"] = self.restart_window.as_dict() + if self.root_path is not None: + body["root_path"] = self.root_path + if self.run_as: + body["run_as"] = self.run_as.as_dict() + if self.schema is not None: + body["schema"] = self.schema + if self.serverless is not None: + body["serverless"] = self.serverless + if self.storage is not None: + body["storage"] = self.storage + if self.tags: + body["tags"] = self.tags + if self.target is not None: + body["target"] = self.target + if self.trigger: + body["trigger"] = self.trigger.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EditPipeline into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.catalog is not None: body['catalog'] = self.catalog - if self.channel is not None: body['channel'] = self.channel - if self.clusters: body['clusters'] = self.clusters - if self.configuration: body['configuration'] = self.configuration - if self.continuous is not None: body['continuous'] = self.continuous - if self.deployment: body['deployment'] = self.deployment - if self.development is not None: body['development'] = self.development - if self.edition is not None: body['edition'] = self.edition - if self.event_log: body['event_log'] = self.event_log - if self.expected_last_modified is not None: body['expected_last_modified'] = self.expected_last_modified - if self.filters: body['filters'] = self.filters - if self.gateway_definition: body['gateway_definition'] = self.gateway_definition - if self.id is not None: body['id'] = self.id - if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition - if self.libraries: body['libraries'] = self.libraries - if self.name is not None: body['name'] = self.name - if self.notifications: body['notifications'] = self.notifications - if self.photon is not None: body['photon'] = self.photon - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.restart_window: body['restart_window'] = self.restart_window - if self.root_path is not None: body['root_path'] = self.root_path - if self.run_as: body['run_as'] = self.run_as - if self.schema is not None: body['schema'] = self.schema - if self.serverless is not None: body['serverless'] = self.serverless - if self.storage is not None: body['storage'] = self.storage - if self.tags: body['tags'] = self.tags - if self.target is not None: body['target'] = self.target - if self.trigger: body['trigger'] = self.trigger + if self.allow_duplicate_names is not None: + body["allow_duplicate_names"] = self.allow_duplicate_names + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.catalog is not None: + body["catalog"] = self.catalog + if self.channel is not None: + body["channel"] = self.channel + if self.clusters: + body["clusters"] = self.clusters + if self.configuration: + body["configuration"] = self.configuration + if self.continuous is not None: + body["continuous"] = self.continuous + if self.deployment: + body["deployment"] = self.deployment + if self.development is not None: + body["development"] = self.development + if self.edition is not None: + body["edition"] = self.edition + if self.event_log: + body["event_log"] = self.event_log + if self.expected_last_modified is not None: + body["expected_last_modified"] = self.expected_last_modified + if self.filters: + body["filters"] = self.filters + if self.gateway_definition: + body["gateway_definition"] = self.gateway_definition + if self.id is not None: + body["id"] = self.id + if self.ingestion_definition: + body["ingestion_definition"] = self.ingestion_definition + if self.libraries: + body["libraries"] = self.libraries + if self.name is not None: + body["name"] = self.name + if self.notifications: + body["notifications"] = self.notifications + if self.photon is not None: + body["photon"] = self.photon + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.restart_window: + body["restart_window"] = self.restart_window + if self.root_path is not None: + body["root_path"] = self.root_path + if self.run_as: + body["run_as"] = self.run_as + if self.schema is not None: + body["schema"] = self.schema + if self.serverless is not None: + body["serverless"] = self.serverless + if self.storage is not None: + body["storage"] = self.storage + if self.tags: + body["tags"] = self.tags + if self.target is not None: + body["target"] = self.target + if self.trigger: + body["trigger"] = self.trigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditPipeline: """Deserializes the EditPipeline from a dictionary.""" - return cls(allow_duplicate_names=d.get('allow_duplicate_names', None), budget_policy_id=d.get('budget_policy_id', None), catalog=d.get('catalog', None), channel=d.get('channel', None), clusters=_repeated_dict(d, 'clusters', PipelineCluster), configuration=d.get('configuration', None), continuous=d.get('continuous', None), deployment=_from_dict(d, 'deployment', PipelineDeployment), development=d.get('development', None), edition=d.get('edition', None), event_log=_from_dict(d, 'event_log', EventLogSpec), expected_last_modified=d.get('expected_last_modified', None), filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), pipeline_id=d.get('pipeline_id', None), restart_window=_from_dict(d, 'restart_window', RestartWindow), root_path=d.get('root_path', None), run_as=_from_dict(d, 'run_as', RunAs), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), tags=d.get('tags', None), target=d.get('target', None), trigger=_from_dict(d, 'trigger', PipelineTrigger)) - - + return cls( + allow_duplicate_names=d.get("allow_duplicate_names", None), + budget_policy_id=d.get("budget_policy_id", None), + catalog=d.get("catalog", None), + channel=d.get("channel", None), + clusters=_repeated_dict(d, "clusters", PipelineCluster), + configuration=d.get("configuration", None), + continuous=d.get("continuous", None), + deployment=_from_dict(d, "deployment", PipelineDeployment), + development=d.get("development", None), + edition=d.get("edition", None), + event_log=_from_dict(d, "event_log", EventLogSpec), + expected_last_modified=d.get("expected_last_modified", None), + filters=_from_dict(d, "filters", Filters), + gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), + id=d.get("id", None), + ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), + libraries=_repeated_dict(d, "libraries", PipelineLibrary), + name=d.get("name", None), + notifications=_repeated_dict(d, "notifications", Notifications), + photon=d.get("photon", None), + pipeline_id=d.get("pipeline_id", None), + restart_window=_from_dict(d, "restart_window", RestartWindow), + root_path=d.get("root_path", None), + run_as=_from_dict(d, "run_as", RunAs), + schema=d.get("schema", None), + serverless=d.get("serverless", None), + storage=d.get("storage", None), + tags=d.get("tags", None), + target=d.get("target", None), + trigger=_from_dict(d, "trigger", PipelineTrigger), + ) @dataclass @@ -537,321 +711,360 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditPipelineResponse: """Deserializes the EditPipelineResponse from a dictionary.""" return cls() - - @dataclass class ErrorDetail: exceptions: Optional[List[SerializedException]] = None """The exception thrown for this error, with its chain of cause.""" - + fatal: Optional[bool] = None """Whether this error is considered fatal, that is, unrecoverable.""" - + def as_dict(self) -> dict: """Serializes the ErrorDetail into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exceptions: body['exceptions'] = [v.as_dict() for v in self.exceptions] - if self.fatal is not None: body['fatal'] = self.fatal + if self.exceptions: + body["exceptions"] = [v.as_dict() for v in self.exceptions] + if self.fatal is not None: + body["fatal"] = self.fatal return body def as_shallow_dict(self) -> dict: """Serializes the ErrorDetail into a shallow dictionary of its immediate attributes.""" body = {} - if self.exceptions: body['exceptions'] = self.exceptions - if self.fatal is not None: body['fatal'] = self.fatal + if self.exceptions: + body["exceptions"] = self.exceptions + if self.fatal is not None: + body["fatal"] = self.fatal return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ErrorDetail: """Deserializes the ErrorDetail from a dictionary.""" - return cls(exceptions=_repeated_dict(d, 'exceptions', SerializedException), fatal=d.get('fatal', None)) - - + return cls(exceptions=_repeated_dict(d, "exceptions", SerializedException), fatal=d.get("fatal", None)) class EventLevel(Enum): """The severity level of the event.""" - - ERROR = 'ERROR' - INFO = 'INFO' - METRICS = 'METRICS' - WARN = 'WARN' + + ERROR = "ERROR" + INFO = "INFO" + METRICS = "METRICS" + WARN = "WARN" + @dataclass class EventLogSpec: """Configurable event log parameters.""" - + catalog: Optional[str] = None """The UC catalog the event log is published under.""" - + name: Optional[str] = None """The name the event log is published to in UC.""" - + schema: Optional[str] = None """The UC schema the event log is published under.""" - + def as_dict(self) -> dict: """Serializes the EventLogSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog is not None: body['catalog'] = self.catalog - if self.name is not None: body['name'] = self.name - if self.schema is not None: body['schema'] = self.schema + if self.catalog is not None: + body["catalog"] = self.catalog + if self.name is not None: + body["name"] = self.name + if self.schema is not None: + body["schema"] = self.schema return body def as_shallow_dict(self) -> dict: """Serializes the EventLogSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog is not None: body['catalog'] = self.catalog - if self.name is not None: body['name'] = self.name - if self.schema is not None: body['schema'] = self.schema + if self.catalog is not None: + body["catalog"] = self.catalog + if self.name is not None: + body["name"] = self.name + if self.schema is not None: + body["schema"] = self.schema return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EventLogSpec: """Deserializes the EventLogSpec from a dictionary.""" - return cls(catalog=d.get('catalog', None), name=d.get('name', None), schema=d.get('schema', None)) - - + return cls(catalog=d.get("catalog", None), name=d.get("name", None), schema=d.get("schema", None)) @dataclass class FileLibrary: path: Optional[str] = None """The absolute path of the source code.""" - + def as_dict(self) -> dict: """Serializes the FileLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: body['path'] = self.path + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the FileLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: body['path'] = self.path + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FileLibrary: """Deserializes the FileLibrary from a dictionary.""" - return cls(path=d.get('path', None)) - - + return cls(path=d.get("path", None)) @dataclass class Filters: exclude: Optional[List[str]] = None """Paths to exclude.""" - + include: Optional[List[str]] = None """Paths to include.""" - + def as_dict(self) -> dict: """Serializes the Filters into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exclude: body['exclude'] = [v for v in self.exclude] - if self.include: body['include'] = [v for v in self.include] + if self.exclude: + body["exclude"] = [v for v in self.exclude] + if self.include: + body["include"] = [v for v in self.include] return body def as_shallow_dict(self) -> dict: """Serializes the Filters into a shallow dictionary of its immediate attributes.""" body = {} - if self.exclude: body['exclude'] = self.exclude - if self.include: body['include'] = self.include + if self.exclude: + body["exclude"] = self.exclude + if self.include: + body["include"] = self.include return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Filters: """Deserializes the Filters from a dictionary.""" - return cls(exclude=d.get('exclude', None), include=d.get('include', None)) - - - - - + return cls(exclude=d.get("exclude", None), include=d.get("include", None)) @dataclass class GetPipelinePermissionLevelsResponse: permission_levels: Optional[List[PipelinePermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetPipelinePermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetPipelinePermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPipelinePermissionLevelsResponse: """Deserializes the GetPipelinePermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', PipelinePermissionsDescription)) - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", PipelinePermissionsDescription)) @dataclass class GetPipelineResponse: cause: Optional[str] = None """An optional message detailing the cause of the pipeline state.""" - + cluster_id: Optional[str] = None """The ID of the cluster that the pipeline is running on.""" - + creator_user_name: Optional[str] = None """The username of the pipeline creator.""" - + effective_budget_policy_id: Optional[str] = None """Serverless budget policy ID of this pipeline.""" - + health: Optional[GetPipelineResponseHealth] = None """The health of a pipeline.""" - + last_modified: Optional[int] = None """The last time the pipeline settings were modified or created.""" - + latest_updates: Optional[List[UpdateStateInfo]] = None """Status of the latest updates for the pipeline. Ordered with the newest update first.""" - + name: Optional[str] = None """A human friendly identifier for the pipeline, taken from the `spec`.""" - + pipeline_id: Optional[str] = None """The ID of the pipeline.""" - + run_as_user_name: Optional[str] = None """Username of the user that the pipeline will run on behalf of.""" - + spec: Optional[PipelineSpec] = None """The pipeline specification. This field is not returned when called by `ListPipelines`.""" - + state: Optional[PipelineState] = None """The pipeline state.""" - + def as_dict(self) -> dict: """Serializes the GetPipelineResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cause is not None: body['cause'] = self.cause - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.health is not None: body['health'] = self.health.value - if self.last_modified is not None: body['last_modified'] = self.last_modified - if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates] - if self.name is not None: body['name'] = self.name - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name - if self.spec: body['spec'] = self.spec.as_dict() - if self.state is not None: body['state'] = self.state.value + if self.cause is not None: + body["cause"] = self.cause + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.health is not None: + body["health"] = self.health.value + if self.last_modified is not None: + body["last_modified"] = self.last_modified + if self.latest_updates: + body["latest_updates"] = [v.as_dict() for v in self.latest_updates] + if self.name is not None: + body["name"] = self.name + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.run_as_user_name is not None: + body["run_as_user_name"] = self.run_as_user_name + if self.spec: + body["spec"] = self.spec.as_dict() + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the GetPipelineResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.cause is not None: body['cause'] = self.cause - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.health is not None: body['health'] = self.health - if self.last_modified is not None: body['last_modified'] = self.last_modified - if self.latest_updates: body['latest_updates'] = self.latest_updates - if self.name is not None: body['name'] = self.name - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name - if self.spec: body['spec'] = self.spec - if self.state is not None: body['state'] = self.state + if self.cause is not None: + body["cause"] = self.cause + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.health is not None: + body["health"] = self.health + if self.last_modified is not None: + body["last_modified"] = self.last_modified + if self.latest_updates: + body["latest_updates"] = self.latest_updates + if self.name is not None: + body["name"] = self.name + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.run_as_user_name is not None: + body["run_as_user_name"] = self.run_as_user_name + if self.spec: + body["spec"] = self.spec + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetPipelineResponse: """Deserializes the GetPipelineResponse from a dictionary.""" - return cls(cause=d.get('cause', None), cluster_id=d.get('cluster_id', None), creator_user_name=d.get('creator_user_name', None), effective_budget_policy_id=d.get('effective_budget_policy_id', None), health=_enum(d, 'health', GetPipelineResponseHealth), last_modified=d.get('last_modified', None), latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo), name=d.get('name', None), pipeline_id=d.get('pipeline_id', None), run_as_user_name=d.get('run_as_user_name', None), spec=_from_dict(d, 'spec', PipelineSpec), state=_enum(d, 'state', PipelineState)) - - + return cls( + cause=d.get("cause", None), + cluster_id=d.get("cluster_id", None), + creator_user_name=d.get("creator_user_name", None), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + health=_enum(d, "health", GetPipelineResponseHealth), + last_modified=d.get("last_modified", None), + latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo), + name=d.get("name", None), + pipeline_id=d.get("pipeline_id", None), + run_as_user_name=d.get("run_as_user_name", None), + spec=_from_dict(d, "spec", PipelineSpec), + state=_enum(d, "state", PipelineState), + ) class GetPipelineResponseHealth(Enum): """The health of a pipeline.""" - - HEALTHY = 'HEALTHY' - UNHEALTHY = 'UNHEALTHY' - + HEALTHY = "HEALTHY" + UNHEALTHY = "UNHEALTHY" @dataclass class GetUpdateResponse: update: Optional[UpdateInfo] = None """The current update info.""" - + def as_dict(self) -> dict: """Serializes the GetUpdateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.update: body['update'] = self.update.as_dict() + if self.update: + body["update"] = self.update.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetUpdateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.update: body['update'] = self.update + if self.update: + body["update"] = self.update return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetUpdateResponse: """Deserializes the GetUpdateResponse from a dictionary.""" - return cls(update=_from_dict(d, 'update', UpdateInfo)) - - + return cls(update=_from_dict(d, "update", UpdateInfo)) @dataclass class IngestionConfig: report: Optional[ReportSpec] = None """Select a specific source report.""" - + schema: Optional[SchemaSpec] = None """Select all tables from a specific source schema.""" - + table: Optional[TableSpec] = None """Select a specific source table.""" - + def as_dict(self) -> dict: """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.report: body['report'] = self.report.as_dict() - if self.schema: body['schema'] = self.schema.as_dict() - if self.table: body['table'] = self.table.as_dict() + if self.report: + body["report"] = self.report.as_dict() + if self.schema: + body["schema"] = self.schema.as_dict() + if self.table: + body["table"] = self.table.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the IngestionConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.report: body['report'] = self.report - if self.schema: body['schema'] = self.schema - if self.table: body['table'] = self.table + if self.report: + body["report"] = self.report + if self.schema: + body["schema"] = self.schema + if self.table: + body["table"] = self.table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IngestionConfig: """Deserializes the IngestionConfig from a dictionary.""" - return cls(report=_from_dict(d, 'report', ReportSpec), schema=_from_dict(d, 'schema', SchemaSpec), table=_from_dict(d, 'table', TableSpec)) - - + return cls( + report=_from_dict(d, "report", ReportSpec), + schema=_from_dict(d, "schema", SchemaSpec), + table=_from_dict(d, "table", TableSpec), + ) @dataclass @@ -859,48 +1072,62 @@ class IngestionGatewayPipelineDefinition: connection_name: str """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" - + gateway_storage_catalog: str """Required, Immutable. The name of the catalog for the gateway pipeline's storage location.""" - + gateway_storage_schema: str """Required, Immutable. The name of the schema for the gateway pipelines's storage location.""" - + connection_id: Optional[str] = None """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.""" - + gateway_storage_name: Optional[str] = None """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.""" - + def as_dict(self) -> dict: """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_id is not None: body['connection_id'] = self.connection_id - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.gateway_storage_catalog is not None: body['gateway_storage_catalog'] = self.gateway_storage_catalog - if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name - if self.gateway_storage_schema is not None: body['gateway_storage_schema'] = self.gateway_storage_schema + if self.connection_id is not None: + body["connection_id"] = self.connection_id + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.gateway_storage_catalog is not None: + body["gateway_storage_catalog"] = self.gateway_storage_catalog + if self.gateway_storage_name is not None: + body["gateway_storage_name"] = self.gateway_storage_name + if self.gateway_storage_schema is not None: + body["gateway_storage_schema"] = self.gateway_storage_schema return body def as_shallow_dict(self) -> dict: """Serializes the IngestionGatewayPipelineDefinition into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_id is not None: body['connection_id'] = self.connection_id - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.gateway_storage_catalog is not None: body['gateway_storage_catalog'] = self.gateway_storage_catalog - if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name - if self.gateway_storage_schema is not None: body['gateway_storage_schema'] = self.gateway_storage_schema + if self.connection_id is not None: + body["connection_id"] = self.connection_id + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.gateway_storage_catalog is not None: + body["gateway_storage_catalog"] = self.gateway_storage_catalog + if self.gateway_storage_name is not None: + body["gateway_storage_name"] = self.gateway_storage_name + if self.gateway_storage_schema is not None: + body["gateway_storage_schema"] = self.gateway_storage_schema return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IngestionGatewayPipelineDefinition: """Deserializes the IngestionGatewayPipelineDefinition from a dictionary.""" - return cls(connection_id=d.get('connection_id', None), connection_name=d.get('connection_name', None), gateway_storage_catalog=d.get('gateway_storage_catalog', None), gateway_storage_name=d.get('gateway_storage_name', None), gateway_storage_schema=d.get('gateway_storage_schema', None)) - - + return cls( + connection_id=d.get("connection_id", None), + connection_name=d.get("connection_name", None), + gateway_storage_catalog=d.get("gateway_storage_catalog", None), + gateway_storage_name=d.get("gateway_storage_name", None), + gateway_storage_schema=d.get("gateway_storage_schema", None), + ) @dataclass @@ -908,139 +1135,156 @@ class IngestionPipelineDefinition: connection_name: Optional[str] = None """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with the source. This is used with connectors for applications like Salesforce, Workday, and so on.""" - + ingestion_gateway_id: Optional[str] = None """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate with the source database. This is used with connectors to databases like SQL Server.""" - + objects: Optional[List[IngestionConfig]] = None """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" - + source_type: Optional[IngestionSourceType] = None """The type of the foreign source. The source type will be inferred from the source connection or ingestion gateway. This field is output only and will be ignored if provided.""" - + table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings are applied to all tables in the pipeline.""" - + def as_dict(self) -> dict: """Serializes the IngestionPipelineDefinition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id - if self.objects: body['objects'] = [v.as_dict() for v in self.objects] - if self.source_type is not None: body['source_type'] = self.source_type.value - if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.ingestion_gateway_id is not None: + body["ingestion_gateway_id"] = self.ingestion_gateway_id + if self.objects: + body["objects"] = [v.as_dict() for v in self.objects] + if self.source_type is not None: + body["source_type"] = self.source_type.value + if self.table_configuration: + body["table_configuration"] = self.table_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the IngestionPipelineDefinition into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id - if self.objects: body['objects'] = self.objects - if self.source_type is not None: body['source_type'] = self.source_type - if self.table_configuration: body['table_configuration'] = self.table_configuration + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.ingestion_gateway_id is not None: + body["ingestion_gateway_id"] = self.ingestion_gateway_id + if self.objects: + body["objects"] = self.objects + if self.source_type is not None: + body["source_type"] = self.source_type + if self.table_configuration: + body["table_configuration"] = self.table_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: """Deserializes the IngestionPipelineDefinition from a dictionary.""" - return cls(connection_name=d.get('connection_name', None), ingestion_gateway_id=d.get('ingestion_gateway_id', None), objects=_repeated_dict(d, 'objects', IngestionConfig), source_type=_enum(d, 'source_type', IngestionSourceType), table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) - - + return cls( + connection_name=d.get("connection_name", None), + ingestion_gateway_id=d.get("ingestion_gateway_id", None), + objects=_repeated_dict(d, "objects", IngestionConfig), + source_type=_enum(d, "source_type", IngestionSourceType), + table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig), + ) class IngestionSourceType(Enum): - - - DYNAMICS365 = 'DYNAMICS365' - GA4_RAW_DATA = 'GA4_RAW_DATA' - MANAGED_POSTGRESQL = 'MANAGED_POSTGRESQL' - MYSQL = 'MYSQL' - NETSUITE = 'NETSUITE' - ORACLE = 'ORACLE' - POSTGRESQL = 'POSTGRESQL' - SALESFORCE = 'SALESFORCE' - SERVICENOW = 'SERVICENOW' - SHAREPOINT = 'SHAREPOINT' - SQLSERVER = 'SQLSERVER' - TERADATA = 'TERADATA' - WORKDAY_RAAS = 'WORKDAY_RAAS' - + DYNAMICS365 = "DYNAMICS365" + GA4_RAW_DATA = "GA4_RAW_DATA" + MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL" + MYSQL = "MYSQL" + NETSUITE = "NETSUITE" + ORACLE = "ORACLE" + POSTGRESQL = "POSTGRESQL" + SALESFORCE = "SALESFORCE" + SERVICENOW = "SERVICENOW" + SHAREPOINT = "SHAREPOINT" + SQLSERVER = "SQLSERVER" + TERADATA = "TERADATA" + WORKDAY_RAAS = "WORKDAY_RAAS" @dataclass class ListPipelineEventsResponse: events: Optional[List[PipelineEvent]] = None """The list of events matching the request criteria.""" - + next_page_token: Optional[str] = None """If present, a token to fetch the next page of events.""" - + prev_page_token: Optional[str] = None """If present, a token to fetch the previous page of events.""" - + def as_dict(self) -> dict: """Serializes the ListPipelineEventsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.events: body['events'] = [v.as_dict() for v in self.events] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.events: + body["events"] = [v.as_dict() for v in self.events] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListPipelineEventsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.events: body['events'] = self.events - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + if self.events: + body["events"] = self.events + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPipelineEventsResponse: """Deserializes the ListPipelineEventsResponse from a dictionary.""" - return cls(events=_repeated_dict(d, 'events', PipelineEvent), next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None)) - - - - - + return cls( + events=_repeated_dict(d, "events", PipelineEvent), + next_page_token=d.get("next_page_token", None), + prev_page_token=d.get("prev_page_token", None), + ) @dataclass class ListPipelinesResponse: next_page_token: Optional[str] = None """If present, a token to fetch the next page of events.""" - + statuses: Optional[List[PipelineStateInfo]] = None """The list of events matching the request criteria.""" - + def as_dict(self) -> dict: """Serializes the ListPipelinesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.statuses: + body["statuses"] = [v.as_dict() for v in self.statuses] return body def as_shallow_dict(self) -> dict: """Serializes the ListPipelinesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.statuses: body['statuses'] = self.statuses + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.statuses: + body["statuses"] = self.statuses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPipelinesResponse: """Deserializes the ListPipelinesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), statuses=_repeated_dict(d, 'statuses', PipelineStateInfo)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), statuses=_repeated_dict(d, "statuses", PipelineStateInfo) + ) @dataclass @@ -1048,34 +1292,42 @@ class ListUpdatesResponse: next_page_token: Optional[str] = None """If present, then there are more results, and this a token to be used in a subsequent request to fetch the next page.""" - + prev_page_token: Optional[str] = None """If present, then this token can be used in a subsequent request to fetch the previous page.""" - + updates: Optional[List[UpdateInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListUpdatesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token - if self.updates: body['updates'] = [v.as_dict() for v in self.updates] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token + if self.updates: + body["updates"] = [v.as_dict() for v in self.updates] return body def as_shallow_dict(self) -> dict: """Serializes the ListUpdatesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token - if self.updates: body['updates'] = self.updates + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.prev_page_token is not None: + body["prev_page_token"] = self.prev_page_token + if self.updates: + body["updates"] = self.updates return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListUpdatesResponse: """Deserializes the ListUpdatesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), prev_page_token=d.get('prev_page_token', None), updates=_repeated_dict(d, 'updates', UpdateInfo)) - - + return cls( + next_page_token=d.get("next_page_token", None), + prev_page_token=d.get("prev_page_token", None), + updates=_repeated_dict(d, "updates", UpdateInfo), + ) @dataclass @@ -1094,40 +1346,39 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ManualTrigger: """Deserializes the ManualTrigger from a dictionary.""" return cls() - - class MaturityLevel(Enum): """Maturity level for EventDetails.""" - - DEPRECATED = 'DEPRECATED' - EVOLVING = 'EVOLVING' - STABLE = 'STABLE' + + DEPRECATED = "DEPRECATED" + EVOLVING = "EVOLVING" + STABLE = "STABLE" + @dataclass class NotebookLibrary: path: Optional[str] = None """The absolute path of the source code.""" - + def as_dict(self) -> dict: """Serializes the NotebookLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: body['path'] = self.path + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the NotebookLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: body['path'] = self.path + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotebookLibrary: """Deserializes the NotebookLibrary from a dictionary.""" - return cls(path=d.get('path', None)) - - + return cls(path=d.get("path", None)) @dataclass @@ -1139,273 +1390,350 @@ class Notifications: * `on-update-success`: A pipeline update completes successfully. * `on-update-failure`: Each time a pipeline update fails. * `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error. * `on-flow-failure`: A single data flow fails.""" - + email_recipients: Optional[List[str]] = None """A list of email addresses notified when a configured alert is triggered.""" - + def as_dict(self) -> dict: """Serializes the Notifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alerts: body['alerts'] = [v for v in self.alerts] - if self.email_recipients: body['email_recipients'] = [v for v in self.email_recipients] + if self.alerts: + body["alerts"] = [v for v in self.alerts] + if self.email_recipients: + body["email_recipients"] = [v for v in self.email_recipients] return body def as_shallow_dict(self) -> dict: """Serializes the Notifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.alerts: body['alerts'] = self.alerts - if self.email_recipients: body['email_recipients'] = self.email_recipients + if self.alerts: + body["alerts"] = self.alerts + if self.email_recipients: + body["email_recipients"] = self.email_recipients return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Notifications: """Deserializes the Notifications from a dictionary.""" - return cls(alerts=d.get('alerts', None), email_recipients=d.get('email_recipients', None)) - - + return cls(alerts=d.get("alerts", None), email_recipients=d.get("email_recipients", None)) @dataclass class Origin: batch_id: Optional[int] = None """The id of a batch. Unique within a flow.""" - + cloud: Optional[str] = None """The cloud provider, e.g., AWS or Azure.""" - + cluster_id: Optional[str] = None """The id of the cluster where an execution happens. Unique within a region.""" - + dataset_name: Optional[str] = None """The name of a dataset. Unique within a pipeline.""" - + flow_id: Optional[str] = None """The id of the flow. Globally unique. Incremental queries will generally reuse the same id while complete queries will have a new id per update.""" - + flow_name: Optional[str] = None """The name of the flow. Not unique.""" - + host: Optional[str] = None """The optional host name where the event was triggered""" - + maintenance_id: Optional[str] = None """The id of a maintenance run. Globally unique.""" - + materialization_name: Optional[str] = None """Materialization name.""" - + org_id: Optional[int] = None """The org id of the user. Unique within a cloud.""" - + pipeline_id: Optional[str] = None """The id of the pipeline. Globally unique.""" - + pipeline_name: Optional[str] = None """The name of the pipeline. Not unique.""" - + region: Optional[str] = None """The cloud region.""" - + request_id: Optional[str] = None """The id of the request that caused an update.""" - + table_id: Optional[str] = None """The id of a (delta) table. Globally unique.""" - + uc_resource_id: Optional[str] = None """The Unity Catalog id of the MV or ST being updated.""" - + update_id: Optional[str] = None """The id of an execution. Globally unique.""" - + def as_dict(self) -> dict: """Serializes the Origin into a dictionary suitable for use as a JSON request body.""" body = {} - if self.batch_id is not None: body['batch_id'] = self.batch_id - if self.cloud is not None: body['cloud'] = self.cloud - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name - if self.flow_id is not None: body['flow_id'] = self.flow_id - if self.flow_name is not None: body['flow_name'] = self.flow_name - if self.host is not None: body['host'] = self.host - if self.maintenance_id is not None: body['maintenance_id'] = self.maintenance_id - if self.materialization_name is not None: body['materialization_name'] = self.materialization_name - if self.org_id is not None: body['org_id'] = self.org_id - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.pipeline_name is not None: body['pipeline_name'] = self.pipeline_name - if self.region is not None: body['region'] = self.region - if self.request_id is not None: body['request_id'] = self.request_id - if self.table_id is not None: body['table_id'] = self.table_id - if self.uc_resource_id is not None: body['uc_resource_id'] = self.uc_resource_id - if self.update_id is not None: body['update_id'] = self.update_id + if self.batch_id is not None: + body["batch_id"] = self.batch_id + if self.cloud is not None: + body["cloud"] = self.cloud + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name + if self.flow_id is not None: + body["flow_id"] = self.flow_id + if self.flow_name is not None: + body["flow_name"] = self.flow_name + if self.host is not None: + body["host"] = self.host + if self.maintenance_id is not None: + body["maintenance_id"] = self.maintenance_id + if self.materialization_name is not None: + body["materialization_name"] = self.materialization_name + if self.org_id is not None: + body["org_id"] = self.org_id + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.pipeline_name is not None: + body["pipeline_name"] = self.pipeline_name + if self.region is not None: + body["region"] = self.region + if self.request_id is not None: + body["request_id"] = self.request_id + if self.table_id is not None: + body["table_id"] = self.table_id + if self.uc_resource_id is not None: + body["uc_resource_id"] = self.uc_resource_id + if self.update_id is not None: + body["update_id"] = self.update_id return body def as_shallow_dict(self) -> dict: """Serializes the Origin into a shallow dictionary of its immediate attributes.""" body = {} - if self.batch_id is not None: body['batch_id'] = self.batch_id - if self.cloud is not None: body['cloud'] = self.cloud - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.dataset_name is not None: body['dataset_name'] = self.dataset_name - if self.flow_id is not None: body['flow_id'] = self.flow_id - if self.flow_name is not None: body['flow_name'] = self.flow_name - if self.host is not None: body['host'] = self.host - if self.maintenance_id is not None: body['maintenance_id'] = self.maintenance_id - if self.materialization_name is not None: body['materialization_name'] = self.materialization_name - if self.org_id is not None: body['org_id'] = self.org_id - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.pipeline_name is not None: body['pipeline_name'] = self.pipeline_name - if self.region is not None: body['region'] = self.region - if self.request_id is not None: body['request_id'] = self.request_id - if self.table_id is not None: body['table_id'] = self.table_id - if self.uc_resource_id is not None: body['uc_resource_id'] = self.uc_resource_id - if self.update_id is not None: body['update_id'] = self.update_id + if self.batch_id is not None: + body["batch_id"] = self.batch_id + if self.cloud is not None: + body["cloud"] = self.cloud + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.dataset_name is not None: + body["dataset_name"] = self.dataset_name + if self.flow_id is not None: + body["flow_id"] = self.flow_id + if self.flow_name is not None: + body["flow_name"] = self.flow_name + if self.host is not None: + body["host"] = self.host + if self.maintenance_id is not None: + body["maintenance_id"] = self.maintenance_id + if self.materialization_name is not None: + body["materialization_name"] = self.materialization_name + if self.org_id is not None: + body["org_id"] = self.org_id + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.pipeline_name is not None: + body["pipeline_name"] = self.pipeline_name + if self.region is not None: + body["region"] = self.region + if self.request_id is not None: + body["request_id"] = self.request_id + if self.table_id is not None: + body["table_id"] = self.table_id + if self.uc_resource_id is not None: + body["uc_resource_id"] = self.uc_resource_id + if self.update_id is not None: + body["update_id"] = self.update_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Origin: """Deserializes the Origin from a dictionary.""" - return cls(batch_id=d.get('batch_id', None), cloud=d.get('cloud', None), cluster_id=d.get('cluster_id', None), dataset_name=d.get('dataset_name', None), flow_id=d.get('flow_id', None), flow_name=d.get('flow_name', None), host=d.get('host', None), maintenance_id=d.get('maintenance_id', None), materialization_name=d.get('materialization_name', None), org_id=d.get('org_id', None), pipeline_id=d.get('pipeline_id', None), pipeline_name=d.get('pipeline_name', None), region=d.get('region', None), request_id=d.get('request_id', None), table_id=d.get('table_id', None), uc_resource_id=d.get('uc_resource_id', None), update_id=d.get('update_id', None)) - - + return cls( + batch_id=d.get("batch_id", None), + cloud=d.get("cloud", None), + cluster_id=d.get("cluster_id", None), + dataset_name=d.get("dataset_name", None), + flow_id=d.get("flow_id", None), + flow_name=d.get("flow_name", None), + host=d.get("host", None), + maintenance_id=d.get("maintenance_id", None), + materialization_name=d.get("materialization_name", None), + org_id=d.get("org_id", None), + pipeline_id=d.get("pipeline_id", None), + pipeline_name=d.get("pipeline_name", None), + region=d.get("region", None), + request_id=d.get("request_id", None), + table_id=d.get("table_id", None), + uc_resource_id=d.get("uc_resource_id", None), + update_id=d.get("update_id", None), + ) @dataclass class PathPattern: include: Optional[str] = None """The source code to include for pipelines""" - + def as_dict(self) -> dict: """Serializes the PathPattern into a dictionary suitable for use as a JSON request body.""" body = {} - if self.include is not None: body['include'] = self.include + if self.include is not None: + body["include"] = self.include return body def as_shallow_dict(self) -> dict: """Serializes the PathPattern into a shallow dictionary of its immediate attributes.""" body = {} - if self.include is not None: body['include'] = self.include + if self.include is not None: + body["include"] = self.include return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PathPattern: """Deserializes the PathPattern from a dictionary.""" - return cls(include=d.get('include', None)) - - + return cls(include=d.get("include", None)) @dataclass class PipelineAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[PipelinePermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the PipelineAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PipelineAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineAccessControlRequest: """Deserializes the PipelineAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', PipelinePermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", PipelinePermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class PipelineAccessControlResponse: all_permissions: Optional[List[PipelinePermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the PipelineAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the PipelineAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineAccessControlResponse: """Deserializes the PipelineAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', PipelinePermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", PipelinePermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class PipelineCluster: apply_policy_default_values: Optional[bool] = None """Note: This field won't be persisted. Only API users will check this field.""" - + autoscale: Optional[PipelineClusterAutoscale] = None """Parameters needed in order to automatically scale clusters up and down based on load. Note: autoscaling works best with DB runtime versions 3.0 or later.""" - + aws_attributes: Optional[compute.AwsAttributes] = None """Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation, a set of default values will be used.""" - + azure_attributes: Optional[compute.AzureAttributes] = None """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a set of default values will be used.""" - + cluster_log_conf: Optional[compute.ClusterLogConf] = None """The configuration for delivering spark logs to a long-term storage destination. Only dbfs destinations are supported. Only one destination can be specified for one cluster. If the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is `$destination/$clusterId/executor`.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: @@ -1413,41 +1741,41 @@ class PipelineCluster: - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags""" - + driver_instance_pool_id: Optional[str] = None """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - + driver_node_type_id: Optional[str] = None """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above.""" - + enable_local_disk_encryption: Optional[bool] = None """Whether to enable local disk encryption for the cluster.""" - + gcp_attributes: Optional[compute.GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" - + init_scripts: Optional[List[compute.InitScriptInfo]] = None """The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `//init_scripts`.""" - + instance_pool_id: Optional[str] = None """The optional ID of the instance pool to which the cluster belongs.""" - + label: Optional[str] = None """A label for the cluster specification, either `default` to configure the default cluster, or `maintenance` to configure the maintenance cluster. This field is optional. The default value is `default`.""" - + node_type_id: Optional[str] = None """This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute intensive workloads. A list of available node types can be retrieved by using the :method:clusters/listNodeTypes API call.""" - + num_workers: Optional[int] = None """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. @@ -1457,15 +1785,15 @@ class PipelineCluster: from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned.""" - + policy_id: Optional[str] = None """The ID of the cluster policy used to create the cluster if applicable.""" - - spark_conf: Optional[Dict[str,str]] = None + + spark_conf: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified Spark configuration key-value pairs. See :method:clusters/create for more details.""" - - spark_env_vars: Optional[Dict[str,str]] = None + + spark_env_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. @@ -1477,66 +1805,122 @@ class PipelineCluster: Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}`""" - + ssh_public_keys: Optional[List[str]] = None """SSH public key contents that will be added to each Spark node in this cluster. The corresponding private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be specified.""" - + def as_dict(self) -> dict: """Serializes the PipelineCluster into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values - if self.autoscale: body['autoscale'] = self.autoscale.as_dict() - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() - if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.label is not None: body['label'] = self.label - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.apply_policy_default_values is not None: + body["apply_policy_default_values"] = self.apply_policy_default_values + if self.autoscale: + body["autoscale"] = self.autoscale.as_dict() + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes.as_dict() + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes.as_dict() + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf.as_dict() + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes.as_dict() + if self.init_scripts: + body["init_scripts"] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.label is not None: + body["label"] = self.label + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.ssh_public_keys: + body["ssh_public_keys"] = [v for v in self.ssh_public_keys] return body def as_shallow_dict(self) -> dict: """Serializes the PipelineCluster into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: body['apply_policy_default_values'] = self.apply_policy_default_values - if self.autoscale: body['autoscale'] = self.autoscale - if self.aws_attributes: body['aws_attributes'] = self.aws_attributes - if self.azure_attributes: body['azure_attributes'] = self.azure_attributes - if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id - if self.enable_local_disk_encryption is not None: body['enable_local_disk_encryption'] = self.enable_local_disk_encryption - if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes - if self.init_scripts: body['init_scripts'] = self.init_scripts - if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id - if self.label is not None: body['label'] = self.label - if self.node_type_id is not None: body['node_type_id'] = self.node_type_id - if self.num_workers is not None: body['num_workers'] = self.num_workers - if self.policy_id is not None: body['policy_id'] = self.policy_id - if self.spark_conf: body['spark_conf'] = self.spark_conf - if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars - if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys + if self.apply_policy_default_values is not None: + body["apply_policy_default_values"] = self.apply_policy_default_values + if self.autoscale: + body["autoscale"] = self.autoscale + if self.aws_attributes: + body["aws_attributes"] = self.aws_attributes + if self.azure_attributes: + body["azure_attributes"] = self.azure_attributes + if self.cluster_log_conf: + body["cluster_log_conf"] = self.cluster_log_conf + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.driver_instance_pool_id is not None: + body["driver_instance_pool_id"] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: + body["driver_node_type_id"] = self.driver_node_type_id + if self.enable_local_disk_encryption is not None: + body["enable_local_disk_encryption"] = self.enable_local_disk_encryption + if self.gcp_attributes: + body["gcp_attributes"] = self.gcp_attributes + if self.init_scripts: + body["init_scripts"] = self.init_scripts + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + if self.label is not None: + body["label"] = self.label + if self.node_type_id is not None: + body["node_type_id"] = self.node_type_id + if self.num_workers is not None: + body["num_workers"] = self.num_workers + if self.policy_id is not None: + body["policy_id"] = self.policy_id + if self.spark_conf: + body["spark_conf"] = self.spark_conf + if self.spark_env_vars: + body["spark_env_vars"] = self.spark_env_vars + if self.ssh_public_keys: + body["ssh_public_keys"] = self.ssh_public_keys return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineCluster: """Deserializes the PipelineCluster from a dictionary.""" - return cls(apply_policy_default_values=d.get('apply_policy_default_values', None), autoscale=_from_dict(d, 'autoscale', PipelineClusterAutoscale), aws_attributes=_from_dict(d, 'aws_attributes', compute.AwsAttributes), azure_attributes=_from_dict(d, 'azure_attributes', compute.AzureAttributes), cluster_log_conf=_from_dict(d, 'cluster_log_conf', compute.ClusterLogConf), custom_tags=d.get('custom_tags', None), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', compute.GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', compute.InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), label=d.get('label', None), node_type_id=d.get('node_type_id', None), num_workers=d.get('num_workers', None), policy_id=d.get('policy_id', None), spark_conf=d.get('spark_conf', None), spark_env_vars=d.get('spark_env_vars', None), ssh_public_keys=d.get('ssh_public_keys', None)) - - + return cls( + apply_policy_default_values=d.get("apply_policy_default_values", None), + autoscale=_from_dict(d, "autoscale", PipelineClusterAutoscale), + aws_attributes=_from_dict(d, "aws_attributes", compute.AwsAttributes), + azure_attributes=_from_dict(d, "azure_attributes", compute.AzureAttributes), + cluster_log_conf=_from_dict(d, "cluster_log_conf", compute.ClusterLogConf), + custom_tags=d.get("custom_tags", None), + driver_instance_pool_id=d.get("driver_instance_pool_id", None), + driver_node_type_id=d.get("driver_node_type_id", None), + enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), + gcp_attributes=_from_dict(d, "gcp_attributes", compute.GcpAttributes), + init_scripts=_repeated_dict(d, "init_scripts", compute.InitScriptInfo), + instance_pool_id=d.get("instance_pool_id", None), + label=d.get("label", None), + node_type_id=d.get("node_type_id", None), + num_workers=d.get("num_workers", None), + policy_id=d.get("policy_id", None), + spark_conf=d.get("spark_conf", None), + spark_env_vars=d.get("spark_env_vars", None), + ssh_public_keys=d.get("ssh_public_keys", None), + ) @dataclass @@ -1544,39 +1928,47 @@ class PipelineClusterAutoscale: min_workers: int """The minimum number of workers the cluster can scale down to when underutilized. It is also the initial number of workers the cluster will have after creation.""" - + max_workers: int """The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`.""" - + mode: Optional[PipelineClusterAutoscaleMode] = None """Databricks Enhanced Autoscaling optimizes cluster utilization by automatically allocating cluster resources based on workload volume, with minimal impact to the data processing latency of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy autoscaling feature is used for `maintenance` clusters.""" - + def as_dict(self) -> dict: """Serializes the PipelineClusterAutoscale into a dictionary suitable for use as a JSON request body.""" body = {} - if self.max_workers is not None: body['max_workers'] = self.max_workers - if self.min_workers is not None: body['min_workers'] = self.min_workers - if self.mode is not None: body['mode'] = self.mode.value + if self.max_workers is not None: + body["max_workers"] = self.max_workers + if self.min_workers is not None: + body["min_workers"] = self.min_workers + if self.mode is not None: + body["mode"] = self.mode.value return body def as_shallow_dict(self) -> dict: """Serializes the PipelineClusterAutoscale into a shallow dictionary of its immediate attributes.""" body = {} - if self.max_workers is not None: body['max_workers'] = self.max_workers - if self.min_workers is not None: body['min_workers'] = self.min_workers - if self.mode is not None: body['mode'] = self.mode + if self.max_workers is not None: + body["max_workers"] = self.max_workers + if self.min_workers is not None: + body["min_workers"] = self.min_workers + if self.mode is not None: + body["mode"] = self.mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineClusterAutoscale: """Deserializes the PipelineClusterAutoscale from a dictionary.""" - return cls(max_workers=d.get('max_workers', None), min_workers=d.get('min_workers', None), mode=_enum(d, 'mode', PipelineClusterAutoscaleMode)) - - + return cls( + max_workers=d.get("max_workers", None), + min_workers=d.get("min_workers", None), + mode=_enum(d, "mode", PipelineClusterAutoscaleMode), + ) class PipelineClusterAutoscaleMode(Enum): @@ -1584,599 +1976,790 @@ class PipelineClusterAutoscaleMode(Enum): cluster resources based on workload volume, with minimal impact to the data processing latency of your pipelines. Enhanced Autoscaling is available for `updates` clusters only. The legacy autoscaling feature is used for `maintenance` clusters.""" - - ENHANCED = 'ENHANCED' - LEGACY = 'LEGACY' + + ENHANCED = "ENHANCED" + LEGACY = "LEGACY" + @dataclass class PipelineDeployment: kind: DeploymentKind """The deployment method that manages the pipeline.""" - + metadata_file_path: Optional[str] = None """The path to the file containing metadata about the deployment.""" - + def as_dict(self) -> dict: """Serializes the PipelineDeployment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.kind is not None: body['kind'] = self.kind.value - if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path + if self.kind is not None: + body["kind"] = self.kind.value + if self.metadata_file_path is not None: + body["metadata_file_path"] = self.metadata_file_path return body def as_shallow_dict(self) -> dict: """Serializes the PipelineDeployment into a shallow dictionary of its immediate attributes.""" body = {} - if self.kind is not None: body['kind'] = self.kind - if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path + if self.kind is not None: + body["kind"] = self.kind + if self.metadata_file_path is not None: + body["metadata_file_path"] = self.metadata_file_path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineDeployment: """Deserializes the PipelineDeployment from a dictionary.""" - return cls(kind=_enum(d, 'kind', DeploymentKind), metadata_file_path=d.get('metadata_file_path', None)) - - + return cls(kind=_enum(d, "kind", DeploymentKind), metadata_file_path=d.get("metadata_file_path", None)) @dataclass class PipelineEvent: error: Optional[ErrorDetail] = None """Information about an error captured by the event.""" - + event_type: Optional[str] = None """The event type. Should always correspond to the details""" - + id: Optional[str] = None """A time-based, globally unique id.""" - + level: Optional[EventLevel] = None """The severity level of the event.""" - + maturity_level: Optional[MaturityLevel] = None """Maturity level for event_type.""" - + message: Optional[str] = None """The display message associated with the event.""" - + origin: Optional[Origin] = None """Describes where the event originates from.""" - + sequence: Optional[Sequencing] = None """A sequencing object to identify and order events.""" - + timestamp: Optional[str] = None """The time of the event.""" - + def as_dict(self) -> dict: """Serializes the PipelineEvent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error: body['error'] = self.error.as_dict() - if self.event_type is not None: body['event_type'] = self.event_type - if self.id is not None: body['id'] = self.id - if self.level is not None: body['level'] = self.level.value - if self.maturity_level is not None: body['maturity_level'] = self.maturity_level.value - if self.message is not None: body['message'] = self.message - if self.origin: body['origin'] = self.origin.as_dict() - if self.sequence: body['sequence'] = self.sequence.as_dict() - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.error: + body["error"] = self.error.as_dict() + if self.event_type is not None: + body["event_type"] = self.event_type + if self.id is not None: + body["id"] = self.id + if self.level is not None: + body["level"] = self.level.value + if self.maturity_level is not None: + body["maturity_level"] = self.maturity_level.value + if self.message is not None: + body["message"] = self.message + if self.origin: + body["origin"] = self.origin.as_dict() + if self.sequence: + body["sequence"] = self.sequence.as_dict() + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: """Serializes the PipelineEvent into a shallow dictionary of its immediate attributes.""" body = {} - if self.error: body['error'] = self.error - if self.event_type is not None: body['event_type'] = self.event_type - if self.id is not None: body['id'] = self.id - if self.level is not None: body['level'] = self.level - if self.maturity_level is not None: body['maturity_level'] = self.maturity_level - if self.message is not None: body['message'] = self.message - if self.origin: body['origin'] = self.origin - if self.sequence: body['sequence'] = self.sequence - if self.timestamp is not None: body['timestamp'] = self.timestamp + if self.error: + body["error"] = self.error + if self.event_type is not None: + body["event_type"] = self.event_type + if self.id is not None: + body["id"] = self.id + if self.level is not None: + body["level"] = self.level + if self.maturity_level is not None: + body["maturity_level"] = self.maturity_level + if self.message is not None: + body["message"] = self.message + if self.origin: + body["origin"] = self.origin + if self.sequence: + body["sequence"] = self.sequence + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineEvent: """Deserializes the PipelineEvent from a dictionary.""" - return cls(error=_from_dict(d, 'error', ErrorDetail), event_type=d.get('event_type', None), id=d.get('id', None), level=_enum(d, 'level', EventLevel), maturity_level=_enum(d, 'maturity_level', MaturityLevel), message=d.get('message', None), origin=_from_dict(d, 'origin', Origin), sequence=_from_dict(d, 'sequence', Sequencing), timestamp=d.get('timestamp', None)) - - + return cls( + error=_from_dict(d, "error", ErrorDetail), + event_type=d.get("event_type", None), + id=d.get("id", None), + level=_enum(d, "level", EventLevel), + maturity_level=_enum(d, "maturity_level", MaturityLevel), + message=d.get("message", None), + origin=_from_dict(d, "origin", Origin), + sequence=_from_dict(d, "sequence", Sequencing), + timestamp=d.get("timestamp", None), + ) @dataclass class PipelineLibrary: file: Optional[FileLibrary] = None """The path to a file that defines a pipeline and is stored in the Databricks Repos.""" - + glob: Optional[PathPattern] = None """The unified field to include source codes. Each entry can be a notebook path, a file path, or a folder path that ends `/**`. This field cannot be used together with `notebook` or `file`.""" - + jar: Optional[str] = None """URI of the jar to be installed. Currently only DBFS is supported.""" - + maven: Optional[compute.MavenLibrary] = None """Specification of a maven library to be installed.""" - + notebook: Optional[NotebookLibrary] = None """The path to a notebook that defines a pipeline and is stored in the Databricks workspace.""" - + whl: Optional[str] = None """URI of the whl to be installed.""" - + def as_dict(self) -> dict: """Serializes the PipelineLibrary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.file: body['file'] = self.file.as_dict() - if self.glob: body['glob'] = self.glob.as_dict() - if self.jar is not None: body['jar'] = self.jar - if self.maven: body['maven'] = self.maven.as_dict() - if self.notebook: body['notebook'] = self.notebook.as_dict() - if self.whl is not None: body['whl'] = self.whl + if self.file: + body["file"] = self.file.as_dict() + if self.glob: + body["glob"] = self.glob.as_dict() + if self.jar is not None: + body["jar"] = self.jar + if self.maven: + body["maven"] = self.maven.as_dict() + if self.notebook: + body["notebook"] = self.notebook.as_dict() + if self.whl is not None: + body["whl"] = self.whl return body def as_shallow_dict(self) -> dict: """Serializes the PipelineLibrary into a shallow dictionary of its immediate attributes.""" body = {} - if self.file: body['file'] = self.file - if self.glob: body['glob'] = self.glob - if self.jar is not None: body['jar'] = self.jar - if self.maven: body['maven'] = self.maven - if self.notebook: body['notebook'] = self.notebook - if self.whl is not None: body['whl'] = self.whl + if self.file: + body["file"] = self.file + if self.glob: + body["glob"] = self.glob + if self.jar is not None: + body["jar"] = self.jar + if self.maven: + body["maven"] = self.maven + if self.notebook: + body["notebook"] = self.notebook + if self.whl is not None: + body["whl"] = self.whl return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineLibrary: """Deserializes the PipelineLibrary from a dictionary.""" - return cls(file=_from_dict(d, 'file', FileLibrary), glob=_from_dict(d, 'glob', PathPattern), jar=d.get('jar', None), maven=_from_dict(d, 'maven', compute.MavenLibrary), notebook=_from_dict(d, 'notebook', NotebookLibrary), whl=d.get('whl', None)) - - + return cls( + file=_from_dict(d, "file", FileLibrary), + glob=_from_dict(d, "glob", PathPattern), + jar=d.get("jar", None), + maven=_from_dict(d, "maven", compute.MavenLibrary), + notebook=_from_dict(d, "notebook", NotebookLibrary), + whl=d.get("whl", None), + ) @dataclass class PipelinePermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[PipelinePermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PipelinePermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PipelinePermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelinePermission: """Deserializes the PipelinePermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', PipelinePermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", PipelinePermissionLevel), + ) class PipelinePermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = 'CAN_MANAGE' - CAN_RUN = 'CAN_RUN' - CAN_VIEW = 'CAN_VIEW' - IS_OWNER = 'IS_OWNER' + + CAN_MANAGE = "CAN_MANAGE" + CAN_RUN = "CAN_RUN" + CAN_VIEW = "CAN_VIEW" + IS_OWNER = "IS_OWNER" + @dataclass class PipelinePermissions: access_control_list: Optional[List[PipelineAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the PipelinePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the PipelinePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissions: """Deserializes the PipelinePermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', PipelineAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class PipelinePermissionsDescription: description: Optional[str] = None - + permission_level: Optional[PipelinePermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the PipelinePermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the PipelinePermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsDescription: """Deserializes the PipelinePermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', PipelinePermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", PipelinePermissionLevel), + ) @dataclass class PipelinePermissionsRequest: access_control_list: Optional[List[PipelineAccessControlRequest]] = None - + pipeline_id: Optional[str] = None """The pipeline for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the PipelinePermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id return body def as_shallow_dict(self) -> dict: """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsRequest: """Deserializes the PipelinePermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', PipelineAccessControlRequest), pipeline_id=d.get('pipeline_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlRequest), + pipeline_id=d.get("pipeline_id", None), + ) @dataclass class PipelineSpec: budget_policy_id: Optional[str] = None """Budget policy of this pipeline.""" - + catalog: Optional[str] = None """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.""" - + channel: Optional[str] = None """DLT Release Channel that specifies which version to use.""" - + clusters: Optional[List[PipelineCluster]] = None """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str,str]] = None + + configuration: Optional[Dict[str, str]] = None """String-String configuration for this pipeline execution.""" - + continuous: Optional[bool] = None """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - + deployment: Optional[PipelineDeployment] = None """Deployment type of this pipeline.""" - + development: Optional[bool] = None """Whether the pipeline is in Development mode. Defaults to false.""" - + edition: Optional[str] = None """Pipeline product edition.""" - + event_log: Optional[EventLogSpec] = None """Event log configuration for this pipeline""" - + filters: Optional[Filters] = None """Filters on which Pipeline packages to include in the deployed graph.""" - + gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None """The definition of a gateway pipeline to support change data capture.""" - + id: Optional[str] = None """Unique identifier for this pipeline.""" - + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'schema', 'target', or 'catalog' settings.""" - + libraries: Optional[List[PipelineLibrary]] = None """Libraries or code needed by this deployment.""" - + name: Optional[str] = None """Friendly identifier for this pipeline.""" - + notifications: Optional[List[Notifications]] = None """List of notification settings for this pipeline.""" - + photon: Optional[bool] = None """Whether Photon is enabled for this pipeline.""" - + restart_window: Optional[RestartWindow] = None """Restart window of this pipeline.""" - + root_path: Optional[str] = None """Root path for this pipeline. This is used as the root directory when editing the pipeline in the Databricks user interface and it is added to sys.path when executing Python sources during pipeline execution.""" - + schema: Optional[str] = None """The default schema (database) where tables are read from or published to.""" - + serverless: Optional[bool] = None """Whether serverless compute is enabled for this pipeline.""" - + storage: Optional[str] = None """DBFS root directory for storing checkpoints and tables.""" - - tags: Optional[Dict[str,str]] = None + + tags: Optional[Dict[str, str]] = None """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, and are therefore subject to the same limitations. A maximum of 25 tags can be added to the pipeline.""" - + target: Optional[str] = None """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.""" - + trigger: Optional[PipelineTrigger] = None """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" - + def as_dict(self) -> dict: """Serializes the PipelineSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.catalog is not None: body['catalog'] = self.catalog - if self.channel is not None: body['channel'] = self.channel - if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] - if self.configuration: body['configuration'] = self.configuration - if self.continuous is not None: body['continuous'] = self.continuous - if self.deployment: body['deployment'] = self.deployment.as_dict() - if self.development is not None: body['development'] = self.development - if self.edition is not None: body['edition'] = self.edition - if self.event_log: body['event_log'] = self.event_log.as_dict() - if self.filters: body['filters'] = self.filters.as_dict() - if self.gateway_definition: body['gateway_definition'] = self.gateway_definition.as_dict() - if self.id is not None: body['id'] = self.id - if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition.as_dict() - if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries] - if self.name is not None: body['name'] = self.name - if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications] - if self.photon is not None: body['photon'] = self.photon - if self.restart_window: body['restart_window'] = self.restart_window.as_dict() - if self.root_path is not None: body['root_path'] = self.root_path - if self.schema is not None: body['schema'] = self.schema - if self.serverless is not None: body['serverless'] = self.serverless - if self.storage is not None: body['storage'] = self.storage - if self.tags: body['tags'] = self.tags - if self.target is not None: body['target'] = self.target - if self.trigger: body['trigger'] = self.trigger.as_dict() + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.catalog is not None: + body["catalog"] = self.catalog + if self.channel is not None: + body["channel"] = self.channel + if self.clusters: + body["clusters"] = [v.as_dict() for v in self.clusters] + if self.configuration: + body["configuration"] = self.configuration + if self.continuous is not None: + body["continuous"] = self.continuous + if self.deployment: + body["deployment"] = self.deployment.as_dict() + if self.development is not None: + body["development"] = self.development + if self.edition is not None: + body["edition"] = self.edition + if self.event_log: + body["event_log"] = self.event_log.as_dict() + if self.filters: + body["filters"] = self.filters.as_dict() + if self.gateway_definition: + body["gateway_definition"] = self.gateway_definition.as_dict() + if self.id is not None: + body["id"] = self.id + if self.ingestion_definition: + body["ingestion_definition"] = self.ingestion_definition.as_dict() + if self.libraries: + body["libraries"] = [v.as_dict() for v in self.libraries] + if self.name is not None: + body["name"] = self.name + if self.notifications: + body["notifications"] = [v.as_dict() for v in self.notifications] + if self.photon is not None: + body["photon"] = self.photon + if self.restart_window: + body["restart_window"] = self.restart_window.as_dict() + if self.root_path is not None: + body["root_path"] = self.root_path + if self.schema is not None: + body["schema"] = self.schema + if self.serverless is not None: + body["serverless"] = self.serverless + if self.storage is not None: + body["storage"] = self.storage + if self.tags: + body["tags"] = self.tags + if self.target is not None: + body["target"] = self.target + if self.trigger: + body["trigger"] = self.trigger.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PipelineSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.catalog is not None: body['catalog'] = self.catalog - if self.channel is not None: body['channel'] = self.channel - if self.clusters: body['clusters'] = self.clusters - if self.configuration: body['configuration'] = self.configuration - if self.continuous is not None: body['continuous'] = self.continuous - if self.deployment: body['deployment'] = self.deployment - if self.development is not None: body['development'] = self.development - if self.edition is not None: body['edition'] = self.edition - if self.event_log: body['event_log'] = self.event_log - if self.filters: body['filters'] = self.filters - if self.gateway_definition: body['gateway_definition'] = self.gateway_definition - if self.id is not None: body['id'] = self.id - if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition - if self.libraries: body['libraries'] = self.libraries - if self.name is not None: body['name'] = self.name - if self.notifications: body['notifications'] = self.notifications - if self.photon is not None: body['photon'] = self.photon - if self.restart_window: body['restart_window'] = self.restart_window - if self.root_path is not None: body['root_path'] = self.root_path - if self.schema is not None: body['schema'] = self.schema - if self.serverless is not None: body['serverless'] = self.serverless - if self.storage is not None: body['storage'] = self.storage - if self.tags: body['tags'] = self.tags - if self.target is not None: body['target'] = self.target - if self.trigger: body['trigger'] = self.trigger + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.catalog is not None: + body["catalog"] = self.catalog + if self.channel is not None: + body["channel"] = self.channel + if self.clusters: + body["clusters"] = self.clusters + if self.configuration: + body["configuration"] = self.configuration + if self.continuous is not None: + body["continuous"] = self.continuous + if self.deployment: + body["deployment"] = self.deployment + if self.development is not None: + body["development"] = self.development + if self.edition is not None: + body["edition"] = self.edition + if self.event_log: + body["event_log"] = self.event_log + if self.filters: + body["filters"] = self.filters + if self.gateway_definition: + body["gateway_definition"] = self.gateway_definition + if self.id is not None: + body["id"] = self.id + if self.ingestion_definition: + body["ingestion_definition"] = self.ingestion_definition + if self.libraries: + body["libraries"] = self.libraries + if self.name is not None: + body["name"] = self.name + if self.notifications: + body["notifications"] = self.notifications + if self.photon is not None: + body["photon"] = self.photon + if self.restart_window: + body["restart_window"] = self.restart_window + if self.root_path is not None: + body["root_path"] = self.root_path + if self.schema is not None: + body["schema"] = self.schema + if self.serverless is not None: + body["serverless"] = self.serverless + if self.storage is not None: + body["storage"] = self.storage + if self.tags: + body["tags"] = self.tags + if self.target is not None: + body["target"] = self.target + if self.trigger: + body["trigger"] = self.trigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineSpec: """Deserializes the PipelineSpec from a dictionary.""" - return cls(budget_policy_id=d.get('budget_policy_id', None), catalog=d.get('catalog', None), channel=d.get('channel', None), clusters=_repeated_dict(d, 'clusters', PipelineCluster), configuration=d.get('configuration', None), continuous=d.get('continuous', None), deployment=_from_dict(d, 'deployment', PipelineDeployment), development=d.get('development', None), edition=d.get('edition', None), event_log=_from_dict(d, 'event_log', EventLogSpec), filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), photon=d.get('photon', None), restart_window=_from_dict(d, 'restart_window', RestartWindow), root_path=d.get('root_path', None), schema=d.get('schema', None), serverless=d.get('serverless', None), storage=d.get('storage', None), tags=d.get('tags', None), target=d.get('target', None), trigger=_from_dict(d, 'trigger', PipelineTrigger)) - - + return cls( + budget_policy_id=d.get("budget_policy_id", None), + catalog=d.get("catalog", None), + channel=d.get("channel", None), + clusters=_repeated_dict(d, "clusters", PipelineCluster), + configuration=d.get("configuration", None), + continuous=d.get("continuous", None), + deployment=_from_dict(d, "deployment", PipelineDeployment), + development=d.get("development", None), + edition=d.get("edition", None), + event_log=_from_dict(d, "event_log", EventLogSpec), + filters=_from_dict(d, "filters", Filters), + gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), + id=d.get("id", None), + ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), + libraries=_repeated_dict(d, "libraries", PipelineLibrary), + name=d.get("name", None), + notifications=_repeated_dict(d, "notifications", Notifications), + photon=d.get("photon", None), + restart_window=_from_dict(d, "restart_window", RestartWindow), + root_path=d.get("root_path", None), + schema=d.get("schema", None), + serverless=d.get("serverless", None), + storage=d.get("storage", None), + tags=d.get("tags", None), + target=d.get("target", None), + trigger=_from_dict(d, "trigger", PipelineTrigger), + ) class PipelineState(Enum): """The pipeline state.""" - - DELETED = 'DELETED' - DEPLOYING = 'DEPLOYING' - FAILED = 'FAILED' - IDLE = 'IDLE' - RECOVERING = 'RECOVERING' - RESETTING = 'RESETTING' - RUNNING = 'RUNNING' - STARTING = 'STARTING' - STOPPING = 'STOPPING' + + DELETED = "DELETED" + DEPLOYING = "DEPLOYING" + FAILED = "FAILED" + IDLE = "IDLE" + RECOVERING = "RECOVERING" + RESETTING = "RESETTING" + RUNNING = "RUNNING" + STARTING = "STARTING" + STOPPING = "STOPPING" + @dataclass class PipelineStateInfo: cluster_id: Optional[str] = None """The unique identifier of the cluster running the pipeline.""" - + creator_user_name: Optional[str] = None """The username of the pipeline creator.""" - + health: Optional[PipelineStateInfoHealth] = None """The health of a pipeline.""" - + latest_updates: Optional[List[UpdateStateInfo]] = None """Status of the latest updates for the pipeline. Ordered with the newest update first.""" - + name: Optional[str] = None """The user-friendly name of the pipeline.""" - + pipeline_id: Optional[str] = None """The unique identifier of the pipeline.""" - + run_as_user_name: Optional[str] = None """The username that the pipeline runs as. This is a read only value derived from the pipeline owner.""" - + state: Optional[PipelineState] = None """The pipeline state.""" - + def as_dict(self) -> dict: """Serializes the PipelineStateInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.health is not None: body['health'] = self.health.value - if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates] - if self.name is not None: body['name'] = self.name - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name - if self.state is not None: body['state'] = self.state.value + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.health is not None: + body["health"] = self.health.value + if self.latest_updates: + body["latest_updates"] = [v.as_dict() for v in self.latest_updates] + if self.name is not None: + body["name"] = self.name + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.run_as_user_name is not None: + body["run_as_user_name"] = self.run_as_user_name + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the PipelineStateInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name - if self.health is not None: body['health'] = self.health - if self.latest_updates: body['latest_updates'] = self.latest_updates - if self.name is not None: body['name'] = self.name - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name - if self.state is not None: body['state'] = self.state + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.creator_user_name is not None: + body["creator_user_name"] = self.creator_user_name + if self.health is not None: + body["health"] = self.health + if self.latest_updates: + body["latest_updates"] = self.latest_updates + if self.name is not None: + body["name"] = self.name + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.run_as_user_name is not None: + body["run_as_user_name"] = self.run_as_user_name + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineStateInfo: """Deserializes the PipelineStateInfo from a dictionary.""" - return cls(cluster_id=d.get('cluster_id', None), creator_user_name=d.get('creator_user_name', None), health=_enum(d, 'health', PipelineStateInfoHealth), latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo), name=d.get('name', None), pipeline_id=d.get('pipeline_id', None), run_as_user_name=d.get('run_as_user_name', None), state=_enum(d, 'state', PipelineState)) - - + return cls( + cluster_id=d.get("cluster_id", None), + creator_user_name=d.get("creator_user_name", None), + health=_enum(d, "health", PipelineStateInfoHealth), + latest_updates=_repeated_dict(d, "latest_updates", UpdateStateInfo), + name=d.get("name", None), + pipeline_id=d.get("pipeline_id", None), + run_as_user_name=d.get("run_as_user_name", None), + state=_enum(d, "state", PipelineState), + ) class PipelineStateInfoHealth(Enum): """The health of a pipeline.""" - - HEALTHY = 'HEALTHY' - UNHEALTHY = 'UNHEALTHY' + + HEALTHY = "HEALTHY" + UNHEALTHY = "UNHEALTHY" + @dataclass class PipelineTrigger: cron: Optional[CronTrigger] = None - + manual: Optional[ManualTrigger] = None - + def as_dict(self) -> dict: """Serializes the PipelineTrigger into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cron: body['cron'] = self.cron.as_dict() - if self.manual: body['manual'] = self.manual.as_dict() + if self.cron: + body["cron"] = self.cron.as_dict() + if self.manual: + body["manual"] = self.manual.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PipelineTrigger into a shallow dictionary of its immediate attributes.""" body = {} - if self.cron: body['cron'] = self.cron - if self.manual: body['manual'] = self.manual + if self.cron: + body["cron"] = self.cron + if self.manual: + body["manual"] = self.manual return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PipelineTrigger: """Deserializes the PipelineTrigger from a dictionary.""" - return cls(cron=_from_dict(d, 'cron', CronTrigger), manual=_from_dict(d, 'manual', ManualTrigger)) - - + return cls(cron=_from_dict(d, "cron", CronTrigger), manual=_from_dict(d, "manual", ManualTrigger)) @dataclass class ReportSpec: source_url: str """Required. Report URL in the source system.""" - + destination_catalog: str """Required. Destination catalog to store table.""" - + destination_schema: str """Required. Destination schema to store table.""" - + destination_table: Optional[str] = None """Required. Destination table name. The pipeline fails if a table with that name already exists.""" - + table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object.""" - + def as_dict(self) -> dict: """Serializes the ReportSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog - if self.destination_schema is not None: body['destination_schema'] = self.destination_schema - if self.destination_table is not None: body['destination_table'] = self.destination_table - if self.source_url is not None: body['source_url'] = self.source_url - if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() + if self.destination_catalog is not None: + body["destination_catalog"] = self.destination_catalog + if self.destination_schema is not None: + body["destination_schema"] = self.destination_schema + if self.destination_table is not None: + body["destination_table"] = self.destination_table + if self.source_url is not None: + body["source_url"] = self.source_url + if self.table_configuration: + body["table_configuration"] = self.table_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ReportSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog - if self.destination_schema is not None: body['destination_schema'] = self.destination_schema - if self.destination_table is not None: body['destination_table'] = self.destination_table - if self.source_url is not None: body['source_url'] = self.source_url - if self.table_configuration: body['table_configuration'] = self.table_configuration + if self.destination_catalog is not None: + body["destination_catalog"] = self.destination_catalog + if self.destination_schema is not None: + body["destination_schema"] = self.destination_schema + if self.destination_table is not None: + body["destination_table"] = self.destination_table + if self.source_url is not None: + body["source_url"] = self.source_url + if self.table_configuration: + body["table_configuration"] = self.table_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ReportSpec: """Deserializes the ReportSpec from a dictionary.""" - return cls(destination_catalog=d.get('destination_catalog', None), destination_schema=d.get('destination_schema', None), destination_table=d.get('destination_table', None), source_url=d.get('source_url', None), table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) - - + return cls( + destination_catalog=d.get("destination_catalog", None), + destination_schema=d.get("destination_schema", None), + destination_table=d.get("destination_table", None), + source_url=d.get("source_url", None), + table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig), + ) @dataclass @@ -2184,38 +2767,46 @@ class RestartWindow: start_hour: int """An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day. Continuous pipeline restart is triggered only within a five-hour window starting at this hour.""" - + days_of_week: Optional[List[DayOfWeek]] = None """Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used.""" - + time_zone_id: Optional[str] = None """Time zone id of restart window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details. If not specified, UTC will be used.""" - + def as_dict(self) -> dict: """Serializes the RestartWindow into a dictionary suitable for use as a JSON request body.""" body = {} - if self.days_of_week: body['days_of_week'] = [v.value for v in self.days_of_week] - if self.start_hour is not None: body['start_hour'] = self.start_hour - if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id + if self.days_of_week: + body["days_of_week"] = [v.value for v in self.days_of_week] + if self.start_hour is not None: + body["start_hour"] = self.start_hour + if self.time_zone_id is not None: + body["time_zone_id"] = self.time_zone_id return body def as_shallow_dict(self) -> dict: """Serializes the RestartWindow into a shallow dictionary of its immediate attributes.""" body = {} - if self.days_of_week: body['days_of_week'] = self.days_of_week - if self.start_hour is not None: body['start_hour'] = self.start_hour - if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id + if self.days_of_week: + body["days_of_week"] = self.days_of_week + if self.start_hour is not None: + body["start_hour"] = self.start_hour + if self.time_zone_id is not None: + body["time_zone_id"] = self.time_zone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestartWindow: """Deserializes the RestartWindow from a dictionary.""" - return cls(days_of_week=_repeated_enum(d, 'days_of_week', DayOfWeek), start_hour=d.get('start_hour', None), time_zone_id=d.get('time_zone_id', None)) - - + return cls( + days_of_week=_repeated_enum(d, "days_of_week", DayOfWeek), + start_hour=d.get("start_hour", None), + time_zone_id=d.get("time_zone_id", None), + ) @dataclass @@ -2223,280 +2814,338 @@ class RunAs: """Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.""" - + service_principal_name: Optional[str] = None """Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.""" - + user_name: Optional[str] = None """The email of an active workspace user. Users can only set this field to their own email.""" - + def as_dict(self) -> dict: """Serializes the RunAs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RunAs into a shallow dictionary of its immediate attributes.""" body = {} - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RunAs: """Deserializes the RunAs from a dictionary.""" - return cls(service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls(service_principal_name=d.get("service_principal_name", None), user_name=d.get("user_name", None)) @dataclass class SchemaSpec: source_schema: str """Required. Schema name in the source database.""" - + destination_catalog: str """Required. Destination catalog to store tables.""" - + destination_schema: str """Required. Destination schema to store tables in. Tables with the same name as the source tables are created in this destination schema. The pipeline fails If a table with the same name already exists.""" - + source_catalog: Optional[str] = None """The source catalog name. Might be optional depending on the type of source.""" - + table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the IngestionPipelineDefinition object.""" - + def as_dict(self) -> dict: """Serializes the SchemaSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog - if self.destination_schema is not None: body['destination_schema'] = self.destination_schema - if self.source_catalog is not None: body['source_catalog'] = self.source_catalog - if self.source_schema is not None: body['source_schema'] = self.source_schema - if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() + if self.destination_catalog is not None: + body["destination_catalog"] = self.destination_catalog + if self.destination_schema is not None: + body["destination_schema"] = self.destination_schema + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + if self.source_schema is not None: + body["source_schema"] = self.source_schema + if self.table_configuration: + body["table_configuration"] = self.table_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SchemaSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog - if self.destination_schema is not None: body['destination_schema'] = self.destination_schema - if self.source_catalog is not None: body['source_catalog'] = self.source_catalog - if self.source_schema is not None: body['source_schema'] = self.source_schema - if self.table_configuration: body['table_configuration'] = self.table_configuration + if self.destination_catalog is not None: + body["destination_catalog"] = self.destination_catalog + if self.destination_schema is not None: + body["destination_schema"] = self.destination_schema + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + if self.source_schema is not None: + body["source_schema"] = self.source_schema + if self.table_configuration: + body["table_configuration"] = self.table_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SchemaSpec: """Deserializes the SchemaSpec from a dictionary.""" - return cls(destination_catalog=d.get('destination_catalog', None), destination_schema=d.get('destination_schema', None), source_catalog=d.get('source_catalog', None), source_schema=d.get('source_schema', None), table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) - - + return cls( + destination_catalog=d.get("destination_catalog", None), + destination_schema=d.get("destination_schema", None), + source_catalog=d.get("source_catalog", None), + source_schema=d.get("source_schema", None), + table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig), + ) @dataclass class Sequencing: control_plane_seq_no: Optional[int] = None """A sequence number, unique and increasing within the control plane.""" - + data_plane_id: Optional[DataPlaneId] = None """the ID assigned by the data plane.""" - + def as_dict(self) -> dict: """Serializes the Sequencing into a dictionary suitable for use as a JSON request body.""" body = {} - if self.control_plane_seq_no is not None: body['control_plane_seq_no'] = self.control_plane_seq_no - if self.data_plane_id: body['data_plane_id'] = self.data_plane_id.as_dict() + if self.control_plane_seq_no is not None: + body["control_plane_seq_no"] = self.control_plane_seq_no + if self.data_plane_id: + body["data_plane_id"] = self.data_plane_id.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Sequencing into a shallow dictionary of its immediate attributes.""" body = {} - if self.control_plane_seq_no is not None: body['control_plane_seq_no'] = self.control_plane_seq_no - if self.data_plane_id: body['data_plane_id'] = self.data_plane_id + if self.control_plane_seq_no is not None: + body["control_plane_seq_no"] = self.control_plane_seq_no + if self.data_plane_id: + body["data_plane_id"] = self.data_plane_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Sequencing: """Deserializes the Sequencing from a dictionary.""" - return cls(control_plane_seq_no=d.get('control_plane_seq_no', None), data_plane_id=_from_dict(d, 'data_plane_id', DataPlaneId)) - - + return cls( + control_plane_seq_no=d.get("control_plane_seq_no", None), + data_plane_id=_from_dict(d, "data_plane_id", DataPlaneId), + ) @dataclass class SerializedException: class_name: Optional[str] = None """Runtime class of the exception""" - + message: Optional[str] = None """Exception message""" - + stack: Optional[List[StackFrame]] = None """Stack trace consisting of a list of stack frames""" - + def as_dict(self) -> dict: """Serializes the SerializedException into a dictionary suitable for use as a JSON request body.""" body = {} - if self.class_name is not None: body['class_name'] = self.class_name - if self.message is not None: body['message'] = self.message - if self.stack: body['stack'] = [v.as_dict() for v in self.stack] + if self.class_name is not None: + body["class_name"] = self.class_name + if self.message is not None: + body["message"] = self.message + if self.stack: + body["stack"] = [v.as_dict() for v in self.stack] return body def as_shallow_dict(self) -> dict: """Serializes the SerializedException into a shallow dictionary of its immediate attributes.""" body = {} - if self.class_name is not None: body['class_name'] = self.class_name - if self.message is not None: body['message'] = self.message - if self.stack: body['stack'] = self.stack + if self.class_name is not None: + body["class_name"] = self.class_name + if self.message is not None: + body["message"] = self.message + if self.stack: + body["stack"] = self.stack return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SerializedException: """Deserializes the SerializedException from a dictionary.""" - return cls(class_name=d.get('class_name', None), message=d.get('message', None), stack=_repeated_dict(d, 'stack', StackFrame)) - - + return cls( + class_name=d.get("class_name", None), + message=d.get("message", None), + stack=_repeated_dict(d, "stack", StackFrame), + ) @dataclass class StackFrame: declaring_class: Optional[str] = None """Class from which the method call originated""" - + file_name: Optional[str] = None """File where the method is defined""" - + line_number: Optional[int] = None """Line from which the method was called""" - + method_name: Optional[str] = None """Name of the method which was called""" - + def as_dict(self) -> dict: """Serializes the StackFrame into a dictionary suitable for use as a JSON request body.""" body = {} - if self.declaring_class is not None: body['declaring_class'] = self.declaring_class - if self.file_name is not None: body['file_name'] = self.file_name - if self.line_number is not None: body['line_number'] = self.line_number - if self.method_name is not None: body['method_name'] = self.method_name + if self.declaring_class is not None: + body["declaring_class"] = self.declaring_class + if self.file_name is not None: + body["file_name"] = self.file_name + if self.line_number is not None: + body["line_number"] = self.line_number + if self.method_name is not None: + body["method_name"] = self.method_name return body def as_shallow_dict(self) -> dict: """Serializes the StackFrame into a shallow dictionary of its immediate attributes.""" body = {} - if self.declaring_class is not None: body['declaring_class'] = self.declaring_class - if self.file_name is not None: body['file_name'] = self.file_name - if self.line_number is not None: body['line_number'] = self.line_number - if self.method_name is not None: body['method_name'] = self.method_name + if self.declaring_class is not None: + body["declaring_class"] = self.declaring_class + if self.file_name is not None: + body["file_name"] = self.file_name + if self.line_number is not None: + body["line_number"] = self.line_number + if self.method_name is not None: + body["method_name"] = self.method_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StackFrame: """Deserializes the StackFrame from a dictionary.""" - return cls(declaring_class=d.get('declaring_class', None), file_name=d.get('file_name', None), line_number=d.get('line_number', None), method_name=d.get('method_name', None)) - - + return cls( + declaring_class=d.get("declaring_class", None), + file_name=d.get("file_name", None), + line_number=d.get("line_number", None), + method_name=d.get("method_name", None), + ) @dataclass class StartUpdate: cause: Optional[StartUpdateCause] = None """What triggered this update.""" - + full_refresh: Optional[bool] = None """If true, this update will reset all tables before running.""" - + full_refresh_selection: Optional[List[str]] = None """A list of tables to update with fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" - + pipeline_id: Optional[str] = None - + refresh_selection: Optional[List[str]] = None """A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" - + validate_only: Optional[bool] = None """If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets.""" - + def as_dict(self) -> dict: """Serializes the StartUpdate into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cause is not None: body['cause'] = self.cause.value - if self.full_refresh is not None: body['full_refresh'] = self.full_refresh - if self.full_refresh_selection: body['full_refresh_selection'] = [v for v in self.full_refresh_selection] - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.refresh_selection: body['refresh_selection'] = [v for v in self.refresh_selection] - if self.validate_only is not None: body['validate_only'] = self.validate_only + if self.cause is not None: + body["cause"] = self.cause.value + if self.full_refresh is not None: + body["full_refresh"] = self.full_refresh + if self.full_refresh_selection: + body["full_refresh_selection"] = [v for v in self.full_refresh_selection] + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.refresh_selection: + body["refresh_selection"] = [v for v in self.refresh_selection] + if self.validate_only is not None: + body["validate_only"] = self.validate_only return body def as_shallow_dict(self) -> dict: """Serializes the StartUpdate into a shallow dictionary of its immediate attributes.""" body = {} - if self.cause is not None: body['cause'] = self.cause - if self.full_refresh is not None: body['full_refresh'] = self.full_refresh - if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.refresh_selection: body['refresh_selection'] = self.refresh_selection - if self.validate_only is not None: body['validate_only'] = self.validate_only + if self.cause is not None: + body["cause"] = self.cause + if self.full_refresh is not None: + body["full_refresh"] = self.full_refresh + if self.full_refresh_selection: + body["full_refresh_selection"] = self.full_refresh_selection + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.refresh_selection: + body["refresh_selection"] = self.refresh_selection + if self.validate_only is not None: + body["validate_only"] = self.validate_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StartUpdate: """Deserializes the StartUpdate from a dictionary.""" - return cls(cause=_enum(d, 'cause', StartUpdateCause), full_refresh=d.get('full_refresh', None), full_refresh_selection=d.get('full_refresh_selection', None), pipeline_id=d.get('pipeline_id', None), refresh_selection=d.get('refresh_selection', None), validate_only=d.get('validate_only', None)) - - + return cls( + cause=_enum(d, "cause", StartUpdateCause), + full_refresh=d.get("full_refresh", None), + full_refresh_selection=d.get("full_refresh_selection", None), + pipeline_id=d.get("pipeline_id", None), + refresh_selection=d.get("refresh_selection", None), + validate_only=d.get("validate_only", None), + ) class StartUpdateCause(Enum): """What triggered this update.""" - - API_CALL = 'API_CALL' - INFRASTRUCTURE_MAINTENANCE = 'INFRASTRUCTURE_MAINTENANCE' - JOB_TASK = 'JOB_TASK' - RETRY_ON_FAILURE = 'RETRY_ON_FAILURE' - SCHEMA_CHANGE = 'SCHEMA_CHANGE' - SERVICE_UPGRADE = 'SERVICE_UPGRADE' - USER_ACTION = 'USER_ACTION' + + API_CALL = "API_CALL" + INFRASTRUCTURE_MAINTENANCE = "INFRASTRUCTURE_MAINTENANCE" + JOB_TASK = "JOB_TASK" + RETRY_ON_FAILURE = "RETRY_ON_FAILURE" + SCHEMA_CHANGE = "SCHEMA_CHANGE" + SERVICE_UPGRADE = "SERVICE_UPGRADE" + USER_ACTION = "USER_ACTION" + @dataclass class StartUpdateResponse: update_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the StartUpdateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.update_id is not None: body['update_id'] = self.update_id + if self.update_id is not None: + body["update_id"] = self.update_id return body def as_shallow_dict(self) -> dict: """Serializes the StartUpdateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.update_id is not None: body['update_id'] = self.update_id + if self.update_id is not None: + body["update_id"] = self.update_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StartUpdateResponse: """Deserializes the StartUpdateResponse from a dictionary.""" - return cls(update_id=d.get('update_id', None)) - - + return cls(update_id=d.get("update_id", None)) @dataclass @@ -2515,68 +3164,83 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> StopPipelineResponse: """Deserializes the StopPipelineResponse from a dictionary.""" return cls() - - - - - @dataclass class TableSpec: source_table: str """Required. Table name in the source database.""" - + destination_catalog: str """Required. Destination catalog to store table.""" - + destination_schema: str """Required. Destination schema to store table.""" - + destination_table: Optional[str] = None """Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used.""" - + source_catalog: Optional[str] = None """Source catalog name. Might be optional depending on the type of source.""" - + source_schema: Optional[str] = None """Schema name in the source database. Might be optional depending on the type of source.""" - + table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.""" - + def as_dict(self) -> dict: """Serializes the TableSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog - if self.destination_schema is not None: body['destination_schema'] = self.destination_schema - if self.destination_table is not None: body['destination_table'] = self.destination_table - if self.source_catalog is not None: body['source_catalog'] = self.source_catalog - if self.source_schema is not None: body['source_schema'] = self.source_schema - if self.source_table is not None: body['source_table'] = self.source_table - if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() + if self.destination_catalog is not None: + body["destination_catalog"] = self.destination_catalog + if self.destination_schema is not None: + body["destination_schema"] = self.destination_schema + if self.destination_table is not None: + body["destination_table"] = self.destination_table + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + if self.source_schema is not None: + body["source_schema"] = self.source_schema + if self.source_table is not None: + body["source_table"] = self.source_table + if self.table_configuration: + body["table_configuration"] = self.table_configuration.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the TableSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog - if self.destination_schema is not None: body['destination_schema'] = self.destination_schema - if self.destination_table is not None: body['destination_table'] = self.destination_table - if self.source_catalog is not None: body['source_catalog'] = self.source_catalog - if self.source_schema is not None: body['source_schema'] = self.source_schema - if self.source_table is not None: body['source_table'] = self.source_table - if self.table_configuration: body['table_configuration'] = self.table_configuration + if self.destination_catalog is not None: + body["destination_catalog"] = self.destination_catalog + if self.destination_schema is not None: + body["destination_schema"] = self.destination_schema + if self.destination_table is not None: + body["destination_table"] = self.destination_table + if self.source_catalog is not None: + body["source_catalog"] = self.source_catalog + if self.source_schema is not None: + body["source_schema"] = self.source_schema + if self.source_table is not None: + body["source_table"] = self.source_table + if self.table_configuration: + body["table_configuration"] = self.table_configuration return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableSpec: """Deserializes the TableSpec from a dictionary.""" - return cls(destination_catalog=d.get('destination_catalog', None), destination_schema=d.get('destination_schema', None), destination_table=d.get('destination_table', None), source_catalog=d.get('source_catalog', None), source_schema=d.get('source_schema', None), source_table=d.get('source_table', None), table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) - - + return cls( + destination_catalog=d.get("destination_catalog", None), + destination_schema=d.get("destination_schema", None), + destination_table=d.get("destination_table", None), + source_catalog=d.get("source_catalog", None), + source_schema=d.get("source_schema", None), + source_table=d.get("source_table", None), + table_configuration=_from_dict(d, "table_configuration", TableSpecificConfig), + ) @dataclass @@ -2586,280 +3250,364 @@ class TableSpecificConfig: fully controls what columns to be ingested. When specified, all other columns including future ones will be automatically included for ingestion. This field in mutually exclusive with `include_columns`.""" - + include_columns: Optional[List[str]] = None """A list of column names to be included for the ingestion. When not specified, all columns except ones in exclude_columns will be included. Future columns will be automatically included. When specified, all other future columns will be automatically excluded from ingestion. This field in mutually exclusive with `exclude_columns`.""" - + primary_keys: Optional[List[str]] = None """The primary key of the table used to apply changes.""" - + salesforce_include_formula_fields: Optional[bool] = None """If true, formula fields defined in the table are included in the ingestion. This setting is only valid for the Salesforce connector""" - + scd_type: Optional[TableSpecificConfigScdType] = None """The SCD type to use to ingest the table.""" - + sequence_by: Optional[List[str]] = None """The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.""" - + def as_dict(self) -> dict: """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.exclude_columns: body['exclude_columns'] = [v for v in self.exclude_columns] - if self.include_columns: body['include_columns'] = [v for v in self.include_columns] - if self.primary_keys: body['primary_keys'] = [v for v in self.primary_keys] - if self.salesforce_include_formula_fields is not None: body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields - if self.scd_type is not None: body['scd_type'] = self.scd_type.value - if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by] + if self.exclude_columns: + body["exclude_columns"] = [v for v in self.exclude_columns] + if self.include_columns: + body["include_columns"] = [v for v in self.include_columns] + if self.primary_keys: + body["primary_keys"] = [v for v in self.primary_keys] + if self.salesforce_include_formula_fields is not None: + body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields + if self.scd_type is not None: + body["scd_type"] = self.scd_type.value + if self.sequence_by: + body["sequence_by"] = [v for v in self.sequence_by] return body def as_shallow_dict(self) -> dict: """Serializes the TableSpecificConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.exclude_columns: body['exclude_columns'] = self.exclude_columns - if self.include_columns: body['include_columns'] = self.include_columns - if self.primary_keys: body['primary_keys'] = self.primary_keys - if self.salesforce_include_formula_fields is not None: body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields - if self.scd_type is not None: body['scd_type'] = self.scd_type - if self.sequence_by: body['sequence_by'] = self.sequence_by + if self.exclude_columns: + body["exclude_columns"] = self.exclude_columns + if self.include_columns: + body["include_columns"] = self.include_columns + if self.primary_keys: + body["primary_keys"] = self.primary_keys + if self.salesforce_include_formula_fields is not None: + body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields + if self.scd_type is not None: + body["scd_type"] = self.scd_type + if self.sequence_by: + body["sequence_by"] = self.sequence_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: """Deserializes the TableSpecificConfig from a dictionary.""" - return cls(exclude_columns=d.get('exclude_columns', None), include_columns=d.get('include_columns', None), primary_keys=d.get('primary_keys', None), salesforce_include_formula_fields=d.get('salesforce_include_formula_fields', None), scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType), sequence_by=d.get('sequence_by', None)) - - + return cls( + exclude_columns=d.get("exclude_columns", None), + include_columns=d.get("include_columns", None), + primary_keys=d.get("primary_keys", None), + salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None), + scd_type=_enum(d, "scd_type", TableSpecificConfigScdType), + sequence_by=d.get("sequence_by", None), + ) class TableSpecificConfigScdType(Enum): """The SCD type to use to ingest the table.""" - - SCD_TYPE_1 = 'SCD_TYPE_1' - SCD_TYPE_2 = 'SCD_TYPE_2' + + SCD_TYPE_1 = "SCD_TYPE_1" + SCD_TYPE_2 = "SCD_TYPE_2" + @dataclass class UpdateInfo: cause: Optional[UpdateInfoCause] = None """What triggered this update.""" - + cluster_id: Optional[str] = None """The ID of the cluster that the update is running on.""" - + config: Optional[PipelineSpec] = None """The pipeline configuration with system defaults applied where unspecified by the user. Not returned by ListUpdates.""" - + creation_time: Optional[int] = None """The time when this update was created.""" - + full_refresh: Optional[bool] = None """If true, this update will reset all tables before running.""" - + full_refresh_selection: Optional[List[str]] = None """A list of tables to update with fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" - + pipeline_id: Optional[str] = None """The ID of the pipeline.""" - + refresh_selection: Optional[List[str]] = None """A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means that the states of the table will be reset before the refresh.""" - + state: Optional[UpdateInfoState] = None """The update state.""" - + update_id: Optional[str] = None """The ID of this update.""" - + validate_only: Optional[bool] = None """If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets.""" - + def as_dict(self) -> dict: """Serializes the UpdateInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cause is not None: body['cause'] = self.cause.value - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.config: body['config'] = self.config.as_dict() - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.full_refresh is not None: body['full_refresh'] = self.full_refresh - if self.full_refresh_selection: body['full_refresh_selection'] = [v for v in self.full_refresh_selection] - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.refresh_selection: body['refresh_selection'] = [v for v in self.refresh_selection] - if self.state is not None: body['state'] = self.state.value - if self.update_id is not None: body['update_id'] = self.update_id - if self.validate_only is not None: body['validate_only'] = self.validate_only + if self.cause is not None: + body["cause"] = self.cause.value + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.config: + body["config"] = self.config.as_dict() + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.full_refresh is not None: + body["full_refresh"] = self.full_refresh + if self.full_refresh_selection: + body["full_refresh_selection"] = [v for v in self.full_refresh_selection] + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.refresh_selection: + body["refresh_selection"] = [v for v in self.refresh_selection] + if self.state is not None: + body["state"] = self.state.value + if self.update_id is not None: + body["update_id"] = self.update_id + if self.validate_only is not None: + body["validate_only"] = self.validate_only return body def as_shallow_dict(self) -> dict: """Serializes the UpdateInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cause is not None: body['cause'] = self.cause - if self.cluster_id is not None: body['cluster_id'] = self.cluster_id - if self.config: body['config'] = self.config - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.full_refresh is not None: body['full_refresh'] = self.full_refresh - if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.refresh_selection: body['refresh_selection'] = self.refresh_selection - if self.state is not None: body['state'] = self.state - if self.update_id is not None: body['update_id'] = self.update_id - if self.validate_only is not None: body['validate_only'] = self.validate_only + if self.cause is not None: + body["cause"] = self.cause + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + if self.config: + body["config"] = self.config + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.full_refresh is not None: + body["full_refresh"] = self.full_refresh + if self.full_refresh_selection: + body["full_refresh_selection"] = self.full_refresh_selection + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.refresh_selection: + body["refresh_selection"] = self.refresh_selection + if self.state is not None: + body["state"] = self.state + if self.update_id is not None: + body["update_id"] = self.update_id + if self.validate_only is not None: + body["validate_only"] = self.validate_only return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateInfo: """Deserializes the UpdateInfo from a dictionary.""" - return cls(cause=_enum(d, 'cause', UpdateInfoCause), cluster_id=d.get('cluster_id', None), config=_from_dict(d, 'config', PipelineSpec), creation_time=d.get('creation_time', None), full_refresh=d.get('full_refresh', None), full_refresh_selection=d.get('full_refresh_selection', None), pipeline_id=d.get('pipeline_id', None), refresh_selection=d.get('refresh_selection', None), state=_enum(d, 'state', UpdateInfoState), update_id=d.get('update_id', None), validate_only=d.get('validate_only', None)) - - + return cls( + cause=_enum(d, "cause", UpdateInfoCause), + cluster_id=d.get("cluster_id", None), + config=_from_dict(d, "config", PipelineSpec), + creation_time=d.get("creation_time", None), + full_refresh=d.get("full_refresh", None), + full_refresh_selection=d.get("full_refresh_selection", None), + pipeline_id=d.get("pipeline_id", None), + refresh_selection=d.get("refresh_selection", None), + state=_enum(d, "state", UpdateInfoState), + update_id=d.get("update_id", None), + validate_only=d.get("validate_only", None), + ) class UpdateInfoCause(Enum): """What triggered this update.""" - - API_CALL = 'API_CALL' - INFRASTRUCTURE_MAINTENANCE = 'INFRASTRUCTURE_MAINTENANCE' - JOB_TASK = 'JOB_TASK' - RETRY_ON_FAILURE = 'RETRY_ON_FAILURE' - SCHEMA_CHANGE = 'SCHEMA_CHANGE' - SERVICE_UPGRADE = 'SERVICE_UPGRADE' - USER_ACTION = 'USER_ACTION' + + API_CALL = "API_CALL" + INFRASTRUCTURE_MAINTENANCE = "INFRASTRUCTURE_MAINTENANCE" + JOB_TASK = "JOB_TASK" + RETRY_ON_FAILURE = "RETRY_ON_FAILURE" + SCHEMA_CHANGE = "SCHEMA_CHANGE" + SERVICE_UPGRADE = "SERVICE_UPGRADE" + USER_ACTION = "USER_ACTION" + class UpdateInfoState(Enum): """The update state.""" - - CANCELED = 'CANCELED' - COMPLETED = 'COMPLETED' - CREATED = 'CREATED' - FAILED = 'FAILED' - INITIALIZING = 'INITIALIZING' - QUEUED = 'QUEUED' - RESETTING = 'RESETTING' - RUNNING = 'RUNNING' - SETTING_UP_TABLES = 'SETTING_UP_TABLES' - STOPPING = 'STOPPING' - WAITING_FOR_RESOURCES = 'WAITING_FOR_RESOURCES' + + CANCELED = "CANCELED" + COMPLETED = "COMPLETED" + CREATED = "CREATED" + FAILED = "FAILED" + INITIALIZING = "INITIALIZING" + QUEUED = "QUEUED" + RESETTING = "RESETTING" + RUNNING = "RUNNING" + SETTING_UP_TABLES = "SETTING_UP_TABLES" + STOPPING = "STOPPING" + WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES" + @dataclass class UpdateStateInfo: creation_time: Optional[str] = None - + state: Optional[UpdateStateInfoState] = None """The update state.""" - + update_id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateStateInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.state is not None: body['state'] = self.state.value - if self.update_id is not None: body['update_id'] = self.update_id + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.state is not None: + body["state"] = self.state.value + if self.update_id is not None: + body["update_id"] = self.update_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateStateInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.state is not None: body['state'] = self.state - if self.update_id is not None: body['update_id'] = self.update_id + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.state is not None: + body["state"] = self.state + if self.update_id is not None: + body["update_id"] = self.update_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateStateInfo: """Deserializes the UpdateStateInfo from a dictionary.""" - return cls(creation_time=d.get('creation_time', None), state=_enum(d, 'state', UpdateStateInfoState), update_id=d.get('update_id', None)) - - + return cls( + creation_time=d.get("creation_time", None), + state=_enum(d, "state", UpdateStateInfoState), + update_id=d.get("update_id", None), + ) class UpdateStateInfoState(Enum): """The update state.""" - - CANCELED = 'CANCELED' - COMPLETED = 'COMPLETED' - CREATED = 'CREATED' - FAILED = 'FAILED' - INITIALIZING = 'INITIALIZING' - QUEUED = 'QUEUED' - RESETTING = 'RESETTING' - RUNNING = 'RUNNING' - SETTING_UP_TABLES = 'SETTING_UP_TABLES' - STOPPING = 'STOPPING' - WAITING_FOR_RESOURCES = 'WAITING_FOR_RESOURCES' + CANCELED = "CANCELED" + COMPLETED = "COMPLETED" + CREATED = "CREATED" + FAILED = "FAILED" + INITIALIZING = "INITIALIZING" + QUEUED = "QUEUED" + RESETTING = "RESETTING" + RUNNING = "RUNNING" + SETTING_UP_TABLES = "SETTING_UP_TABLES" + STOPPING = "STOPPING" + WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES" class PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines. - + Delta Live Tables is a framework for building reliable, maintainable, and testable data processing pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task orchestration, cluster management, monitoring, data quality, and error handling. - + Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables manages how your data is transformed based on a target schema you define for each processing step. You can also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations.""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_get_pipeline_idle(self, pipeline_id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (PipelineState.IDLE, ) - failure_states = (PipelineState.FAILED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(pipeline_id=pipeline_id) - status = poll.state - status_message = poll.cause - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach IDLE, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"pipeline_id={pipeline_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - def create(self - - , * - , allow_duplicate_names: Optional[bool] = None, budget_policy_id: Optional[str] = None, catalog: Optional[str] = None, channel: Optional[str] = None, clusters: Optional[List[PipelineCluster]] = None, configuration: Optional[Dict[str,str]] = None, continuous: Optional[bool] = None, deployment: Optional[PipelineDeployment] = None, development: Optional[bool] = None, dry_run: Optional[bool] = None, edition: Optional[str] = None, event_log: Optional[EventLogSpec] = None, filters: Optional[Filters] = None, gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, id: Optional[str] = None, ingestion_definition: Optional[IngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, photon: Optional[bool] = None, restart_window: Optional[RestartWindow] = None, root_path: Optional[str] = None, run_as: Optional[RunAs] = None, schema: Optional[str] = None, serverless: Optional[bool] = None, storage: Optional[str] = None, tags: Optional[Dict[str,str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None) -> CreatePipelineResponse: + def wait_get_pipeline_idle( + self, + pipeline_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[GetPipelineResponse], None]] = None, + ) -> GetPipelineResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (PipelineState.IDLE,) + failure_states = (PipelineState.FAILED,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(pipeline_id=pipeline_id) + status = poll.state + status_message = poll.cause + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach IDLE, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"pipeline_id={pipeline_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def create( + self, + *, + allow_duplicate_names: Optional[bool] = None, + budget_policy_id: Optional[str] = None, + catalog: Optional[str] = None, + channel: Optional[str] = None, + clusters: Optional[List[PipelineCluster]] = None, + configuration: Optional[Dict[str, str]] = None, + continuous: Optional[bool] = None, + deployment: Optional[PipelineDeployment] = None, + development: Optional[bool] = None, + dry_run: Optional[bool] = None, + edition: Optional[str] = None, + event_log: Optional[EventLogSpec] = None, + filters: Optional[Filters] = None, + gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, + id: Optional[str] = None, + ingestion_definition: Optional[IngestionPipelineDefinition] = None, + libraries: Optional[List[PipelineLibrary]] = None, + name: Optional[str] = None, + notifications: Optional[List[Notifications]] = None, + photon: Optional[bool] = None, + restart_window: Optional[RestartWindow] = None, + root_path: Optional[str] = None, + run_as: Optional[RunAs] = None, + schema: Optional[str] = None, + serverless: Optional[bool] = None, + storage: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + target: Optional[str] = None, + trigger: Optional[PipelineTrigger] = None, + ) -> CreatePipelineResponse: """Create a pipeline. - + Creates a new data processing pipeline based on the requested configuration. If successful, this method returns the ID of the new pipeline. - + :param allow_duplicate_names: bool (optional) If false, deployment will fail if name conflicts with that of another pipeline. :param budget_policy_id: str (optional) @@ -2911,7 +3659,7 @@ def create(self :param run_as: :class:`RunAs` (optional) Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. :param schema: str (optional) @@ -2929,190 +3677,184 @@ def create(self for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. - + :returns: :class:`CreatePipelineResponse` """ body = {} - if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names - if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id - if catalog is not None: body['catalog'] = catalog - if channel is not None: body['channel'] = channel - if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters] - if configuration is not None: body['configuration'] = configuration - if continuous is not None: body['continuous'] = continuous - if deployment is not None: body['deployment'] = deployment.as_dict() - if development is not None: body['development'] = development - if dry_run is not None: body['dry_run'] = dry_run - if edition is not None: body['edition'] = edition - if event_log is not None: body['event_log'] = event_log.as_dict() - if filters is not None: body['filters'] = filters.as_dict() - if gateway_definition is not None: body['gateway_definition'] = gateway_definition.as_dict() - if id is not None: body['id'] = id - if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict() - if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] - if name is not None: body['name'] = name - if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] - if photon is not None: body['photon'] = photon - if restart_window is not None: body['restart_window'] = restart_window.as_dict() - if root_path is not None: body['root_path'] = root_path - if run_as is not None: body['run_as'] = run_as.as_dict() - if schema is not None: body['schema'] = schema - if serverless is not None: body['serverless'] = serverless - if storage is not None: body['storage'] = storage - if tags is not None: body['tags'] = tags - if target is not None: body['target'] = target - if trigger is not None: body['trigger'] = trigger.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/pipelines', body=body - - , headers=headers - ) + if allow_duplicate_names is not None: + body["allow_duplicate_names"] = allow_duplicate_names + if budget_policy_id is not None: + body["budget_policy_id"] = budget_policy_id + if catalog is not None: + body["catalog"] = catalog + if channel is not None: + body["channel"] = channel + if clusters is not None: + body["clusters"] = [v.as_dict() for v in clusters] + if configuration is not None: + body["configuration"] = configuration + if continuous is not None: + body["continuous"] = continuous + if deployment is not None: + body["deployment"] = deployment.as_dict() + if development is not None: + body["development"] = development + if dry_run is not None: + body["dry_run"] = dry_run + if edition is not None: + body["edition"] = edition + if event_log is not None: + body["event_log"] = event_log.as_dict() + if filters is not None: + body["filters"] = filters.as_dict() + if gateway_definition is not None: + body["gateway_definition"] = gateway_definition.as_dict() + if id is not None: + body["id"] = id + if ingestion_definition is not None: + body["ingestion_definition"] = ingestion_definition.as_dict() + if libraries is not None: + body["libraries"] = [v.as_dict() for v in libraries] + if name is not None: + body["name"] = name + if notifications is not None: + body["notifications"] = [v.as_dict() for v in notifications] + if photon is not None: + body["photon"] = photon + if restart_window is not None: + body["restart_window"] = restart_window.as_dict() + if root_path is not None: + body["root_path"] = root_path + if run_as is not None: + body["run_as"] = run_as.as_dict() + if schema is not None: + body["schema"] = schema + if serverless is not None: + body["serverless"] = serverless + if storage is not None: + body["storage"] = storage + if tags is not None: + body["tags"] = tags + if target is not None: + body["target"] = target + if trigger is not None: + body["trigger"] = trigger.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/pipelines", body=body, headers=headers) return CreatePipelineResponse.from_dict(res) - - - - - def delete(self - , pipeline_id: str - ): + def delete(self, pipeline_id: str): """Delete a pipeline. - + Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and its tables. You cannot undo this action. - + :param pipeline_id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/pipelines/{pipeline_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/pipelines/{pipeline_id}", headers=headers) - def get(self - , pipeline_id: str - ) -> GetPipelineResponse: + def get(self, pipeline_id: str) -> GetPipelineResponse: """Get a pipeline. - + :param pipeline_id: str - + :returns: :class:`GetPipelineResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/pipelines/{pipeline_id}' - - , headers=headers - ) - return GetPipelineResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}", headers=headers) + return GetPipelineResponse.from_dict(res) - def get_permission_levels(self - , pipeline_id: str - ) -> GetPipelinePermissionLevelsResponse: + def get_permission_levels(self, pipeline_id: str) -> GetPipelinePermissionLevelsResponse: """Get pipeline permission levels. - + Gets the permission levels that a user can have on an object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. - + :returns: :class:`GetPipelinePermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/pipelines/{pipeline_id}/permissionLevels' - - , headers=headers - ) - return GetPipelinePermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/pipelines/{pipeline_id}/permissionLevels", headers=headers) + return GetPipelinePermissionLevelsResponse.from_dict(res) - def get_permissions(self - , pipeline_id: str - ) -> PipelinePermissions: + def get_permissions(self, pipeline_id: str) -> PipelinePermissions: """Get pipeline permissions. - + Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. - + :returns: :class:`PipelinePermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/pipelines/{pipeline_id}' - - , headers=headers - ) - return PipelinePermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/pipelines/{pipeline_id}", headers=headers) + return PipelinePermissions.from_dict(res) - def get_update(self - , pipeline_id: str, update_id: str - ) -> GetUpdateResponse: + def get_update(self, pipeline_id: str, update_id: str) -> GetUpdateResponse: """Get a pipeline update. - + Gets an update from an active pipeline. - + :param pipeline_id: str The ID of the pipeline. :param update_id: str The ID of the update. - + :returns: :class:`GetUpdateResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/pipelines/{pipeline_id}/updates/{update_id}' - - , headers=headers - ) - return GetUpdateResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/updates/{update_id}", headers=headers) + return GetUpdateResponse.from_dict(res) - def list_pipeline_events(self - , pipeline_id: str - , * - , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None) -> Iterator[PipelineEvent]: + def list_pipeline_events( + self, + pipeline_id: str, + *, + filter: Optional[str] = None, + max_results: Optional[int] = None, + order_by: Optional[List[str]] = None, + page_token: Optional[str] = None, + ) -> Iterator[PipelineEvent]: """List pipeline events. - + Retrieves events for a pipeline. - + :param pipeline_id: str The pipeline to return events for. :param filter: str (optional) Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp > 'TIMESTAMP' (or >=,<,<=,=) - + Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp> '2021-07-22T06:37:33.083Z' :param max_results: int (optional) @@ -3126,51 +3868,51 @@ def list_pipeline_events(self Page token returned by previous call. This field is mutually exclusive with all fields in this request except max_results. An error is returned if any fields other than max_results are set when this field is set. - + :returns: Iterator over :class:`PipelineEvent` """ - - query = {} - if filter is not None: query['filter'] = filter - if max_results is not None: query['max_results'] = max_results - if order_by is not None: query['order_by'] = [v for v in order_by] - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/pipelines/{pipeline_id}/events', query=query - - , headers=headers - ) - if 'events' in json: - for v in json['events']: - yield PipelineEvent.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if filter is not None: + query["filter"] = filter + if max_results is not None: + query["max_results"] = max_results + if order_by is not None: + query["order_by"] = [v for v in order_by] + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def list_pipelines(self - - , * - , filter: Optional[str] = None, max_results: Optional[int] = None, order_by: Optional[List[str]] = None, page_token: Optional[str] = None) -> Iterator[PipelineStateInfo]: + while True: + json = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/events", query=query, headers=headers) + if "events" in json: + for v in json["events"]: + yield PipelineEvent.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_pipelines( + self, + *, + filter: Optional[str] = None, + max_results: Optional[int] = None, + order_by: Optional[List[str]] = None, + page_token: Optional[str] = None, + ) -> Iterator[PipelineStateInfo]: """List pipelines. - + Lists pipelines defined in the Delta Live Tables system. - + :param filter: str (optional) Select a subset of results based on the specified criteria. The supported filters are: - + * `notebook=''` to select pipelines that reference the provided notebook path. * `name LIKE '[pattern]'` to select pipelines with a name that matches pattern. Wildcards are supported, for example: `name LIKE '%shopping%'` - + Composite filters are not supported. This field is optional. :param max_results: int (optional) The maximum number of entries to return in a single page. The system may return fewer than @@ -3182,44 +3924,44 @@ def list_pipelines(self default is id asc. This field is optional. :param page_token: str (optional) Page token returned by previous call - + :returns: Iterator over :class:`PipelineStateInfo` """ - - query = {} - if filter is not None: query['filter'] = filter - if max_results is not None: query['max_results'] = max_results - if order_by is not None: query['order_by'] = [v for v in order_by] - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/pipelines', query=query - - , headers=headers - ) - if 'statuses' in json: - for v in json['statuses']: - yield PipelineStateInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if filter is not None: + query["filter"] = filter + if max_results is not None: + query["max_results"] = max_results + if order_by is not None: + query["order_by"] = [v for v in order_by] + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def list_updates(self - , pipeline_id: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None, until_update_id: Optional[str] = None) -> ListUpdatesResponse: + while True: + json = self._api.do("GET", "/api/2.0/pipelines", query=query, headers=headers) + if "statuses" in json: + for v in json["statuses"]: + yield PipelineStateInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_updates( + self, + pipeline_id: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + until_update_id: Optional[str] = None, + ) -> ListUpdatesResponse: """List pipeline updates. - + List updates for an active pipeline. - + :param pipeline_id: str The pipeline to return updates for. :param max_results: int (optional) @@ -3228,64 +3970,64 @@ def list_updates(self Page token returned by previous call :param until_update_id: str (optional) If present, returns updates until and including this update_id. - + :returns: :class:`ListUpdatesResponse` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - if until_update_id is not None: query['until_update_id'] = until_update_id - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/pipelines/{pipeline_id}/updates', query=query - - , headers=headers - ) + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + if until_update_id is not None: + query["until_update_id"] = until_update_id + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/pipelines/{pipeline_id}/updates", query=query, headers=headers) return ListUpdatesResponse.from_dict(res) - - - - - def set_permissions(self - , pipeline_id: str - , * - , access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions: + def set_permissions( + self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None + ) -> PipelinePermissions: """Set pipeline permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional) - + :returns: :class:`PipelinePermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/pipelines/{pipeline_id}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", f"/api/2.0/permissions/pipelines/{pipeline_id}", body=body, headers=headers) return PipelinePermissions.from_dict(res) - - - - - def start_update(self - , pipeline_id: str - , * - , cause: Optional[StartUpdateCause] = None, full_refresh: Optional[bool] = None, full_refresh_selection: Optional[List[str]] = None, refresh_selection: Optional[List[str]] = None, validate_only: Optional[bool] = None) -> StartUpdateResponse: + def start_update( + self, + pipeline_id: str, + *, + cause: Optional[StartUpdateCause] = None, + full_refresh: Optional[bool] = None, + full_refresh_selection: Optional[List[str]] = None, + refresh_selection: Optional[List[str]] = None, + validate_only: Optional[bool] = None, + ) -> StartUpdateResponse: """Start a pipeline. - + Starts a new update for the pipeline. If there is already an active update for the pipeline, the request will fail and the active update will remain running. - + :param pipeline_id: str :param cause: :class:`StartUpdateCause` (optional) What triggered this update. @@ -3302,70 +4044,91 @@ def start_update(self :param validate_only: bool (optional) If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets. - + :returns: :class:`StartUpdateResponse` """ body = {} - if cause is not None: body['cause'] = cause.value - if full_refresh is not None: body['full_refresh'] = full_refresh - if full_refresh_selection is not None: body['full_refresh_selection'] = [v for v in full_refresh_selection] - if refresh_selection is not None: body['refresh_selection'] = [v for v in refresh_selection] - if validate_only is not None: body['validate_only'] = validate_only - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/pipelines/{pipeline_id}/updates', body=body - - , headers=headers - ) + if cause is not None: + body["cause"] = cause.value + if full_refresh is not None: + body["full_refresh"] = full_refresh + if full_refresh_selection is not None: + body["full_refresh_selection"] = [v for v in full_refresh_selection] + if refresh_selection is not None: + body["refresh_selection"] = [v for v in refresh_selection] + if validate_only is not None: + body["validate_only"] = validate_only + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/updates", body=body, headers=headers) return StartUpdateResponse.from_dict(res) - - - - - def stop(self - , pipeline_id: str - ) -> Wait[GetPipelineResponse]: + def stop(self, pipeline_id: str) -> Wait[GetPipelineResponse]: """Stop a pipeline. - + Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this request is a no-op. - + :param pipeline_id: str - + :returns: Long-running operation waiter for :class:`GetPipelineResponse`. See :method:wait_get_pipeline_idle for more details. """ - - headers = {'Accept': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/pipelines/{pipeline_id}/stop' - - , headers=headers - ) - return Wait(self.wait_get_pipeline_idle - , response = StopPipelineResponse.from_dict(op_response) - , pipeline_id=pipeline_id) - - def stop_and_wait(self - , pipeline_id: str - , - timeout=timedelta(minutes=20)) -> GetPipelineResponse: + headers = { + "Accept": "application/json", + } + + op_response = self._api.do("POST", f"/api/2.0/pipelines/{pipeline_id}/stop", headers=headers) + return Wait( + self.wait_get_pipeline_idle, response=StopPipelineResponse.from_dict(op_response), pipeline_id=pipeline_id + ) + + def stop_and_wait(self, pipeline_id: str, timeout=timedelta(minutes=20)) -> GetPipelineResponse: return self.stop(pipeline_id=pipeline_id).result(timeout=timeout) - - - - def update(self - , pipeline_id: str - , * - , allow_duplicate_names: Optional[bool] = None, budget_policy_id: Optional[str] = None, catalog: Optional[str] = None, channel: Optional[str] = None, clusters: Optional[List[PipelineCluster]] = None, configuration: Optional[Dict[str,str]] = None, continuous: Optional[bool] = None, deployment: Optional[PipelineDeployment] = None, development: Optional[bool] = None, edition: Optional[str] = None, event_log: Optional[EventLogSpec] = None, expected_last_modified: Optional[int] = None, filters: Optional[Filters] = None, gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, id: Optional[str] = None, ingestion_definition: Optional[IngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, photon: Optional[bool] = None, restart_window: Optional[RestartWindow] = None, root_path: Optional[str] = None, run_as: Optional[RunAs] = None, schema: Optional[str] = None, serverless: Optional[bool] = None, storage: Optional[str] = None, tags: Optional[Dict[str,str]] = None, target: Optional[str] = None, trigger: Optional[PipelineTrigger] = None): + def update( + self, + pipeline_id: str, + *, + allow_duplicate_names: Optional[bool] = None, + budget_policy_id: Optional[str] = None, + catalog: Optional[str] = None, + channel: Optional[str] = None, + clusters: Optional[List[PipelineCluster]] = None, + configuration: Optional[Dict[str, str]] = None, + continuous: Optional[bool] = None, + deployment: Optional[PipelineDeployment] = None, + development: Optional[bool] = None, + edition: Optional[str] = None, + event_log: Optional[EventLogSpec] = None, + expected_last_modified: Optional[int] = None, + filters: Optional[Filters] = None, + gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, + id: Optional[str] = None, + ingestion_definition: Optional[IngestionPipelineDefinition] = None, + libraries: Optional[List[PipelineLibrary]] = None, + name: Optional[str] = None, + notifications: Optional[List[Notifications]] = None, + photon: Optional[bool] = None, + restart_window: Optional[RestartWindow] = None, + root_path: Optional[str] = None, + run_as: Optional[RunAs] = None, + schema: Optional[str] = None, + serverless: Optional[bool] = None, + storage: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + target: Optional[str] = None, + trigger: Optional[PipelineTrigger] = None, + ): """Edit a pipeline. - + Updates a pipeline with the supplied configuration. - + :param pipeline_id: str Unique identifier for this pipeline. :param allow_duplicate_names: bool (optional) @@ -3421,7 +4184,7 @@ def update(self :param run_as: :class:`RunAs` (optional) Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. :param schema: str (optional) @@ -3439,74 +4202,95 @@ def update(self for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. - - - """ - body = {} - if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names - if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id - if catalog is not None: body['catalog'] = catalog - if channel is not None: body['channel'] = channel - if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters] - if configuration is not None: body['configuration'] = configuration - if continuous is not None: body['continuous'] = continuous - if deployment is not None: body['deployment'] = deployment.as_dict() - if development is not None: body['development'] = development - if edition is not None: body['edition'] = edition - if event_log is not None: body['event_log'] = event_log.as_dict() - if expected_last_modified is not None: body['expected_last_modified'] = expected_last_modified - if filters is not None: body['filters'] = filters.as_dict() - if gateway_definition is not None: body['gateway_definition'] = gateway_definition.as_dict() - if id is not None: body['id'] = id - if ingestion_definition is not None: body['ingestion_definition'] = ingestion_definition.as_dict() - if libraries is not None: body['libraries'] = [v.as_dict() for v in libraries] - if name is not None: body['name'] = name - if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications] - if photon is not None: body['photon'] = photon - if restart_window is not None: body['restart_window'] = restart_window.as_dict() - if root_path is not None: body['root_path'] = root_path - if run_as is not None: body['run_as'] = run_as.as_dict() - if schema is not None: body['schema'] = schema - if serverless is not None: body['serverless'] = serverless - if storage is not None: body['storage'] = storage - if tags is not None: body['tags'] = tags - if target is not None: body['target'] = target - if trigger is not None: body['trigger'] = trigger.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/pipelines/{pipeline_id}', body=body - - , headers=headers - ) - - - - - def update_permissions(self - , pipeline_id: str - , * - , access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions: + """ + body = {} + if allow_duplicate_names is not None: + body["allow_duplicate_names"] = allow_duplicate_names + if budget_policy_id is not None: + body["budget_policy_id"] = budget_policy_id + if catalog is not None: + body["catalog"] = catalog + if channel is not None: + body["channel"] = channel + if clusters is not None: + body["clusters"] = [v.as_dict() for v in clusters] + if configuration is not None: + body["configuration"] = configuration + if continuous is not None: + body["continuous"] = continuous + if deployment is not None: + body["deployment"] = deployment.as_dict() + if development is not None: + body["development"] = development + if edition is not None: + body["edition"] = edition + if event_log is not None: + body["event_log"] = event_log.as_dict() + if expected_last_modified is not None: + body["expected_last_modified"] = expected_last_modified + if filters is not None: + body["filters"] = filters.as_dict() + if gateway_definition is not None: + body["gateway_definition"] = gateway_definition.as_dict() + if id is not None: + body["id"] = id + if ingestion_definition is not None: + body["ingestion_definition"] = ingestion_definition.as_dict() + if libraries is not None: + body["libraries"] = [v.as_dict() for v in libraries] + if name is not None: + body["name"] = name + if notifications is not None: + body["notifications"] = [v.as_dict() for v in notifications] + if photon is not None: + body["photon"] = photon + if restart_window is not None: + body["restart_window"] = restart_window.as_dict() + if root_path is not None: + body["root_path"] = root_path + if run_as is not None: + body["run_as"] = run_as.as_dict() + if schema is not None: + body["schema"] = schema + if serverless is not None: + body["serverless"] = serverless + if storage is not None: + body["storage"] = storage + if tags is not None: + body["tags"] = tags + if target is not None: + body["target"] = target + if trigger is not None: + body["trigger"] = trigger.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/pipelines/{pipeline_id}", body=body, headers=headers) + + def update_permissions( + self, pipeline_id: str, *, access_control_list: Optional[List[PipelineAccessControlRequest]] = None + ) -> PipelinePermissions: """Update pipeline permissions. - + Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional) - + :returns: :class:`PipelinePermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/pipelines/{pipeline_id}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/permissions/pipelines/{pipeline_id}", body=body, headers=headers) return PipelinePermissions.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index 91063317c..e56c0c382 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -1,147 +1,158 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Callable, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AwsCredentials: sts_role: Optional[StsRole] = None - + def as_dict(self) -> dict: """Serializes the AwsCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sts_role: body['sts_role'] = self.sts_role.as_dict() + if self.sts_role: + body["sts_role"] = self.sts_role.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.sts_role: body['sts_role'] = self.sts_role + if self.sts_role: + body["sts_role"] = self.sts_role return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsCredentials: """Deserializes the AwsCredentials from a dictionary.""" - return cls(sts_role=_from_dict(d, 'sts_role', StsRole)) - - + return cls(sts_role=_from_dict(d, "sts_role", StsRole)) @dataclass class AwsKeyInfo: key_arn: str """The AWS KMS key's Amazon Resource Name (ARN).""" - + key_region: str """The AWS KMS key region.""" - + key_alias: Optional[str] = None """The AWS KMS key alias.""" - + reuse_key_for_cluster_volumes: Optional[bool] = None """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to `true` or omitted, the key is also used to encrypt cluster EBS volumes. If you do not want to use this key for encrypting EBS volumes, set to `false`.""" - + def as_dict(self) -> dict: """Serializes the AwsKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key_alias is not None: body['key_alias'] = self.key_alias - if self.key_arn is not None: body['key_arn'] = self.key_arn - if self.key_region is not None: body['key_region'] = self.key_region - if self.reuse_key_for_cluster_volumes is not None: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes + if self.key_alias is not None: + body["key_alias"] = self.key_alias + if self.key_arn is not None: + body["key_arn"] = self.key_arn + if self.key_region is not None: + body["key_region"] = self.key_region + if self.reuse_key_for_cluster_volumes is not None: + body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes return body def as_shallow_dict(self) -> dict: """Serializes the AwsKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.key_alias is not None: body['key_alias'] = self.key_alias - if self.key_arn is not None: body['key_arn'] = self.key_arn - if self.key_region is not None: body['key_region'] = self.key_region - if self.reuse_key_for_cluster_volumes is not None: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes + if self.key_alias is not None: + body["key_alias"] = self.key_alias + if self.key_arn is not None: + body["key_arn"] = self.key_arn + if self.key_region is not None: + body["key_region"] = self.key_region + if self.reuse_key_for_cluster_volumes is not None: + body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AwsKeyInfo: """Deserializes the AwsKeyInfo from a dictionary.""" - return cls(key_alias=d.get('key_alias', None), key_arn=d.get('key_arn', None), key_region=d.get('key_region', None), reuse_key_for_cluster_volumes=d.get('reuse_key_for_cluster_volumes', None)) - - + return cls( + key_alias=d.get("key_alias", None), + key_arn=d.get("key_arn", None), + key_region=d.get("key_region", None), + reuse_key_for_cluster_volumes=d.get("reuse_key_for_cluster_volumes", None), + ) @dataclass class AzureWorkspaceInfo: resource_group: Optional[str] = None """Azure Resource Group name""" - + subscription_id: Optional[str] = None """Azure Subscription ID""" - + def as_dict(self) -> dict: """Serializes the AzureWorkspaceInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.resource_group is not None: body['resource_group'] = self.resource_group - if self.subscription_id is not None: body['subscription_id'] = self.subscription_id + if self.resource_group is not None: + body["resource_group"] = self.resource_group + if self.subscription_id is not None: + body["subscription_id"] = self.subscription_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureWorkspaceInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.resource_group is not None: body['resource_group'] = self.resource_group - if self.subscription_id is not None: body['subscription_id'] = self.subscription_id + if self.resource_group is not None: + body["resource_group"] = self.resource_group + if self.subscription_id is not None: + body["subscription_id"] = self.subscription_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureWorkspaceInfo: """Deserializes the AzureWorkspaceInfo from a dictionary.""" - return cls(resource_group=d.get('resource_group', None), subscription_id=d.get('subscription_id', None)) - - + return cls(resource_group=d.get("resource_group", None), subscription_id=d.get("subscription_id", None)) @dataclass class CloudResourceContainer: """The general workspace configurations that are specific to cloud providers.""" - + gcp: Optional[CustomerFacingGcpCloudResourceContainer] = None """The general workspace configurations that are specific to Google Cloud.""" - + def as_dict(self) -> dict: """Serializes the CloudResourceContainer into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gcp: body['gcp'] = self.gcp.as_dict() + if self.gcp: + body["gcp"] = self.gcp.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CloudResourceContainer into a shallow dictionary of its immediate attributes.""" body = {} - if self.gcp: body['gcp'] = self.gcp + if self.gcp: + body["gcp"] = self.gcp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CloudResourceContainer: """Deserializes the CloudResourceContainer from a dictionary.""" - return cls(gcp=_from_dict(d, 'gcp', CustomerFacingGcpCloudResourceContainer)) - - + return cls(gcp=_from_dict(d, "gcp", CustomerFacingGcpCloudResourceContainer)) @dataclass @@ -149,325 +160,379 @@ class CreateAwsKeyInfo: key_arn: str """The AWS KMS key's Amazon Resource Name (ARN). Note that the key's AWS region is inferred from the ARN.""" - + key_alias: Optional[str] = None """The AWS KMS key alias.""" - + reuse_key_for_cluster_volumes: Optional[bool] = None """This field applies only if the `use_cases` property includes `STORAGE`. If this is set to `true` or omitted, the key is also used to encrypt cluster EBS volumes. To not use this key also for encrypting EBS volumes, set this to `false`.""" - + def as_dict(self) -> dict: """Serializes the CreateAwsKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key_alias is not None: body['key_alias'] = self.key_alias - if self.key_arn is not None: body['key_arn'] = self.key_arn - if self.reuse_key_for_cluster_volumes is not None: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes + if self.key_alias is not None: + body["key_alias"] = self.key_alias + if self.key_arn is not None: + body["key_arn"] = self.key_arn + if self.reuse_key_for_cluster_volumes is not None: + body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes return body def as_shallow_dict(self) -> dict: """Serializes the CreateAwsKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.key_alias is not None: body['key_alias'] = self.key_alias - if self.key_arn is not None: body['key_arn'] = self.key_arn - if self.reuse_key_for_cluster_volumes is not None: body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes + if self.key_alias is not None: + body["key_alias"] = self.key_alias + if self.key_arn is not None: + body["key_arn"] = self.key_arn + if self.reuse_key_for_cluster_volumes is not None: + body["reuse_key_for_cluster_volumes"] = self.reuse_key_for_cluster_volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateAwsKeyInfo: """Deserializes the CreateAwsKeyInfo from a dictionary.""" - return cls(key_alias=d.get('key_alias', None), key_arn=d.get('key_arn', None), reuse_key_for_cluster_volumes=d.get('reuse_key_for_cluster_volumes', None)) - - + return cls( + key_alias=d.get("key_alias", None), + key_arn=d.get("key_arn", None), + reuse_key_for_cluster_volumes=d.get("reuse_key_for_cluster_volumes", None), + ) @dataclass class CreateCredentialAwsCredentials: sts_role: Optional[CreateCredentialStsRole] = None - + def as_dict(self) -> dict: """Serializes the CreateCredentialAwsCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sts_role: body['sts_role'] = self.sts_role.as_dict() + if self.sts_role: + body["sts_role"] = self.sts_role.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialAwsCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.sts_role: body['sts_role'] = self.sts_role + if self.sts_role: + body["sts_role"] = self.sts_role return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialAwsCredentials: """Deserializes the CreateCredentialAwsCredentials from a dictionary.""" - return cls(sts_role=_from_dict(d, 'sts_role', CreateCredentialStsRole)) - - + return cls(sts_role=_from_dict(d, "sts_role", CreateCredentialStsRole)) @dataclass class CreateCredentialRequest: credentials_name: str """The human-readable name of the credential configuration object.""" - + aws_credentials: CreateCredentialAwsCredentials - + def as_dict(self) -> dict: """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_credentials: body['aws_credentials'] = self.aws_credentials.as_dict() - if self.credentials_name is not None: body['credentials_name'] = self.credentials_name + if self.aws_credentials: + body["aws_credentials"] = self.aws_credentials.as_dict() + if self.credentials_name is not None: + body["credentials_name"] = self.credentials_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_credentials: body['aws_credentials'] = self.aws_credentials - if self.credentials_name is not None: body['credentials_name'] = self.credentials_name + if self.aws_credentials: + body["aws_credentials"] = self.aws_credentials + if self.credentials_name is not None: + body["credentials_name"] = self.credentials_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialRequest: """Deserializes the CreateCredentialRequest from a dictionary.""" - return cls(aws_credentials=_from_dict(d, 'aws_credentials', CreateCredentialAwsCredentials), credentials_name=d.get('credentials_name', None)) - - + return cls( + aws_credentials=_from_dict(d, "aws_credentials", CreateCredentialAwsCredentials), + credentials_name=d.get("credentials_name", None), + ) @dataclass class CreateCredentialStsRole: role_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the cross account role.""" - + def as_dict(self) -> dict: """Serializes the CreateCredentialStsRole into a dictionary suitable for use as a JSON request body.""" body = {} - if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.role_arn is not None: + body["role_arn"] = self.role_arn return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialStsRole into a shallow dictionary of its immediate attributes.""" body = {} - if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.role_arn is not None: + body["role_arn"] = self.role_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialStsRole: """Deserializes the CreateCredentialStsRole from a dictionary.""" - return cls(role_arn=d.get('role_arn', None)) - - + return cls(role_arn=d.get("role_arn", None)) @dataclass class CreateCustomerManagedKeyRequest: use_cases: List[KeyUseCase] """The cases that the key can be used for.""" - + aws_key_info: Optional[CreateAwsKeyInfo] = None - + gcp_key_info: Optional[CreateGcpKeyInfo] = None - + def as_dict(self) -> dict: """Serializes the CreateCustomerManagedKeyRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_key_info: body['aws_key_info'] = self.aws_key_info.as_dict() - if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info.as_dict() - if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases] + if self.aws_key_info: + body["aws_key_info"] = self.aws_key_info.as_dict() + if self.gcp_key_info: + body["gcp_key_info"] = self.gcp_key_info.as_dict() + if self.use_cases: + body["use_cases"] = [v.value for v in self.use_cases] return body def as_shallow_dict(self) -> dict: """Serializes the CreateCustomerManagedKeyRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_key_info: body['aws_key_info'] = self.aws_key_info - if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info - if self.use_cases: body['use_cases'] = self.use_cases + if self.aws_key_info: + body["aws_key_info"] = self.aws_key_info + if self.gcp_key_info: + body["gcp_key_info"] = self.gcp_key_info + if self.use_cases: + body["use_cases"] = self.use_cases return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCustomerManagedKeyRequest: """Deserializes the CreateCustomerManagedKeyRequest from a dictionary.""" - return cls(aws_key_info=_from_dict(d, 'aws_key_info', CreateAwsKeyInfo), gcp_key_info=_from_dict(d, 'gcp_key_info', CreateGcpKeyInfo), use_cases=_repeated_enum(d, 'use_cases', KeyUseCase)) - - + return cls( + aws_key_info=_from_dict(d, "aws_key_info", CreateAwsKeyInfo), + gcp_key_info=_from_dict(d, "gcp_key_info", CreateGcpKeyInfo), + use_cases=_repeated_enum(d, "use_cases", KeyUseCase), + ) @dataclass class CreateGcpKeyInfo: kms_key_id: str """The GCP KMS key's resource name""" - + def as_dict(self) -> dict: """Serializes the CreateGcpKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id + if self.kms_key_id is not None: + body["kms_key_id"] = self.kms_key_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateGcpKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id + if self.kms_key_id is not None: + body["kms_key_id"] = self.kms_key_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateGcpKeyInfo: """Deserializes the CreateGcpKeyInfo from a dictionary.""" - return cls(kms_key_id=d.get('kms_key_id', None)) - - + return cls(kms_key_id=d.get("kms_key_id", None)) @dataclass class CreateNetworkRequest: network_name: str """The human-readable name of the network configuration.""" - + gcp_network_info: Optional[GcpNetworkInfo] = None """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and secondary IP ranges).""" - + security_group_ids: Optional[List[str]] = None """IDs of one to five security groups associated with this network. Security group IDs **cannot** be used in multiple network configurations.""" - + subnet_ids: Optional[List[str]] = None """IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple network configurations.""" - + vpc_endpoints: Optional[NetworkVpcEndpoints] = None """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" - + vpc_id: Optional[str] = None """The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations.""" - + def as_dict(self) -> dict: """Serializes the CreateNetworkRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info.as_dict() - if self.network_name is not None: body['network_name'] = self.network_name - if self.security_group_ids: body['security_group_ids'] = [v for v in self.security_group_ids] - if self.subnet_ids: body['subnet_ids'] = [v for v in self.subnet_ids] - if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints.as_dict() - if self.vpc_id is not None: body['vpc_id'] = self.vpc_id + if self.gcp_network_info: + body["gcp_network_info"] = self.gcp_network_info.as_dict() + if self.network_name is not None: + body["network_name"] = self.network_name + if self.security_group_ids: + body["security_group_ids"] = [v for v in self.security_group_ids] + if self.subnet_ids: + body["subnet_ids"] = [v for v in self.subnet_ids] + if self.vpc_endpoints: + body["vpc_endpoints"] = self.vpc_endpoints.as_dict() + if self.vpc_id is not None: + body["vpc_id"] = self.vpc_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateNetworkRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info - if self.network_name is not None: body['network_name'] = self.network_name - if self.security_group_ids: body['security_group_ids'] = self.security_group_ids - if self.subnet_ids: body['subnet_ids'] = self.subnet_ids - if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints - if self.vpc_id is not None: body['vpc_id'] = self.vpc_id + if self.gcp_network_info: + body["gcp_network_info"] = self.gcp_network_info + if self.network_name is not None: + body["network_name"] = self.network_name + if self.security_group_ids: + body["security_group_ids"] = self.security_group_ids + if self.subnet_ids: + body["subnet_ids"] = self.subnet_ids + if self.vpc_endpoints: + body["vpc_endpoints"] = self.vpc_endpoints + if self.vpc_id is not None: + body["vpc_id"] = self.vpc_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateNetworkRequest: """Deserializes the CreateNetworkRequest from a dictionary.""" - return cls(gcp_network_info=_from_dict(d, 'gcp_network_info', GcpNetworkInfo), network_name=d.get('network_name', None), security_group_ids=d.get('security_group_ids', None), subnet_ids=d.get('subnet_ids', None), vpc_endpoints=_from_dict(d, 'vpc_endpoints', NetworkVpcEndpoints), vpc_id=d.get('vpc_id', None)) - - + return cls( + gcp_network_info=_from_dict(d, "gcp_network_info", GcpNetworkInfo), + network_name=d.get("network_name", None), + security_group_ids=d.get("security_group_ids", None), + subnet_ids=d.get("subnet_ids", None), + vpc_endpoints=_from_dict(d, "vpc_endpoints", NetworkVpcEndpoints), + vpc_id=d.get("vpc_id", None), + ) @dataclass class CreateStorageConfigurationRequest: storage_configuration_name: str """The human-readable name of the storage configuration.""" - + root_bucket_info: RootBucketInfo """Root S3 bucket information.""" - + def as_dict(self) -> dict: """Serializes the CreateStorageConfigurationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info.as_dict() - if self.storage_configuration_name is not None: body['storage_configuration_name'] = self.storage_configuration_name + if self.root_bucket_info: + body["root_bucket_info"] = self.root_bucket_info.as_dict() + if self.storage_configuration_name is not None: + body["storage_configuration_name"] = self.storage_configuration_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateStorageConfigurationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info - if self.storage_configuration_name is not None: body['storage_configuration_name'] = self.storage_configuration_name + if self.root_bucket_info: + body["root_bucket_info"] = self.root_bucket_info + if self.storage_configuration_name is not None: + body["storage_configuration_name"] = self.storage_configuration_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateStorageConfigurationRequest: """Deserializes the CreateStorageConfigurationRequest from a dictionary.""" - return cls(root_bucket_info=_from_dict(d, 'root_bucket_info', RootBucketInfo), storage_configuration_name=d.get('storage_configuration_name', None)) - - + return cls( + root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo), + storage_configuration_name=d.get("storage_configuration_name", None), + ) @dataclass class CreateVpcEndpointRequest: vpc_endpoint_name: str """The human-readable name of the storage configuration.""" - + aws_vpc_endpoint_id: Optional[str] = None """The ID of the VPC endpoint object in AWS.""" - + gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None """The Google Cloud specific information for this Private Service Connect endpoint.""" - + region: Optional[str] = None """The AWS region in which this VPC endpoint object exists.""" - + def as_dict(self) -> dict: """Serializes the CreateVpcEndpointRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict() - if self.region is not None: body['region'] = self.region - if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name + if self.aws_vpc_endpoint_id is not None: + body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: + body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info.as_dict() + if self.region is not None: + body["region"] = self.region + if self.vpc_endpoint_name is not None: + body["vpc_endpoint_name"] = self.vpc_endpoint_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateVpcEndpointRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info - if self.region is not None: body['region'] = self.region - if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name + if self.aws_vpc_endpoint_id is not None: + body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: + body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info + if self.region is not None: + body["region"] = self.region + if self.vpc_endpoint_name is not None: + body["vpc_endpoint_name"] = self.vpc_endpoint_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVpcEndpointRequest: """Deserializes the CreateVpcEndpointRequest from a dictionary.""" - return cls(aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None), gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo), region=d.get('region', None), vpc_endpoint_name=d.get('vpc_endpoint_name', None)) - - + return cls( + aws_vpc_endpoint_id=d.get("aws_vpc_endpoint_id", None), + gcp_vpc_endpoint_info=_from_dict(d, "gcp_vpc_endpoint_info", GcpVpcEndpointInfo), + region=d.get("region", None), + vpc_endpoint_name=d.get("vpc_endpoint_name", None), + ) @dataclass class CreateWorkspaceRequest: workspace_name: str """The workspace's human-readable name.""" - + aws_region: Optional[str] = None """The AWS region of the workspace's data plane.""" - + cloud: Optional[str] = None """The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to `gcp`.""" - + cloud_resource_container: Optional[CloudResourceContainer] = None """The general workspace configurations that are specific to cloud providers.""" - + credentials_id: Optional[str] = None """ID of the workspace's credential configuration object.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - + deployment_name: Optional[str] = None """The deployment name defines part of the subdomain for the workspace. The workspace URL for the web application and REST APIs is `.cloud.databricks.com`. For @@ -492,7 +557,7 @@ class CreateWorkspaceRequest: If a new workspace omits this property, the server generates a unique deployment name for you with the pattern `dbc-xxxxxxxx-xxxx`.""" - + gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP @@ -512,30 +577,30 @@ class CreateWorkspaceRequest: Excel spreadsheet. See [calculate subnet sizes for a new workspace]. [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html""" - + gke_config: Optional[GkeConfig] = None """The configurations for the GKE cluster of a Databricks workspace.""" - + is_no_public_ip_enabled: Optional[bool] = None """Whether no public IP is enabled for the workspace.""" - + location: Optional[str] = None """The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.""" - + managed_services_customer_managed_key_id: Optional[str] = None """The ID of the workspace's managed services encryption key configuration object. This is used to help protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`.""" - + network_id: Optional[str] = None - + pricing_tier: Optional[PricingTier] = None """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. [AWS Pricing]: https://databricks.com/product/aws-pricing""" - + private_access_settings_id: Optional[str] = None """ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace @@ -545,197 +610,266 @@ class CreateWorkspaceRequest: [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" - + storage_configuration_id: Optional[str] = None """The ID of the workspace's storage configuration object.""" - + storage_customer_managed_key_id: Optional[str] = None """The ID of the workspace's storage encryption key configuration object. This is used to encrypt the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The provided key configuration object property `use_cases` must contain `STORAGE`.""" - + def as_dict(self) -> dict: """Serializes the CreateWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_region is not None: body['aws_region'] = self.aws_region - if self.cloud is not None: body['cloud'] = self.cloud - if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container.as_dict() - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.deployment_name is not None: body['deployment_name'] = self.deployment_name - if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict() - if self.gke_config: body['gke_config'] = self.gke_config.as_dict() - if self.is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled - if self.location is not None: body['location'] = self.location - if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id - if self.network_id is not None: body['network_id'] = self.network_id - if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier.value - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id - if self.workspace_name is not None: body['workspace_name'] = self.workspace_name + if self.aws_region is not None: + body["aws_region"] = self.aws_region + if self.cloud is not None: + body["cloud"] = self.cloud + if self.cloud_resource_container: + body["cloud_resource_container"] = self.cloud_resource_container.as_dict() + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.deployment_name is not None: + body["deployment_name"] = self.deployment_name + if self.gcp_managed_network_config: + body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict() + if self.gke_config: + body["gke_config"] = self.gke_config.as_dict() + if self.is_no_public_ip_enabled is not None: + body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled + if self.location is not None: + body["location"] = self.location + if self.managed_services_customer_managed_key_id is not None: + body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network_id is not None: + body["network_id"] = self.network_id + if self.pricing_tier is not None: + body["pricing_tier"] = self.pricing_tier.value + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: + body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.workspace_name is not None: + body["workspace_name"] = self.workspace_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_region is not None: body['aws_region'] = self.aws_region - if self.cloud is not None: body['cloud'] = self.cloud - if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.deployment_name is not None: body['deployment_name'] = self.deployment_name - if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config - if self.gke_config: body['gke_config'] = self.gke_config - if self.is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled - if self.location is not None: body['location'] = self.location - if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id - if self.network_id is not None: body['network_id'] = self.network_id - if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id - if self.workspace_name is not None: body['workspace_name'] = self.workspace_name + if self.aws_region is not None: + body["aws_region"] = self.aws_region + if self.cloud is not None: + body["cloud"] = self.cloud + if self.cloud_resource_container: + body["cloud_resource_container"] = self.cloud_resource_container + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.deployment_name is not None: + body["deployment_name"] = self.deployment_name + if self.gcp_managed_network_config: + body["gcp_managed_network_config"] = self.gcp_managed_network_config + if self.gke_config: + body["gke_config"] = self.gke_config + if self.is_no_public_ip_enabled is not None: + body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled + if self.location is not None: + body["location"] = self.location + if self.managed_services_customer_managed_key_id is not None: + body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network_id is not None: + body["network_id"] = self.network_id + if self.pricing_tier is not None: + body["pricing_tier"] = self.pricing_tier + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: + body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.workspace_name is not None: + body["workspace_name"] = self.workspace_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWorkspaceRequest: """Deserializes the CreateWorkspaceRequest from a dictionary.""" - return cls(aws_region=d.get('aws_region', None), cloud=d.get('cloud', None), cloud_resource_container=_from_dict(d, 'cloud_resource_container', CloudResourceContainer), credentials_id=d.get('credentials_id', None), custom_tags=d.get('custom_tags', None), deployment_name=d.get('deployment_name', None), gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config', GcpManagedNetworkConfig), gke_config=_from_dict(d, 'gke_config', GkeConfig), is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None), location=d.get('location', None), managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id', None), network_id=d.get('network_id', None), pricing_tier=_enum(d, 'pricing_tier', PricingTier), private_access_settings_id=d.get('private_access_settings_id', None), storage_configuration_id=d.get('storage_configuration_id', None), storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None), workspace_name=d.get('workspace_name', None)) - - + return cls( + aws_region=d.get("aws_region", None), + cloud=d.get("cloud", None), + cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer), + credentials_id=d.get("credentials_id", None), + custom_tags=d.get("custom_tags", None), + deployment_name=d.get("deployment_name", None), + gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig), + gke_config=_from_dict(d, "gke_config", GkeConfig), + is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None), + location=d.get("location", None), + managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), + network_id=d.get("network_id", None), + pricing_tier=_enum(d, "pricing_tier", PricingTier), + private_access_settings_id=d.get("private_access_settings_id", None), + storage_configuration_id=d.get("storage_configuration_id", None), + storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), + workspace_name=d.get("workspace_name", None), + ) @dataclass class Credential: account_id: Optional[str] = None """The Databricks account ID that hosts the credential.""" - + aws_credentials: Optional[AwsCredentials] = None - + creation_time: Optional[int] = None """Time in epoch milliseconds when the credential was created.""" - + credentials_id: Optional[str] = None """Databricks credential configuration ID.""" - + credentials_name: Optional[str] = None """The human-readable name of the credential configuration object.""" - + def as_dict(self) -> dict: """Serializes the Credential into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.aws_credentials: body['aws_credentials'] = self.aws_credentials.as_dict() - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.credentials_name is not None: body['credentials_name'] = self.credentials_name + if self.account_id is not None: + body["account_id"] = self.account_id + if self.aws_credentials: + body["aws_credentials"] = self.aws_credentials.as_dict() + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.credentials_name is not None: + body["credentials_name"] = self.credentials_name return body def as_shallow_dict(self) -> dict: """Serializes the Credential into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.aws_credentials: body['aws_credentials'] = self.aws_credentials - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.credentials_name is not None: body['credentials_name'] = self.credentials_name + if self.account_id is not None: + body["account_id"] = self.account_id + if self.aws_credentials: + body["aws_credentials"] = self.aws_credentials + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.credentials_name is not None: + body["credentials_name"] = self.credentials_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Credential: """Deserializes the Credential from a dictionary.""" - return cls(account_id=d.get('account_id', None), aws_credentials=_from_dict(d, 'aws_credentials', AwsCredentials), creation_time=d.get('creation_time', None), credentials_id=d.get('credentials_id', None), credentials_name=d.get('credentials_name', None)) - - + return cls( + account_id=d.get("account_id", None), + aws_credentials=_from_dict(d, "aws_credentials", AwsCredentials), + creation_time=d.get("creation_time", None), + credentials_id=d.get("credentials_id", None), + credentials_name=d.get("credentials_name", None), + ) @dataclass class CustomerFacingGcpCloudResourceContainer: """The general workspace configurations that are specific to Google Cloud.""" - + project_id: Optional[str] = None """The Google Cloud project ID, which the workspace uses to instantiate cloud resources for your workspace.""" - + def as_dict(self) -> dict: """Serializes the CustomerFacingGcpCloudResourceContainer into a dictionary suitable for use as a JSON request body.""" body = {} - if self.project_id is not None: body['project_id'] = self.project_id + if self.project_id is not None: + body["project_id"] = self.project_id return body def as_shallow_dict(self) -> dict: """Serializes the CustomerFacingGcpCloudResourceContainer into a shallow dictionary of its immediate attributes.""" body = {} - if self.project_id is not None: body['project_id'] = self.project_id + if self.project_id is not None: + body["project_id"] = self.project_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomerFacingGcpCloudResourceContainer: """Deserializes the CustomerFacingGcpCloudResourceContainer from a dictionary.""" - return cls(project_id=d.get('project_id', None)) - - + return cls(project_id=d.get("project_id", None)) @dataclass class CustomerManagedKey: account_id: Optional[str] = None """The Databricks account ID that holds the customer-managed key.""" - + aws_key_info: Optional[AwsKeyInfo] = None - + creation_time: Optional[int] = None """Time in epoch milliseconds when the customer key was created.""" - + customer_managed_key_id: Optional[str] = None """ID of the encryption key configuration object.""" - + gcp_key_info: Optional[GcpKeyInfo] = None - + use_cases: Optional[List[KeyUseCase]] = None """The cases that the key can be used for.""" - + def as_dict(self) -> dict: """Serializes the CustomerManagedKey into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.aws_key_info: body['aws_key_info'] = self.aws_key_info.as_dict() - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.customer_managed_key_id is not None: body['customer_managed_key_id'] = self.customer_managed_key_id - if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info.as_dict() - if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases] + if self.account_id is not None: + body["account_id"] = self.account_id + if self.aws_key_info: + body["aws_key_info"] = self.aws_key_info.as_dict() + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.customer_managed_key_id is not None: + body["customer_managed_key_id"] = self.customer_managed_key_id + if self.gcp_key_info: + body["gcp_key_info"] = self.gcp_key_info.as_dict() + if self.use_cases: + body["use_cases"] = [v.value for v in self.use_cases] return body def as_shallow_dict(self) -> dict: """Serializes the CustomerManagedKey into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.aws_key_info: body['aws_key_info'] = self.aws_key_info - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.customer_managed_key_id is not None: body['customer_managed_key_id'] = self.customer_managed_key_id - if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info - if self.use_cases: body['use_cases'] = self.use_cases + if self.account_id is not None: + body["account_id"] = self.account_id + if self.aws_key_info: + body["aws_key_info"] = self.aws_key_info + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.customer_managed_key_id is not None: + body["customer_managed_key_id"] = self.customer_managed_key_id + if self.gcp_key_info: + body["gcp_key_info"] = self.gcp_key_info + if self.use_cases: + body["use_cases"] = self.use_cases return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomerManagedKey: """Deserializes the CustomerManagedKey from a dictionary.""" - return cls(account_id=d.get('account_id', None), aws_key_info=_from_dict(d, 'aws_key_info', AwsKeyInfo), creation_time=d.get('creation_time', None), customer_managed_key_id=d.get('customer_managed_key_id', None), gcp_key_info=_from_dict(d, 'gcp_key_info', GcpKeyInfo), use_cases=_repeated_enum(d, 'use_cases', KeyUseCase)) - - - - - - - - - - - - - - + return cls( + account_id=d.get("account_id", None), + aws_key_info=_from_dict(d, "aws_key_info", AwsKeyInfo), + creation_time=d.get("creation_time", None), + customer_managed_key_id=d.get("customer_managed_key_id", None), + gcp_key_info=_from_dict(d, "gcp_key_info", GcpKeyInfo), + use_cases=_repeated_enum(d, "use_cases", KeyUseCase), + ) @dataclass @@ -754,96 +888,95 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - - - - - - - - - - class EndpointUseCase(Enum): """This enumeration represents the type of Databricks VPC [endpoint service] that was used when creating this VPC endpoint. - + [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" - - DATAPLANE_RELAY_ACCESS = 'DATAPLANE_RELAY_ACCESS' - WORKSPACE_ACCESS = 'WORKSPACE_ACCESS' + + DATAPLANE_RELAY_ACCESS = "DATAPLANE_RELAY_ACCESS" + WORKSPACE_ACCESS = "WORKSPACE_ACCESS" + class ErrorType(Enum): """The AWS resource associated with this error: credentials, VPC, subnet, security group, or network ACL.""" - - CREDENTIALS = 'credentials' - NETWORK_ACL = 'networkAcl' - SECURITY_GROUP = 'securityGroup' - SUBNET = 'subnet' - VPC = 'vpc' + + CREDENTIALS = "credentials" + NETWORK_ACL = "networkAcl" + SECURITY_GROUP = "securityGroup" + SUBNET = "subnet" + VPC = "vpc" + @dataclass class ExternalCustomerInfo: authoritative_user_email: Optional[str] = None """Email of the authoritative user.""" - + authoritative_user_full_name: Optional[str] = None """The authoritative user full name.""" - + customer_name: Optional[str] = None """The legal entity name for the external workspace""" - + def as_dict(self) -> dict: """Serializes the ExternalCustomerInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authoritative_user_email is not None: body['authoritative_user_email'] = self.authoritative_user_email - if self.authoritative_user_full_name is not None: body['authoritative_user_full_name'] = self.authoritative_user_full_name - if self.customer_name is not None: body['customer_name'] = self.customer_name + if self.authoritative_user_email is not None: + body["authoritative_user_email"] = self.authoritative_user_email + if self.authoritative_user_full_name is not None: + body["authoritative_user_full_name"] = self.authoritative_user_full_name + if self.customer_name is not None: + body["customer_name"] = self.customer_name return body def as_shallow_dict(self) -> dict: """Serializes the ExternalCustomerInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.authoritative_user_email is not None: body['authoritative_user_email'] = self.authoritative_user_email - if self.authoritative_user_full_name is not None: body['authoritative_user_full_name'] = self.authoritative_user_full_name - if self.customer_name is not None: body['customer_name'] = self.customer_name + if self.authoritative_user_email is not None: + body["authoritative_user_email"] = self.authoritative_user_email + if self.authoritative_user_full_name is not None: + body["authoritative_user_full_name"] = self.authoritative_user_full_name + if self.customer_name is not None: + body["customer_name"] = self.customer_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalCustomerInfo: """Deserializes the ExternalCustomerInfo from a dictionary.""" - return cls(authoritative_user_email=d.get('authoritative_user_email', None), authoritative_user_full_name=d.get('authoritative_user_full_name', None), customer_name=d.get('customer_name', None)) - - + return cls( + authoritative_user_email=d.get("authoritative_user_email", None), + authoritative_user_full_name=d.get("authoritative_user_full_name", None), + customer_name=d.get("customer_name", None), + ) @dataclass class GcpKeyInfo: kms_key_id: str """The GCP KMS key's resource name""" - + def as_dict(self) -> dict: """Serializes the GcpKeyInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id + if self.kms_key_id is not None: + body["kms_key_id"] = self.kms_key_id return body def as_shallow_dict(self) -> dict: """Serializes the GcpKeyInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id + if self.kms_key_id is not None: + body["kms_key_id"] = self.kms_key_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpKeyInfo: """Deserializes the GcpKeyInfo from a dictionary.""" - return cls(kms_key_id=d.get('kms_key_id', None)) - - + return cls(kms_key_id=d.get("kms_key_id", None)) @dataclass @@ -852,185 +985,204 @@ class GcpManagedNetworkConfig: It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks detects an IP range overlap. - + Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - + The sizes of these IP ranges affect the maximum number of nodes for the workspace. - + **Important**: Confirm the IP ranges used by your Databricks workspace before creating the workspace. You cannot change them after your workspace is deployed. If the IP address ranges for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To determine the address range sizes that you need, Databricks provides a calculator as a Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - - [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html""" - + + [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html + """ + gke_cluster_pod_ip_range: Optional[str] = None """The IP range from which to allocate GKE cluster pods. No bigger than `/9` and no smaller than `/21`.""" - + gke_cluster_service_ip_range: Optional[str] = None """The IP range from which to allocate GKE cluster services. No bigger than `/16` and no smaller than `/27`.""" - + subnet_cidr: Optional[str] = None """The IP range from which to allocate GKE cluster nodes. No bigger than `/9` and no smaller than `/29`.""" - + def as_dict(self) -> dict: """Serializes the GcpManagedNetworkConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.gke_cluster_pod_ip_range is not None: body['gke_cluster_pod_ip_range'] = self.gke_cluster_pod_ip_range - if self.gke_cluster_service_ip_range is not None: body['gke_cluster_service_ip_range'] = self.gke_cluster_service_ip_range - if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr + if self.gke_cluster_pod_ip_range is not None: + body["gke_cluster_pod_ip_range"] = self.gke_cluster_pod_ip_range + if self.gke_cluster_service_ip_range is not None: + body["gke_cluster_service_ip_range"] = self.gke_cluster_service_ip_range + if self.subnet_cidr is not None: + body["subnet_cidr"] = self.subnet_cidr return body def as_shallow_dict(self) -> dict: """Serializes the GcpManagedNetworkConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.gke_cluster_pod_ip_range is not None: body['gke_cluster_pod_ip_range'] = self.gke_cluster_pod_ip_range - if self.gke_cluster_service_ip_range is not None: body['gke_cluster_service_ip_range'] = self.gke_cluster_service_ip_range - if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr + if self.gke_cluster_pod_ip_range is not None: + body["gke_cluster_pod_ip_range"] = self.gke_cluster_pod_ip_range + if self.gke_cluster_service_ip_range is not None: + body["gke_cluster_service_ip_range"] = self.gke_cluster_service_ip_range + if self.subnet_cidr is not None: + body["subnet_cidr"] = self.subnet_cidr return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpManagedNetworkConfig: """Deserializes the GcpManagedNetworkConfig from a dictionary.""" - return cls(gke_cluster_pod_ip_range=d.get('gke_cluster_pod_ip_range', None), gke_cluster_service_ip_range=d.get('gke_cluster_service_ip_range', None), subnet_cidr=d.get('subnet_cidr', None)) - - + return cls( + gke_cluster_pod_ip_range=d.get("gke_cluster_pod_ip_range", None), + gke_cluster_service_ip_range=d.get("gke_cluster_service_ip_range", None), + subnet_cidr=d.get("subnet_cidr", None), + ) @dataclass class GcpNetworkInfo: """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and secondary IP ranges).""" - + network_project_id: str """The Google Cloud project ID of the VPC network.""" - + vpc_id: str """The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations.""" - + subnet_id: str """The ID of the subnet associated with this network.""" - + subnet_region: str """The Google Cloud region of the workspace data plane (for example, `us-east4`).""" - + pod_ip_range_name: str """The name of the secondary IP range for pods. A Databricks-managed GKE cluster uses this IP range for its pods. This secondary IP range can be used by only one workspace.""" - + service_ip_range_name: str """The name of the secondary IP range for services. A Databricks-managed GKE cluster uses this IP range for its services. This secondary IP range can be used by only one workspace.""" - + def as_dict(self) -> dict: """Serializes the GcpNetworkInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.network_project_id is not None: body['network_project_id'] = self.network_project_id - if self.pod_ip_range_name is not None: body['pod_ip_range_name'] = self.pod_ip_range_name - if self.service_ip_range_name is not None: body['service_ip_range_name'] = self.service_ip_range_name - if self.subnet_id is not None: body['subnet_id'] = self.subnet_id - if self.subnet_region is not None: body['subnet_region'] = self.subnet_region - if self.vpc_id is not None: body['vpc_id'] = self.vpc_id + if self.network_project_id is not None: + body["network_project_id"] = self.network_project_id + if self.pod_ip_range_name is not None: + body["pod_ip_range_name"] = self.pod_ip_range_name + if self.service_ip_range_name is not None: + body["service_ip_range_name"] = self.service_ip_range_name + if self.subnet_id is not None: + body["subnet_id"] = self.subnet_id + if self.subnet_region is not None: + body["subnet_region"] = self.subnet_region + if self.vpc_id is not None: + body["vpc_id"] = self.vpc_id return body def as_shallow_dict(self) -> dict: """Serializes the GcpNetworkInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.network_project_id is not None: body['network_project_id'] = self.network_project_id - if self.pod_ip_range_name is not None: body['pod_ip_range_name'] = self.pod_ip_range_name - if self.service_ip_range_name is not None: body['service_ip_range_name'] = self.service_ip_range_name - if self.subnet_id is not None: body['subnet_id'] = self.subnet_id - if self.subnet_region is not None: body['subnet_region'] = self.subnet_region - if self.vpc_id is not None: body['vpc_id'] = self.vpc_id + if self.network_project_id is not None: + body["network_project_id"] = self.network_project_id + if self.pod_ip_range_name is not None: + body["pod_ip_range_name"] = self.pod_ip_range_name + if self.service_ip_range_name is not None: + body["service_ip_range_name"] = self.service_ip_range_name + if self.subnet_id is not None: + body["subnet_id"] = self.subnet_id + if self.subnet_region is not None: + body["subnet_region"] = self.subnet_region + if self.vpc_id is not None: + body["vpc_id"] = self.vpc_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpNetworkInfo: """Deserializes the GcpNetworkInfo from a dictionary.""" - return cls(network_project_id=d.get('network_project_id', None), pod_ip_range_name=d.get('pod_ip_range_name', None), service_ip_range_name=d.get('service_ip_range_name', None), subnet_id=d.get('subnet_id', None), subnet_region=d.get('subnet_region', None), vpc_id=d.get('vpc_id', None)) - - + return cls( + network_project_id=d.get("network_project_id", None), + pod_ip_range_name=d.get("pod_ip_range_name", None), + service_ip_range_name=d.get("service_ip_range_name", None), + subnet_id=d.get("subnet_id", None), + subnet_region=d.get("subnet_region", None), + vpc_id=d.get("vpc_id", None), + ) @dataclass class GcpVpcEndpointInfo: """The Google Cloud specific information for this Private Service Connect endpoint.""" - + project_id: str """The Google Cloud project ID of the VPC network where the PSC connection resides.""" - + psc_endpoint_name: str """The name of the PSC endpoint in the Google Cloud project.""" - + endpoint_region: str """Region of the PSC endpoint.""" - + psc_connection_id: Optional[str] = None """The unique ID of this PSC connection.""" - + service_attachment_id: Optional[str] = None """The service attachment this PSC connection connects to.""" - + def as_dict(self) -> dict: """Serializes the GcpVpcEndpointInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoint_region is not None: body['endpoint_region'] = self.endpoint_region - if self.project_id is not None: body['project_id'] = self.project_id - if self.psc_connection_id is not None: body['psc_connection_id'] = self.psc_connection_id - if self.psc_endpoint_name is not None: body['psc_endpoint_name'] = self.psc_endpoint_name - if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id + if self.endpoint_region is not None: + body["endpoint_region"] = self.endpoint_region + if self.project_id is not None: + body["project_id"] = self.project_id + if self.psc_connection_id is not None: + body["psc_connection_id"] = self.psc_connection_id + if self.psc_endpoint_name is not None: + body["psc_endpoint_name"] = self.psc_endpoint_name + if self.service_attachment_id is not None: + body["service_attachment_id"] = self.service_attachment_id return body def as_shallow_dict(self) -> dict: """Serializes the GcpVpcEndpointInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoint_region is not None: body['endpoint_region'] = self.endpoint_region - if self.project_id is not None: body['project_id'] = self.project_id - if self.psc_connection_id is not None: body['psc_connection_id'] = self.psc_connection_id - if self.psc_endpoint_name is not None: body['psc_endpoint_name'] = self.psc_endpoint_name - if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id + if self.endpoint_region is not None: + body["endpoint_region"] = self.endpoint_region + if self.project_id is not None: + body["project_id"] = self.project_id + if self.psc_connection_id is not None: + body["psc_connection_id"] = self.psc_connection_id + if self.psc_endpoint_name is not None: + body["psc_endpoint_name"] = self.psc_endpoint_name + if self.service_attachment_id is not None: + body["service_attachment_id"] = self.service_attachment_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GcpVpcEndpointInfo: """Deserializes the GcpVpcEndpointInfo from a dictionary.""" - return cls(endpoint_region=d.get('endpoint_region', None), project_id=d.get('project_id', None), psc_connection_id=d.get('psc_connection_id', None), psc_endpoint_name=d.get('psc_endpoint_name', None), service_attachment_id=d.get('service_attachment_id', None)) - - - - - - - - - - - - - - - - - - - - - - - + return cls( + endpoint_region=d.get("endpoint_region", None), + project_id=d.get("project_id", None), + psc_connection_id=d.get("psc_connection_id", None), + psc_endpoint_name=d.get("psc_endpoint_name", None), + service_attachment_id=d.get("service_attachment_id", None), + ) @dataclass class GkeConfig: """The configurations for the GKE cluster of a Databricks workspace.""" - + connectivity_type: Optional[GkeConfigConnectivityType] = None """Specifies the network connectivity types for the GKE nodes and the GKE master network. @@ -1039,252 +1191,304 @@ class GkeConfig: Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster have public IP addresses.""" - + master_ip_range: Optional[str] = None """The IP range from which to allocate GKE cluster master resources. This field will be ignored if GKE private cluster is not enabled. It must be exactly as big as `/28`.""" - + def as_dict(self) -> dict: """Serializes the GkeConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connectivity_type is not None: body['connectivity_type'] = self.connectivity_type.value - if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range + if self.connectivity_type is not None: + body["connectivity_type"] = self.connectivity_type.value + if self.master_ip_range is not None: + body["master_ip_range"] = self.master_ip_range return body def as_shallow_dict(self) -> dict: """Serializes the GkeConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.connectivity_type is not None: body['connectivity_type'] = self.connectivity_type - if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range + if self.connectivity_type is not None: + body["connectivity_type"] = self.connectivity_type + if self.master_ip_range is not None: + body["master_ip_range"] = self.master_ip_range return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GkeConfig: """Deserializes the GkeConfig from a dictionary.""" - return cls(connectivity_type=_enum(d, 'connectivity_type', GkeConfigConnectivityType), master_ip_range=d.get('master_ip_range', None)) - - + return cls( + connectivity_type=_enum(d, "connectivity_type", GkeConfigConnectivityType), + master_ip_range=d.get("master_ip_range", None), + ) class GkeConfigConnectivityType(Enum): """Specifies the network connectivity types for the GKE nodes and the GKE master network. - + Set to `PRIVATE_NODE_PUBLIC_MASTER` for a private GKE cluster for the workspace. The GKE nodes will not have public IPs. - + Set to `PUBLIC_NODE_PUBLIC_MASTER` for a public GKE cluster. The nodes of a public GKE cluster have public IP addresses.""" - - PRIVATE_NODE_PUBLIC_MASTER = 'PRIVATE_NODE_PUBLIC_MASTER' - PUBLIC_NODE_PUBLIC_MASTER = 'PUBLIC_NODE_PUBLIC_MASTER' + + PRIVATE_NODE_PUBLIC_MASTER = "PRIVATE_NODE_PUBLIC_MASTER" + PUBLIC_NODE_PUBLIC_MASTER = "PUBLIC_NODE_PUBLIC_MASTER" + class KeyUseCase(Enum): """Possible values are: * `MANAGED_SERVICES`: Encrypts notebook and secret data in the control plane * `STORAGE`: Encrypts the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes.""" - - MANAGED_SERVICES = 'MANAGED_SERVICES' - STORAGE = 'STORAGE' + + MANAGED_SERVICES = "MANAGED_SERVICES" + STORAGE = "STORAGE" + @dataclass class Network: account_id: Optional[str] = None """The Databricks account ID associated with this network configuration.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when the network was created.""" - + error_messages: Optional[List[NetworkHealth]] = None """Array of error messages about the network configuration.""" - + gcp_network_info: Optional[GcpNetworkInfo] = None """The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and secondary IP ranges).""" - + network_id: Optional[str] = None """The Databricks network configuration ID.""" - + network_name: Optional[str] = None """The human-readable name of the network configuration.""" - + security_group_ids: Optional[List[str]] = None - + subnet_ids: Optional[List[str]] = None - + vpc_endpoints: Optional[NetworkVpcEndpoints] = None """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" - + vpc_id: Optional[str] = None """The ID of the VPC associated with this network configuration. VPC IDs can be used in multiple networks.""" - + vpc_status: Optional[VpcStatus] = None """The status of this network configuration object in terms of its use in a workspace: * `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned.""" - + warning_messages: Optional[List[NetworkWarning]] = None """Array of warning messages about the network configuration.""" - + workspace_id: Optional[int] = None """Workspace ID associated with this network configuration.""" - + def as_dict(self) -> dict: """Serializes the Network into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.error_messages: body['error_messages'] = [v.as_dict() for v in self.error_messages] - if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info.as_dict() - if self.network_id is not None: body['network_id'] = self.network_id - if self.network_name is not None: body['network_name'] = self.network_name - if self.security_group_ids: body['security_group_ids'] = [v for v in self.security_group_ids] - if self.subnet_ids: body['subnet_ids'] = [v for v in self.subnet_ids] - if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints.as_dict() - if self.vpc_id is not None: body['vpc_id'] = self.vpc_id - if self.vpc_status is not None: body['vpc_status'] = self.vpc_status.value - if self.warning_messages: body['warning_messages'] = [v.as_dict() for v in self.warning_messages] - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.error_messages: + body["error_messages"] = [v.as_dict() for v in self.error_messages] + if self.gcp_network_info: + body["gcp_network_info"] = self.gcp_network_info.as_dict() + if self.network_id is not None: + body["network_id"] = self.network_id + if self.network_name is not None: + body["network_name"] = self.network_name + if self.security_group_ids: + body["security_group_ids"] = [v for v in self.security_group_ids] + if self.subnet_ids: + body["subnet_ids"] = [v for v in self.subnet_ids] + if self.vpc_endpoints: + body["vpc_endpoints"] = self.vpc_endpoints.as_dict() + if self.vpc_id is not None: + body["vpc_id"] = self.vpc_id + if self.vpc_status is not None: + body["vpc_status"] = self.vpc_status.value + if self.warning_messages: + body["warning_messages"] = [v.as_dict() for v in self.warning_messages] + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the Network into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.error_messages: body['error_messages'] = self.error_messages - if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info - if self.network_id is not None: body['network_id'] = self.network_id - if self.network_name is not None: body['network_name'] = self.network_name - if self.security_group_ids: body['security_group_ids'] = self.security_group_ids - if self.subnet_ids: body['subnet_ids'] = self.subnet_ids - if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints - if self.vpc_id is not None: body['vpc_id'] = self.vpc_id - if self.vpc_status is not None: body['vpc_status'] = self.vpc_status - if self.warning_messages: body['warning_messages'] = self.warning_messages - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.error_messages: + body["error_messages"] = self.error_messages + if self.gcp_network_info: + body["gcp_network_info"] = self.gcp_network_info + if self.network_id is not None: + body["network_id"] = self.network_id + if self.network_name is not None: + body["network_name"] = self.network_name + if self.security_group_ids: + body["security_group_ids"] = self.security_group_ids + if self.subnet_ids: + body["subnet_ids"] = self.subnet_ids + if self.vpc_endpoints: + body["vpc_endpoints"] = self.vpc_endpoints + if self.vpc_id is not None: + body["vpc_id"] = self.vpc_id + if self.vpc_status is not None: + body["vpc_status"] = self.vpc_status + if self.warning_messages: + body["warning_messages"] = self.warning_messages + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Network: """Deserializes the Network from a dictionary.""" - return cls(account_id=d.get('account_id', None), creation_time=d.get('creation_time', None), error_messages=_repeated_dict(d, 'error_messages', NetworkHealth), gcp_network_info=_from_dict(d, 'gcp_network_info', GcpNetworkInfo), network_id=d.get('network_id', None), network_name=d.get('network_name', None), security_group_ids=d.get('security_group_ids', None), subnet_ids=d.get('subnet_ids', None), vpc_endpoints=_from_dict(d, 'vpc_endpoints', NetworkVpcEndpoints), vpc_id=d.get('vpc_id', None), vpc_status=_enum(d, 'vpc_status', VpcStatus), warning_messages=_repeated_dict(d, 'warning_messages', NetworkWarning), workspace_id=d.get('workspace_id', None)) - - + return cls( + account_id=d.get("account_id", None), + creation_time=d.get("creation_time", None), + error_messages=_repeated_dict(d, "error_messages", NetworkHealth), + gcp_network_info=_from_dict(d, "gcp_network_info", GcpNetworkInfo), + network_id=d.get("network_id", None), + network_name=d.get("network_name", None), + security_group_ids=d.get("security_group_ids", None), + subnet_ids=d.get("subnet_ids", None), + vpc_endpoints=_from_dict(d, "vpc_endpoints", NetworkVpcEndpoints), + vpc_id=d.get("vpc_id", None), + vpc_status=_enum(d, "vpc_status", VpcStatus), + warning_messages=_repeated_dict(d, "warning_messages", NetworkWarning), + workspace_id=d.get("workspace_id", None), + ) @dataclass class NetworkHealth: error_message: Optional[str] = None """Details of the error.""" - + error_type: Optional[ErrorType] = None """The AWS resource associated with this error: credentials, VPC, subnet, security group, or network ACL.""" - + def as_dict(self) -> dict: """Serializes the NetworkHealth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error_message is not None: body['error_message'] = self.error_message - if self.error_type is not None: body['error_type'] = self.error_type.value + if self.error_message is not None: + body["error_message"] = self.error_message + if self.error_type is not None: + body["error_type"] = self.error_type.value return body def as_shallow_dict(self) -> dict: """Serializes the NetworkHealth into a shallow dictionary of its immediate attributes.""" body = {} - if self.error_message is not None: body['error_message'] = self.error_message - if self.error_type is not None: body['error_type'] = self.error_type + if self.error_message is not None: + body["error_message"] = self.error_message + if self.error_type is not None: + body["error_type"] = self.error_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkHealth: """Deserializes the NetworkHealth from a dictionary.""" - return cls(error_message=d.get('error_message', None), error_type=_enum(d, 'error_type', ErrorType)) - - + return cls(error_message=d.get("error_message", None), error_type=_enum(d, "error_type", ErrorType)) @dataclass class NetworkVpcEndpoints: """If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/""" - + rest_api: List[str] """The VPC endpoint ID used by this network to access the Databricks REST API.""" - + dataplane_relay: List[str] """The VPC endpoint ID used by this network to access the Databricks secure cluster connectivity relay.""" - + def as_dict(self) -> dict: """Serializes the NetworkVpcEndpoints into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataplane_relay: body['dataplane_relay'] = [v for v in self.dataplane_relay] - if self.rest_api: body['rest_api'] = [v for v in self.rest_api] + if self.dataplane_relay: + body["dataplane_relay"] = [v for v in self.dataplane_relay] + if self.rest_api: + body["rest_api"] = [v for v in self.rest_api] return body def as_shallow_dict(self) -> dict: """Serializes the NetworkVpcEndpoints into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataplane_relay: body['dataplane_relay'] = self.dataplane_relay - if self.rest_api: body['rest_api'] = self.rest_api + if self.dataplane_relay: + body["dataplane_relay"] = self.dataplane_relay + if self.rest_api: + body["rest_api"] = self.rest_api return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkVpcEndpoints: """Deserializes the NetworkVpcEndpoints from a dictionary.""" - return cls(dataplane_relay=d.get('dataplane_relay', None), rest_api=d.get('rest_api', None)) - - + return cls(dataplane_relay=d.get("dataplane_relay", None), rest_api=d.get("rest_api", None)) @dataclass class NetworkWarning: warning_message: Optional[str] = None """Details of the warning.""" - + warning_type: Optional[WarningType] = None """The AWS resource associated with this warning: a subnet or a security group.""" - + def as_dict(self) -> dict: """Serializes the NetworkWarning into a dictionary suitable for use as a JSON request body.""" body = {} - if self.warning_message is not None: body['warning_message'] = self.warning_message - if self.warning_type is not None: body['warning_type'] = self.warning_type.value + if self.warning_message is not None: + body["warning_message"] = self.warning_message + if self.warning_type is not None: + body["warning_type"] = self.warning_type.value return body def as_shallow_dict(self) -> dict: """Serializes the NetworkWarning into a shallow dictionary of its immediate attributes.""" body = {} - if self.warning_message is not None: body['warning_message'] = self.warning_message - if self.warning_type is not None: body['warning_type'] = self.warning_type + if self.warning_message is not None: + body["warning_message"] = self.warning_message + if self.warning_type is not None: + body["warning_type"] = self.warning_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkWarning: """Deserializes the NetworkWarning from a dictionary.""" - return cls(warning_message=d.get('warning_message', None), warning_type=_enum(d, 'warning_type', WarningType)) - - + return cls(warning_message=d.get("warning_message", None), warning_type=_enum(d, "warning_type", WarningType)) class PricingTier(Enum): """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - + [AWS Pricing]: https://databricks.com/product/aws-pricing""" - - COMMUNITY_EDITION = 'COMMUNITY_EDITION' - DEDICATED = 'DEDICATED' - ENTERPRISE = 'ENTERPRISE' - PREMIUM = 'PREMIUM' - STANDARD = 'STANDARD' - UNKNOWN = 'UNKNOWN' + + COMMUNITY_EDITION = "COMMUNITY_EDITION" + DEDICATED = "DEDICATED" + ENTERPRISE = "ENTERPRISE" + PREMIUM = "PREMIUM" + STANDARD = "STANDARD" + UNKNOWN = "UNKNOWN" + class PrivateAccessLevel(Enum): """The private access level controls which VPC endpoints can connect to the UI or API of any @@ -1292,70 +1496,91 @@ class PrivateAccessLevel(Enum): default) allows only VPC endpoints that are registered in your Databricks account connect to your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, see `allowed_vpc_endpoint_ids`.""" - - ACCOUNT = 'ACCOUNT' - ENDPOINT = 'ENDPOINT' + + ACCOUNT = "ACCOUNT" + ENDPOINT = "ENDPOINT" + @dataclass class PrivateAccessSettings: account_id: Optional[str] = None """The Databricks account ID that hosts the credential.""" - + allowed_vpc_endpoint_ids: Optional[List[str]] = None """An array of Databricks VPC endpoint IDs.""" - + private_access_level: Optional[PrivateAccessLevel] = None """The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object. * `ACCOUNT` level access (the default) allows only VPC endpoints that are registered in your Databricks account connect to your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, see `allowed_vpc_endpoint_ids`.""" - + private_access_settings_id: Optional[str] = None """Databricks private access settings ID.""" - + private_access_settings_name: Optional[str] = None """The human-readable name of the private access settings object.""" - + public_access_enabled: Optional[bool] = None """Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.""" - + region: Optional[str] = None """The cloud region for workspaces attached to this private access settings object.""" - + def as_dict(self) -> dict: """Serializes the PrivateAccessSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = [v for v in self.allowed_vpc_endpoint_ids] - if self.private_access_level is not None: body['private_access_level'] = self.private_access_level.value - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.private_access_settings_name is not None: body['private_access_settings_name'] = self.private_access_settings_name - if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled - if self.region is not None: body['region'] = self.region + if self.account_id is not None: + body["account_id"] = self.account_id + if self.allowed_vpc_endpoint_ids: + body["allowed_vpc_endpoint_ids"] = [v for v in self.allowed_vpc_endpoint_ids] + if self.private_access_level is not None: + body["private_access_level"] = self.private_access_level.value + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.private_access_settings_name is not None: + body["private_access_settings_name"] = self.private_access_settings_name + if self.public_access_enabled is not None: + body["public_access_enabled"] = self.public_access_enabled + if self.region is not None: + body["region"] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the PrivateAccessSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids - if self.private_access_level is not None: body['private_access_level'] = self.private_access_level - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.private_access_settings_name is not None: body['private_access_settings_name'] = self.private_access_settings_name - if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled - if self.region is not None: body['region'] = self.region + if self.account_id is not None: + body["account_id"] = self.account_id + if self.allowed_vpc_endpoint_ids: + body["allowed_vpc_endpoint_ids"] = self.allowed_vpc_endpoint_ids + if self.private_access_level is not None: + body["private_access_level"] = self.private_access_level + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.private_access_settings_name is not None: + body["private_access_settings_name"] = self.private_access_settings_name + if self.public_access_enabled is not None: + body["public_access_enabled"] = self.public_access_enabled + if self.region is not None: + body["region"] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivateAccessSettings: """Deserializes the PrivateAccessSettings from a dictionary.""" - return cls(account_id=d.get('account_id', None), allowed_vpc_endpoint_ids=d.get('allowed_vpc_endpoint_ids', None), private_access_level=_enum(d, 'private_access_level', PrivateAccessLevel), private_access_settings_id=d.get('private_access_settings_id', None), private_access_settings_name=d.get('private_access_settings_name', None), public_access_enabled=d.get('public_access_enabled', None), region=d.get('region', None)) - - + return cls( + account_id=d.get("account_id", None), + allowed_vpc_endpoint_ids=d.get("allowed_vpc_endpoint_ids", None), + private_access_level=_enum(d, "private_access_level", PrivateAccessLevel), + private_access_settings_id=d.get("private_access_settings_id", None), + private_access_settings_name=d.get("private_access_settings_name", None), + public_access_enabled=d.get("public_access_enabled", None), + region=d.get("region", None), + ) @dataclass @@ -1374,80 +1599,92 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ReplaceResponse: """Deserializes the ReplaceResponse from a dictionary.""" return cls() - - @dataclass class RootBucketInfo: """Root S3 bucket information.""" - + bucket_name: Optional[str] = None """The name of the S3 bucket.""" - + def as_dict(self) -> dict: """Serializes the RootBucketInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bucket_name is not None: body['bucket_name'] = self.bucket_name + if self.bucket_name is not None: + body["bucket_name"] = self.bucket_name return body def as_shallow_dict(self) -> dict: """Serializes the RootBucketInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.bucket_name is not None: body['bucket_name'] = self.bucket_name + if self.bucket_name is not None: + body["bucket_name"] = self.bucket_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RootBucketInfo: """Deserializes the RootBucketInfo from a dictionary.""" - return cls(bucket_name=d.get('bucket_name', None)) - - + return cls(bucket_name=d.get("bucket_name", None)) @dataclass class StorageConfiguration: account_id: Optional[str] = None """The Databricks account ID that hosts the credential.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when the storage configuration was created.""" - + root_bucket_info: Optional[RootBucketInfo] = None """Root S3 bucket information.""" - + storage_configuration_id: Optional[str] = None """Databricks storage configuration ID.""" - + storage_configuration_name: Optional[str] = None """The human-readable name of the storage configuration.""" - + def as_dict(self) -> dict: """Serializes the StorageConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info.as_dict() - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.storage_configuration_name is not None: body['storage_configuration_name'] = self.storage_configuration_name + if self.account_id is not None: + body["account_id"] = self.account_id + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.root_bucket_info: + body["root_bucket_info"] = self.root_bucket_info.as_dict() + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.storage_configuration_name is not None: + body["storage_configuration_name"] = self.storage_configuration_name return body def as_shallow_dict(self) -> dict: """Serializes the StorageConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.storage_configuration_name is not None: body['storage_configuration_name'] = self.storage_configuration_name + if self.account_id is not None: + body["account_id"] = self.account_id + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.root_bucket_info: + body["root_bucket_info"] = self.root_bucket_info + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.storage_configuration_name is not None: + body["storage_configuration_name"] = self.storage_configuration_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StorageConfiguration: """Deserializes the StorageConfiguration from a dictionary.""" - return cls(account_id=d.get('account_id', None), creation_time=d.get('creation_time', None), root_bucket_info=_from_dict(d, 'root_bucket_info', RootBucketInfo), storage_configuration_id=d.get('storage_configuration_id', None), storage_configuration_name=d.get('storage_configuration_name', None)) - - + return cls( + account_id=d.get("account_id", None), + creation_time=d.get("creation_time", None), + root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo), + storage_configuration_id=d.get("storage_configuration_id", None), + storage_configuration_name=d.get("storage_configuration_name", None), + ) @dataclass @@ -1455,30 +1692,32 @@ class StsRole: external_id: Optional[str] = None """The external ID that needs to be trusted by the cross-account role. This is always your Databricks account ID.""" - + role_arn: Optional[str] = None """The Amazon Resource Name (ARN) of the cross account role.""" - + def as_dict(self) -> dict: """Serializes the StsRole into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_id is not None: body['external_id'] = self.external_id - if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.external_id is not None: + body["external_id"] = self.external_id + if self.role_arn is not None: + body["role_arn"] = self.role_arn return body def as_shallow_dict(self) -> dict: """Serializes the StsRole into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_id is not None: body['external_id'] = self.external_id - if self.role_arn is not None: body['role_arn'] = self.role_arn + if self.external_id is not None: + body["external_id"] = self.external_id + if self.role_arn is not None: + body["role_arn"] = self.role_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StsRole: """Deserializes the StsRole from a dictionary.""" - return cls(external_id=d.get('external_id', None), role_arn=d.get('role_arn', None)) - - + return cls(external_id=d.get("external_id", None), role_arn=d.get("role_arn", None)) @dataclass @@ -1497,8 +1736,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() - - @dataclass @@ -1506,88 +1743,117 @@ class UpdateWorkspaceRequest: aws_region: Optional[str] = None """The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is available only for updating failed workspaces.""" - + credentials_id: Optional[str] = None """ID of the workspace's credential configuration object. This parameter is available for updating both failed and running workspaces.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - + managed_services_customer_managed_key_id: Optional[str] = None """The ID of the workspace's managed services encryption key configuration object. This parameter is available only for updating failed workspaces.""" - + network_connectivity_config_id: Optional[str] = None - + network_id: Optional[str] = None """The ID of the workspace's network configuration object. Used only if you already use a customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a customer-managed VPC by updating the workspace to add a network configuration ID.""" - + private_access_settings_id: Optional[str] = None """The ID of the workspace's private access settings configuration object. This parameter is available only for updating failed workspaces.""" - + storage_configuration_id: Optional[str] = None """The ID of the workspace's storage configuration object. This parameter is available only for updating failed workspaces.""" - + storage_customer_managed_key_id: Optional[str] = None """The ID of the key configuration object for workspace storage. This parameter is available for updating both failed and running workspaces.""" - + workspace_id: Optional[int] = None """Workspace ID.""" - + def as_dict(self) -> dict: """Serializes the UpdateWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_region is not None: body['aws_region'] = self.aws_region - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.network_id is not None: body['network_id'] = self.network_id - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.aws_region is not None: + body["aws_region"] = self.aws_region + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.managed_services_customer_managed_key_id is not None: + body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.network_id is not None: + body["network_id"] = self.network_id + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: + body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_region is not None: body['aws_region'] = self.aws_region - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.network_id is not None: body['network_id'] = self.network_id - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.aws_region is not None: + body["aws_region"] = self.aws_region + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.managed_services_customer_managed_key_id is not None: + body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.network_id is not None: + body["network_id"] = self.network_id + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: + body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceRequest: """Deserializes the UpdateWorkspaceRequest from a dictionary.""" - return cls(aws_region=d.get('aws_region', None), credentials_id=d.get('credentials_id', None), custom_tags=d.get('custom_tags', None), managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), network_id=d.get('network_id', None), private_access_settings_id=d.get('private_access_settings_id', None), storage_configuration_id=d.get('storage_configuration_id', None), storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None), workspace_id=d.get('workspace_id', None)) - - + return cls( + aws_region=d.get("aws_region", None), + credentials_id=d.get("credentials_id", None), + custom_tags=d.get("custom_tags", None), + managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), + network_connectivity_config_id=d.get("network_connectivity_config_id", None), + network_id=d.get("network_id", None), + private_access_settings_id=d.get("private_access_settings_id", None), + storage_configuration_id=d.get("storage_configuration_id", None), + storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass class UpsertPrivateAccessSettingsRequest: private_access_settings_name: str """The human-readable name of the private access settings object.""" - + region: str """The cloud region for workspaces associated with this private access settings object.""" - + allowed_vpc_endpoint_ids: Optional[List[str]] = None """An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the @@ -1601,61 +1867,78 @@ class UpsertPrivateAccessSettingsRequest: public internet, see [IP access lists]. [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html""" - + private_access_level: Optional[PrivateAccessLevel] = None """The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object. * `ACCOUNT` level access (the default) allows only VPC endpoints that are registered in your Databricks account connect to your workspace. * `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details, see `allowed_vpc_endpoint_ids`.""" - + private_access_settings_id: Optional[str] = None """Databricks Account API private access settings ID.""" - + public_access_enabled: Optional[bool] = None """Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.""" - + def as_dict(self) -> dict: """Serializes the UpsertPrivateAccessSettingsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = [v for v in self.allowed_vpc_endpoint_ids] - if self.private_access_level is not None: body['private_access_level'] = self.private_access_level.value - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.private_access_settings_name is not None: body['private_access_settings_name'] = self.private_access_settings_name - if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled - if self.region is not None: body['region'] = self.region + if self.allowed_vpc_endpoint_ids: + body["allowed_vpc_endpoint_ids"] = [v for v in self.allowed_vpc_endpoint_ids] + if self.private_access_level is not None: + body["private_access_level"] = self.private_access_level.value + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.private_access_settings_name is not None: + body["private_access_settings_name"] = self.private_access_settings_name + if self.public_access_enabled is not None: + body["public_access_enabled"] = self.public_access_enabled + if self.region is not None: + body["region"] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the UpsertPrivateAccessSettingsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids - if self.private_access_level is not None: body['private_access_level'] = self.private_access_level - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.private_access_settings_name is not None: body['private_access_settings_name'] = self.private_access_settings_name - if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled - if self.region is not None: body['region'] = self.region + if self.allowed_vpc_endpoint_ids: + body["allowed_vpc_endpoint_ids"] = self.allowed_vpc_endpoint_ids + if self.private_access_level is not None: + body["private_access_level"] = self.private_access_level + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.private_access_settings_name is not None: + body["private_access_settings_name"] = self.private_access_settings_name + if self.public_access_enabled is not None: + body["public_access_enabled"] = self.public_access_enabled + if self.region is not None: + body["region"] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpsertPrivateAccessSettingsRequest: """Deserializes the UpsertPrivateAccessSettingsRequest from a dictionary.""" - return cls(allowed_vpc_endpoint_ids=d.get('allowed_vpc_endpoint_ids', None), private_access_level=_enum(d, 'private_access_level', PrivateAccessLevel), private_access_settings_id=d.get('private_access_settings_id', None), private_access_settings_name=d.get('private_access_settings_name', None), public_access_enabled=d.get('public_access_enabled', None), region=d.get('region', None)) - - + return cls( + allowed_vpc_endpoint_ids=d.get("allowed_vpc_endpoint_ids", None), + private_access_level=_enum(d, "private_access_level", PrivateAccessLevel), + private_access_settings_id=d.get("private_access_settings_id", None), + private_access_settings_name=d.get("private_access_settings_name", None), + public_access_enabled=d.get("public_access_enabled", None), + region=d.get("region", None), + ) @dataclass class VpcEndpoint: account_id: Optional[str] = None """The Databricks account ID that hosts the VPC endpoint configuration.""" - + aws_account_id: Optional[str] = None """The AWS Account in which the VPC endpoint object exists.""" - + aws_endpoint_service_id: Optional[str] = None """The ID of the Databricks [endpoint service] that this VPC endpoint is connected to. For a list of endpoint service IDs for each supported AWS region, see the [Databricks PrivateLink @@ -1663,125 +1946,156 @@ class VpcEndpoint: [Databricks PrivateLink documentation]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" - + aws_vpc_endpoint_id: Optional[str] = None """The ID of the VPC endpoint object in AWS.""" - + gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None """The Google Cloud specific information for this Private Service Connect endpoint.""" - + region: Optional[str] = None """The AWS region in which this VPC endpoint object exists.""" - + state: Optional[str] = None """The current state (such as `available` or `rejected`) of the VPC endpoint. Derived from AWS. For the full set of values, see [AWS DescribeVpcEndpoint documentation]. [AWS DescribeVpcEndpoint documentation]: https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-vpc-endpoints.html""" - + use_case: Optional[EndpointUseCase] = None """This enumeration represents the type of Databricks VPC [endpoint service] that was used when creating this VPC endpoint. [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/endpoint-service.html""" - + vpc_endpoint_id: Optional[str] = None """Databricks VPC endpoint ID. This is the Databricks-specific name of the VPC endpoint. Do not confuse this with the `aws_vpc_endpoint_id`, which is the ID within AWS of the VPC endpoint.""" - + vpc_endpoint_name: Optional[str] = None """The human-readable name of the storage configuration.""" - + def as_dict(self) -> dict: """Serializes the VpcEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.aws_account_id is not None: body['aws_account_id'] = self.aws_account_id - if self.aws_endpoint_service_id is not None: body['aws_endpoint_service_id'] = self.aws_endpoint_service_id - if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info.as_dict() - if self.region is not None: body['region'] = self.region - if self.state is not None: body['state'] = self.state - if self.use_case is not None: body['use_case'] = self.use_case.value - if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id - if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name + if self.account_id is not None: + body["account_id"] = self.account_id + if self.aws_account_id is not None: + body["aws_account_id"] = self.aws_account_id + if self.aws_endpoint_service_id is not None: + body["aws_endpoint_service_id"] = self.aws_endpoint_service_id + if self.aws_vpc_endpoint_id is not None: + body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: + body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info.as_dict() + if self.region is not None: + body["region"] = self.region + if self.state is not None: + body["state"] = self.state + if self.use_case is not None: + body["use_case"] = self.use_case.value + if self.vpc_endpoint_id is not None: + body["vpc_endpoint_id"] = self.vpc_endpoint_id + if self.vpc_endpoint_name is not None: + body["vpc_endpoint_name"] = self.vpc_endpoint_name return body def as_shallow_dict(self) -> dict: """Serializes the VpcEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.aws_account_id is not None: body['aws_account_id'] = self.aws_account_id - if self.aws_endpoint_service_id is not None: body['aws_endpoint_service_id'] = self.aws_endpoint_service_id - if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info - if self.region is not None: body['region'] = self.region - if self.state is not None: body['state'] = self.state - if self.use_case is not None: body['use_case'] = self.use_case - if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id - if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name + if self.account_id is not None: + body["account_id"] = self.account_id + if self.aws_account_id is not None: + body["aws_account_id"] = self.aws_account_id + if self.aws_endpoint_service_id is not None: + body["aws_endpoint_service_id"] = self.aws_endpoint_service_id + if self.aws_vpc_endpoint_id is not None: + body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id + if self.gcp_vpc_endpoint_info: + body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info + if self.region is not None: + body["region"] = self.region + if self.state is not None: + body["state"] = self.state + if self.use_case is not None: + body["use_case"] = self.use_case + if self.vpc_endpoint_id is not None: + body["vpc_endpoint_id"] = self.vpc_endpoint_id + if self.vpc_endpoint_name is not None: + body["vpc_endpoint_name"] = self.vpc_endpoint_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VpcEndpoint: """Deserializes the VpcEndpoint from a dictionary.""" - return cls(account_id=d.get('account_id', None), aws_account_id=d.get('aws_account_id', None), aws_endpoint_service_id=d.get('aws_endpoint_service_id', None), aws_vpc_endpoint_id=d.get('aws_vpc_endpoint_id', None), gcp_vpc_endpoint_info=_from_dict(d, 'gcp_vpc_endpoint_info', GcpVpcEndpointInfo), region=d.get('region', None), state=d.get('state', None), use_case=_enum(d, 'use_case', EndpointUseCase), vpc_endpoint_id=d.get('vpc_endpoint_id', None), vpc_endpoint_name=d.get('vpc_endpoint_name', None)) - - + return cls( + account_id=d.get("account_id", None), + aws_account_id=d.get("aws_account_id", None), + aws_endpoint_service_id=d.get("aws_endpoint_service_id", None), + aws_vpc_endpoint_id=d.get("aws_vpc_endpoint_id", None), + gcp_vpc_endpoint_info=_from_dict(d, "gcp_vpc_endpoint_info", GcpVpcEndpointInfo), + region=d.get("region", None), + state=d.get("state", None), + use_case=_enum(d, "use_case", EndpointUseCase), + vpc_endpoint_id=d.get("vpc_endpoint_id", None), + vpc_endpoint_name=d.get("vpc_endpoint_name", None), + ) class VpcStatus(Enum): """The status of this network configuration object in terms of its use in a workspace: * `UNATTACHED`: Unattached. * `VALID`: Valid. * `BROKEN`: Broken. * `WARNED`: Warned.""" - - BROKEN = 'BROKEN' - UNATTACHED = 'UNATTACHED' - VALID = 'VALID' - WARNED = 'WARNED' + + BROKEN = "BROKEN" + UNATTACHED = "UNATTACHED" + VALID = "VALID" + WARNED = "WARNED" + class WarningType(Enum): """The AWS resource associated with this warning: a subnet or a security group.""" - - SECURITY_GROUP = 'securityGroup' - SUBNET = 'subnet' + + SECURITY_GROUP = "securityGroup" + SUBNET = "subnet" + @dataclass class Workspace: account_id: Optional[str] = None """Databricks account ID.""" - + aws_region: Optional[str] = None """The AWS region of the workspace data plane (for example, `us-west-2`).""" - + azure_workspace_info: Optional[AzureWorkspaceInfo] = None - + cloud: Optional[str] = None """The cloud name. This field always has the value `gcp`.""" - + cloud_resource_container: Optional[CloudResourceContainer] = None """The general workspace configurations that are specific to cloud providers.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when the workspace was created.""" - + credentials_id: Optional[str] = None """ID of the workspace's credential configuration object.""" - - custom_tags: Optional[Dict[str,str]] = None + + custom_tags: Optional[Dict[str, str]] = None """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - + deployment_name: Optional[str] = None """The deployment name defines part of the subdomain for the workspace. The workspace URL for web application and REST APIs is `.cloud.databricks.com`. This value must be unique across all non-deleted deployments across all AWS regions.""" - + external_customer_info: Optional[ExternalCustomerInfo] = None """If this workspace is for a external customer, then external_customer_info is populated. If this workspace is not for a external customer, then external_customer_info is empty.""" - + gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None """The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP @@ -1801,29 +2115,29 @@ class Workspace: Excel spreadsheet. See [calculate subnet sizes for a new workspace]. [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html""" - + gke_config: Optional[GkeConfig] = None """The configurations for the GKE cluster of a Databricks workspace.""" - + is_no_public_ip_enabled: Optional[bool] = None """Whether no public IP is enabled for the workspace.""" - + location: Optional[str] = None """The Google Cloud region of the workspace data plane in your Google account (for example, `us-east4`).""" - + managed_services_customer_managed_key_id: Optional[str] = None """ID of the key configuration for encrypting managed services.""" - + network_id: Optional[str] = None """The network configuration ID that is attached to the workspace. This field is available only if the network is a customer-managed network.""" - + pricing_tier: Optional[PricingTier] = None """The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. [AWS Pricing]: https://databricks.com/product/aws-pricing""" - + private_access_settings_id: Optional[str] = None """ID of the workspace's private access settings object. Only used for PrivateLink. You must specify this ID if you are using [AWS PrivateLink] for either front-end (user-to-workspace @@ -1833,103 +2147,173 @@ class Workspace: [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" - + storage_configuration_id: Optional[str] = None """ID of the workspace's storage configuration object.""" - + storage_customer_managed_key_id: Optional[str] = None """ID of the key configuration for encrypting workspace storage.""" - + workspace_id: Optional[int] = None """A unique integer ID for the workspace""" - + workspace_name: Optional[str] = None """The human-readable name of the workspace.""" - + workspace_status: Optional[WorkspaceStatus] = None """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` initially. Continue to check the status until the status is `RUNNING`.""" - + workspace_status_message: Optional[str] = None """Message describing the current workspace status.""" - + def as_dict(self) -> dict: """Serializes the Workspace into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.aws_region is not None: body['aws_region'] = self.aws_region - if self.azure_workspace_info: body['azure_workspace_info'] = self.azure_workspace_info.as_dict() - if self.cloud is not None: body['cloud'] = self.cloud - if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container.as_dict() - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.deployment_name is not None: body['deployment_name'] = self.deployment_name - if self.external_customer_info: body['external_customer_info'] = self.external_customer_info.as_dict() - if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict() - if self.gke_config: body['gke_config'] = self.gke_config.as_dict() - if self.is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled - if self.location is not None: body['location'] = self.location - if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id - if self.network_id is not None: body['network_id'] = self.network_id - if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier.value - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id - if self.workspace_name is not None: body['workspace_name'] = self.workspace_name - if self.workspace_status is not None: body['workspace_status'] = self.workspace_status.value - if self.workspace_status_message is not None: body['workspace_status_message'] = self.workspace_status_message + if self.account_id is not None: + body["account_id"] = self.account_id + if self.aws_region is not None: + body["aws_region"] = self.aws_region + if self.azure_workspace_info: + body["azure_workspace_info"] = self.azure_workspace_info.as_dict() + if self.cloud is not None: + body["cloud"] = self.cloud + if self.cloud_resource_container: + body["cloud_resource_container"] = self.cloud_resource_container.as_dict() + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.deployment_name is not None: + body["deployment_name"] = self.deployment_name + if self.external_customer_info: + body["external_customer_info"] = self.external_customer_info.as_dict() + if self.gcp_managed_network_config: + body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict() + if self.gke_config: + body["gke_config"] = self.gke_config.as_dict() + if self.is_no_public_ip_enabled is not None: + body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled + if self.location is not None: + body["location"] = self.location + if self.managed_services_customer_managed_key_id is not None: + body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network_id is not None: + body["network_id"] = self.network_id + if self.pricing_tier is not None: + body["pricing_tier"] = self.pricing_tier.value + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: + body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + if self.workspace_name is not None: + body["workspace_name"] = self.workspace_name + if self.workspace_status is not None: + body["workspace_status"] = self.workspace_status.value + if self.workspace_status_message is not None: + body["workspace_status_message"] = self.workspace_status_message return body def as_shallow_dict(self) -> dict: """Serializes the Workspace into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.aws_region is not None: body['aws_region'] = self.aws_region - if self.azure_workspace_info: body['azure_workspace_info'] = self.azure_workspace_info - if self.cloud is not None: body['cloud'] = self.cloud - if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.credentials_id is not None: body['credentials_id'] = self.credentials_id - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.deployment_name is not None: body['deployment_name'] = self.deployment_name - if self.external_customer_info: body['external_customer_info'] = self.external_customer_info - if self.gcp_managed_network_config: body['gcp_managed_network_config'] = self.gcp_managed_network_config - if self.gke_config: body['gke_config'] = self.gke_config - if self.is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled - if self.location is not None: body['location'] = self.location - if self.managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id - if self.network_id is not None: body['network_id'] = self.network_id - if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier - if self.private_access_settings_id is not None: body['private_access_settings_id'] = self.private_access_settings_id - if self.storage_configuration_id is not None: body['storage_configuration_id'] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id - if self.workspace_name is not None: body['workspace_name'] = self.workspace_name - if self.workspace_status is not None: body['workspace_status'] = self.workspace_status - if self.workspace_status_message is not None: body['workspace_status_message'] = self.workspace_status_message + if self.account_id is not None: + body["account_id"] = self.account_id + if self.aws_region is not None: + body["aws_region"] = self.aws_region + if self.azure_workspace_info: + body["azure_workspace_info"] = self.azure_workspace_info + if self.cloud is not None: + body["cloud"] = self.cloud + if self.cloud_resource_container: + body["cloud_resource_container"] = self.cloud_resource_container + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.credentials_id is not None: + body["credentials_id"] = self.credentials_id + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.deployment_name is not None: + body["deployment_name"] = self.deployment_name + if self.external_customer_info: + body["external_customer_info"] = self.external_customer_info + if self.gcp_managed_network_config: + body["gcp_managed_network_config"] = self.gcp_managed_network_config + if self.gke_config: + body["gke_config"] = self.gke_config + if self.is_no_public_ip_enabled is not None: + body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled + if self.location is not None: + body["location"] = self.location + if self.managed_services_customer_managed_key_id is not None: + body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id + if self.network_id is not None: + body["network_id"] = self.network_id + if self.pricing_tier is not None: + body["pricing_tier"] = self.pricing_tier + if self.private_access_settings_id is not None: + body["private_access_settings_id"] = self.private_access_settings_id + if self.storage_configuration_id is not None: + body["storage_configuration_id"] = self.storage_configuration_id + if self.storage_customer_managed_key_id is not None: + body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + if self.workspace_name is not None: + body["workspace_name"] = self.workspace_name + if self.workspace_status is not None: + body["workspace_status"] = self.workspace_status + if self.workspace_status_message is not None: + body["workspace_status_message"] = self.workspace_status_message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Workspace: """Deserializes the Workspace from a dictionary.""" - return cls(account_id=d.get('account_id', None), aws_region=d.get('aws_region', None), azure_workspace_info=_from_dict(d, 'azure_workspace_info', AzureWorkspaceInfo), cloud=d.get('cloud', None), cloud_resource_container=_from_dict(d, 'cloud_resource_container', CloudResourceContainer), creation_time=d.get('creation_time', None), credentials_id=d.get('credentials_id', None), custom_tags=d.get('custom_tags', None), deployment_name=d.get('deployment_name', None), external_customer_info=_from_dict(d, 'external_customer_info', ExternalCustomerInfo), gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config', GcpManagedNetworkConfig), gke_config=_from_dict(d, 'gke_config', GkeConfig), is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None), location=d.get('location', None), managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id', None), network_id=d.get('network_id', None), pricing_tier=_enum(d, 'pricing_tier', PricingTier), private_access_settings_id=d.get('private_access_settings_id', None), storage_configuration_id=d.get('storage_configuration_id', None), storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None), workspace_id=d.get('workspace_id', None), workspace_name=d.get('workspace_name', None), workspace_status=_enum(d, 'workspace_status', WorkspaceStatus), workspace_status_message=d.get('workspace_status_message', None)) - - + return cls( + account_id=d.get("account_id", None), + aws_region=d.get("aws_region", None), + azure_workspace_info=_from_dict(d, "azure_workspace_info", AzureWorkspaceInfo), + cloud=d.get("cloud", None), + cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer), + creation_time=d.get("creation_time", None), + credentials_id=d.get("credentials_id", None), + custom_tags=d.get("custom_tags", None), + deployment_name=d.get("deployment_name", None), + external_customer_info=_from_dict(d, "external_customer_info", ExternalCustomerInfo), + gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig), + gke_config=_from_dict(d, "gke_config", GkeConfig), + is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None), + location=d.get("location", None), + managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), + network_id=d.get("network_id", None), + pricing_tier=_enum(d, "pricing_tier", PricingTier), + private_access_settings_id=d.get("private_access_settings_id", None), + storage_configuration_id=d.get("storage_configuration_id", None), + storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), + workspace_id=d.get("workspace_id", None), + workspace_name=d.get("workspace_name", None), + workspace_status=_enum(d, "workspace_status", WorkspaceStatus), + workspace_status_message=d.get("workspace_status_message", None), + ) class WorkspaceStatus(Enum): """The status of the workspace. For workspace creation, usually it is set to `PROVISIONING` initially. Continue to check the status until the status is `RUNNING`.""" - - BANNED = 'BANNED' - CANCELLING = 'CANCELLING' - FAILED = 'FAILED' - NOT_PROVISIONED = 'NOT_PROVISIONED' - PROVISIONING = 'PROVISIONING' - RUNNING = 'RUNNING' + BANNED = "BANNED" + CANCELLING = "CANCELLING" + FAILED = "FAILED" + NOT_PROVISIONED = "NOT_PROVISIONED" + PROVISIONING = "PROVISIONING" + RUNNING = "RUNNING" class CredentialsAPI: @@ -1937,157 +2321,128 @@ class CredentialsAPI: service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the new workspace. A credential configuration encapsulates this role information, and its ID is used when creating a new workspace.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , credentials_name: str, aws_credentials: CreateCredentialAwsCredentials - ) -> Credential: + def create(self, credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential: """Create credential configuration. - + Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account ID) in the returned credential object, and configure the required access policy. - + Save the response's `credentials_id` field, which is the ID for your new credential configuration object. - + For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param credentials_name: str The human-readable name of the credential configuration object. :param aws_credentials: :class:`CreateCredentialAwsCredentials` - + :returns: :class:`Credential` """ body = {} - if aws_credentials is not None: body['aws_credentials'] = aws_credentials.as_dict() - if credentials_name is not None: body['credentials_name'] = credentials_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/credentials', body=body - - , headers=headers - ) - return Credential.from_dict(res) + if aws_credentials is not None: + body["aws_credentials"] = aws_credentials.as_dict() + if credentials_name is not None: + body["credentials_name"] = credentials_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/credentials", body=body, headers=headers) + return Credential.from_dict(res) - def delete(self - , credentials_id: str - ): + def delete(self, credentials_id: str): """Delete credential configuration. - + Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any workspace. - + :param credentials_id: str Databricks Account API credential configuration ID - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get(self - , credentials_id: str - ) -> Credential: + self._api.do( + "DELETE", f"/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}", headers=headers + ) + + def get(self, credentials_id: str) -> Credential: """Get credential configuration. - + Gets a Databricks credential configuration object for an account, both specified by ID. - + :param credentials_id: str Databricks Account API credential configuration ID - + :returns: :class:`Credential` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}' - - , headers=headers - ) - return Credential.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/credentials/{credentials_id}", headers=headers + ) + return Credential.from_dict(res) def list(self) -> Iterator[Credential]: """Get all credential configurations. - + Gets all Databricks credential configurations associated with an account specified by ID. - + :returns: Iterator over :class:`Credential` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/credentials' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/credentials", headers=headers) return [Credential.from_dict(v) for v in res] - - + class EncryptionKeysAPI: """These APIs manage encryption key configurations for this workspace (optional). A key configuration encapsulates the AWS KMS key information and some information about how the key configuration can be used. There are two possible uses for key configurations: - + * Managed services: A key configuration can be used to encrypt a workspace's notebook and secret data in the control plane, as well as Databricks SQL queries and query history. * Storage: A key configuration can be used to encrypt a workspace's DBFS and EBS data in the data plane. - + In both of these cases, the key configuration's ID is used when creating a new workspace. This Preview feature is available if your account is on the E2 version of the platform. Updating a running workspace with workspace storage encryption requires that the workspace is on the E2 version of the platform. If you have an older workspace, it might not be on the E2 version of the platform. If you are not sure, contact your Databricks representative.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , use_cases: List[KeyUseCase] - , * - , aws_key_info: Optional[CreateAwsKeyInfo] = None, gcp_key_info: Optional[CreateGcpKeyInfo] = None) -> CustomerManagedKey: + def create( + self, + use_cases: List[KeyUseCase], + *, + aws_key_info: Optional[CreateAwsKeyInfo] = None, + gcp_key_info: Optional[CreateGcpKeyInfo] = None, + ) -> CustomerManagedKey: """Create encryption key configuration. - + Creates a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -2095,67 +2450,62 @@ def create(self specified as a workspace's customer-managed key for workspace storage, the key encrypts the workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume data. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions that currently support creation of Databricks workspaces. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :param use_cases: List[:class:`KeyUseCase`] The cases that the key can be used for. :param aws_key_info: :class:`CreateAwsKeyInfo` (optional) :param gcp_key_info: :class:`CreateGcpKeyInfo` (optional) - + :returns: :class:`CustomerManagedKey` """ body = {} - if aws_key_info is not None: body['aws_key_info'] = aws_key_info.as_dict() - if gcp_key_info is not None: body['gcp_key_info'] = gcp_key_info.as_dict() - if use_cases is not None: body['use_cases'] = [v.value for v in use_cases] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys', body=body - - , headers=headers - ) + if aws_key_info is not None: + body["aws_key_info"] = aws_key_info.as_dict() + if gcp_key_info is not None: + body["gcp_key_info"] = gcp_key_info.as_dict() + if use_cases is not None: + body["use_cases"] = [v.value for v in use_cases] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys", body=body, headers=headers + ) return CustomerManagedKey.from_dict(res) - - - - - def delete(self - , customer_managed_key_id: str - ): + def delete(self, customer_managed_key_id: str): """Delete encryption key configuration. - + Deletes a customer-managed key configuration object for an account. You cannot delete a configuration that is associated with a running workspace. - + :param customer_managed_key_id: str Databricks encryption key configuration ID. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}", + headers=headers, + ) - def get(self - , customer_managed_key_id: str - ) -> CustomerManagedKey: + def get(self, customer_managed_key_id: str) -> CustomerManagedKey: """Get encryption key configuration. - + Gets a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -2163,80 +2513,76 @@ def get(self specified as a workspace's customer-managed key for storage, the key encrypts the workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume data. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions. - + This operation is available only if your account is on the E2 version of the platform.", - + :param customer_managed_key_id: str Databricks encryption key configuration ID. - + :returns: :class:`CustomerManagedKey` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}' - - , headers=headers - ) - return CustomerManagedKey.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys/{customer_managed_key_id}", + headers=headers, + ) + return CustomerManagedKey.from_dict(res) def list(self) -> Iterator[CustomerManagedKey]: """Get all encryption key configurations. - + Gets all customer-managed key configuration objects for an account. If the key is specified as a workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions. - + This operation is available only if your account is on the E2 version of the platform. - + :returns: Iterator over :class:`CustomerManagedKey` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/customer-managed-keys' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/customer-managed-keys", headers=headers) return [CustomerManagedKey.from_dict(v) for v in res] - - + class NetworksAPI: """These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used when creating a new workspace if you use customer-managed VPCs.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , network_name: str - , * - , gcp_network_info: Optional[GcpNetworkInfo] = None, security_group_ids: Optional[List[str]] = None, subnet_ids: Optional[List[str]] = None, vpc_endpoints: Optional[NetworkVpcEndpoints] = None, vpc_id: Optional[str] = None) -> Network: + def create( + self, + network_name: str, + *, + gcp_network_info: Optional[GcpNetworkInfo] = None, + security_group_ids: Optional[List[str]] = None, + subnet_ids: Optional[List[str]] = None, + vpc_endpoints: Optional[NetworkVpcEndpoints] = None, + vpc_id: Optional[str] = None, + ) -> Network: """Create network configuration. - + Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a pre-existing VPC and subnets. - + :param network_name: str The human-readable name of the network configuration. :param gcp_network_info: :class:`GcpNetworkInfo` (optional) @@ -2251,138 +2597,121 @@ def create(self :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional) If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/ :param vpc_id: str (optional) The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations. - + :returns: :class:`Network` """ body = {} - if gcp_network_info is not None: body['gcp_network_info'] = gcp_network_info.as_dict() - if network_name is not None: body['network_name'] = network_name - if security_group_ids is not None: body['security_group_ids'] = [v for v in security_group_ids] - if subnet_ids is not None: body['subnet_ids'] = [v for v in subnet_ids] - if vpc_endpoints is not None: body['vpc_endpoints'] = vpc_endpoints.as_dict() - if vpc_id is not None: body['vpc_id'] = vpc_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/networks', body=body - - , headers=headers - ) + if gcp_network_info is not None: + body["gcp_network_info"] = gcp_network_info.as_dict() + if network_name is not None: + body["network_name"] = network_name + if security_group_ids is not None: + body["security_group_ids"] = [v for v in security_group_ids] + if subnet_ids is not None: + body["subnet_ids"] = [v for v in subnet_ids] + if vpc_endpoints is not None: + body["vpc_endpoints"] = vpc_endpoints.as_dict() + if vpc_id is not None: + body["vpc_id"] = vpc_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/accounts/{self._api.account_id}/networks", body=body, headers=headers) return Network.from_dict(res) - - - - - def delete(self - , network_id: str - ): + def delete(self, network_id: str): """Delete a network configuration. - + Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. - + This operation is available only if your account is on the E2 version of the platform. - + :param network_id: str Databricks Account API network configuration ID. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/networks/{network_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers) - def get(self - , network_id: str - ) -> Network: + def get(self, network_id: str) -> Network: """Get a network configuration. - + Gets a Databricks network configuration, which represents a cloud VPC and its resources. - + :param network_id: str Databricks Account API network configuration ID. - + :returns: :class:`Network` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/networks/{network_id}' - - , headers=headers - ) - return Network.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/networks/{network_id}", headers=headers) + return Network.from_dict(res) def list(self) -> Iterator[Network]: """Get all network configurations. - + Gets a list of all Databricks network configurations for an account, specified by ID. - + This operation is available only if your account is on the E2 version of the platform. - + :returns: Iterator over :class:`Network` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/networks' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/networks", headers=headers) return [Network.from_dict(v) for v in res] - - + class PrivateAccessAPI: """These APIs manage private access settings for this account.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , private_access_settings_name: str, region: str - , * - , allowed_vpc_endpoint_ids: Optional[List[str]] = None, private_access_level: Optional[PrivateAccessLevel] = None, public_access_enabled: Optional[bool] = None) -> PrivateAccessSettings: + def create( + self, + private_access_settings_name: str, + region: str, + *, + allowed_vpc_endpoint_ids: Optional[List[str]] = None, + private_access_level: Optional[PrivateAccessLevel] = None, + public_access_enabled: Optional[bool] = None, + ) -> PrivateAccessSettings: """Create private access settings. - + Creates a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. - + You can share one private access settings with multiple workspaces in a single account. However, private access settings are specific to AWS regions, so only workspaces in the same AWS region can use a given private access settings object. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_name: str The human-readable name of the private access settings object. :param region: str @@ -2391,14 +2720,14 @@ def create(self An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in AWS. - + Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints that in your account that can connect to your workspace over AWS PrivateLink. - + If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this control only works for PrivateLink connections. To control how your workspace is accessed via public internet, see [IP access lists]. - + [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) The private access level controls which VPC endpoints can connect to the UI or API of any workspace @@ -2410,132 +2739,130 @@ def create(self Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - + :returns: :class:`PrivateAccessSettings` """ body = {} - if allowed_vpc_endpoint_ids is not None: body['allowed_vpc_endpoint_ids'] = [v for v in allowed_vpc_endpoint_ids] - if private_access_level is not None: body['private_access_level'] = private_access_level.value - if private_access_settings_name is not None: body['private_access_settings_name'] = private_access_settings_name - if public_access_enabled is not None: body['public_access_enabled'] = public_access_enabled - if region is not None: body['region'] = region - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings', body=body - - , headers=headers - ) + if allowed_vpc_endpoint_ids is not None: + body["allowed_vpc_endpoint_ids"] = [v for v in allowed_vpc_endpoint_ids] + if private_access_level is not None: + body["private_access_level"] = private_access_level.value + if private_access_settings_name is not None: + body["private_access_settings_name"] = private_access_settings_name + if public_access_enabled is not None: + body["public_access_enabled"] = public_access_enabled + if region is not None: + body["region"] = region + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/private-access-settings", body=body, headers=headers + ) return PrivateAccessSettings.from_dict(res) - - - - - def delete(self - , private_access_settings_id: str - ): + def delete(self, private_access_settings_id: str): """Delete a private access settings object. - + Deletes a private access settings object, which determines how your workspace is accessed over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", + headers=headers, + ) - def get(self - , private_access_settings_id: str - ) -> PrivateAccessSettings: + def get(self, private_access_settings_id: str) -> PrivateAccessSettings: """Get a private access settings object. - + Gets a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. - + :returns: :class:`PrivateAccessSettings` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}' - - , headers=headers - ) - return PrivateAccessSettings.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", + headers=headers, + ) + return PrivateAccessSettings.from_dict(res) def list(self) -> Iterator[PrivateAccessSettings]: """Get all private access settings objects. - + Gets a list of all private access settings objects for an account, specified by ID. - + :returns: Iterator over :class:`PrivateAccessSettings` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings' - , headers=headers - ) - return [PrivateAccessSettings.from_dict(v) for v in res] - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/private-access-settings", headers=headers) + return [PrivateAccessSettings.from_dict(v) for v in res] - def replace(self - , private_access_settings_id: str, private_access_settings_name: str, region: str - , * - , allowed_vpc_endpoint_ids: Optional[List[str]] = None, private_access_level: Optional[PrivateAccessLevel] = None, public_access_enabled: Optional[bool] = None): + def replace( + self, + private_access_settings_id: str, + private_access_settings_name: str, + region: str, + *, + allowed_vpc_endpoint_ids: Optional[List[str]] = None, + private_access_level: Optional[PrivateAccessLevel] = None, + public_access_enabled: Optional[bool] = None, + ): """Replace private access settings. - + Updates an existing private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. - + This operation completely overwrites your existing private access settings object attached to your workspaces. All workspaces attached to the private access settings are affected by any change. If `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of these changes might take several minutes to propagate to the workspace API. - + You can share one private access settings object with multiple workspaces in a single account. However, private access settings are specific to AWS regions, so only workspaces in the same AWS region can use a given private access settings object. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. :param private_access_settings_name: str @@ -2546,14 +2873,14 @@ def replace(self An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in AWS. - + Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints that in your account that can connect to your workspace over AWS PrivateLink. - + If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this control only works for PrivateLink connections. To control how your workspace is accessed via public internet, see [IP access lists]. - + [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) The private access level controls which VPC endpoints can connect to the UI or API of any workspace @@ -2565,180 +2892,166 @@ def replace(self Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - - + + """ body = {} - if allowed_vpc_endpoint_ids is not None: body['allowed_vpc_endpoint_ids'] = [v for v in allowed_vpc_endpoint_ids] - if private_access_level is not None: body['private_access_level'] = private_access_level.value - if private_access_settings_name is not None: body['private_access_settings_name'] = private_access_settings_name - if public_access_enabled is not None: body['public_access_enabled'] = public_access_enabled - if region is not None: body['region'] = region - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}', body=body - - , headers=headers - ) - + if allowed_vpc_endpoint_ids is not None: + body["allowed_vpc_endpoint_ids"] = [v for v in allowed_vpc_endpoint_ids] + if private_access_level is not None: + body["private_access_level"] = private_access_level.value + if private_access_settings_name is not None: + body["private_access_settings_name"] = private_access_settings_name + if public_access_enabled is not None: + body["public_access_enabled"] = public_access_enabled + if region is not None: + body["region"] = region + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/private-access-settings/{private_access_settings_id}", + body=body, + headers=headers, + ) + - - class StorageAPI: """These APIs manage storage configurations for this workspace. A root storage S3 bucket in your account is required to store objects like cluster logs, notebook revisions, and job results. You can also use the root storage S3 bucket for storage of non-production DBFS data. A storage configuration encapsulates this bucket information, and its ID is used when creating a new workspace.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , storage_configuration_name: str, root_bucket_info: RootBucketInfo - ) -> StorageConfiguration: + def create(self, storage_configuration_name: str, root_bucket_info: RootBucketInfo) -> StorageConfiguration: """Create new storage configuration. - + Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your account. Databricks stores related workspace assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the required bucket policy. - + For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param storage_configuration_name: str The human-readable name of the storage configuration. :param root_bucket_info: :class:`RootBucketInfo` Root S3 bucket information. - + :returns: :class:`StorageConfiguration` """ body = {} - if root_bucket_info is not None: body['root_bucket_info'] = root_bucket_info.as_dict() - if storage_configuration_name is not None: body['storage_configuration_name'] = storage_configuration_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/storage-configurations', body=body - - , headers=headers - ) + if root_bucket_info is not None: + body["root_bucket_info"] = root_bucket_info.as_dict() + if storage_configuration_name is not None: + body["storage_configuration_name"] = storage_configuration_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/storage-configurations", body=body, headers=headers + ) return StorageConfiguration.from_dict(res) - - - - - def delete(self - , storage_configuration_id: str - ): + def delete(self, storage_configuration_id: str): """Delete storage configuration. - + Deletes a Databricks storage configuration. You cannot delete a storage configuration that is associated with any workspace. - + :param storage_configuration_id: str Databricks Account API storage configuration ID. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}", + headers=headers, + ) - def get(self - , storage_configuration_id: str - ) -> StorageConfiguration: + def get(self, storage_configuration_id: str) -> StorageConfiguration: """Get storage configuration. - + Gets a Databricks storage configuration for an account, both specified by ID. - + :param storage_configuration_id: str Databricks Account API storage configuration ID. - + :returns: :class:`StorageConfiguration` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}' - - , headers=headers - ) - return StorageConfiguration.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/storage-configurations/{storage_configuration_id}", + headers=headers, + ) + return StorageConfiguration.from_dict(res) def list(self) -> Iterator[StorageConfiguration]: """Get all storage configurations. - + Gets a list of all Databricks storage configurations for your account, specified by ID. - + :returns: Iterator over :class:`StorageConfiguration` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/storage-configurations' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/storage-configurations", headers=headers) return [StorageConfiguration.from_dict(v) for v in res] - - + class VpcEndpointsAPI: """These APIs manage VPC endpoint configurations for this account.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , vpc_endpoint_name: str - , * - , aws_vpc_endpoint_id: Optional[str] = None, gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None, region: Optional[str] = None) -> VpcEndpoint: + def create( + self, + vpc_endpoint_name: str, + *, + aws_vpc_endpoint_id: Optional[str] = None, + gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None, + region: Optional[str] = None, + ) -> VpcEndpoint: """Create VPC endpoint configuration. - + Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. - + After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically accepts the VPC endpoint. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html - + :param vpc_endpoint_name: str The human-readable name of the storage configuration. :param aws_vpc_endpoint_id: str (optional) @@ -2747,170 +3060,173 @@ def create(self The Google Cloud specific information for this Private Service Connect endpoint. :param region: str (optional) The AWS region in which this VPC endpoint object exists. - + :returns: :class:`VpcEndpoint` """ body = {} - if aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = aws_vpc_endpoint_id - if gcp_vpc_endpoint_info is not None: body['gcp_vpc_endpoint_info'] = gcp_vpc_endpoint_info.as_dict() - if region is not None: body['region'] = region - if vpc_endpoint_name is not None: body['vpc_endpoint_name'] = vpc_endpoint_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints', body=body - - , headers=headers - ) + if aws_vpc_endpoint_id is not None: + body["aws_vpc_endpoint_id"] = aws_vpc_endpoint_id + if gcp_vpc_endpoint_info is not None: + body["gcp_vpc_endpoint_info"] = gcp_vpc_endpoint_info.as_dict() + if region is not None: + body["region"] = region + if vpc_endpoint_name is not None: + body["vpc_endpoint_name"] = vpc_endpoint_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints", body=body, headers=headers + ) return VpcEndpoint.from_dict(res) - - - - - def delete(self - , vpc_endpoint_id: str - ): + def delete(self, vpc_endpoint_id: str): """Delete VPC endpoint configuration. - + Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate privately with Databricks over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param vpc_endpoint_id: str Databricks VPC endpoint ID. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get(self - , vpc_endpoint_id: str - ) -> VpcEndpoint: + self._api.do( + "DELETE", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}", headers=headers + ) + + def get(self, vpc_endpoint_id: str) -> VpcEndpoint: """Get a VPC endpoint configuration. - + Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html - + :param vpc_endpoint_id: str Databricks VPC endpoint ID. - + :returns: :class:`VpcEndpoint` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}' - - , headers=headers - ) - return VpcEndpoint.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints/{vpc_endpoint_id}", headers=headers + ) + return VpcEndpoint.from_dict(res) def list(self) -> Iterator[VpcEndpoint]: """Get all VPC endpoint configurations. - + Gets a list of all VPC endpoints for an account, specified by ID. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :returns: Iterator over :class:`VpcEndpoint` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/vpc-endpoints' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/vpc-endpoints", headers=headers) return [VpcEndpoint.from_dict(v) for v in res] - - + class WorkspacesAPI: """These APIs manage workspaces for this account. A Databricks workspace is an environment for accessing all of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into folders, and provides access to data and computational resources such as clusters and jobs. - + These endpoints are available if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account.""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_get_workspace_running(self, workspace_id: int, - timeout=timedelta(minutes=20), callback: Optional[Callable[[Workspace], None]] = None) -> Workspace: - deadline = time.time() + timeout.total_seconds() - target_states = (WorkspaceStatus.RUNNING, ) - failure_states = (WorkspaceStatus.BANNED, WorkspaceStatus.FAILED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(workspace_id=workspace_id) - status = poll.workspace_status - status_message = poll.workspace_status_message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach RUNNING, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"workspace_id={workspace_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - def create(self - , workspace_name: str - , * - , aws_region: Optional[str] = None, cloud: Optional[str] = None, cloud_resource_container: Optional[CloudResourceContainer] = None, credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, deployment_name: Optional[str] = None, gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, gke_config: Optional[GkeConfig] = None, is_no_public_ip_enabled: Optional[bool] = None, location: Optional[str] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_id: Optional[str] = None, pricing_tier: Optional[PricingTier] = None, private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]: + def wait_get_workspace_running( + self, workspace_id: int, timeout=timedelta(minutes=20), callback: Optional[Callable[[Workspace], None]] = None + ) -> Workspace: + deadline = time.time() + timeout.total_seconds() + target_states = (WorkspaceStatus.RUNNING,) + failure_states = ( + WorkspaceStatus.BANNED, + WorkspaceStatus.FAILED, + ) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(workspace_id=workspace_id) + status = poll.workspace_status + status_message = poll.workspace_status_message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach RUNNING, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"workspace_id={workspace_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def create( + self, + workspace_name: str, + *, + aws_region: Optional[str] = None, + cloud: Optional[str] = None, + cloud_resource_container: Optional[CloudResourceContainer] = None, + credentials_id: Optional[str] = None, + custom_tags: Optional[Dict[str, str]] = None, + deployment_name: Optional[str] = None, + gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, + gke_config: Optional[GkeConfig] = None, + is_no_public_ip_enabled: Optional[bool] = None, + location: Optional[str] = None, + managed_services_customer_managed_key_id: Optional[str] = None, + network_id: Optional[str] = None, + pricing_tier: Optional[PricingTier] = None, + private_access_settings_id: Optional[str] = None, + storage_configuration_id: Optional[str] = None, + storage_customer_managed_key_id: Optional[str] = None, + ) -> Wait[Workspace]: """Create a new workspace. - + Creates a new workspace. - + **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request has been accepted and is in progress, but does not mean that the workspace deployed successfully and is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID (`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests with the workspace ID and check its status. The workspace becomes available when the status changes to `RUNNING`. - + :param workspace_name: str The workspace's human-readable name. :param aws_region: str (optional) @@ -2932,22 +3248,22 @@ def create(self deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the set of characters that are allowed in a subdomain. - + To set this value, you must have a deployment name prefix. Contact your Databricks account team to add an account deployment name prefix to your account. - + Workspace deployment names follow the account prefix and a hyphen. For example, if your account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be `acme-workspace-1.cloud.databricks.com`. - + You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment name to only include the deployment prefix. For example, if your account's deployment prefix is `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and the workspace URL is `acme.cloud.databricks.com`. - + This value must be unique across all non-deleted deployments across all AWS regions. - + If a new workspace omits this property, the server generates a unique deployment name for you with the pattern `dbc-xxxxxxxx-xxxx`. :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional) @@ -2955,19 +3271,19 @@ def create(self is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks detects an IP range overlap. - + Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - + The sizes of these IP ranges affect the maximum number of nodes for the workspace. - + **Important**: Confirm the IP ranges used by your Databricks workspace before creating the workspace. You cannot change them after your workspace is deployed. If the IP address ranges for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To determine the address range sizes that you need, Databricks provides a calculator as a Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - + [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html :param gke_config: :class:`GkeConfig` (optional) The configurations for the GKE cluster of a Databricks workspace. @@ -2982,15 +3298,15 @@ def create(self :param network_id: str (optional) :param pricing_tier: :class:`PricingTier` (optional) The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - + [AWS Pricing]: https://databricks.com/product/aws-pricing :param private_access_settings_id: str (optional) ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), back-end (data plane to control plane connection), or both connection types. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html :param storage_configuration_id: str (optional) @@ -2999,147 +3315,192 @@ def create(self The ID of the workspace's storage encryption key configuration object. This is used to encrypt the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The provided key configuration object property `use_cases` must contain `STORAGE`. - + :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. """ body = {} - if aws_region is not None: body['aws_region'] = aws_region - if cloud is not None: body['cloud'] = cloud - if cloud_resource_container is not None: body['cloud_resource_container'] = cloud_resource_container.as_dict() - if credentials_id is not None: body['credentials_id'] = credentials_id - if custom_tags is not None: body['custom_tags'] = custom_tags - if deployment_name is not None: body['deployment_name'] = deployment_name - if gcp_managed_network_config is not None: body['gcp_managed_network_config'] = gcp_managed_network_config.as_dict() - if gke_config is not None: body['gke_config'] = gke_config.as_dict() - if is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = is_no_public_ip_enabled - if location is not None: body['location'] = location - if managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id - if network_id is not None: body['network_id'] = network_id - if pricing_tier is not None: body['pricing_tier'] = pricing_tier.value - if private_access_settings_id is not None: body['private_access_settings_id'] = private_access_settings_id - if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id - if storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = storage_customer_managed_key_id - if workspace_name is not None: body['workspace_name'] = workspace_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/workspaces', body=body - - , headers=headers - ) - return Wait(self.wait_get_workspace_running - , response = Workspace.from_dict(op_response) - , workspace_id=op_response['workspace_id']) - - - def create_and_wait(self - , workspace_name: str - , * - , aws_region: Optional[str] = None, cloud: Optional[str] = None, cloud_resource_container: Optional[CloudResourceContainer] = None, credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, deployment_name: Optional[str] = None, gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, gke_config: Optional[GkeConfig] = None, is_no_public_ip_enabled: Optional[bool] = None, location: Optional[str] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_id: Optional[str] = None, pricing_tier: Optional[PricingTier] = None, private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, - timeout=timedelta(minutes=20)) -> Workspace: - return self.create(aws_region=aws_region, cloud=cloud, cloud_resource_container=cloud_resource_container, credentials_id=credentials_id, custom_tags=custom_tags, deployment_name=deployment_name, gcp_managed_network_config=gcp_managed_network_config, gke_config=gke_config, is_no_public_ip_enabled=is_no_public_ip_enabled, location=location, managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, network_id=network_id, pricing_tier=pricing_tier, private_access_settings_id=private_access_settings_id, storage_configuration_id=storage_configuration_id, storage_customer_managed_key_id=storage_customer_managed_key_id, workspace_name=workspace_name).result(timeout=timeout) - - - - - def delete(self - , workspace_id: int - ): + if aws_region is not None: + body["aws_region"] = aws_region + if cloud is not None: + body["cloud"] = cloud + if cloud_resource_container is not None: + body["cloud_resource_container"] = cloud_resource_container.as_dict() + if credentials_id is not None: + body["credentials_id"] = credentials_id + if custom_tags is not None: + body["custom_tags"] = custom_tags + if deployment_name is not None: + body["deployment_name"] = deployment_name + if gcp_managed_network_config is not None: + body["gcp_managed_network_config"] = gcp_managed_network_config.as_dict() + if gke_config is not None: + body["gke_config"] = gke_config.as_dict() + if is_no_public_ip_enabled is not None: + body["is_no_public_ip_enabled"] = is_no_public_ip_enabled + if location is not None: + body["location"] = location + if managed_services_customer_managed_key_id is not None: + body["managed_services_customer_managed_key_id"] = managed_services_customer_managed_key_id + if network_id is not None: + body["network_id"] = network_id + if pricing_tier is not None: + body["pricing_tier"] = pricing_tier.value + if private_access_settings_id is not None: + body["private_access_settings_id"] = private_access_settings_id + if storage_configuration_id is not None: + body["storage_configuration_id"] = storage_configuration_id + if storage_customer_managed_key_id is not None: + body["storage_customer_managed_key_id"] = storage_customer_managed_key_id + if workspace_name is not None: + body["workspace_name"] = workspace_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/workspaces", body=body, headers=headers + ) + return Wait( + self.wait_get_workspace_running, + response=Workspace.from_dict(op_response), + workspace_id=op_response["workspace_id"], + ) + + def create_and_wait( + self, + workspace_name: str, + *, + aws_region: Optional[str] = None, + cloud: Optional[str] = None, + cloud_resource_container: Optional[CloudResourceContainer] = None, + credentials_id: Optional[str] = None, + custom_tags: Optional[Dict[str, str]] = None, + deployment_name: Optional[str] = None, + gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None, + gke_config: Optional[GkeConfig] = None, + is_no_public_ip_enabled: Optional[bool] = None, + location: Optional[str] = None, + managed_services_customer_managed_key_id: Optional[str] = None, + network_id: Optional[str] = None, + pricing_tier: Optional[PricingTier] = None, + private_access_settings_id: Optional[str] = None, + storage_configuration_id: Optional[str] = None, + storage_customer_managed_key_id: Optional[str] = None, + timeout=timedelta(minutes=20), + ) -> Workspace: + return self.create( + aws_region=aws_region, + cloud=cloud, + cloud_resource_container=cloud_resource_container, + credentials_id=credentials_id, + custom_tags=custom_tags, + deployment_name=deployment_name, + gcp_managed_network_config=gcp_managed_network_config, + gke_config=gke_config, + is_no_public_ip_enabled=is_no_public_ip_enabled, + location=location, + managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, + network_id=network_id, + pricing_tier=pricing_tier, + private_access_settings_id=private_access_settings_id, + storage_configuration_id=storage_configuration_id, + storage_customer_managed_key_id=storage_customer_managed_key_id, + workspace_name=workspace_name, + ).result(timeout=timeout) + + def delete(self, workspace_id: int): """Delete a workspace. - + Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. However, it might take a few minutes for all workspaces resources to be deleted, depending on the size and number of workspace resources. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :param workspace_id: int Workspace ID. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers) - def get(self - , workspace_id: int - ) -> Workspace: + def get(self, workspace_id: int) -> Workspace: """Get a workspace. - + Gets information including status for a Databricks workspace, specified by ID. In the response, the `workspace_status` field indicates the current status. After initial workspace creation (which is asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace becomes available when the status changes to `RUNNING`. - + For information about how to create a new workspace with this API **including error handling**, see [Create a new workspace using the Account API]. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param workspace_id: int Workspace ID. - + :returns: :class:`Workspace` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}' - - , headers=headers - ) - return Workspace.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", headers=headers + ) + return Workspace.from_dict(res) def list(self) -> Iterator[Workspace]: """Get all workspaces. - + Gets a list of all workspaces associated with an account, specified by ID. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :returns: Iterator over :class:`Workspace` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces' - , headers=headers - ) - return [Workspace.from_dict(v) for v in res] - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/workspaces", headers=headers) + return [Workspace.from_dict(v) for v in res] - def update(self - , workspace_id: int - , * - , aws_region: Optional[str] = None, credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]: + def update( + self, + workspace_id: int, + *, + aws_region: Optional[str] = None, + credentials_id: Optional[str] = None, + custom_tags: Optional[Dict[str, str]] = None, + managed_services_customer_managed_key_id: Optional[str] = None, + network_connectivity_config_id: Optional[str] = None, + network_id: Optional[str] = None, + private_access_settings_id: Optional[str] = None, + storage_configuration_id: Optional[str] = None, + storage_customer_managed_key_id: Optional[str] = None, + ) -> Wait[Workspace]: """Update workspace configuration. - + Updates a workspace configuration for either a running workspace or a failed workspace. The elements that can be updated varies between these two use cases. - + ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace deployment for some fields, but not all fields. For a failed workspace, this request supports updates to the following fields only: - Credential configuration ID - Storage configuration ID - Network @@ -3161,14 +3522,14 @@ def update(self update the network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from the workspace once attached, you can only switch to another one. - + After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` requests with the workspace ID and check the workspace status. The workspace is successful if the status changes to `RUNNING`. - + For information about how to create a new workspace with this API **including error handling**, see [Create a new workspace using the Account API]. - + ### Update a running workspace You can update a Databricks workspace configuration for running workspaces for some fields, but not all fields. For a running workspace, this request supports updating the following fields only: - Credential configuration ID - Network configuration ID. Used @@ -3194,12 +3555,12 @@ def update(self network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from the workspace once attached, you can only switch to another one. - + **Important**: To update a running workspace, your workspace must have no running compute resources that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not terminate all cluster instances in the workspace before calling this API, the request will fail. - + ### Wait until changes take effect. After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` requests with the workspace ID and check the workspace status and the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes @@ -3215,22 +3576,22 @@ def update(self silently to its original configuration. After the workspace has been updated, you cannot use or create clusters for another 20 minutes. If you create or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could cause other unexpected behavior. - + If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20 minute wait. - + **Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment types and subscription types. If you have questions about availability, contact your Databricks representative. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param workspace_id: int Workspace ID. :param aws_region: str (optional) @@ -3260,37 +3621,66 @@ def update(self :param storage_customer_managed_key_id: str (optional) The ID of the key configuration object for workspace storage. This parameter is available for updating both failed and running workspaces. - + :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. """ body = {} - if aws_region is not None: body['aws_region'] = aws_region - if credentials_id is not None: body['credentials_id'] = credentials_id - if custom_tags is not None: body['custom_tags'] = custom_tags - if managed_services_customer_managed_key_id is not None: body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id - if network_connectivity_config_id is not None: body['network_connectivity_config_id'] = network_connectivity_config_id - if network_id is not None: body['network_id'] = network_id - if private_access_settings_id is not None: body['private_access_settings_id'] = private_access_settings_id - if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id - if storage_customer_managed_key_id is not None: body['storage_customer_managed_key_id'] = storage_customer_managed_key_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}', body=body - - , headers=headers - ) - return Wait(self.wait_get_workspace_running - , response = UpdateResponse.from_dict(op_response) - , workspace_id=workspace_id) - - - def update_and_wait(self - , workspace_id: int - , * - , aws_region: Optional[str] = None, credentials_id: Optional[str] = None, custom_tags: Optional[Dict[str,str]] = None, managed_services_customer_managed_key_id: Optional[str] = None, network_connectivity_config_id: Optional[str] = None, network_id: Optional[str] = None, private_access_settings_id: Optional[str] = None, storage_configuration_id: Optional[str] = None, storage_customer_managed_key_id: Optional[str] = None, - timeout=timedelta(minutes=20)) -> Workspace: - return self.update(aws_region=aws_region, credentials_id=credentials_id, custom_tags=custom_tags, managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, network_connectivity_config_id=network_connectivity_config_id, network_id=network_id, private_access_settings_id=private_access_settings_id, storage_configuration_id=storage_configuration_id, storage_customer_managed_key_id=storage_customer_managed_key_id, workspace_id=workspace_id).result(timeout=timeout) - - \ No newline at end of file + if aws_region is not None: + body["aws_region"] = aws_region + if credentials_id is not None: + body["credentials_id"] = credentials_id + if custom_tags is not None: + body["custom_tags"] = custom_tags + if managed_services_customer_managed_key_id is not None: + body["managed_services_customer_managed_key_id"] = managed_services_customer_managed_key_id + if network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = network_connectivity_config_id + if network_id is not None: + body["network_id"] = network_id + if private_access_settings_id is not None: + body["private_access_settings_id"] = private_access_settings_id + if storage_configuration_id is not None: + body["storage_configuration_id"] = storage_configuration_id + if storage_customer_managed_key_id is not None: + body["storage_customer_managed_key_id"] = storage_customer_managed_key_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do( + "PATCH", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}", body=body, headers=headers + ) + return Wait( + self.wait_get_workspace_running, response=UpdateResponse.from_dict(op_response), workspace_id=workspace_id + ) + + def update_and_wait( + self, + workspace_id: int, + *, + aws_region: Optional[str] = None, + credentials_id: Optional[str] = None, + custom_tags: Optional[Dict[str, str]] = None, + managed_services_customer_managed_key_id: Optional[str] = None, + network_connectivity_config_id: Optional[str] = None, + network_id: Optional[str] = None, + private_access_settings_id: Optional[str] = None, + storage_configuration_id: Optional[str] = None, + storage_customer_managed_key_id: Optional[str] = None, + timeout=timedelta(minutes=20), + ) -> Workspace: + return self.update( + aws_region=aws_region, + credentials_id=credentials_id, + custom_tags=custom_tags, + managed_services_customer_managed_key_id=managed_services_customer_managed_key_id, + network_connectivity_config_id=network_connectivity_config_id, + network_id=network_id, + private_access_settings_id=private_access_settings_id, + storage_configuration_id=storage_configuration_id, + storage_customer_managed_key_id=storage_customer_managed_key_id, + workspace_id=workspace_id, + ).result(timeout=timeout) diff --git a/databricks/sdk/service/qualitymonitorv2.py b/databricks/sdk/service/qualitymonitorv2.py index c304fd95d..bf3ef953f 100755 --- a/databricks/sdk/service/qualitymonitorv2.py +++ b/databricks/sdk/service/qualitymonitorv2.py @@ -1,72 +1,66 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading +from typing import Any, Dict, Iterator, List, Optional -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token - -_LOG = logging.getLogger('databricks.sdk') +from ._internal import _enum, _from_dict, _repeated_dict +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AnomalyDetectionConfig: last_run_id: Optional[str] = None """Run id of the last run of the workflow""" - + latest_run_status: Optional[AnomalyDetectionRunStatus] = None """The status of the last run of the workflow.""" - + def as_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_run_id is not None: body['last_run_id'] = self.last_run_id - if self.latest_run_status is not None: body['latest_run_status'] = self.latest_run_status.value + if self.last_run_id is not None: + body["last_run_id"] = self.last_run_id + if self.latest_run_status is not None: + body["latest_run_status"] = self.latest_run_status.value return body def as_shallow_dict(self) -> dict: """Serializes the AnomalyDetectionConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_run_id is not None: body['last_run_id'] = self.last_run_id - if self.latest_run_status is not None: body['latest_run_status'] = self.latest_run_status + if self.last_run_id is not None: + body["last_run_id"] = self.last_run_id + if self.latest_run_status is not None: + body["latest_run_status"] = self.latest_run_status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AnomalyDetectionConfig: """Deserializes the AnomalyDetectionConfig from a dictionary.""" - return cls(last_run_id=d.get('last_run_id', None), latest_run_status=_enum(d, 'latest_run_status', AnomalyDetectionRunStatus)) - - + return cls( + last_run_id=d.get("last_run_id", None), + latest_run_status=_enum(d, "latest_run_status", AnomalyDetectionRunStatus), + ) class AnomalyDetectionRunStatus(Enum): """Status of Anomaly Detection Job Run""" - - ANOMALY_DETECTION_RUN_STATUS_CANCELED = 'ANOMALY_DETECTION_RUN_STATUS_CANCELED' - ANOMALY_DETECTION_RUN_STATUS_FAILED = 'ANOMALY_DETECTION_RUN_STATUS_FAILED' - ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED = 'ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED' - ANOMALY_DETECTION_RUN_STATUS_PENDING = 'ANOMALY_DETECTION_RUN_STATUS_PENDING' - ANOMALY_DETECTION_RUN_STATUS_RUNNING = 'ANOMALY_DETECTION_RUN_STATUS_RUNNING' - ANOMALY_DETECTION_RUN_STATUS_SUCCESS = 'ANOMALY_DETECTION_RUN_STATUS_SUCCESS' - ANOMALY_DETECTION_RUN_STATUS_UNKNOWN = 'ANOMALY_DETECTION_RUN_STATUS_UNKNOWN' - ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR = 'ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR' - - - - + ANOMALY_DETECTION_RUN_STATUS_CANCELED = "ANOMALY_DETECTION_RUN_STATUS_CANCELED" + ANOMALY_DETECTION_RUN_STATUS_FAILED = "ANOMALY_DETECTION_RUN_STATUS_FAILED" + ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED = "ANOMALY_DETECTION_RUN_STATUS_JOB_DELETED" + ANOMALY_DETECTION_RUN_STATUS_PENDING = "ANOMALY_DETECTION_RUN_STATUS_PENDING" + ANOMALY_DETECTION_RUN_STATUS_RUNNING = "ANOMALY_DETECTION_RUN_STATUS_RUNNING" + ANOMALY_DETECTION_RUN_STATUS_SUCCESS = "ANOMALY_DETECTION_RUN_STATUS_SUCCESS" + ANOMALY_DETECTION_RUN_STATUS_UNKNOWN = "ANOMALY_DETECTION_RUN_STATUS_UNKNOWN" + ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR = "ANOMALY_DETECTION_RUN_STATUS_WORKSPACE_MISMATCH_ERROR" @dataclass @@ -85,238 +79,197 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteQualityMonitorResponse: """Deserializes the DeleteQualityMonitorResponse from a dictionary.""" return cls() - - - - - - - - @dataclass class ListQualityMonitorResponse: next_page_token: Optional[str] = None - + quality_monitors: Optional[List[QualityMonitor]] = None - + def as_dict(self) -> dict: """Serializes the ListQualityMonitorResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.quality_monitors: body['quality_monitors'] = [v.as_dict() for v in self.quality_monitors] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.quality_monitors: + body["quality_monitors"] = [v.as_dict() for v in self.quality_monitors] return body def as_shallow_dict(self) -> dict: """Serializes the ListQualityMonitorResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.quality_monitors: body['quality_monitors'] = self.quality_monitors + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.quality_monitors: + body["quality_monitors"] = self.quality_monitors return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListQualityMonitorResponse: """Deserializes the ListQualityMonitorResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), quality_monitors=_repeated_dict(d, 'quality_monitors', QualityMonitor)) - - + return cls( + next_page_token=d.get("next_page_token", None), + quality_monitors=_repeated_dict(d, "quality_monitors", QualityMonitor), + ) @dataclass class QualityMonitor: object_type: str """The type of the monitored object. Can be one of the following: schema.""" - + object_id: str """The uuid of the request object. For example, schema id.""" - + anomaly_detection_config: Optional[AnomalyDetectionConfig] = None - + def as_dict(self) -> dict: """Serializes the QualityMonitor into a dictionary suitable for use as a JSON request body.""" body = {} - if self.anomaly_detection_config: body['anomaly_detection_config'] = self.anomaly_detection_config.as_dict() - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.anomaly_detection_config: + body["anomaly_detection_config"] = self.anomaly_detection_config.as_dict() + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the QualityMonitor into a shallow dictionary of its immediate attributes.""" body = {} - if self.anomaly_detection_config: body['anomaly_detection_config'] = self.anomaly_detection_config - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.anomaly_detection_config: + body["anomaly_detection_config"] = self.anomaly_detection_config + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QualityMonitor: """Deserializes the QualityMonitor from a dictionary.""" - return cls(anomaly_detection_config=_from_dict(d, 'anomaly_detection_config', AnomalyDetectionConfig), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - - - - - - + return cls( + anomaly_detection_config=_from_dict(d, "anomaly_detection_config", AnomalyDetectionConfig), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) class QualityMonitorV2API: """Manage data quality of UC objects (currently support `schema`)""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_quality_monitor(self - , quality_monitor: QualityMonitor - ) -> QualityMonitor: + def create_quality_monitor(self, quality_monitor: QualityMonitor) -> QualityMonitor: """Create a quality monitor. - + Create a quality monitor on UC object - + :param quality_monitor: :class:`QualityMonitor` - + :returns: :class:`QualityMonitor` """ body = quality_monitor.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/quality-monitors', body=body - - , headers=headers - ) - return QualityMonitor.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/quality-monitors", body=body, headers=headers) + return QualityMonitor.from_dict(res) - def delete_quality_monitor(self - , object_type: str, object_id: str - ): + def delete_quality_monitor(self, object_type: str, object_id: str): """Delete a quality monitor. - + Delete a quality monitor on UC object - + :param object_type: str The type of the monitored object. Can be one of the following: schema. :param object_id: str The uuid of the request object. For example, schema id. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/quality-monitors/{object_type}/{object_id}' - - , headers=headers - ) - - - - - - - def get_quality_monitor(self - , object_type: str, object_id: str - ) -> QualityMonitor: + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/quality-monitors/{object_type}/{object_id}", headers=headers) + + def get_quality_monitor(self, object_type: str, object_id: str) -> QualityMonitor: """Read a quality monitor. - + Read a quality monitor on UC object - + :param object_type: str The type of the monitored object. Can be one of the following: schema. :param object_id: str The uuid of the request object. For example, schema id. - + :returns: :class:`QualityMonitor` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/quality-monitors/{object_type}/{object_id}' - - , headers=headers - ) - return QualityMonitor.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/quality-monitors/{object_type}/{object_id}", headers=headers) + return QualityMonitor.from_dict(res) - def list_quality_monitor(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[QualityMonitor]: + def list_quality_monitor( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[QualityMonitor]: """List quality monitors. - + (Unimplemented) List quality monitors - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`QualityMonitor` """ - + query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + while True: - json = self._api.do('GET','/api/2.0/quality-monitors', query=query - - , headers=headers - ) - if 'quality_monitors' in json: - for v in json['quality_monitors']: - yield QualityMonitor.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update_quality_monitor(self - , object_type: str, object_id: str, quality_monitor: QualityMonitor - ) -> QualityMonitor: + json = self._api.do("GET", "/api/2.0/quality-monitors", query=query, headers=headers) + if "quality_monitors" in json: + for v in json["quality_monitors"]: + yield QualityMonitor.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_quality_monitor( + self, object_type: str, object_id: str, quality_monitor: QualityMonitor + ) -> QualityMonitor: """Update a quality monitor. - + (Unimplemented) Update a quality monitor on UC object - + :param object_type: str The type of the monitored object. Can be one of the following: schema. :param object_id: str The uuid of the request object. For example, schema id. :param quality_monitor: :class:`QualityMonitor` - + :returns: :class:`QualityMonitor` """ body = quality_monitor.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/quality-monitors/{object_type}/{object_id}', body=body - - , headers=headers - ) - return QualityMonitor.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - \ No newline at end of file + res = self._api.do("PUT", f"/api/2.0/quality-monitors/{object_type}/{object_id}", body=body, headers=headers) + return QualityMonitor.from_dict(res) diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 3d4e9ed1c..8d8c09ff8 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -1,58 +1,64 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import threading +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading +from typing import Any, BinaryIO, Callable, Dict, Iterator, List, Optional -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +import requests -_LOG = logging.getLogger('databricks.sdk') +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class Ai21LabsConfig: ai21labs_api_key: Optional[str] = None """The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.""" - + ai21labs_api_key_plaintext: Optional[str] = None """An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the Ai21LabsConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key - if self.ai21labs_api_key_plaintext is not None: body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext + if self.ai21labs_api_key is not None: + body["ai21labs_api_key"] = self.ai21labs_api_key + if self.ai21labs_api_key_plaintext is not None: + body["ai21labs_api_key_plaintext"] = self.ai21labs_api_key_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the Ai21LabsConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key - if self.ai21labs_api_key_plaintext is not None: body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext + if self.ai21labs_api_key is not None: + body["ai21labs_api_key"] = self.ai21labs_api_key + if self.ai21labs_api_key_plaintext is not None: + body["ai21labs_api_key_plaintext"] = self.ai21labs_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Ai21LabsConfig: """Deserializes the Ai21LabsConfig from a dictionary.""" - return cls(ai21labs_api_key=d.get('ai21labs_api_key', None), ai21labs_api_key_plaintext=d.get('ai21labs_api_key_plaintext', None)) - - + return cls( + ai21labs_api_key=d.get("ai21labs_api_key", None), + ai21labs_api_key_plaintext=d.get("ai21labs_api_key_plaintext", None), + ) @dataclass @@ -60,48 +66,62 @@ class AiGatewayConfig: fallback_config: Optional[FallbackConfig] = None """Configuration for traffic fallback which auto fallbacks to other served entities if the request to a served entity fails with certain error codes, to increase availability.""" - + guardrails: Optional[AiGatewayGuardrails] = None """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.""" - + inference_table_config: Optional[AiGatewayInferenceTableConfig] = None """Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.""" - + rate_limits: Optional[List[AiGatewayRateLimit]] = None """Configuration for rate limits which can be set to limit endpoint traffic.""" - + usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None """Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fallback_config: body['fallback_config'] = self.fallback_config.as_dict() - if self.guardrails: body['guardrails'] = self.guardrails.as_dict() - if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict() - if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] - if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict() + if self.fallback_config: + body["fallback_config"] = self.fallback_config.as_dict() + if self.guardrails: + body["guardrails"] = self.guardrails.as_dict() + if self.inference_table_config: + body["inference_table_config"] = self.inference_table_config.as_dict() + if self.rate_limits: + body["rate_limits"] = [v.as_dict() for v in self.rate_limits] + if self.usage_tracking_config: + body["usage_tracking_config"] = self.usage_tracking_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.fallback_config: body['fallback_config'] = self.fallback_config - if self.guardrails: body['guardrails'] = self.guardrails - if self.inference_table_config: body['inference_table_config'] = self.inference_table_config - if self.rate_limits: body['rate_limits'] = self.rate_limits - if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config + if self.fallback_config: + body["fallback_config"] = self.fallback_config + if self.guardrails: + body["guardrails"] = self.guardrails + if self.inference_table_config: + body["inference_table_config"] = self.inference_table_config + if self.rate_limits: + body["rate_limits"] = self.rate_limits + if self.usage_tracking_config: + body["usage_tracking_config"] = self.usage_tracking_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayConfig: """Deserializes the AiGatewayConfig from a dictionary.""" - return cls(fallback_config=_from_dict(d, 'fallback_config', FallbackConfig), guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails), inference_table_config=_from_dict(d, 'inference_table_config', AiGatewayInferenceTableConfig), rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit), usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig)) - - + return cls( + fallback_config=_from_dict(d, "fallback_config", FallbackConfig), + guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails), + inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig), + rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit), + usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig), + ) @dataclass @@ -109,102 +129,118 @@ class AiGatewayGuardrailParameters: invalid_keywords: Optional[List[str]] = None """List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.""" - + pii: Optional[AiGatewayGuardrailPiiBehavior] = None """Configuration for guardrail PII filter.""" - + safety: Optional[bool] = None """Indicates whether the safety filter is enabled.""" - + valid_topics: Optional[List[str]] = None """The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayGuardrailParameters into a dictionary suitable for use as a JSON request body.""" body = {} - if self.invalid_keywords: body['invalid_keywords'] = [v for v in self.invalid_keywords] - if self.pii: body['pii'] = self.pii.as_dict() - if self.safety is not None: body['safety'] = self.safety - if self.valid_topics: body['valid_topics'] = [v for v in self.valid_topics] + if self.invalid_keywords: + body["invalid_keywords"] = [v for v in self.invalid_keywords] + if self.pii: + body["pii"] = self.pii.as_dict() + if self.safety is not None: + body["safety"] = self.safety + if self.valid_topics: + body["valid_topics"] = [v for v in self.valid_topics] return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayGuardrailParameters into a shallow dictionary of its immediate attributes.""" body = {} - if self.invalid_keywords: body['invalid_keywords'] = self.invalid_keywords - if self.pii: body['pii'] = self.pii - if self.safety is not None: body['safety'] = self.safety - if self.valid_topics: body['valid_topics'] = self.valid_topics + if self.invalid_keywords: + body["invalid_keywords"] = self.invalid_keywords + if self.pii: + body["pii"] = self.pii + if self.safety is not None: + body["safety"] = self.safety + if self.valid_topics: + body["valid_topics"] = self.valid_topics return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayGuardrailParameters: """Deserializes the AiGatewayGuardrailParameters from a dictionary.""" - return cls(invalid_keywords=d.get('invalid_keywords', None), pii=_from_dict(d, 'pii', AiGatewayGuardrailPiiBehavior), safety=d.get('safety', None), valid_topics=d.get('valid_topics', None)) - - + return cls( + invalid_keywords=d.get("invalid_keywords", None), + pii=_from_dict(d, "pii", AiGatewayGuardrailPiiBehavior), + safety=d.get("safety", None), + valid_topics=d.get("valid_topics", None), + ) @dataclass class AiGatewayGuardrailPiiBehavior: behavior: Optional[AiGatewayGuardrailPiiBehaviorBehavior] = None """Configuration for input guardrail filters.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayGuardrailPiiBehavior into a dictionary suitable for use as a JSON request body.""" body = {} - if self.behavior is not None: body['behavior'] = self.behavior.value + if self.behavior is not None: + body["behavior"] = self.behavior.value return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayGuardrailPiiBehavior into a shallow dictionary of its immediate attributes.""" body = {} - if self.behavior is not None: body['behavior'] = self.behavior + if self.behavior is not None: + body["behavior"] = self.behavior return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayGuardrailPiiBehavior: """Deserializes the AiGatewayGuardrailPiiBehavior from a dictionary.""" - return cls(behavior=_enum(d, 'behavior', AiGatewayGuardrailPiiBehaviorBehavior)) - - + return cls(behavior=_enum(d, "behavior", AiGatewayGuardrailPiiBehaviorBehavior)) class AiGatewayGuardrailPiiBehaviorBehavior(Enum): - - - BLOCK = 'BLOCK' - NONE = 'NONE' + + BLOCK = "BLOCK" + NONE = "NONE" + @dataclass class AiGatewayGuardrails: input: Optional[AiGatewayGuardrailParameters] = None """Configuration for input guardrail filters.""" - + output: Optional[AiGatewayGuardrailParameters] = None """Configuration for output guardrail filters.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayGuardrails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.input: body['input'] = self.input.as_dict() - if self.output: body['output'] = self.output.as_dict() + if self.input: + body["input"] = self.input.as_dict() + if self.output: + body["output"] = self.output.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayGuardrails into a shallow dictionary of its immediate attributes.""" body = {} - if self.input: body['input'] = self.input - if self.output: body['output'] = self.output + if self.input: + body["input"] = self.input + if self.output: + body["output"] = self.output return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayGuardrails: """Deserializes the AiGatewayGuardrails from a dictionary.""" - return cls(input=_from_dict(d, 'input', AiGatewayGuardrailParameters), output=_from_dict(d, 'output', AiGatewayGuardrailParameters)) - - + return cls( + input=_from_dict(d, "input", AiGatewayGuardrailParameters), + output=_from_dict(d, "output", AiGatewayGuardrailParameters), + ) @dataclass @@ -212,194 +248,233 @@ class AiGatewayInferenceTableConfig: catalog_name: Optional[str] = None """The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.""" - + enabled: Optional[bool] = None """Indicates whether the inference table is enabled.""" - + schema_name: Optional[str] = None """The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.""" - + table_name_prefix: Optional[str] = None """The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayInferenceTableConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.enabled is not None: body['enabled'] = self.enabled - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.enabled is not None: + body["enabled"] = self.enabled + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.table_name_prefix is not None: + body["table_name_prefix"] = self.table_name_prefix return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayInferenceTableConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.enabled is not None: body['enabled'] = self.enabled - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.enabled is not None: + body["enabled"] = self.enabled + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.table_name_prefix is not None: + body["table_name_prefix"] = self.table_name_prefix return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayInferenceTableConfig: """Deserializes the AiGatewayInferenceTableConfig from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), enabled=d.get('enabled', None), schema_name=d.get('schema_name', None), table_name_prefix=d.get('table_name_prefix', None)) - - + return cls( + catalog_name=d.get("catalog_name", None), + enabled=d.get("enabled", None), + schema_name=d.get("schema_name", None), + table_name_prefix=d.get("table_name_prefix", None), + ) @dataclass class AiGatewayRateLimit: calls: int """Used to specify how many calls are allowed for a key within the renewal_period.""" - + renewal_period: AiGatewayRateLimitRenewalPeriod """Renewal period field for a rate limit. Currently, only 'minute' is supported.""" - + key: Optional[AiGatewayRateLimitKey] = None """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayRateLimit into a dictionary suitable for use as a JSON request body.""" body = {} - if self.calls is not None: body['calls'] = self.calls - if self.key is not None: body['key'] = self.key.value - if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value + if self.calls is not None: + body["calls"] = self.calls + if self.key is not None: + body["key"] = self.key.value + if self.renewal_period is not None: + body["renewal_period"] = self.renewal_period.value return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayRateLimit into a shallow dictionary of its immediate attributes.""" body = {} - if self.calls is not None: body['calls'] = self.calls - if self.key is not None: body['key'] = self.key - if self.renewal_period is not None: body['renewal_period'] = self.renewal_period + if self.calls is not None: + body["calls"] = self.calls + if self.key is not None: + body["key"] = self.key + if self.renewal_period is not None: + body["renewal_period"] = self.renewal_period return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayRateLimit: """Deserializes the AiGatewayRateLimit from a dictionary.""" - return cls(calls=d.get('calls', None), key=_enum(d, 'key', AiGatewayRateLimitKey), renewal_period=_enum(d, 'renewal_period', AiGatewayRateLimitRenewalPeriod)) - - + return cls( + calls=d.get("calls", None), + key=_enum(d, "key", AiGatewayRateLimitKey), + renewal_period=_enum(d, "renewal_period", AiGatewayRateLimitRenewalPeriod), + ) class AiGatewayRateLimitKey(Enum): - - - ENDPOINT = 'endpoint' - USER = 'user' + + ENDPOINT = "endpoint" + USER = "user" + class AiGatewayRateLimitRenewalPeriod(Enum): - - - MINUTE = 'minute' + + MINUTE = "minute" + @dataclass class AiGatewayUsageTrackingConfig: enabled: Optional[bool] = None """Whether to enable usage tracking.""" - + def as_dict(self) -> dict: """Serializes the AiGatewayUsageTrackingConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled + if self.enabled is not None: + body["enabled"] = self.enabled return body def as_shallow_dict(self) -> dict: """Serializes the AiGatewayUsageTrackingConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled + if self.enabled is not None: + body["enabled"] = self.enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AiGatewayUsageTrackingConfig: """Deserializes the AiGatewayUsageTrackingConfig from a dictionary.""" - return cls(enabled=d.get('enabled', None)) - - + return cls(enabled=d.get("enabled", None)) @dataclass class AmazonBedrockConfig: aws_region: str """The AWS region to use. Bedrock has to be enabled there.""" - + bedrock_provider: AmazonBedrockConfigBedrockProvider """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.""" - + aws_access_key_id: Optional[str] = None """The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id_plaintext`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.""" - + aws_access_key_id_plaintext: Optional[str] = None """An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.""" - + aws_secret_access_key: Optional[str] = None """The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.""" - + aws_secret_access_key_plaintext: Optional[str] = None """An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the external model will use to access AWS resources. You must authenticate using an instance profile or access keys. If you prefer to authenticate using access keys, see `aws_access_key_id`, `aws_access_key_id_plaintext`, `aws_secret_access_key` and `aws_secret_access_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the AmazonBedrockConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id - if self.aws_access_key_id_plaintext is not None: body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext - if self.aws_region is not None: body['aws_region'] = self.aws_region - if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key - if self.aws_secret_access_key_plaintext is not None: body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext - if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.aws_access_key_id is not None: + body["aws_access_key_id"] = self.aws_access_key_id + if self.aws_access_key_id_plaintext is not None: + body["aws_access_key_id_plaintext"] = self.aws_access_key_id_plaintext + if self.aws_region is not None: + body["aws_region"] = self.aws_region + if self.aws_secret_access_key is not None: + body["aws_secret_access_key"] = self.aws_secret_access_key + if self.aws_secret_access_key_plaintext is not None: + body["aws_secret_access_key_plaintext"] = self.aws_secret_access_key_plaintext + if self.bedrock_provider is not None: + body["bedrock_provider"] = self.bedrock_provider.value + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn return body def as_shallow_dict(self) -> dict: """Serializes the AmazonBedrockConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id - if self.aws_access_key_id_plaintext is not None: body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext - if self.aws_region is not None: body['aws_region'] = self.aws_region - if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key - if self.aws_secret_access_key_plaintext is not None: body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext - if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.aws_access_key_id is not None: + body["aws_access_key_id"] = self.aws_access_key_id + if self.aws_access_key_id_plaintext is not None: + body["aws_access_key_id_plaintext"] = self.aws_access_key_id_plaintext + if self.aws_region is not None: + body["aws_region"] = self.aws_region + if self.aws_secret_access_key is not None: + body["aws_secret_access_key"] = self.aws_secret_access_key + if self.aws_secret_access_key_plaintext is not None: + body["aws_secret_access_key_plaintext"] = self.aws_secret_access_key_plaintext + if self.bedrock_provider is not None: + body["bedrock_provider"] = self.bedrock_provider + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AmazonBedrockConfig: """Deserializes the AmazonBedrockConfig from a dictionary.""" - return cls(aws_access_key_id=d.get('aws_access_key_id', None), aws_access_key_id_plaintext=d.get('aws_access_key_id_plaintext', None), aws_region=d.get('aws_region', None), aws_secret_access_key=d.get('aws_secret_access_key', None), aws_secret_access_key_plaintext=d.get('aws_secret_access_key_plaintext', None), bedrock_provider=_enum(d, 'bedrock_provider', AmazonBedrockConfigBedrockProvider), instance_profile_arn=d.get('instance_profile_arn', None)) - - + return cls( + aws_access_key_id=d.get("aws_access_key_id", None), + aws_access_key_id_plaintext=d.get("aws_access_key_id_plaintext", None), + aws_region=d.get("aws_region", None), + aws_secret_access_key=d.get("aws_secret_access_key", None), + aws_secret_access_key_plaintext=d.get("aws_secret_access_key_plaintext", None), + bedrock_provider=_enum(d, "bedrock_provider", AmazonBedrockConfigBedrockProvider), + instance_profile_arn=d.get("instance_profile_arn", None), + ) class AmazonBedrockConfigBedrockProvider(Enum): - - - AI21LABS = 'ai21labs' - AMAZON = 'amazon' - ANTHROPIC = 'anthropic' - COHERE = 'cohere' + + AI21LABS = "ai21labs" + AMAZON = "amazon" + ANTHROPIC = "anthropic" + COHERE = "cohere" + @dataclass class AnthropicConfig: @@ -407,69 +482,78 @@ class AnthropicConfig: """The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.""" - + anthropic_api_key_plaintext: Optional[str] = None """The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the AnthropicConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key - if self.anthropic_api_key_plaintext is not None: body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext + if self.anthropic_api_key is not None: + body["anthropic_api_key"] = self.anthropic_api_key + if self.anthropic_api_key_plaintext is not None: + body["anthropic_api_key_plaintext"] = self.anthropic_api_key_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the AnthropicConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key - if self.anthropic_api_key_plaintext is not None: body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext + if self.anthropic_api_key is not None: + body["anthropic_api_key"] = self.anthropic_api_key + if self.anthropic_api_key_plaintext is not None: + body["anthropic_api_key_plaintext"] = self.anthropic_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AnthropicConfig: """Deserializes the AnthropicConfig from a dictionary.""" - return cls(anthropic_api_key=d.get('anthropic_api_key', None), anthropic_api_key_plaintext=d.get('anthropic_api_key_plaintext', None)) - - + return cls( + anthropic_api_key=d.get("anthropic_api_key", None), + anthropic_api_key_plaintext=d.get("anthropic_api_key_plaintext", None), + ) @dataclass class ApiKeyAuth: key: str """The name of the API key parameter used for authentication.""" - + value: Optional[str] = None """The Databricks secret key reference for an API Key. If you prefer to paste your token directly, see `value_plaintext`.""" - + value_plaintext: Optional[str] = None """The API Key provided as a plaintext string. If you prefer to reference your token using Databricks Secrets, see `value`.""" - + def as_dict(self) -> dict: """Serializes the ApiKeyAuth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value - if self.value_plaintext is not None: body['value_plaintext'] = self.value_plaintext + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + if self.value_plaintext is not None: + body["value_plaintext"] = self.value_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the ApiKeyAuth into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value - if self.value_plaintext is not None: body['value_plaintext'] = self.value_plaintext + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + if self.value_plaintext is not None: + body["value_plaintext"] = self.value_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ApiKeyAuth: """Deserializes the ApiKeyAuth from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None), value_plaintext=d.get('value_plaintext', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None), value_plaintext=d.get("value_plaintext", None)) @dataclass @@ -477,42 +561,53 @@ class AutoCaptureConfigInput: catalog_name: Optional[str] = None """The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled.""" - + enabled: Optional[bool] = None """Indicates whether the inference table is enabled.""" - + schema_name: Optional[str] = None """The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled.""" - + table_name_prefix: Optional[str] = None """The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.""" - + def as_dict(self) -> dict: """Serializes the AutoCaptureConfigInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.enabled is not None: body['enabled'] = self.enabled - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.enabled is not None: + body["enabled"] = self.enabled + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.table_name_prefix is not None: + body["table_name_prefix"] = self.table_name_prefix return body def as_shallow_dict(self) -> dict: """Serializes the AutoCaptureConfigInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.enabled is not None: body['enabled'] = self.enabled - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.enabled is not None: + body["enabled"] = self.enabled + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.table_name_prefix is not None: + body["table_name_prefix"] = self.table_name_prefix return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutoCaptureConfigInput: """Deserializes the AutoCaptureConfigInput from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), enabled=d.get('enabled', None), schema_name=d.get('schema_name', None), table_name_prefix=d.get('table_name_prefix', None)) - - + return cls( + catalog_name=d.get("catalog_name", None), + enabled=d.get("enabled", None), + schema_name=d.get("schema_name", None), + table_name_prefix=d.get("table_name_prefix", None), + ) @dataclass @@ -520,70 +615,84 @@ class AutoCaptureConfigOutput: catalog_name: Optional[str] = None """The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled.""" - + enabled: Optional[bool] = None """Indicates whether the inference table is enabled.""" - + schema_name: Optional[str] = None """The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled.""" - + state: Optional[AutoCaptureState] = None - + table_name_prefix: Optional[str] = None """The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.""" - + def as_dict(self) -> dict: """Serializes the AutoCaptureConfigOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.enabled is not None: body['enabled'] = self.enabled - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.state: body['state'] = self.state.as_dict() - if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.enabled is not None: + body["enabled"] = self.enabled + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.state: + body["state"] = self.state.as_dict() + if self.table_name_prefix is not None: + body["table_name_prefix"] = self.table_name_prefix return body def as_shallow_dict(self) -> dict: """Serializes the AutoCaptureConfigOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: body['catalog_name'] = self.catalog_name - if self.enabled is not None: body['enabled'] = self.enabled - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.state: body['state'] = self.state - if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.enabled is not None: + body["enabled"] = self.enabled + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.state: + body["state"] = self.state + if self.table_name_prefix is not None: + body["table_name_prefix"] = self.table_name_prefix return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutoCaptureConfigOutput: """Deserializes the AutoCaptureConfigOutput from a dictionary.""" - return cls(catalog_name=d.get('catalog_name', None), enabled=d.get('enabled', None), schema_name=d.get('schema_name', None), state=_from_dict(d, 'state', AutoCaptureState), table_name_prefix=d.get('table_name_prefix', None)) - - + return cls( + catalog_name=d.get("catalog_name", None), + enabled=d.get("enabled", None), + schema_name=d.get("schema_name", None), + state=_from_dict(d, "state", AutoCaptureState), + table_name_prefix=d.get("table_name_prefix", None), + ) @dataclass class AutoCaptureState: payload_table: Optional[PayloadTable] = None - + def as_dict(self) -> dict: """Serializes the AutoCaptureState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.payload_table: body['payload_table'] = self.payload_table.as_dict() + if self.payload_table: + body["payload_table"] = self.payload_table.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AutoCaptureState into a shallow dictionary of its immediate attributes.""" body = {} - if self.payload_table: body['payload_table'] = self.payload_table + if self.payload_table: + body["payload_table"] = self.payload_table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutoCaptureState: """Deserializes the AutoCaptureState from a dictionary.""" - return cls(payload_table=_from_dict(d, 'payload_table', PayloadTable)) - - + return cls(payload_table=_from_dict(d, "payload_table", PayloadTable)) @dataclass @@ -591,136 +700,146 @@ class BearerTokenAuth: token: Optional[str] = None """The Databricks secret key reference for a token. If you prefer to paste your token directly, see `token_plaintext`.""" - + token_plaintext: Optional[str] = None """The token provided as a plaintext string. If you prefer to reference your token using Databricks Secrets, see `token`.""" - + def as_dict(self) -> dict: """Serializes the BearerTokenAuth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token is not None: body['token'] = self.token - if self.token_plaintext is not None: body['token_plaintext'] = self.token_plaintext + if self.token is not None: + body["token"] = self.token + if self.token_plaintext is not None: + body["token_plaintext"] = self.token_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the BearerTokenAuth into a shallow dictionary of its immediate attributes.""" body = {} - if self.token is not None: body['token'] = self.token - if self.token_plaintext is not None: body['token_plaintext'] = self.token_plaintext + if self.token is not None: + body["token"] = self.token + if self.token_plaintext is not None: + body["token_plaintext"] = self.token_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BearerTokenAuth: """Deserializes the BearerTokenAuth from a dictionary.""" - return cls(token=d.get('token', None), token_plaintext=d.get('token_plaintext', None)) - - - - - + return cls(token=d.get("token", None), token_plaintext=d.get("token_plaintext", None)) @dataclass class BuildLogsResponse: logs: str """The logs associated with building the served entity's environment.""" - + def as_dict(self) -> dict: """Serializes the BuildLogsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.logs is not None: body['logs'] = self.logs + if self.logs is not None: + body["logs"] = self.logs return body def as_shallow_dict(self) -> dict: """Serializes the BuildLogsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.logs is not None: body['logs'] = self.logs + if self.logs is not None: + body["logs"] = self.logs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BuildLogsResponse: """Deserializes the BuildLogsResponse from a dictionary.""" - return cls(logs=d.get('logs', None)) - - + return cls(logs=d.get("logs", None)) @dataclass class ChatMessage: content: Optional[str] = None """The content of the message.""" - + role: Optional[ChatMessageRole] = None """The role of the message. One of [system, user, assistant].""" - + def as_dict(self) -> dict: """Serializes the ChatMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: body['content'] = self.content - if self.role is not None: body['role'] = self.role.value + if self.content is not None: + body["content"] = self.content + if self.role is not None: + body["role"] = self.role.value return body def as_shallow_dict(self) -> dict: """Serializes the ChatMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: body['content'] = self.content - if self.role is not None: body['role'] = self.role + if self.content is not None: + body["content"] = self.content + if self.role is not None: + body["role"] = self.role return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ChatMessage: """Deserializes the ChatMessage from a dictionary.""" - return cls(content=d.get('content', None), role=_enum(d, 'role', ChatMessageRole)) - - + return cls(content=d.get("content", None), role=_enum(d, "role", ChatMessageRole)) class ChatMessageRole(Enum): """The role of the message. One of [system, user, assistant].""" - - ASSISTANT = 'assistant' - SYSTEM = 'system' - USER = 'user' + + ASSISTANT = "assistant" + SYSTEM = "system" + USER = "user" + @dataclass class CohereConfig: cohere_api_base: Optional[str] = None """This is an optional field to provide a customized base URL for the Cohere API. If left unspecified, the standard Cohere base URL is used.""" - + cohere_api_key: Optional[str] = None """The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.""" - + cohere_api_key_plaintext: Optional[str] = None """The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the CohereConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base - if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key - if self.cohere_api_key_plaintext is not None: body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext + if self.cohere_api_base is not None: + body["cohere_api_base"] = self.cohere_api_base + if self.cohere_api_key is not None: + body["cohere_api_key"] = self.cohere_api_key + if self.cohere_api_key_plaintext is not None: + body["cohere_api_key_plaintext"] = self.cohere_api_key_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the CohereConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base - if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key - if self.cohere_api_key_plaintext is not None: body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext + if self.cohere_api_base is not None: + body["cohere_api_base"] = self.cohere_api_base + if self.cohere_api_key is not None: + body["cohere_api_key"] = self.cohere_api_key + if self.cohere_api_key_plaintext is not None: + body["cohere_api_key_plaintext"] = self.cohere_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CohereConfig: """Deserializes the CohereConfig from a dictionary.""" - return cls(cohere_api_base=d.get('cohere_api_base', None), cohere_api_key=d.get('cohere_api_key', None), cohere_api_key_plaintext=d.get('cohere_api_key_plaintext', None)) - - + return cls( + cohere_api_base=d.get("cohere_api_base", None), + cohere_api_key=d.get("cohere_api_key", None), + cohere_api_key_plaintext=d.get("cohere_api_key_plaintext", None), + ) @dataclass @@ -728,45 +847,59 @@ class CreatePtEndpointRequest: name: str """The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.""" - + config: PtEndpointCoreConfig """The core config of the serving endpoint.""" - + ai_gateway: Optional[AiGatewayConfig] = None """The AI Gateway configuration for the serving endpoint.""" - + budget_policy_id: Optional[str] = None """The budget policy associated with the endpoint.""" - + tags: Optional[List[EndpointTag]] = None """Tags to be attached to the serving endpoint and automatically propagated to billing logs.""" - + def as_dict(self) -> dict: """Serializes the CreatePtEndpointRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.config: body['config'] = self.config.as_dict() - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.ai_gateway: + body["ai_gateway"] = self.ai_gateway.as_dict() + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.config: + body["config"] = self.config.as_dict() + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreatePtEndpointRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai_gateway: body['ai_gateway'] = self.ai_gateway - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.config: body['config'] = self.config - if self.name is not None: body['name'] = self.name - if self.tags: body['tags'] = self.tags + if self.ai_gateway: + body["ai_gateway"] = self.ai_gateway + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.config: + body["config"] = self.config + if self.name is not None: + body["name"] = self.name + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePtEndpointRequest: """Deserializes the CreatePtEndpointRequest from a dictionary.""" - return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig), budget_policy_id=d.get('budget_policy_id', None), config=_from_dict(d, 'config', PtEndpointCoreConfig), name=d.get('name', None), tags=_repeated_dict(d, 'tags', EndpointTag)) - - + return cls( + ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), + budget_policy_id=d.get("budget_policy_id", None), + config=_from_dict(d, "config", PtEndpointCoreConfig), + name=d.get("name", None), + tags=_repeated_dict(d, "tags", EndpointTag), + ) @dataclass @@ -774,129 +907,159 @@ class CreateServingEndpoint: name: str """The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.""" - + ai_gateway: Optional[AiGatewayConfig] = None """The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables.""" - + budget_policy_id: Optional[str] = None """The budget policy to be applied to the serving endpoint.""" - + config: Optional[EndpointCoreConfigInput] = None """The core config of the serving endpoint.""" - + rate_limits: Optional[List[RateLimit]] = None """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits.""" - + route_optimized: Optional[bool] = None """Enable route optimization for the serving endpoint.""" - + tags: Optional[List[EndpointTag]] = None """Tags to be attached to the serving endpoint and automatically propagated to billing logs.""" - + def as_dict(self) -> dict: """Serializes the CreateServingEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.config: body['config'] = self.config.as_dict() - if self.name is not None: body['name'] = self.name - if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] - if self.route_optimized is not None: body['route_optimized'] = self.route_optimized - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.ai_gateway: + body["ai_gateway"] = self.ai_gateway.as_dict() + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.config: + body["config"] = self.config.as_dict() + if self.name is not None: + body["name"] = self.name + if self.rate_limits: + body["rate_limits"] = [v.as_dict() for v in self.rate_limits] + if self.route_optimized is not None: + body["route_optimized"] = self.route_optimized + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the CreateServingEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai_gateway: body['ai_gateway'] = self.ai_gateway - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.config: body['config'] = self.config - if self.name is not None: body['name'] = self.name - if self.rate_limits: body['rate_limits'] = self.rate_limits - if self.route_optimized is not None: body['route_optimized'] = self.route_optimized - if self.tags: body['tags'] = self.tags + if self.ai_gateway: + body["ai_gateway"] = self.ai_gateway + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.config: + body["config"] = self.config + if self.name is not None: + body["name"] = self.name + if self.rate_limits: + body["rate_limits"] = self.rate_limits + if self.route_optimized is not None: + body["route_optimized"] = self.route_optimized + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateServingEndpoint: """Deserializes the CreateServingEndpoint from a dictionary.""" - return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig), budget_policy_id=d.get('budget_policy_id', None), config=_from_dict(d, 'config', EndpointCoreConfigInput), name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', RateLimit), route_optimized=d.get('route_optimized', None), tags=_repeated_dict(d, 'tags', EndpointTag)) - - + return cls( + ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), + budget_policy_id=d.get("budget_policy_id", None), + config=_from_dict(d, "config", EndpointCoreConfigInput), + name=d.get("name", None), + rate_limits=_repeated_dict(d, "rate_limits", RateLimit), + route_optimized=d.get("route_optimized", None), + tags=_repeated_dict(d, "tags", EndpointTag), + ) @dataclass class CustomProviderConfig: """Configs needed to create a custom provider model route.""" - + custom_provider_url: str """This is a field to provide the URL of the custom provider API.""" - + api_key_auth: Optional[ApiKeyAuth] = None """This is a field to provide API key authentication for the custom provider API. You can only specify one authentication method.""" - + bearer_token_auth: Optional[BearerTokenAuth] = None """This is a field to provide bearer token authentication for the custom provider API. You can only specify one authentication method.""" - + def as_dict(self) -> dict: """Serializes the CustomProviderConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.api_key_auth: body['api_key_auth'] = self.api_key_auth.as_dict() - if self.bearer_token_auth: body['bearer_token_auth'] = self.bearer_token_auth.as_dict() - if self.custom_provider_url is not None: body['custom_provider_url'] = self.custom_provider_url + if self.api_key_auth: + body["api_key_auth"] = self.api_key_auth.as_dict() + if self.bearer_token_auth: + body["bearer_token_auth"] = self.bearer_token_auth.as_dict() + if self.custom_provider_url is not None: + body["custom_provider_url"] = self.custom_provider_url return body def as_shallow_dict(self) -> dict: """Serializes the CustomProviderConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.api_key_auth: body['api_key_auth'] = self.api_key_auth - if self.bearer_token_auth: body['bearer_token_auth'] = self.bearer_token_auth - if self.custom_provider_url is not None: body['custom_provider_url'] = self.custom_provider_url + if self.api_key_auth: + body["api_key_auth"] = self.api_key_auth + if self.bearer_token_auth: + body["bearer_token_auth"] = self.bearer_token_auth + if self.custom_provider_url is not None: + body["custom_provider_url"] = self.custom_provider_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomProviderConfig: """Deserializes the CustomProviderConfig from a dictionary.""" - return cls(api_key_auth=_from_dict(d, 'api_key_auth', ApiKeyAuth), bearer_token_auth=_from_dict(d, 'bearer_token_auth', BearerTokenAuth), custom_provider_url=d.get('custom_provider_url', None)) - - + return cls( + api_key_auth=_from_dict(d, "api_key_auth", ApiKeyAuth), + bearer_token_auth=_from_dict(d, "bearer_token_auth", BearerTokenAuth), + custom_provider_url=d.get("custom_provider_url", None), + ) @dataclass class DataPlaneInfo: """Details necessary to query this object's API through the DataPlane APIs.""" - + authorization_details: Optional[str] = None """Authorization details as a string.""" - + endpoint_url: Optional[str] = None """The URL of the endpoint for this operation in the dataplane.""" - + def as_dict(self) -> dict: """Serializes the DataPlaneInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authorization_details is not None: body['authorization_details'] = self.authorization_details - if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url + if self.authorization_details is not None: + body["authorization_details"] = self.authorization_details + if self.endpoint_url is not None: + body["endpoint_url"] = self.endpoint_url return body def as_shallow_dict(self) -> dict: """Serializes the DataPlaneInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.authorization_details is not None: body['authorization_details'] = self.authorization_details - if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url + if self.authorization_details is not None: + body["authorization_details"] = self.authorization_details + if self.endpoint_url is not None: + body["endpoint_url"] = self.endpoint_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataPlaneInfo: """Deserializes the DataPlaneInfo from a dictionary.""" - return cls(authorization_details=d.get('authorization_details', None), endpoint_url=d.get('endpoint_url', None)) - - + return cls(authorization_details=d.get("authorization_details", None), endpoint_url=d.get("endpoint_url", None)) @dataclass @@ -904,75 +1067,87 @@ class DatabricksModelServingConfig: databricks_workspace_url: str """The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.""" - + databricks_api_token: Optional[str] = None """The Databricks secret key reference for a Databricks API token that corresponds to a user or service principal with Can Query access to the model serving endpoint pointed to by this external model. If you prefer to paste your API key directly, see `databricks_api_token_plaintext`. You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.""" - + databricks_api_token_plaintext: Optional[str] = None """The Databricks API token that corresponds to a user or service principal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `databricks_api_token`. You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the DatabricksModelServingConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token - if self.databricks_api_token_plaintext is not None: body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext - if self.databricks_workspace_url is not None: body['databricks_workspace_url'] = self.databricks_workspace_url + if self.databricks_api_token is not None: + body["databricks_api_token"] = self.databricks_api_token + if self.databricks_api_token_plaintext is not None: + body["databricks_api_token_plaintext"] = self.databricks_api_token_plaintext + if self.databricks_workspace_url is not None: + body["databricks_workspace_url"] = self.databricks_workspace_url return body def as_shallow_dict(self) -> dict: """Serializes the DatabricksModelServingConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token - if self.databricks_api_token_plaintext is not None: body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext - if self.databricks_workspace_url is not None: body['databricks_workspace_url'] = self.databricks_workspace_url + if self.databricks_api_token is not None: + body["databricks_api_token"] = self.databricks_api_token + if self.databricks_api_token_plaintext is not None: + body["databricks_api_token_plaintext"] = self.databricks_api_token_plaintext + if self.databricks_workspace_url is not None: + body["databricks_workspace_url"] = self.databricks_workspace_url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DatabricksModelServingConfig: """Deserializes the DatabricksModelServingConfig from a dictionary.""" - return cls(databricks_api_token=d.get('databricks_api_token', None), databricks_api_token_plaintext=d.get('databricks_api_token_plaintext', None), databricks_workspace_url=d.get('databricks_workspace_url', None)) - - + return cls( + databricks_api_token=d.get("databricks_api_token", None), + databricks_api_token_plaintext=d.get("databricks_api_token_plaintext", None), + databricks_workspace_url=d.get("databricks_workspace_url", None), + ) @dataclass class DataframeSplitInput: columns: Optional[List[Any]] = None - + data: Optional[List[Any]] = None - + index: Optional[List[int]] = None - + def as_dict(self) -> dict: """Serializes the DataframeSplitInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: body['columns'] = [v for v in self.columns] - if self.data: body['data'] = [v for v in self.data] - if self.index: body['index'] = [v for v in self.index] + if self.columns: + body["columns"] = [v for v in self.columns] + if self.data: + body["data"] = [v for v in self.data] + if self.index: + body["index"] = [v for v in self.index] return body def as_shallow_dict(self) -> dict: """Serializes the DataframeSplitInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: body['columns'] = self.columns - if self.data: body['data'] = self.data - if self.index: body['index'] = self.index + if self.columns: + body["columns"] = self.columns + if self.data: + body["data"] = self.data + if self.index: + body["index"] = self.index return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataframeSplitInput: """Deserializes the DataframeSplitInput from a dictionary.""" - return cls(columns=d.get('columns', None), data=d.get('data', None), index=d.get('index', None)) - - + return cls(columns=d.get("columns", None), data=d.get("data", None), index=d.get("index", None)) @dataclass @@ -991,51 +1166,55 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - - - - @dataclass class EmbeddingsV1ResponseEmbeddingElement: embedding: Optional[List[float]] = None - + index: Optional[int] = None """The index of the embedding in the response.""" - + object: Optional[EmbeddingsV1ResponseEmbeddingElementObject] = None """This will always be 'embedding'.""" - + def as_dict(self) -> dict: """Serializes the EmbeddingsV1ResponseEmbeddingElement into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding: body['embedding'] = [v for v in self.embedding] - if self.index is not None: body['index'] = self.index - if self.object is not None: body['object'] = self.object.value + if self.embedding: + body["embedding"] = [v for v in self.embedding] + if self.index is not None: + body["index"] = self.index + if self.object is not None: + body["object"] = self.object.value return body def as_shallow_dict(self) -> dict: """Serializes the EmbeddingsV1ResponseEmbeddingElement into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding: body['embedding'] = self.embedding - if self.index is not None: body['index'] = self.index - if self.object is not None: body['object'] = self.object + if self.embedding: + body["embedding"] = self.embedding + if self.index is not None: + body["index"] = self.index + if self.object is not None: + body["object"] = self.object return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmbeddingsV1ResponseEmbeddingElement: """Deserializes the EmbeddingsV1ResponseEmbeddingElement from a dictionary.""" - return cls(embedding=d.get('embedding', None), index=d.get('index', None), object=_enum(d, 'object', EmbeddingsV1ResponseEmbeddingElementObject)) - - + return cls( + embedding=d.get("embedding", None), + index=d.get("index", None), + object=_enum(d, "object", EmbeddingsV1ResponseEmbeddingElementObject), + ) class EmbeddingsV1ResponseEmbeddingElementObject(Enum): """This will always be 'embedding'.""" - - EMBEDDING = 'embedding' + + EMBEDDING = "embedding" + @dataclass class EndpointCoreConfigInput: @@ -1044,46 +1223,60 @@ class EndpointCoreConfigInput: Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or updating existing provisioned throughput endpoints that never have inference table configured; in these cases please use AI Gateway to manage inference tables.""" - + name: Optional[str] = None """The name of the serving endpoint to update. This field is required.""" - + served_entities: Optional[List[ServedEntityInput]] = None """The list of served entities under the serving endpoint config.""" - + served_models: Optional[List[ServedModelInput]] = None """(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.""" - + traffic_config: Optional[TrafficConfig] = None """The traffic configuration associated with the serving endpoint config.""" - + def as_dict(self) -> dict: """Serializes the EndpointCoreConfigInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict() - if self.name is not None: body['name'] = self.name - if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] - if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] - if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() + if self.auto_capture_config: + body["auto_capture_config"] = self.auto_capture_config.as_dict() + if self.name is not None: + body["name"] = self.name + if self.served_entities: + body["served_entities"] = [v.as_dict() for v in self.served_entities] + if self.served_models: + body["served_models"] = [v.as_dict() for v in self.served_models] + if self.traffic_config: + body["traffic_config"] = self.traffic_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EndpointCoreConfigInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config - if self.name is not None: body['name'] = self.name - if self.served_entities: body['served_entities'] = self.served_entities - if self.served_models: body['served_models'] = self.served_models - if self.traffic_config: body['traffic_config'] = self.traffic_config + if self.auto_capture_config: + body["auto_capture_config"] = self.auto_capture_config + if self.name is not None: + body["name"] = self.name + if self.served_entities: + body["served_entities"] = self.served_entities + if self.served_models: + body["served_models"] = self.served_models + if self.traffic_config: + body["traffic_config"] = self.traffic_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointCoreConfigInput: """Deserializes the EndpointCoreConfigInput from a dictionary.""" - return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigInput), name=d.get('name', None), served_entities=_repeated_dict(d, 'served_entities', ServedEntityInput), served_models=_repeated_dict(d, 'served_models', ServedModelInput), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) - - + return cls( + auto_capture_config=_from_dict(d, "auto_capture_config", AutoCaptureConfigInput), + name=d.get("name", None), + served_entities=_repeated_dict(d, "served_entities", ServedEntityInput), + served_models=_repeated_dict(d, "served_models", ServedModelInput), + traffic_config=_from_dict(d, "traffic_config", TrafficConfig), + ) @dataclass @@ -1093,77 +1286,96 @@ class EndpointCoreConfigOutput: Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or updating existing provisioned throughput endpoints that never have inference table configured; in these cases please use AI Gateway to manage inference tables.""" - + config_version: Optional[int] = None """The config version that the serving endpoint is currently serving.""" - + served_entities: Optional[List[ServedEntityOutput]] = None """The list of served entities under the serving endpoint config.""" - + served_models: Optional[List[ServedModelOutput]] = None """(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.""" - + traffic_config: Optional[TrafficConfig] = None """The traffic configuration associated with the serving endpoint config.""" - + def as_dict(self) -> dict: """Serializes the EndpointCoreConfigOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict() - if self.config_version is not None: body['config_version'] = self.config_version - if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] - if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] - if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() + if self.auto_capture_config: + body["auto_capture_config"] = self.auto_capture_config.as_dict() + if self.config_version is not None: + body["config_version"] = self.config_version + if self.served_entities: + body["served_entities"] = [v.as_dict() for v in self.served_entities] + if self.served_models: + body["served_models"] = [v.as_dict() for v in self.served_models] + if self.traffic_config: + body["traffic_config"] = self.traffic_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EndpointCoreConfigOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config - if self.config_version is not None: body['config_version'] = self.config_version - if self.served_entities: body['served_entities'] = self.served_entities - if self.served_models: body['served_models'] = self.served_models - if self.traffic_config: body['traffic_config'] = self.traffic_config + if self.auto_capture_config: + body["auto_capture_config"] = self.auto_capture_config + if self.config_version is not None: + body["config_version"] = self.config_version + if self.served_entities: + body["served_entities"] = self.served_entities + if self.served_models: + body["served_models"] = self.served_models + if self.traffic_config: + body["traffic_config"] = self.traffic_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointCoreConfigOutput: """Deserializes the EndpointCoreConfigOutput from a dictionary.""" - return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigOutput), config_version=d.get('config_version', None), served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput), served_models=_repeated_dict(d, 'served_models', ServedModelOutput), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) - - + return cls( + auto_capture_config=_from_dict(d, "auto_capture_config", AutoCaptureConfigOutput), + config_version=d.get("config_version", None), + served_entities=_repeated_dict(d, "served_entities", ServedEntityOutput), + served_models=_repeated_dict(d, "served_models", ServedModelOutput), + traffic_config=_from_dict(d, "traffic_config", TrafficConfig), + ) @dataclass class EndpointCoreConfigSummary: served_entities: Optional[List[ServedEntitySpec]] = None """The list of served entities under the serving endpoint config.""" - + served_models: Optional[List[ServedModelSpec]] = None """(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.""" - + def as_dict(self) -> dict: """Serializes the EndpointCoreConfigSummary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] - if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] + if self.served_entities: + body["served_entities"] = [v.as_dict() for v in self.served_entities] + if self.served_models: + body["served_models"] = [v.as_dict() for v in self.served_models] return body def as_shallow_dict(self) -> dict: """Serializes the EndpointCoreConfigSummary into a shallow dictionary of its immediate attributes.""" body = {} - if self.served_entities: body['served_entities'] = self.served_entities - if self.served_models: body['served_models'] = self.served_models + if self.served_entities: + body["served_entities"] = self.served_entities + if self.served_models: + body["served_models"] = self.served_models return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointCoreConfigSummary: """Deserializes the EndpointCoreConfigSummary from a dictionary.""" - return cls(served_entities=_repeated_dict(d, 'served_entities', ServedEntitySpec), served_models=_repeated_dict(d, 'served_models', ServedModelSpec)) - - + return cls( + served_entities=_repeated_dict(d, "served_entities", ServedEntitySpec), + served_models=_repeated_dict(d, "served_models", ServedModelSpec), + ) @dataclass @@ -1173,51 +1385,68 @@ class EndpointPendingConfig: Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or updating existing provisioned throughput endpoints that never have inference table configured; in these cases please use AI Gateway to manage inference tables.""" - + config_version: Optional[int] = None """The config version that the serving endpoint is currently serving.""" - + served_entities: Optional[List[ServedEntityOutput]] = None """The list of served entities belonging to the last issued update to the serving endpoint.""" - + served_models: Optional[List[ServedModelOutput]] = None """(Deprecated, use served_entities instead) The list of served models belonging to the last issued update to the serving endpoint.""" - + start_time: Optional[int] = None """The timestamp when the update to the pending config started.""" - + traffic_config: Optional[TrafficConfig] = None """The traffic config defining how invocations to the serving endpoint should be routed.""" - + def as_dict(self) -> dict: """Serializes the EndpointPendingConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict() - if self.config_version is not None: body['config_version'] = self.config_version - if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] - if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models] - if self.start_time is not None: body['start_time'] = self.start_time - if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() + if self.auto_capture_config: + body["auto_capture_config"] = self.auto_capture_config.as_dict() + if self.config_version is not None: + body["config_version"] = self.config_version + if self.served_entities: + body["served_entities"] = [v.as_dict() for v in self.served_entities] + if self.served_models: + body["served_models"] = [v.as_dict() for v in self.served_models] + if self.start_time is not None: + body["start_time"] = self.start_time + if self.traffic_config: + body["traffic_config"] = self.traffic_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EndpointPendingConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config - if self.config_version is not None: body['config_version'] = self.config_version - if self.served_entities: body['served_entities'] = self.served_entities - if self.served_models: body['served_models'] = self.served_models - if self.start_time is not None: body['start_time'] = self.start_time - if self.traffic_config: body['traffic_config'] = self.traffic_config + if self.auto_capture_config: + body["auto_capture_config"] = self.auto_capture_config + if self.config_version is not None: + body["config_version"] = self.config_version + if self.served_entities: + body["served_entities"] = self.served_entities + if self.served_models: + body["served_models"] = self.served_models + if self.start_time is not None: + body["start_time"] = self.start_time + if self.traffic_config: + body["traffic_config"] = self.traffic_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointPendingConfig: """Deserializes the EndpointPendingConfig from a dictionary.""" - return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigOutput), config_version=d.get('config_version', None), served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput), served_models=_repeated_dict(d, 'served_models', ServedModelOutput), start_time=d.get('start_time', None), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) - - + return cls( + auto_capture_config=_from_dict(d, "auto_capture_config", AutoCaptureConfigOutput), + config_version=d.get("config_version", None), + served_entities=_repeated_dict(d, "served_entities", ServedEntityOutput), + served_models=_repeated_dict(d, "served_models", ServedModelOutput), + start_time=d.get("start_time", None), + traffic_config=_from_dict(d, "traffic_config", TrafficConfig), + ) @dataclass @@ -1227,190 +1456,211 @@ class EndpointState: progress, if the update failed, or if there is no update in progress. Note that if the endpoint's config_update state value is IN_PROGRESS, another update can not be made until the update completes or fails.""" - + ready: Optional[EndpointStateReady] = None """The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is READY if all of the served entities in its active configuration are ready. If any of the actively served entities are in a non-ready state, the endpoint state will be NOT_READY.""" - + def as_dict(self) -> dict: """Serializes the EndpointState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config_update is not None: body['config_update'] = self.config_update.value - if self.ready is not None: body['ready'] = self.ready.value + if self.config_update is not None: + body["config_update"] = self.config_update.value + if self.ready is not None: + body["ready"] = self.ready.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointState into a shallow dictionary of its immediate attributes.""" body = {} - if self.config_update is not None: body['config_update'] = self.config_update - if self.ready is not None: body['ready'] = self.ready + if self.config_update is not None: + body["config_update"] = self.config_update + if self.ready is not None: + body["ready"] = self.ready return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointState: """Deserializes the EndpointState from a dictionary.""" - return cls(config_update=_enum(d, 'config_update', EndpointStateConfigUpdate), ready=_enum(d, 'ready', EndpointStateReady)) - - + return cls( + config_update=_enum(d, "config_update", EndpointStateConfigUpdate), + ready=_enum(d, "ready", EndpointStateReady), + ) class EndpointStateConfigUpdate(Enum): - - - IN_PROGRESS = 'IN_PROGRESS' - NOT_UPDATING = 'NOT_UPDATING' - UPDATE_CANCELED = 'UPDATE_CANCELED' - UPDATE_FAILED = 'UPDATE_FAILED' + + IN_PROGRESS = "IN_PROGRESS" + NOT_UPDATING = "NOT_UPDATING" + UPDATE_CANCELED = "UPDATE_CANCELED" + UPDATE_FAILED = "UPDATE_FAILED" + class EndpointStateReady(Enum): - - - NOT_READY = 'NOT_READY' - READY = 'READY' + + NOT_READY = "NOT_READY" + READY = "READY" + @dataclass class EndpointTag: key: str """Key field for a serving endpoint tag.""" - + value: Optional[str] = None """Optional value field for a serving endpoint tag.""" - + def as_dict(self) -> dict: """Serializes the EndpointTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointTag: """Deserializes the EndpointTag from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class EndpointTags: tags: Optional[List[EndpointTag]] = None - + def as_dict(self) -> dict: """Serializes the EndpointTags into a dictionary suitable for use as a JSON request body.""" body = {} - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the EndpointTags into a shallow dictionary of its immediate attributes.""" body = {} - if self.tags: body['tags'] = self.tags + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointTags: """Deserializes the EndpointTags from a dictionary.""" - return cls(tags=_repeated_dict(d, 'tags', EndpointTag)) - - - - - + return cls(tags=_repeated_dict(d, "tags", EndpointTag)) @dataclass class ExportMetricsResponse: contents: Optional[BinaryIO] = None - + def as_dict(self) -> dict: """Serializes the ExportMetricsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: body['contents'] = self.contents + if self.contents: + body["contents"] = self.contents return body def as_shallow_dict(self) -> dict: """Serializes the ExportMetricsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: body['contents'] = self.contents + if self.contents: + body["contents"] = self.contents return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExportMetricsResponse: """Deserializes the ExportMetricsResponse from a dictionary.""" - return cls(contents=d.get('contents', None)) - - + return cls(contents=d.get("contents", None)) @dataclass class ExternalFunctionRequest: """Simple Proto message for testing""" - + connection_name: str """The connection name to use. This is required to identify the external connection.""" - + method: ExternalFunctionRequestHttpMethod """The HTTP method to use (e.g., 'GET', 'POST').""" - + path: str """The relative path for the API endpoint. This is required.""" - + headers: Optional[str] = None """Additional headers for the request. If not provided, only auth headers from connections would be passed.""" - + json: Optional[str] = None """The JSON payload to send in the request body.""" - + params: Optional[str] = None """Query parameters for the request.""" - + def as_dict(self) -> dict: """Serializes the ExternalFunctionRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.headers is not None: body['headers'] = self.headers - if self.json is not None: body['json'] = self.json - if self.method is not None: body['method'] = self.method.value - if self.params is not None: body['params'] = self.params - if self.path is not None: body['path'] = self.path + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.headers is not None: + body["headers"] = self.headers + if self.json is not None: + body["json"] = self.json + if self.method is not None: + body["method"] = self.method.value + if self.params is not None: + body["params"] = self.params + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the ExternalFunctionRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.headers is not None: body['headers'] = self.headers - if self.json is not None: body['json'] = self.json - if self.method is not None: body['method'] = self.method - if self.params is not None: body['params'] = self.params - if self.path is not None: body['path'] = self.path + if self.connection_name is not None: + body["connection_name"] = self.connection_name + if self.headers is not None: + body["headers"] = self.headers + if self.json is not None: + body["json"] = self.json + if self.method is not None: + body["method"] = self.method + if self.params is not None: + body["params"] = self.params + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalFunctionRequest: """Deserializes the ExternalFunctionRequest from a dictionary.""" - return cls(connection_name=d.get('connection_name', None), headers=d.get('headers', None), json=d.get('json', None), method=_enum(d, 'method', ExternalFunctionRequestHttpMethod), params=d.get('params', None), path=d.get('path', None)) - - + return cls( + connection_name=d.get("connection_name", None), + headers=d.get("headers", None), + json=d.get("json", None), + method=_enum(d, "method", ExternalFunctionRequestHttpMethod), + params=d.get("params", None), + path=d.get("path", None), + ) class ExternalFunctionRequestHttpMethod(Enum): - - - DELETE = 'DELETE' - GET = 'GET' - PATCH = 'PATCH' - POST = 'POST' - PUT = 'PUT' + + DELETE = "DELETE" + GET = "GET" + PATCH = "PATCH" + POST = "POST" + PUT = "PUT" + @dataclass class ExternalModel: @@ -1418,128 +1668,173 @@ class ExternalModel: """The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'.""" - + name: str """The name of the external model.""" - + task: str """The task type of the external model.""" - + ai21labs_config: Optional[Ai21LabsConfig] = None """AI21Labs Config. Only required if the provider is 'ai21labs'.""" - + amazon_bedrock_config: Optional[AmazonBedrockConfig] = None """Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'.""" - + anthropic_config: Optional[AnthropicConfig] = None """Anthropic Config. Only required if the provider is 'anthropic'.""" - + cohere_config: Optional[CohereConfig] = None """Cohere Config. Only required if the provider is 'cohere'.""" - + custom_provider_config: Optional[CustomProviderConfig] = None """Custom Provider Config. Only required if the provider is 'custom'.""" - + databricks_model_serving_config: Optional[DatabricksModelServingConfig] = None """Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.""" - + google_cloud_vertex_ai_config: Optional[GoogleCloudVertexAiConfig] = None """Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.""" - + openai_config: Optional[OpenAiConfig] = None """OpenAI Config. Only required if the provider is 'openai'.""" - + palm_config: Optional[PaLmConfig] = None """PaLM Config. Only required if the provider is 'palm'.""" - + def as_dict(self) -> dict: """Serializes the ExternalModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config.as_dict() - if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config.as_dict() - if self.anthropic_config: body['anthropic_config'] = self.anthropic_config.as_dict() - if self.cohere_config: body['cohere_config'] = self.cohere_config.as_dict() - if self.custom_provider_config: body['custom_provider_config'] = self.custom_provider_config.as_dict() - if self.databricks_model_serving_config: body['databricks_model_serving_config'] = self.databricks_model_serving_config.as_dict() - if self.google_cloud_vertex_ai_config: body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config.as_dict() - if self.name is not None: body['name'] = self.name - if self.openai_config: body['openai_config'] = self.openai_config.as_dict() - if self.palm_config: body['palm_config'] = self.palm_config.as_dict() - if self.provider is not None: body['provider'] = self.provider.value - if self.task is not None: body['task'] = self.task + if self.ai21labs_config: + body["ai21labs_config"] = self.ai21labs_config.as_dict() + if self.amazon_bedrock_config: + body["amazon_bedrock_config"] = self.amazon_bedrock_config.as_dict() + if self.anthropic_config: + body["anthropic_config"] = self.anthropic_config.as_dict() + if self.cohere_config: + body["cohere_config"] = self.cohere_config.as_dict() + if self.custom_provider_config: + body["custom_provider_config"] = self.custom_provider_config.as_dict() + if self.databricks_model_serving_config: + body["databricks_model_serving_config"] = self.databricks_model_serving_config.as_dict() + if self.google_cloud_vertex_ai_config: + body["google_cloud_vertex_ai_config"] = self.google_cloud_vertex_ai_config.as_dict() + if self.name is not None: + body["name"] = self.name + if self.openai_config: + body["openai_config"] = self.openai_config.as_dict() + if self.palm_config: + body["palm_config"] = self.palm_config.as_dict() + if self.provider is not None: + body["provider"] = self.provider.value + if self.task is not None: + body["task"] = self.task return body def as_shallow_dict(self) -> dict: """Serializes the ExternalModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config - if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config - if self.anthropic_config: body['anthropic_config'] = self.anthropic_config - if self.cohere_config: body['cohere_config'] = self.cohere_config - if self.custom_provider_config: body['custom_provider_config'] = self.custom_provider_config - if self.databricks_model_serving_config: body['databricks_model_serving_config'] = self.databricks_model_serving_config - if self.google_cloud_vertex_ai_config: body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config - if self.name is not None: body['name'] = self.name - if self.openai_config: body['openai_config'] = self.openai_config - if self.palm_config: body['palm_config'] = self.palm_config - if self.provider is not None: body['provider'] = self.provider - if self.task is not None: body['task'] = self.task + if self.ai21labs_config: + body["ai21labs_config"] = self.ai21labs_config + if self.amazon_bedrock_config: + body["amazon_bedrock_config"] = self.amazon_bedrock_config + if self.anthropic_config: + body["anthropic_config"] = self.anthropic_config + if self.cohere_config: + body["cohere_config"] = self.cohere_config + if self.custom_provider_config: + body["custom_provider_config"] = self.custom_provider_config + if self.databricks_model_serving_config: + body["databricks_model_serving_config"] = self.databricks_model_serving_config + if self.google_cloud_vertex_ai_config: + body["google_cloud_vertex_ai_config"] = self.google_cloud_vertex_ai_config + if self.name is not None: + body["name"] = self.name + if self.openai_config: + body["openai_config"] = self.openai_config + if self.palm_config: + body["palm_config"] = self.palm_config + if self.provider is not None: + body["provider"] = self.provider + if self.task is not None: + body["task"] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalModel: """Deserializes the ExternalModel from a dictionary.""" - return cls(ai21labs_config=_from_dict(d, 'ai21labs_config', Ai21LabsConfig), amazon_bedrock_config=_from_dict(d, 'amazon_bedrock_config', AmazonBedrockConfig), anthropic_config=_from_dict(d, 'anthropic_config', AnthropicConfig), cohere_config=_from_dict(d, 'cohere_config', CohereConfig), custom_provider_config=_from_dict(d, 'custom_provider_config', CustomProviderConfig), databricks_model_serving_config=_from_dict(d, 'databricks_model_serving_config', DatabricksModelServingConfig), google_cloud_vertex_ai_config=_from_dict(d, 'google_cloud_vertex_ai_config', GoogleCloudVertexAiConfig), name=d.get('name', None), openai_config=_from_dict(d, 'openai_config', OpenAiConfig), palm_config=_from_dict(d, 'palm_config', PaLmConfig), provider=_enum(d, 'provider', ExternalModelProvider), task=d.get('task', None)) - - + return cls( + ai21labs_config=_from_dict(d, "ai21labs_config", Ai21LabsConfig), + amazon_bedrock_config=_from_dict(d, "amazon_bedrock_config", AmazonBedrockConfig), + anthropic_config=_from_dict(d, "anthropic_config", AnthropicConfig), + cohere_config=_from_dict(d, "cohere_config", CohereConfig), + custom_provider_config=_from_dict(d, "custom_provider_config", CustomProviderConfig), + databricks_model_serving_config=_from_dict( + d, "databricks_model_serving_config", DatabricksModelServingConfig + ), + google_cloud_vertex_ai_config=_from_dict(d, "google_cloud_vertex_ai_config", GoogleCloudVertexAiConfig), + name=d.get("name", None), + openai_config=_from_dict(d, "openai_config", OpenAiConfig), + palm_config=_from_dict(d, "palm_config", PaLmConfig), + provider=_enum(d, "provider", ExternalModelProvider), + task=d.get("task", None), + ) class ExternalModelProvider(Enum): - - - AI21LABS = 'ai21labs' - AMAZON_BEDROCK = 'amazon-bedrock' - ANTHROPIC = 'anthropic' - COHERE = 'cohere' - CUSTOM = 'custom' - DATABRICKS_MODEL_SERVING = 'databricks-model-serving' - GOOGLE_CLOUD_VERTEX_AI = 'google-cloud-vertex-ai' - OPENAI = 'openai' - PALM = 'palm' + + AI21LABS = "ai21labs" + AMAZON_BEDROCK = "amazon-bedrock" + ANTHROPIC = "anthropic" + COHERE = "cohere" + CUSTOM = "custom" + DATABRICKS_MODEL_SERVING = "databricks-model-serving" + GOOGLE_CLOUD_VERTEX_AI = "google-cloud-vertex-ai" + OPENAI = "openai" + PALM = "palm" + @dataclass class ExternalModelUsageElement: completion_tokens: Optional[int] = None """The number of tokens in the chat/completions response.""" - + prompt_tokens: Optional[int] = None """The number of tokens in the prompt.""" - + total_tokens: Optional[int] = None """The total number of tokens in the prompt and response.""" - + def as_dict(self) -> dict: """Serializes the ExternalModelUsageElement into a dictionary suitable for use as a JSON request body.""" body = {} - if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens - if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens - if self.total_tokens is not None: body['total_tokens'] = self.total_tokens + if self.completion_tokens is not None: + body["completion_tokens"] = self.completion_tokens + if self.prompt_tokens is not None: + body["prompt_tokens"] = self.prompt_tokens + if self.total_tokens is not None: + body["total_tokens"] = self.total_tokens return body def as_shallow_dict(self) -> dict: """Serializes the ExternalModelUsageElement into a shallow dictionary of its immediate attributes.""" body = {} - if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens - if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens - if self.total_tokens is not None: body['total_tokens'] = self.total_tokens + if self.completion_tokens is not None: + body["completion_tokens"] = self.completion_tokens + if self.prompt_tokens is not None: + body["prompt_tokens"] = self.prompt_tokens + if self.total_tokens is not None: + body["total_tokens"] = self.total_tokens return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalModelUsageElement: """Deserializes the ExternalModelUsageElement from a dictionary.""" - return cls(completion_tokens=d.get('completion_tokens', None), prompt_tokens=d.get('prompt_tokens', None), total_tokens=d.get('total_tokens', None)) - - + return cls( + completion_tokens=d.get("completion_tokens", None), + prompt_tokens=d.get("prompt_tokens", None), + total_tokens=d.get("total_tokens", None), + ) @dataclass @@ -1550,138 +1845,137 @@ class FallbackConfig: other served entities in the same endpoint, following the order of served entity list, until a successful response is returned. If all attempts fail, return the last response with the error code.""" - + def as_dict(self) -> dict: """Serializes the FallbackConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled + if self.enabled is not None: + body["enabled"] = self.enabled return body def as_shallow_dict(self) -> dict: """Serializes the FallbackConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled + if self.enabled is not None: + body["enabled"] = self.enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FallbackConfig: """Deserializes the FallbackConfig from a dictionary.""" - return cls(enabled=d.get('enabled', None)) - - + return cls(enabled=d.get("enabled", None)) @dataclass class FoundationModel: """All fields are not sensitive as they are hard-coded in the system and made available to customers.""" - + description: Optional[str] = None - + display_name: Optional[str] = None - + docs: Optional[str] = None - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the FoundationModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.docs is not None: body['docs'] = self.docs - if self.name is not None: body['name'] = self.name + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.docs is not None: + body["docs"] = self.docs + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the FoundationModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.docs is not None: body['docs'] = self.docs - if self.name is not None: body['name'] = self.name + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.docs is not None: + body["docs"] = self.docs + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FoundationModel: """Deserializes the FoundationModel from a dictionary.""" - return cls(description=d.get('description', None), display_name=d.get('display_name', None), docs=d.get('docs', None), name=d.get('name', None)) - - - - - + return cls( + description=d.get("description", None), + display_name=d.get("display_name", None), + docs=d.get("docs", None), + name=d.get("name", None), + ) @dataclass class GetOpenApiResponse: contents: Optional[BinaryIO] = None - + def as_dict(self) -> dict: """Serializes the GetOpenApiResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: body['contents'] = self.contents + if self.contents: + body["contents"] = self.contents return body def as_shallow_dict(self) -> dict: """Serializes the GetOpenApiResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: body['contents'] = self.contents + if self.contents: + body["contents"] = self.contents return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetOpenApiResponse: """Deserializes the GetOpenApiResponse from a dictionary.""" - return cls(contents=d.get('contents', None)) - - - - - + return cls(contents=d.get("contents", None)) @dataclass class GetServingEndpointPermissionLevelsResponse: permission_levels: Optional[List[ServingEndpointPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetServingEndpointPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetServingEndpointPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetServingEndpointPermissionLevelsResponse: """Deserializes the GetServingEndpointPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', ServingEndpointPermissionsDescription)) - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", ServingEndpointPermissionsDescription)) @dataclass class GoogleCloudVertexAiConfig: project_id: str """This is the Google Cloud project id that the service account is associated with.""" - + region: str """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more details. Some models are only available in specific regions. [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations""" - + private_key: Optional[str] = None """The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys]. @@ -1690,7 +1984,7 @@ class GoogleCloudVertexAiConfig: [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" - + private_key_plaintext: Optional[str] = None """The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys]. If you @@ -1699,206 +1993,246 @@ class GoogleCloudVertexAiConfig: [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" - + def as_dict(self) -> dict: """Serializes the GoogleCloudVertexAiConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.private_key is not None: body['private_key'] = self.private_key - if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext - if self.project_id is not None: body['project_id'] = self.project_id - if self.region is not None: body['region'] = self.region + if self.private_key is not None: + body["private_key"] = self.private_key + if self.private_key_plaintext is not None: + body["private_key_plaintext"] = self.private_key_plaintext + if self.project_id is not None: + body["project_id"] = self.project_id + if self.region is not None: + body["region"] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the GoogleCloudVertexAiConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.private_key is not None: body['private_key'] = self.private_key - if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext - if self.project_id is not None: body['project_id'] = self.project_id - if self.region is not None: body['region'] = self.region + if self.private_key is not None: + body["private_key"] = self.private_key + if self.private_key_plaintext is not None: + body["private_key_plaintext"] = self.private_key_plaintext + if self.project_id is not None: + body["project_id"] = self.project_id + if self.region is not None: + body["region"] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GoogleCloudVertexAiConfig: """Deserializes the GoogleCloudVertexAiConfig from a dictionary.""" - return cls(private_key=d.get('private_key', None), private_key_plaintext=d.get('private_key_plaintext', None), project_id=d.get('project_id', None), region=d.get('region', None)) - - + return cls( + private_key=d.get("private_key", None), + private_key_plaintext=d.get("private_key_plaintext", None), + project_id=d.get("project_id", None), + region=d.get("region", None), + ) @dataclass class HttpRequestResponse: contents: Optional[BinaryIO] = None - + def as_dict(self) -> dict: """Serializes the HttpRequestResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.contents: body['contents'] = self.contents + if self.contents: + body["contents"] = self.contents return body def as_shallow_dict(self) -> dict: """Serializes the HttpRequestResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.contents: body['contents'] = self.contents + if self.contents: + body["contents"] = self.contents return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> HttpRequestResponse: """Deserializes the HttpRequestResponse from a dictionary.""" - return cls(contents=d.get('contents', None)) - - + return cls(contents=d.get("contents", None)) @dataclass class ListEndpointsResponse: endpoints: Optional[List[ServingEndpoint]] = None """The list of endpoints.""" - + def as_dict(self) -> dict: """Serializes the ListEndpointsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoints: body['endpoints'] = [v.as_dict() for v in self.endpoints] + if self.endpoints: + body["endpoints"] = [v.as_dict() for v in self.endpoints] return body def as_shallow_dict(self) -> dict: """Serializes the ListEndpointsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoints: body['endpoints'] = self.endpoints + if self.endpoints: + body["endpoints"] = self.endpoints return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListEndpointsResponse: """Deserializes the ListEndpointsResponse from a dictionary.""" - return cls(endpoints=_repeated_dict(d, 'endpoints', ServingEndpoint)) - - - - - + return cls(endpoints=_repeated_dict(d, "endpoints", ServingEndpoint)) @dataclass class ModelDataPlaneInfo: """A representation of all DataPlaneInfo for operations that can be done on a model through Data Plane APIs.""" - + query_info: Optional[DataPlaneInfo] = None """Information required to query DataPlane API 'query' endpoint.""" - + def as_dict(self) -> dict: """Serializes the ModelDataPlaneInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.query_info: body['query_info'] = self.query_info.as_dict() + if self.query_info: + body["query_info"] = self.query_info.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ModelDataPlaneInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.query_info: body['query_info'] = self.query_info + if self.query_info: + body["query_info"] = self.query_info return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ModelDataPlaneInfo: """Deserializes the ModelDataPlaneInfo from a dictionary.""" - return cls(query_info=_from_dict(d, 'query_info', DataPlaneInfo)) - - + return cls(query_info=_from_dict(d, "query_info", DataPlaneInfo)) @dataclass class OpenAiConfig: """Configs needed to create an OpenAI model route.""" - + microsoft_entra_client_id: Optional[str] = None """This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.""" - + microsoft_entra_client_secret: Optional[str] = None """The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication. If you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`. You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.""" - + microsoft_entra_client_secret_plaintext: Optional[str] = None """The client secret used for Microsoft Entra ID authentication provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`. You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.""" - + microsoft_entra_tenant_id: Optional[str] = None """This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.""" - + openai_api_base: Optional[str] = None """This is a field to provide a customized base URl for the OpenAI API. For Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service provided by Azure. For other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.""" - + openai_api_key: Optional[str] = None """The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.""" - + openai_api_key_plaintext: Optional[str] = None """The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.""" - + openai_api_type: Optional[str] = None """This is an optional field to specify the type of OpenAI API to use. For Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security access validation protocol. For access token validation, use azure. For authentication using Azure Active Directory (Azure AD) use, azuread.""" - + openai_api_version: Optional[str] = None """This is an optional field to specify the OpenAI API version. For Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to utilize, specified by a date.""" - + openai_deployment_name: Optional[str] = None """This field is only required for Azure OpenAI and is the name of the deployment resource for the Azure OpenAI service.""" - + openai_organization: Optional[str] = None """This is an optional field to specify the organization in OpenAI or Azure OpenAI.""" - + def as_dict(self) -> dict: """Serializes the OpenAiConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.microsoft_entra_client_id is not None: body['microsoft_entra_client_id'] = self.microsoft_entra_client_id - if self.microsoft_entra_client_secret is not None: body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret - if self.microsoft_entra_client_secret_plaintext is not None: body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext - if self.microsoft_entra_tenant_id is not None: body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id - if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base - if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key - if self.openai_api_key_plaintext is not None: body['openai_api_key_plaintext'] = self.openai_api_key_plaintext - if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type - if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version - if self.openai_deployment_name is not None: body['openai_deployment_name'] = self.openai_deployment_name - if self.openai_organization is not None: body['openai_organization'] = self.openai_organization + if self.microsoft_entra_client_id is not None: + body["microsoft_entra_client_id"] = self.microsoft_entra_client_id + if self.microsoft_entra_client_secret is not None: + body["microsoft_entra_client_secret"] = self.microsoft_entra_client_secret + if self.microsoft_entra_client_secret_plaintext is not None: + body["microsoft_entra_client_secret_plaintext"] = self.microsoft_entra_client_secret_plaintext + if self.microsoft_entra_tenant_id is not None: + body["microsoft_entra_tenant_id"] = self.microsoft_entra_tenant_id + if self.openai_api_base is not None: + body["openai_api_base"] = self.openai_api_base + if self.openai_api_key is not None: + body["openai_api_key"] = self.openai_api_key + if self.openai_api_key_plaintext is not None: + body["openai_api_key_plaintext"] = self.openai_api_key_plaintext + if self.openai_api_type is not None: + body["openai_api_type"] = self.openai_api_type + if self.openai_api_version is not None: + body["openai_api_version"] = self.openai_api_version + if self.openai_deployment_name is not None: + body["openai_deployment_name"] = self.openai_deployment_name + if self.openai_organization is not None: + body["openai_organization"] = self.openai_organization return body def as_shallow_dict(self) -> dict: """Serializes the OpenAiConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.microsoft_entra_client_id is not None: body['microsoft_entra_client_id'] = self.microsoft_entra_client_id - if self.microsoft_entra_client_secret is not None: body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret - if self.microsoft_entra_client_secret_plaintext is not None: body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext - if self.microsoft_entra_tenant_id is not None: body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id - if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base - if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key - if self.openai_api_key_plaintext is not None: body['openai_api_key_plaintext'] = self.openai_api_key_plaintext - if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type - if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version - if self.openai_deployment_name is not None: body['openai_deployment_name'] = self.openai_deployment_name - if self.openai_organization is not None: body['openai_organization'] = self.openai_organization + if self.microsoft_entra_client_id is not None: + body["microsoft_entra_client_id"] = self.microsoft_entra_client_id + if self.microsoft_entra_client_secret is not None: + body["microsoft_entra_client_secret"] = self.microsoft_entra_client_secret + if self.microsoft_entra_client_secret_plaintext is not None: + body["microsoft_entra_client_secret_plaintext"] = self.microsoft_entra_client_secret_plaintext + if self.microsoft_entra_tenant_id is not None: + body["microsoft_entra_tenant_id"] = self.microsoft_entra_tenant_id + if self.openai_api_base is not None: + body["openai_api_base"] = self.openai_api_base + if self.openai_api_key is not None: + body["openai_api_key"] = self.openai_api_key + if self.openai_api_key_plaintext is not None: + body["openai_api_key_plaintext"] = self.openai_api_key_plaintext + if self.openai_api_type is not None: + body["openai_api_type"] = self.openai_api_type + if self.openai_api_version is not None: + body["openai_api_version"] = self.openai_api_version + if self.openai_deployment_name is not None: + body["openai_deployment_name"] = self.openai_deployment_name + if self.openai_organization is not None: + body["openai_organization"] = self.openai_organization return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OpenAiConfig: """Deserializes the OpenAiConfig from a dictionary.""" - return cls(microsoft_entra_client_id=d.get('microsoft_entra_client_id', None), microsoft_entra_client_secret=d.get('microsoft_entra_client_secret', None), microsoft_entra_client_secret_plaintext=d.get('microsoft_entra_client_secret_plaintext', None), microsoft_entra_tenant_id=d.get('microsoft_entra_tenant_id', None), openai_api_base=d.get('openai_api_base', None), openai_api_key=d.get('openai_api_key', None), openai_api_key_plaintext=d.get('openai_api_key_plaintext', None), openai_api_type=d.get('openai_api_type', None), openai_api_version=d.get('openai_api_version', None), openai_deployment_name=d.get('openai_deployment_name', None), openai_organization=d.get('openai_organization', None)) - - + return cls( + microsoft_entra_client_id=d.get("microsoft_entra_client_id", None), + microsoft_entra_client_secret=d.get("microsoft_entra_client_secret", None), + microsoft_entra_client_secret_plaintext=d.get("microsoft_entra_client_secret_plaintext", None), + microsoft_entra_tenant_id=d.get("microsoft_entra_tenant_id", None), + openai_api_base=d.get("openai_api_base", None), + openai_api_key=d.get("openai_api_key", None), + openai_api_key_plaintext=d.get("openai_api_key_plaintext", None), + openai_api_type=d.get("openai_api_type", None), + openai_api_version=d.get("openai_api_version", None), + openai_deployment_name=d.get("openai_deployment_name", None), + openai_organization=d.get("openai_organization", None), + ) @dataclass @@ -1907,128 +2241,149 @@ class PaLmConfig: """The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.""" - + palm_api_key_plaintext: Optional[str] = None """The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.""" - + def as_dict(self) -> dict: """Serializes the PaLmConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key - if self.palm_api_key_plaintext is not None: body['palm_api_key_plaintext'] = self.palm_api_key_plaintext + if self.palm_api_key is not None: + body["palm_api_key"] = self.palm_api_key + if self.palm_api_key_plaintext is not None: + body["palm_api_key_plaintext"] = self.palm_api_key_plaintext return body def as_shallow_dict(self) -> dict: """Serializes the PaLmConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key - if self.palm_api_key_plaintext is not None: body['palm_api_key_plaintext'] = self.palm_api_key_plaintext + if self.palm_api_key is not None: + body["palm_api_key"] = self.palm_api_key + if self.palm_api_key_plaintext is not None: + body["palm_api_key_plaintext"] = self.palm_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PaLmConfig: """Deserializes the PaLmConfig from a dictionary.""" - return cls(palm_api_key=d.get('palm_api_key', None), palm_api_key_plaintext=d.get('palm_api_key_plaintext', None)) - - + return cls( + palm_api_key=d.get("palm_api_key", None), palm_api_key_plaintext=d.get("palm_api_key_plaintext", None) + ) @dataclass class PatchServingEndpointTags: add_tags: Optional[List[EndpointTag]] = None """List of endpoint tags to add""" - + delete_tags: Optional[List[str]] = None """List of tag keys to delete""" - + name: Optional[str] = None """The name of the serving endpoint who's tags to patch. This field is required.""" - + def as_dict(self) -> dict: """Serializes the PatchServingEndpointTags into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add_tags: body['add_tags'] = [v.as_dict() for v in self.add_tags] - if self.delete_tags: body['delete_tags'] = [v for v in self.delete_tags] - if self.name is not None: body['name'] = self.name + if self.add_tags: + body["add_tags"] = [v.as_dict() for v in self.add_tags] + if self.delete_tags: + body["delete_tags"] = [v for v in self.delete_tags] + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the PatchServingEndpointTags into a shallow dictionary of its immediate attributes.""" body = {} - if self.add_tags: body['add_tags'] = self.add_tags - if self.delete_tags: body['delete_tags'] = self.delete_tags - if self.name is not None: body['name'] = self.name + if self.add_tags: + body["add_tags"] = self.add_tags + if self.delete_tags: + body["delete_tags"] = self.delete_tags + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PatchServingEndpointTags: """Deserializes the PatchServingEndpointTags from a dictionary.""" - return cls(add_tags=_repeated_dict(d, 'add_tags', EndpointTag), delete_tags=d.get('delete_tags', None), name=d.get('name', None)) - - + return cls( + add_tags=_repeated_dict(d, "add_tags", EndpointTag), + delete_tags=d.get("delete_tags", None), + name=d.get("name", None), + ) @dataclass class PayloadTable: name: Optional[str] = None - + status: Optional[str] = None - + status_message: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the PayloadTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.status is not None: body['status'] = self.status - if self.status_message is not None: body['status_message'] = self.status_message + if self.name is not None: + body["name"] = self.name + if self.status is not None: + body["status"] = self.status + if self.status_message is not None: + body["status_message"] = self.status_message return body def as_shallow_dict(self) -> dict: """Serializes the PayloadTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.status is not None: body['status'] = self.status - if self.status_message is not None: body['status_message'] = self.status_message + if self.name is not None: + body["name"] = self.name + if self.status is not None: + body["status"] = self.status + if self.status_message is not None: + body["status_message"] = self.status_message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PayloadTable: """Deserializes the PayloadTable from a dictionary.""" - return cls(name=d.get('name', None), status=d.get('status', None), status_message=d.get('status_message', None)) - - + return cls(name=d.get("name", None), status=d.get("status", None), status_message=d.get("status_message", None)) @dataclass class PtEndpointCoreConfig: served_entities: Optional[List[PtServedModel]] = None """The list of served entities under the serving endpoint config.""" - + traffic_config: Optional[TrafficConfig] = None - + def as_dict(self) -> dict: """Serializes the PtEndpointCoreConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities] - if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict() + if self.served_entities: + body["served_entities"] = [v.as_dict() for v in self.served_entities] + if self.traffic_config: + body["traffic_config"] = self.traffic_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PtEndpointCoreConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.served_entities: body['served_entities'] = self.served_entities - if self.traffic_config: body['traffic_config'] = self.traffic_config + if self.served_entities: + body["served_entities"] = self.served_entities + if self.traffic_config: + body["traffic_config"] = self.traffic_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PtEndpointCoreConfig: """Deserializes the PtEndpointCoreConfig from a dictionary.""" - return cls(served_entities=_repeated_dict(d, 'served_entities', PtServedModel), traffic_config=_from_dict(d, 'traffic_config', TrafficConfig)) - - + return cls( + served_entities=_repeated_dict(d, "served_entities", PtServedModel), + traffic_config=_from_dict(d, "traffic_config", TrafficConfig), + ) @dataclass @@ -2038,42 +2393,53 @@ class PtServedModel: a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.""" - + provisioned_model_units: int """The number of model units to be provisioned.""" - + entity_version: Optional[str] = None - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + def as_dict(self) -> dict: """Serializes the PtServedModel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entity_name is not None: body['entity_name'] = self.entity_name - if self.entity_version is not None: body['entity_version'] = self.entity_version - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.entity_name is not None: + body["entity_name"] = self.entity_name + if self.entity_version is not None: + body["entity_version"] = self.entity_version + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units return body def as_shallow_dict(self) -> dict: """Serializes the PtServedModel into a shallow dictionary of its immediate attributes.""" body = {} - if self.entity_name is not None: body['entity_name'] = self.entity_name - if self.entity_version is not None: body['entity_version'] = self.entity_version - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units + if self.entity_name is not None: + body["entity_name"] = self.entity_name + if self.entity_version is not None: + body["entity_version"] = self.entity_version + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PtServedModel: """Deserializes the PtServedModel from a dictionary.""" - return cls(entity_name=d.get('entity_name', None), entity_version=d.get('entity_version', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None)) - - + return cls( + entity_name=d.get("entity_name", None), + entity_version=d.get("entity_version", None), + name=d.get("name", None), + provisioned_model_units=d.get("provisioned_model_units", None), + ) @dataclass @@ -2081,53 +2447,70 @@ class PutAiGatewayRequest: fallback_config: Optional[FallbackConfig] = None """Configuration for traffic fallback which auto fallbacks to other served entities if the request to a served entity fails with certain error codes, to increase availability.""" - + guardrails: Optional[AiGatewayGuardrails] = None """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.""" - + inference_table_config: Optional[AiGatewayInferenceTableConfig] = None """Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.""" - + name: Optional[str] = None """The name of the serving endpoint whose AI Gateway is being updated. This field is required.""" - + rate_limits: Optional[List[AiGatewayRateLimit]] = None """Configuration for rate limits which can be set to limit endpoint traffic.""" - + usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None """Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.""" - + def as_dict(self) -> dict: """Serializes the PutAiGatewayRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fallback_config: body['fallback_config'] = self.fallback_config.as_dict() - if self.guardrails: body['guardrails'] = self.guardrails.as_dict() - if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict() - if self.name is not None: body['name'] = self.name - if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] - if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict() + if self.fallback_config: + body["fallback_config"] = self.fallback_config.as_dict() + if self.guardrails: + body["guardrails"] = self.guardrails.as_dict() + if self.inference_table_config: + body["inference_table_config"] = self.inference_table_config.as_dict() + if self.name is not None: + body["name"] = self.name + if self.rate_limits: + body["rate_limits"] = [v.as_dict() for v in self.rate_limits] + if self.usage_tracking_config: + body["usage_tracking_config"] = self.usage_tracking_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PutAiGatewayRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.fallback_config: body['fallback_config'] = self.fallback_config - if self.guardrails: body['guardrails'] = self.guardrails - if self.inference_table_config: body['inference_table_config'] = self.inference_table_config - if self.name is not None: body['name'] = self.name - if self.rate_limits: body['rate_limits'] = self.rate_limits - if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config + if self.fallback_config: + body["fallback_config"] = self.fallback_config + if self.guardrails: + body["guardrails"] = self.guardrails + if self.inference_table_config: + body["inference_table_config"] = self.inference_table_config + if self.name is not None: + body["name"] = self.name + if self.rate_limits: + body["rate_limits"] = self.rate_limits + if self.usage_tracking_config: + body["usage_tracking_config"] = self.usage_tracking_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutAiGatewayRequest: """Deserializes the PutAiGatewayRequest from a dictionary.""" - return cls(fallback_config=_from_dict(d, 'fallback_config', FallbackConfig), guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails), inference_table_config=_from_dict(d, 'inference_table_config', AiGatewayInferenceTableConfig), name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit), usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig)) - - + return cls( + fallback_config=_from_dict(d, "fallback_config", FallbackConfig), + guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails), + inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig), + name=d.get("name", None), + rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit), + usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig), + ) @dataclass @@ -2135,209 +2518,266 @@ class PutAiGatewayResponse: fallback_config: Optional[FallbackConfig] = None """Configuration for traffic fallback which auto fallbacks to other served entities if the request to a served entity fails with certain error codes, to increase availability.""" - + guardrails: Optional[AiGatewayGuardrails] = None """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.""" - + inference_table_config: Optional[AiGatewayInferenceTableConfig] = None """Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.""" - + rate_limits: Optional[List[AiGatewayRateLimit]] = None """Configuration for rate limits which can be set to limit endpoint traffic.""" - + usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None """Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.""" - + def as_dict(self) -> dict: """Serializes the PutAiGatewayResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fallback_config: body['fallback_config'] = self.fallback_config.as_dict() - if self.guardrails: body['guardrails'] = self.guardrails.as_dict() - if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict() - if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] - if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict() + if self.fallback_config: + body["fallback_config"] = self.fallback_config.as_dict() + if self.guardrails: + body["guardrails"] = self.guardrails.as_dict() + if self.inference_table_config: + body["inference_table_config"] = self.inference_table_config.as_dict() + if self.rate_limits: + body["rate_limits"] = [v.as_dict() for v in self.rate_limits] + if self.usage_tracking_config: + body["usage_tracking_config"] = self.usage_tracking_config.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the PutAiGatewayResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.fallback_config: body['fallback_config'] = self.fallback_config - if self.guardrails: body['guardrails'] = self.guardrails - if self.inference_table_config: body['inference_table_config'] = self.inference_table_config - if self.rate_limits: body['rate_limits'] = self.rate_limits - if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config + if self.fallback_config: + body["fallback_config"] = self.fallback_config + if self.guardrails: + body["guardrails"] = self.guardrails + if self.inference_table_config: + body["inference_table_config"] = self.inference_table_config + if self.rate_limits: + body["rate_limits"] = self.rate_limits + if self.usage_tracking_config: + body["usage_tracking_config"] = self.usage_tracking_config return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutAiGatewayResponse: """Deserializes the PutAiGatewayResponse from a dictionary.""" - return cls(fallback_config=_from_dict(d, 'fallback_config', FallbackConfig), guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails), inference_table_config=_from_dict(d, 'inference_table_config', AiGatewayInferenceTableConfig), rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit), usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig)) - - + return cls( + fallback_config=_from_dict(d, "fallback_config", FallbackConfig), + guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails), + inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig), + rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit), + usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig), + ) @dataclass class PutRequest: name: Optional[str] = None """The name of the serving endpoint whose rate limits are being updated. This field is required.""" - + rate_limits: Optional[List[RateLimit]] = None """The list of endpoint rate limits.""" - + def as_dict(self) -> dict: """Serializes the PutRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] + if self.name is not None: + body["name"] = self.name + if self.rate_limits: + body["rate_limits"] = [v.as_dict() for v in self.rate_limits] return body def as_shallow_dict(self) -> dict: """Serializes the PutRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.name is not None: + body["name"] = self.name + if self.rate_limits: + body["rate_limits"] = self.rate_limits return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutRequest: """Deserializes the PutRequest from a dictionary.""" - return cls(name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', RateLimit)) - - + return cls(name=d.get("name", None), rate_limits=_repeated_dict(d, "rate_limits", RateLimit)) @dataclass class PutResponse: rate_limits: Optional[List[RateLimit]] = None """The list of endpoint rate limits.""" - + def as_dict(self) -> dict: """Serializes the PutResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits] + if self.rate_limits: + body["rate_limits"] = [v.as_dict() for v in self.rate_limits] return body def as_shallow_dict(self) -> dict: """Serializes the PutResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.rate_limits: body['rate_limits'] = self.rate_limits + if self.rate_limits: + body["rate_limits"] = self.rate_limits return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutResponse: """Deserializes the PutResponse from a dictionary.""" - return cls(rate_limits=_repeated_dict(d, 'rate_limits', RateLimit)) - - + return cls(rate_limits=_repeated_dict(d, "rate_limits", RateLimit)) @dataclass class QueryEndpointInput: dataframe_records: Optional[List[Any]] = None """Pandas Dataframe input in the records orientation.""" - + dataframe_split: Optional[DataframeSplitInput] = None """Pandas Dataframe input in the split orientation.""" - - extra_params: Optional[Dict[str,str]] = None + + extra_params: Optional[Dict[str, str]] = None """The extra parameters field used ONLY for __completions, chat,__ and __embeddings external & foundation model__ serving endpoints. This is a map of strings and should only be used with other external/foundation model query fields.""" - + input: Optional[Any] = None """The input string (or array of strings) field used ONLY for __embeddings external & foundation model__ serving endpoints and is the only field (along with extra_params if needed) used by embeddings queries.""" - + inputs: Optional[Any] = None """Tensor-based input in columnar format.""" - + instances: Optional[List[Any]] = None """Tensor-based input in row format.""" - + max_tokens: Optional[int] = None """The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is an integer and should only be used with other chat/completions query fields.""" - + messages: Optional[List[ChatMessage]] = None """The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a map of strings and should only be used with other chat query fields.""" - + n: Optional[int] = None """The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be used with other chat/completions query fields.""" - + name: Optional[str] = None """The name of the serving endpoint. This field is required.""" - + prompt: Optional[Any] = None """The prompt string (or array of strings) field used ONLY for __completions external & foundation model__ serving endpoints and should only be used with other completions query fields.""" - + stop: Optional[List[str]] = None """The stop sequences field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a list of strings and should only be used with other chat/completions query fields.""" - + stream: Optional[bool] = None """The stream field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a boolean defaulting to false and should only be used with other chat/completions query fields.""" - + temperature: Optional[float] = None """The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields.""" - + def as_dict(self) -> dict: """Serializes the QueryEndpointInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dataframe_records: body['dataframe_records'] = [v for v in self.dataframe_records] - if self.dataframe_split: body['dataframe_split'] = self.dataframe_split.as_dict() - if self.extra_params: body['extra_params'] = self.extra_params - if self.input: body['input'] = self.input - if self.inputs: body['inputs'] = self.inputs - if self.instances: body['instances'] = [v for v in self.instances] - if self.max_tokens is not None: body['max_tokens'] = self.max_tokens - if self.messages: body['messages'] = [v.as_dict() for v in self.messages] - if self.n is not None: body['n'] = self.n - if self.name is not None: body['name'] = self.name - if self.prompt: body['prompt'] = self.prompt - if self.stop: body['stop'] = [v for v in self.stop] - if self.stream is not None: body['stream'] = self.stream - if self.temperature is not None: body['temperature'] = self.temperature + if self.dataframe_records: + body["dataframe_records"] = [v for v in self.dataframe_records] + if self.dataframe_split: + body["dataframe_split"] = self.dataframe_split.as_dict() + if self.extra_params: + body["extra_params"] = self.extra_params + if self.input: + body["input"] = self.input + if self.inputs: + body["inputs"] = self.inputs + if self.instances: + body["instances"] = [v for v in self.instances] + if self.max_tokens is not None: + body["max_tokens"] = self.max_tokens + if self.messages: + body["messages"] = [v.as_dict() for v in self.messages] + if self.n is not None: + body["n"] = self.n + if self.name is not None: + body["name"] = self.name + if self.prompt: + body["prompt"] = self.prompt + if self.stop: + body["stop"] = [v for v in self.stop] + if self.stream is not None: + body["stream"] = self.stream + if self.temperature is not None: + body["temperature"] = self.temperature return body def as_shallow_dict(self) -> dict: """Serializes the QueryEndpointInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.dataframe_records: body['dataframe_records'] = self.dataframe_records - if self.dataframe_split: body['dataframe_split'] = self.dataframe_split - if self.extra_params: body['extra_params'] = self.extra_params - if self.input: body['input'] = self.input - if self.inputs: body['inputs'] = self.inputs - if self.instances: body['instances'] = self.instances - if self.max_tokens is not None: body['max_tokens'] = self.max_tokens - if self.messages: body['messages'] = self.messages - if self.n is not None: body['n'] = self.n - if self.name is not None: body['name'] = self.name - if self.prompt: body['prompt'] = self.prompt - if self.stop: body['stop'] = self.stop - if self.stream is not None: body['stream'] = self.stream - if self.temperature is not None: body['temperature'] = self.temperature + if self.dataframe_records: + body["dataframe_records"] = self.dataframe_records + if self.dataframe_split: + body["dataframe_split"] = self.dataframe_split + if self.extra_params: + body["extra_params"] = self.extra_params + if self.input: + body["input"] = self.input + if self.inputs: + body["inputs"] = self.inputs + if self.instances: + body["instances"] = self.instances + if self.max_tokens is not None: + body["max_tokens"] = self.max_tokens + if self.messages: + body["messages"] = self.messages + if self.n is not None: + body["n"] = self.n + if self.name is not None: + body["name"] = self.name + if self.prompt: + body["prompt"] = self.prompt + if self.stop: + body["stop"] = self.stop + if self.stream is not None: + body["stream"] = self.stream + if self.temperature is not None: + body["temperature"] = self.temperature return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryEndpointInput: """Deserializes the QueryEndpointInput from a dictionary.""" - return cls(dataframe_records=d.get('dataframe_records', None), dataframe_split=_from_dict(d, 'dataframe_split', DataframeSplitInput), extra_params=d.get('extra_params', None), input=d.get('input', None), inputs=d.get('inputs', None), instances=d.get('instances', None), max_tokens=d.get('max_tokens', None), messages=_repeated_dict(d, 'messages', ChatMessage), n=d.get('n', None), name=d.get('name', None), prompt=d.get('prompt', None), stop=d.get('stop', None), stream=d.get('stream', None), temperature=d.get('temperature', None)) - - + return cls( + dataframe_records=d.get("dataframe_records", None), + dataframe_split=_from_dict(d, "dataframe_split", DataframeSplitInput), + extra_params=d.get("extra_params", None), + input=d.get("input", None), + inputs=d.get("inputs", None), + instances=d.get("instances", None), + max_tokens=d.get("max_tokens", None), + messages=_repeated_dict(d, "messages", ChatMessage), + n=d.get("n", None), + name=d.get("name", None), + prompt=d.get("prompt", None), + stop=d.get("stop", None), + stream=d.get("stream", None), + temperature=d.get("temperature", None), + ) @dataclass @@ -2345,158 +2785,197 @@ class QueryEndpointResponse: choices: Optional[List[V1ResponseChoiceElement]] = None """The list of choices returned by the __chat or completions external/foundation model__ serving endpoint.""" - + created: Optional[int] = None """The timestamp in seconds when the query was created in Unix time returned by a __completions or chat external/foundation model__ serving endpoint.""" - + data: Optional[List[EmbeddingsV1ResponseEmbeddingElement]] = None """The list of the embeddings returned by the __embeddings external/foundation model__ serving endpoint.""" - + id: Optional[str] = None """The ID of the query that may be returned by a __completions or chat external/foundation model__ serving endpoint.""" - + model: Optional[str] = None """The name of the __external/foundation model__ used for querying. This is the name of the model that was specified in the endpoint config.""" - + object: Optional[QueryEndpointResponseObject] = None """The type of object returned by the __external/foundation model__ serving endpoint, one of [text_completion, chat.completion, list (of embeddings)].""" - + predictions: Optional[List[Any]] = None """The predictions returned by the serving endpoint.""" - + served_model_name: Optional[str] = None """The name of the served model that served the request. This is useful when there are multiple models behind the same endpoint with traffic split.""" - + usage: Optional[ExternalModelUsageElement] = None """The usage object that may be returned by the __external/foundation model__ serving endpoint. This contains information about the number of tokens used in the prompt and response.""" - + def as_dict(self) -> dict: """Serializes the QueryEndpointResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.choices: body['choices'] = [v.as_dict() for v in self.choices] - if self.created is not None: body['created'] = self.created - if self.data: body['data'] = [v.as_dict() for v in self.data] - if self.id is not None: body['id'] = self.id - if self.model is not None: body['model'] = self.model - if self.object is not None: body['object'] = self.object.value - if self.predictions: body['predictions'] = [v for v in self.predictions] - if self.served_model_name is not None: body['served-model-name'] = self.served_model_name - if self.usage: body['usage'] = self.usage.as_dict() + if self.choices: + body["choices"] = [v.as_dict() for v in self.choices] + if self.created is not None: + body["created"] = self.created + if self.data: + body["data"] = [v.as_dict() for v in self.data] + if self.id is not None: + body["id"] = self.id + if self.model is not None: + body["model"] = self.model + if self.object is not None: + body["object"] = self.object.value + if self.predictions: + body["predictions"] = [v for v in self.predictions] + if self.served_model_name is not None: + body["served-model-name"] = self.served_model_name + if self.usage: + body["usage"] = self.usage.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the QueryEndpointResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.choices: body['choices'] = self.choices - if self.created is not None: body['created'] = self.created - if self.data: body['data'] = self.data - if self.id is not None: body['id'] = self.id - if self.model is not None: body['model'] = self.model - if self.object is not None: body['object'] = self.object - if self.predictions: body['predictions'] = self.predictions - if self.served_model_name is not None: body['served-model-name'] = self.served_model_name - if self.usage: body['usage'] = self.usage + if self.choices: + body["choices"] = self.choices + if self.created is not None: + body["created"] = self.created + if self.data: + body["data"] = self.data + if self.id is not None: + body["id"] = self.id + if self.model is not None: + body["model"] = self.model + if self.object is not None: + body["object"] = self.object + if self.predictions: + body["predictions"] = self.predictions + if self.served_model_name is not None: + body["served-model-name"] = self.served_model_name + if self.usage: + body["usage"] = self.usage return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryEndpointResponse: """Deserializes the QueryEndpointResponse from a dictionary.""" - return cls(choices=_repeated_dict(d, 'choices', V1ResponseChoiceElement), created=d.get('created', None), data=_repeated_dict(d, 'data', EmbeddingsV1ResponseEmbeddingElement), id=d.get('id', None), model=d.get('model', None), object=_enum(d, 'object', QueryEndpointResponseObject), predictions=d.get('predictions', None), served_model_name=d.get('served-model-name', None), usage=_from_dict(d, 'usage', ExternalModelUsageElement)) - - + return cls( + choices=_repeated_dict(d, "choices", V1ResponseChoiceElement), + created=d.get("created", None), + data=_repeated_dict(d, "data", EmbeddingsV1ResponseEmbeddingElement), + id=d.get("id", None), + model=d.get("model", None), + object=_enum(d, "object", QueryEndpointResponseObject), + predictions=d.get("predictions", None), + served_model_name=d.get("served-model-name", None), + usage=_from_dict(d, "usage", ExternalModelUsageElement), + ) class QueryEndpointResponseObject(Enum): """The type of object returned by the __external/foundation model__ serving endpoint, one of [text_completion, chat.completion, list (of embeddings)].""" - - CHAT_COMPLETION = 'chat.completion' - LIST = 'list' - TEXT_COMPLETION = 'text_completion' + + CHAT_COMPLETION = "chat.completion" + LIST = "list" + TEXT_COMPLETION = "text_completion" + @dataclass class RateLimit: calls: int """Used to specify how many calls are allowed for a key within the renewal_period.""" - + renewal_period: RateLimitRenewalPeriod """Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.""" - + key: Optional[RateLimitKey] = None """Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.""" - + def as_dict(self) -> dict: """Serializes the RateLimit into a dictionary suitable for use as a JSON request body.""" body = {} - if self.calls is not None: body['calls'] = self.calls - if self.key is not None: body['key'] = self.key.value - if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value + if self.calls is not None: + body["calls"] = self.calls + if self.key is not None: + body["key"] = self.key.value + if self.renewal_period is not None: + body["renewal_period"] = self.renewal_period.value return body def as_shallow_dict(self) -> dict: """Serializes the RateLimit into a shallow dictionary of its immediate attributes.""" body = {} - if self.calls is not None: body['calls'] = self.calls - if self.key is not None: body['key'] = self.key - if self.renewal_period is not None: body['renewal_period'] = self.renewal_period + if self.calls is not None: + body["calls"] = self.calls + if self.key is not None: + body["key"] = self.key + if self.renewal_period is not None: + body["renewal_period"] = self.renewal_period return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RateLimit: """Deserializes the RateLimit from a dictionary.""" - return cls(calls=d.get('calls', None), key=_enum(d, 'key', RateLimitKey), renewal_period=_enum(d, 'renewal_period', RateLimitRenewalPeriod)) - - + return cls( + calls=d.get("calls", None), + key=_enum(d, "key", RateLimitKey), + renewal_period=_enum(d, "renewal_period", RateLimitRenewalPeriod), + ) class RateLimitKey(Enum): - - - ENDPOINT = 'endpoint' - USER = 'user' + + ENDPOINT = "endpoint" + USER = "user" + class RateLimitRenewalPeriod(Enum): - - - MINUTE = 'minute' + + MINUTE = "minute" + @dataclass class Route: served_model_name: str """The name of the served model this route configures traffic for.""" - + traffic_percentage: int """The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.""" - + def as_dict(self) -> dict: """Serializes the Route into a dictionary suitable for use as a JSON request body.""" body = {} - if self.served_model_name is not None: body['served_model_name'] = self.served_model_name - if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage + if self.served_model_name is not None: + body["served_model_name"] = self.served_model_name + if self.traffic_percentage is not None: + body["traffic_percentage"] = self.traffic_percentage return body def as_shallow_dict(self) -> dict: """Serializes the Route into a shallow dictionary of its immediate attributes.""" body = {} - if self.served_model_name is not None: body['served_model_name'] = self.served_model_name - if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage + if self.served_model_name is not None: + body["served_model_name"] = self.served_model_name + if self.traffic_percentage is not None: + body["traffic_percentage"] = self.traffic_percentage return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Route: """Deserializes the Route from a dictionary.""" - return cls(served_model_name=d.get('served_model_name', None), traffic_percentage=d.get('traffic_percentage', None)) - - + return cls( + served_model_name=d.get("served_model_name", None), traffic_percentage=d.get("traffic_percentage", None) + ) @dataclass @@ -2506,15 +2985,15 @@ class ServedEntityInput: a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.""" - + entity_version: Optional[str] = None - - environment_vars: Optional[Dict[str,str]] = None + + environment_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" - + external_model: Optional[ExternalModel] = None """The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with @@ -2522,36 +3001,36 @@ class ServedEntityInput: existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" - + max_provisioned_concurrency: Optional[int] = None """The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.""" - + max_provisioned_throughput: Optional[int] = None """The maximum tokens per second that the endpoint can scale up to.""" - + min_provisioned_concurrency: Optional[int] = None """The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.""" - + min_provisioned_throughput: Optional[int] = None """The minimum tokens per second that the endpoint can scale down to.""" - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + provisioned_model_units: Optional[int] = None """The number of model units provisioned.""" - + scale_to_zero_enabled: Optional[bool] = None """Whether the compute resources for the served entity should scale down to zero.""" - + workload_size: Optional[str] = None """The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can @@ -2560,7 +3039,7 @@ class ServedEntityInput: Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.""" - + workload_type: Optional[ServingModelWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -2568,73 +3047,114 @@ class ServedEntityInput: available [GPU types]. [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" - + def as_dict(self) -> dict: """Serializes the ServedEntityInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entity_name is not None: body['entity_name'] = self.entity_name - if self.entity_version is not None: body['entity_version'] = self.entity_version - if self.environment_vars: body['environment_vars'] = self.environment_vars - if self.external_model: body['external_model'] = self.external_model.as_dict() - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency - if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput - if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency - if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled - if self.workload_size is not None: body['workload_size'] = self.workload_size - if self.workload_type is not None: body['workload_type'] = self.workload_type.value + if self.entity_name is not None: + body["entity_name"] = self.entity_name + if self.entity_version is not None: + body["entity_version"] = self.entity_version + if self.environment_vars: + body["environment_vars"] = self.environment_vars + if self.external_model: + body["external_model"] = self.external_model.as_dict() + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: + body["max_provisioned_concurrency"] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: + body["max_provisioned_throughput"] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: + body["min_provisioned_concurrency"] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: + body["min_provisioned_throughput"] = self.min_provisioned_throughput + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: + body["scale_to_zero_enabled"] = self.scale_to_zero_enabled + if self.workload_size is not None: + body["workload_size"] = self.workload_size + if self.workload_type is not None: + body["workload_type"] = self.workload_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ServedEntityInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.entity_name is not None: body['entity_name'] = self.entity_name - if self.entity_version is not None: body['entity_version'] = self.entity_version - if self.environment_vars: body['environment_vars'] = self.environment_vars - if self.external_model: body['external_model'] = self.external_model - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency - if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput - if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency - if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled - if self.workload_size is not None: body['workload_size'] = self.workload_size - if self.workload_type is not None: body['workload_type'] = self.workload_type + if self.entity_name is not None: + body["entity_name"] = self.entity_name + if self.entity_version is not None: + body["entity_version"] = self.entity_version + if self.environment_vars: + body["environment_vars"] = self.environment_vars + if self.external_model: + body["external_model"] = self.external_model + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: + body["max_provisioned_concurrency"] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: + body["max_provisioned_throughput"] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: + body["min_provisioned_concurrency"] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: + body["min_provisioned_throughput"] = self.min_provisioned_throughput + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: + body["scale_to_zero_enabled"] = self.scale_to_zero_enabled + if self.workload_size is not None: + body["workload_size"] = self.workload_size + if self.workload_type is not None: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedEntityInput: """Deserializes the ServedEntityInput from a dictionary.""" - return cls(entity_name=d.get('entity_name', None), entity_version=d.get('entity_version', None), environment_vars=d.get('environment_vars', None), external_model=_from_dict(d, 'external_model', ExternalModel), instance_profile_arn=d.get('instance_profile_arn', None), max_provisioned_concurrency=d.get('max_provisioned_concurrency', None), max_provisioned_throughput=d.get('max_provisioned_throughput', None), min_provisioned_concurrency=d.get('min_provisioned_concurrency', None), min_provisioned_throughput=d.get('min_provisioned_throughput', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), workload_size=d.get('workload_size', None), workload_type=_enum(d, 'workload_type', ServingModelWorkloadType)) - - + return cls( + entity_name=d.get("entity_name", None), + entity_version=d.get("entity_version", None), + environment_vars=d.get("environment_vars", None), + external_model=_from_dict(d, "external_model", ExternalModel), + instance_profile_arn=d.get("instance_profile_arn", None), + max_provisioned_concurrency=d.get("max_provisioned_concurrency", None), + max_provisioned_throughput=d.get("max_provisioned_throughput", None), + min_provisioned_concurrency=d.get("min_provisioned_concurrency", None), + min_provisioned_throughput=d.get("min_provisioned_throughput", None), + name=d.get("name", None), + provisioned_model_units=d.get("provisioned_model_units", None), + scale_to_zero_enabled=d.get("scale_to_zero_enabled", None), + workload_size=d.get("workload_size", None), + workload_type=_enum(d, "workload_type", ServingModelWorkloadType), + ) @dataclass class ServedEntityOutput: creation_timestamp: Optional[int] = None - + creator: Optional[str] = None - + entity_name: Optional[str] = None """The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.""" - + entity_version: Optional[str] = None - - environment_vars: Optional[Dict[str,str]] = None + + environment_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" - + external_model: Optional[ExternalModel] = None """The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with @@ -2642,42 +3162,42 @@ class ServedEntityOutput: existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.""" - + foundation_model: Optional[FoundationModel] = None """All fields are not sensitive as they are hard-coded in the system and made available to customers.""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" - + max_provisioned_concurrency: Optional[int] = None """The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.""" - + max_provisioned_throughput: Optional[int] = None """The maximum tokens per second that the endpoint can scale up to.""" - + min_provisioned_concurrency: Optional[int] = None """The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.""" - + min_provisioned_throughput: Optional[int] = None """The minimum tokens per second that the endpoint can scale down to.""" - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + provisioned_model_units: Optional[int] = None """The number of model units provisioned.""" - + scale_to_zero_enabled: Optional[bool] = None """Whether the compute resources for the served entity should scale down to zero.""" - + state: Optional[ServedModelState] = None - + workload_size: Optional[str] = None """The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can @@ -2686,7 +3206,7 @@ class ServedEntityOutput: Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.""" - + workload_type: Optional[ServingModelWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -2694,144 +3214,211 @@ class ServedEntityOutput: available [GPU types]. [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" - + def as_dict(self) -> dict: """Serializes the ServedEntityOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.entity_name is not None: body['entity_name'] = self.entity_name - if self.entity_version is not None: body['entity_version'] = self.entity_version - if self.environment_vars: body['environment_vars'] = self.environment_vars - if self.external_model: body['external_model'] = self.external_model.as_dict() - if self.foundation_model: body['foundation_model'] = self.foundation_model.as_dict() - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency - if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput - if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency - if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled - if self.state: body['state'] = self.state.as_dict() - if self.workload_size is not None: body['workload_size'] = self.workload_size - if self.workload_type is not None: body['workload_type'] = self.workload_type.value + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.entity_name is not None: + body["entity_name"] = self.entity_name + if self.entity_version is not None: + body["entity_version"] = self.entity_version + if self.environment_vars: + body["environment_vars"] = self.environment_vars + if self.external_model: + body["external_model"] = self.external_model.as_dict() + if self.foundation_model: + body["foundation_model"] = self.foundation_model.as_dict() + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: + body["max_provisioned_concurrency"] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: + body["max_provisioned_throughput"] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: + body["min_provisioned_concurrency"] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: + body["min_provisioned_throughput"] = self.min_provisioned_throughput + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: + body["scale_to_zero_enabled"] = self.scale_to_zero_enabled + if self.state: + body["state"] = self.state.as_dict() + if self.workload_size is not None: + body["workload_size"] = self.workload_size + if self.workload_type is not None: + body["workload_type"] = self.workload_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ServedEntityOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.entity_name is not None: body['entity_name'] = self.entity_name - if self.entity_version is not None: body['entity_version'] = self.entity_version - if self.environment_vars: body['environment_vars'] = self.environment_vars - if self.external_model: body['external_model'] = self.external_model - if self.foundation_model: body['foundation_model'] = self.foundation_model - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency - if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput - if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency - if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled - if self.state: body['state'] = self.state - if self.workload_size is not None: body['workload_size'] = self.workload_size - if self.workload_type is not None: body['workload_type'] = self.workload_type + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.entity_name is not None: + body["entity_name"] = self.entity_name + if self.entity_version is not None: + body["entity_version"] = self.entity_version + if self.environment_vars: + body["environment_vars"] = self.environment_vars + if self.external_model: + body["external_model"] = self.external_model + if self.foundation_model: + body["foundation_model"] = self.foundation_model + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: + body["max_provisioned_concurrency"] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: + body["max_provisioned_throughput"] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: + body["min_provisioned_concurrency"] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: + body["min_provisioned_throughput"] = self.min_provisioned_throughput + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: + body["scale_to_zero_enabled"] = self.scale_to_zero_enabled + if self.state: + body["state"] = self.state + if self.workload_size is not None: + body["workload_size"] = self.workload_size + if self.workload_type is not None: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedEntityOutput: """Deserializes the ServedEntityOutput from a dictionary.""" - return cls(creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), entity_name=d.get('entity_name', None), entity_version=d.get('entity_version', None), environment_vars=d.get('environment_vars', None), external_model=_from_dict(d, 'external_model', ExternalModel), foundation_model=_from_dict(d, 'foundation_model', FoundationModel), instance_profile_arn=d.get('instance_profile_arn', None), max_provisioned_concurrency=d.get('max_provisioned_concurrency', None), max_provisioned_throughput=d.get('max_provisioned_throughput', None), min_provisioned_concurrency=d.get('min_provisioned_concurrency', None), min_provisioned_throughput=d.get('min_provisioned_throughput', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), state=_from_dict(d, 'state', ServedModelState), workload_size=d.get('workload_size', None), workload_type=_enum(d, 'workload_type', ServingModelWorkloadType)) - - + return cls( + creation_timestamp=d.get("creation_timestamp", None), + creator=d.get("creator", None), + entity_name=d.get("entity_name", None), + entity_version=d.get("entity_version", None), + environment_vars=d.get("environment_vars", None), + external_model=_from_dict(d, "external_model", ExternalModel), + foundation_model=_from_dict(d, "foundation_model", FoundationModel), + instance_profile_arn=d.get("instance_profile_arn", None), + max_provisioned_concurrency=d.get("max_provisioned_concurrency", None), + max_provisioned_throughput=d.get("max_provisioned_throughput", None), + min_provisioned_concurrency=d.get("min_provisioned_concurrency", None), + min_provisioned_throughput=d.get("min_provisioned_throughput", None), + name=d.get("name", None), + provisioned_model_units=d.get("provisioned_model_units", None), + scale_to_zero_enabled=d.get("scale_to_zero_enabled", None), + state=_from_dict(d, "state", ServedModelState), + workload_size=d.get("workload_size", None), + workload_type=_enum(d, "workload_type", ServingModelWorkloadType), + ) @dataclass class ServedEntitySpec: entity_name: Optional[str] = None - + entity_version: Optional[str] = None - + external_model: Optional[ExternalModel] = None - + foundation_model: Optional[FoundationModel] = None """All fields are not sensitive as they are hard-coded in the system and made available to customers.""" - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ServedEntitySpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entity_name is not None: body['entity_name'] = self.entity_name - if self.entity_version is not None: body['entity_version'] = self.entity_version - if self.external_model: body['external_model'] = self.external_model.as_dict() - if self.foundation_model: body['foundation_model'] = self.foundation_model.as_dict() - if self.name is not None: body['name'] = self.name + if self.entity_name is not None: + body["entity_name"] = self.entity_name + if self.entity_version is not None: + body["entity_version"] = self.entity_version + if self.external_model: + body["external_model"] = self.external_model.as_dict() + if self.foundation_model: + body["foundation_model"] = self.foundation_model.as_dict() + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the ServedEntitySpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.entity_name is not None: body['entity_name'] = self.entity_name - if self.entity_version is not None: body['entity_version'] = self.entity_version - if self.external_model: body['external_model'] = self.external_model - if self.foundation_model: body['foundation_model'] = self.foundation_model - if self.name is not None: body['name'] = self.name + if self.entity_name is not None: + body["entity_name"] = self.entity_name + if self.entity_version is not None: + body["entity_version"] = self.entity_version + if self.external_model: + body["external_model"] = self.external_model + if self.foundation_model: + body["foundation_model"] = self.foundation_model + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedEntitySpec: """Deserializes the ServedEntitySpec from a dictionary.""" - return cls(entity_name=d.get('entity_name', None), entity_version=d.get('entity_version', None), external_model=_from_dict(d, 'external_model', ExternalModel), foundation_model=_from_dict(d, 'foundation_model', FoundationModel), name=d.get('name', None)) - - + return cls( + entity_name=d.get("entity_name", None), + entity_version=d.get("entity_version", None), + external_model=_from_dict(d, "external_model", ExternalModel), + foundation_model=_from_dict(d, "foundation_model", FoundationModel), + name=d.get("name", None), + ) @dataclass class ServedModelInput: scale_to_zero_enabled: bool """Whether the compute resources for the served entity should scale down to zero.""" - + model_name: str - + model_version: str - - environment_vars: Optional[Dict[str,str]] = None + + environment_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" - + max_provisioned_concurrency: Optional[int] = None """The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.""" - + max_provisioned_throughput: Optional[int] = None """The maximum tokens per second that the endpoint can scale up to.""" - + min_provisioned_concurrency: Optional[int] = None """The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.""" - + min_provisioned_throughput: Optional[int] = None """The minimum tokens per second that the endpoint can scale down to.""" - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + provisioned_model_units: Optional[int] = None """The number of model units provisioned.""" - + workload_size: Optional[str] = None """The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can @@ -2840,7 +3427,7 @@ class ServedModelInput: Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.""" - + workload_type: Optional[ServedModelInputWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -2848,101 +3435,140 @@ class ServedModelInput: available [GPU types]. [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" - + def as_dict(self) -> dict: """Serializes the ServedModelInput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.environment_vars: body['environment_vars'] = self.environment_vars - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency - if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput - if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency - if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput - if self.model_name is not None: body['model_name'] = self.model_name - if self.model_version is not None: body['model_version'] = self.model_version - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled - if self.workload_size is not None: body['workload_size'] = self.workload_size - if self.workload_type is not None: body['workload_type'] = self.workload_type.value + if self.environment_vars: + body["environment_vars"] = self.environment_vars + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: + body["max_provisioned_concurrency"] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: + body["max_provisioned_throughput"] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: + body["min_provisioned_concurrency"] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: + body["min_provisioned_throughput"] = self.min_provisioned_throughput + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version is not None: + body["model_version"] = self.model_version + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: + body["scale_to_zero_enabled"] = self.scale_to_zero_enabled + if self.workload_size is not None: + body["workload_size"] = self.workload_size + if self.workload_type is not None: + body["workload_type"] = self.workload_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ServedModelInput into a shallow dictionary of its immediate attributes.""" body = {} - if self.environment_vars: body['environment_vars'] = self.environment_vars - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency - if self.max_provisioned_throughput is not None: body['max_provisioned_throughput'] = self.max_provisioned_throughput - if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency - if self.min_provisioned_throughput is not None: body['min_provisioned_throughput'] = self.min_provisioned_throughput - if self.model_name is not None: body['model_name'] = self.model_name - if self.model_version is not None: body['model_version'] = self.model_version - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled - if self.workload_size is not None: body['workload_size'] = self.workload_size - if self.workload_type is not None: body['workload_type'] = self.workload_type + if self.environment_vars: + body["environment_vars"] = self.environment_vars + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: + body["max_provisioned_concurrency"] = self.max_provisioned_concurrency + if self.max_provisioned_throughput is not None: + body["max_provisioned_throughput"] = self.max_provisioned_throughput + if self.min_provisioned_concurrency is not None: + body["min_provisioned_concurrency"] = self.min_provisioned_concurrency + if self.min_provisioned_throughput is not None: + body["min_provisioned_throughput"] = self.min_provisioned_throughput + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version is not None: + body["model_version"] = self.model_version + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: + body["scale_to_zero_enabled"] = self.scale_to_zero_enabled + if self.workload_size is not None: + body["workload_size"] = self.workload_size + if self.workload_type is not None: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedModelInput: """Deserializes the ServedModelInput from a dictionary.""" - return cls(environment_vars=d.get('environment_vars', None), instance_profile_arn=d.get('instance_profile_arn', None), max_provisioned_concurrency=d.get('max_provisioned_concurrency', None), max_provisioned_throughput=d.get('max_provisioned_throughput', None), min_provisioned_concurrency=d.get('min_provisioned_concurrency', None), min_provisioned_throughput=d.get('min_provisioned_throughput', None), model_name=d.get('model_name', None), model_version=d.get('model_version', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), workload_size=d.get('workload_size', None), workload_type=_enum(d, 'workload_type', ServedModelInputWorkloadType)) - - + return cls( + environment_vars=d.get("environment_vars", None), + instance_profile_arn=d.get("instance_profile_arn", None), + max_provisioned_concurrency=d.get("max_provisioned_concurrency", None), + max_provisioned_throughput=d.get("max_provisioned_throughput", None), + min_provisioned_concurrency=d.get("min_provisioned_concurrency", None), + min_provisioned_throughput=d.get("min_provisioned_throughput", None), + model_name=d.get("model_name", None), + model_version=d.get("model_version", None), + name=d.get("name", None), + provisioned_model_units=d.get("provisioned_model_units", None), + scale_to_zero_enabled=d.get("scale_to_zero_enabled", None), + workload_size=d.get("workload_size", None), + workload_type=_enum(d, "workload_type", ServedModelInputWorkloadType), + ) class ServedModelInputWorkloadType(Enum): """Please keep this in sync with with workload types in InferenceEndpointEntities.scala""" - - CPU = 'CPU' - GPU_LARGE = 'GPU_LARGE' - GPU_MEDIUM = 'GPU_MEDIUM' - GPU_SMALL = 'GPU_SMALL' - MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM' + + CPU = "CPU" + GPU_LARGE = "GPU_LARGE" + GPU_MEDIUM = "GPU_MEDIUM" + GPU_SMALL = "GPU_SMALL" + MULTIGPU_MEDIUM = "MULTIGPU_MEDIUM" + @dataclass class ServedModelOutput: creation_timestamp: Optional[int] = None - + creator: Optional[str] = None - - environment_vars: Optional[Dict[str,str]] = None + + environment_vars: Optional[Dict[str, str]] = None """An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`""" - + instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" - + max_provisioned_concurrency: Optional[int] = None """The maximum provisioned concurrency that the endpoint can scale up to. Do not use if workload_size is specified.""" - + min_provisioned_concurrency: Optional[int] = None """The minimum provisioned concurrency that the endpoint can scale down to. Do not use if workload_size is specified.""" - + model_name: Optional[str] = None - + model_version: Optional[str] = None - + name: Optional[str] = None """The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.""" - + provisioned_model_units: Optional[int] = None """The number of model units provisioned.""" - + scale_to_zero_enabled: Optional[bool] = None """Whether the compute resources for the served entity should scale down to zero.""" - + state: Optional[ServedModelState] = None - + workload_size: Optional[str] = None """The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can @@ -2951,7 +3577,7 @@ class ServedModelOutput: Additional custom workload sizes can also be used when available in the workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. Do not use if min_provisioned_concurrency and max_provisioned_concurrency are specified.""" - + workload_type: Optional[ServingModelWorkloadType] = None """The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -2959,147 +3585,199 @@ class ServedModelOutput: available [GPU types]. [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types""" - + def as_dict(self) -> dict: """Serializes the ServedModelOutput into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.environment_vars: body['environment_vars'] = self.environment_vars - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency - if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency - if self.model_name is not None: body['model_name'] = self.model_name - if self.model_version is not None: body['model_version'] = self.model_version - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled - if self.state: body['state'] = self.state.as_dict() - if self.workload_size is not None: body['workload_size'] = self.workload_size - if self.workload_type is not None: body['workload_type'] = self.workload_type.value + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.environment_vars: + body["environment_vars"] = self.environment_vars + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: + body["max_provisioned_concurrency"] = self.max_provisioned_concurrency + if self.min_provisioned_concurrency is not None: + body["min_provisioned_concurrency"] = self.min_provisioned_concurrency + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version is not None: + body["model_version"] = self.model_version + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: + body["scale_to_zero_enabled"] = self.scale_to_zero_enabled + if self.state: + body["state"] = self.state.as_dict() + if self.workload_size is not None: + body["workload_size"] = self.workload_size + if self.workload_type is not None: + body["workload_type"] = self.workload_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ServedModelOutput into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.environment_vars: body['environment_vars'] = self.environment_vars - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_provisioned_concurrency is not None: body['max_provisioned_concurrency'] = self.max_provisioned_concurrency - if self.min_provisioned_concurrency is not None: body['min_provisioned_concurrency'] = self.min_provisioned_concurrency - if self.model_name is not None: body['model_name'] = self.model_name - if self.model_version is not None: body['model_version'] = self.model_version - if self.name is not None: body['name'] = self.name - if self.provisioned_model_units is not None: body['provisioned_model_units'] = self.provisioned_model_units - if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled - if self.state: body['state'] = self.state - if self.workload_size is not None: body['workload_size'] = self.workload_size - if self.workload_type is not None: body['workload_type'] = self.workload_type + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.environment_vars: + body["environment_vars"] = self.environment_vars + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_provisioned_concurrency is not None: + body["max_provisioned_concurrency"] = self.max_provisioned_concurrency + if self.min_provisioned_concurrency is not None: + body["min_provisioned_concurrency"] = self.min_provisioned_concurrency + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version is not None: + body["model_version"] = self.model_version + if self.name is not None: + body["name"] = self.name + if self.provisioned_model_units is not None: + body["provisioned_model_units"] = self.provisioned_model_units + if self.scale_to_zero_enabled is not None: + body["scale_to_zero_enabled"] = self.scale_to_zero_enabled + if self.state: + body["state"] = self.state + if self.workload_size is not None: + body["workload_size"] = self.workload_size + if self.workload_type is not None: + body["workload_type"] = self.workload_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedModelOutput: """Deserializes the ServedModelOutput from a dictionary.""" - return cls(creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), environment_vars=d.get('environment_vars', None), instance_profile_arn=d.get('instance_profile_arn', None), max_provisioned_concurrency=d.get('max_provisioned_concurrency', None), min_provisioned_concurrency=d.get('min_provisioned_concurrency', None), model_name=d.get('model_name', None), model_version=d.get('model_version', None), name=d.get('name', None), provisioned_model_units=d.get('provisioned_model_units', None), scale_to_zero_enabled=d.get('scale_to_zero_enabled', None), state=_from_dict(d, 'state', ServedModelState), workload_size=d.get('workload_size', None), workload_type=_enum(d, 'workload_type', ServingModelWorkloadType)) - - + return cls( + creation_timestamp=d.get("creation_timestamp", None), + creator=d.get("creator", None), + environment_vars=d.get("environment_vars", None), + instance_profile_arn=d.get("instance_profile_arn", None), + max_provisioned_concurrency=d.get("max_provisioned_concurrency", None), + min_provisioned_concurrency=d.get("min_provisioned_concurrency", None), + model_name=d.get("model_name", None), + model_version=d.get("model_version", None), + name=d.get("name", None), + provisioned_model_units=d.get("provisioned_model_units", None), + scale_to_zero_enabled=d.get("scale_to_zero_enabled", None), + state=_from_dict(d, "state", ServedModelState), + workload_size=d.get("workload_size", None), + workload_type=_enum(d, "workload_type", ServingModelWorkloadType), + ) @dataclass class ServedModelSpec: model_name: Optional[str] = None """Only one of model_name and entity_name should be populated""" - + model_version: Optional[str] = None """Only one of model_version and entity_version should be populated""" - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ServedModelSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_name is not None: body['model_name'] = self.model_name - if self.model_version is not None: body['model_version'] = self.model_version - if self.name is not None: body['name'] = self.name + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version is not None: + body["model_version"] = self.model_version + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the ServedModelSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_name is not None: body['model_name'] = self.model_name - if self.model_version is not None: body['model_version'] = self.model_version - if self.name is not None: body['name'] = self.name + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version is not None: + body["model_version"] = self.model_version + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedModelSpec: """Deserializes the ServedModelSpec from a dictionary.""" - return cls(model_name=d.get('model_name', None), model_version=d.get('model_version', None), name=d.get('name', None)) - - + return cls( + model_name=d.get("model_name", None), model_version=d.get("model_version", None), name=d.get("name", None) + ) @dataclass class ServedModelState: deployment: Optional[ServedModelStateDeployment] = None - + deployment_state_message: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ServedModelState into a dictionary suitable for use as a JSON request body.""" body = {} - if self.deployment is not None: body['deployment'] = self.deployment.value - if self.deployment_state_message is not None: body['deployment_state_message'] = self.deployment_state_message + if self.deployment is not None: + body["deployment"] = self.deployment.value + if self.deployment_state_message is not None: + body["deployment_state_message"] = self.deployment_state_message return body def as_shallow_dict(self) -> dict: """Serializes the ServedModelState into a shallow dictionary of its immediate attributes.""" body = {} - if self.deployment is not None: body['deployment'] = self.deployment - if self.deployment_state_message is not None: body['deployment_state_message'] = self.deployment_state_message + if self.deployment is not None: + body["deployment"] = self.deployment + if self.deployment_state_message is not None: + body["deployment_state_message"] = self.deployment_state_message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServedModelState: """Deserializes the ServedModelState from a dictionary.""" - return cls(deployment=_enum(d, 'deployment', ServedModelStateDeployment), deployment_state_message=d.get('deployment_state_message', None)) - - + return cls( + deployment=_enum(d, "deployment", ServedModelStateDeployment), + deployment_state_message=d.get("deployment_state_message", None), + ) class ServedModelStateDeployment(Enum): - - - ABORTED = 'DEPLOYMENT_ABORTED' - CREATING = 'DEPLOYMENT_CREATING' - FAILED = 'DEPLOYMENT_FAILED' - READY = 'DEPLOYMENT_READY' - RECOVERING = 'DEPLOYMENT_RECOVERING' + + ABORTED = "DEPLOYMENT_ABORTED" + CREATING = "DEPLOYMENT_CREATING" + FAILED = "DEPLOYMENT_FAILED" + READY = "DEPLOYMENT_READY" + RECOVERING = "DEPLOYMENT_RECOVERING" + @dataclass class ServerLogsResponse: logs: str """The most recent log lines of the model server processing invocation requests.""" - + def as_dict(self) -> dict: """Serializes the ServerLogsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.logs is not None: body['logs'] = self.logs + if self.logs is not None: + body["logs"] = self.logs return body def as_shallow_dict(self) -> dict: """Serializes the ServerLogsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.logs is not None: body['logs'] = self.logs + if self.logs is not None: + body["logs"] = self.logs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServerLogsResponse: """Deserializes the ServerLogsResponse from a dictionary.""" - return cls(logs=d.get('logs', None)) - - + return cls(logs=d.get("logs", None)) @dataclass @@ -3108,160 +3786,217 @@ class ServingEndpoint: """The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables.""" - + budget_policy_id: Optional[str] = None """The budget policy associated with the endpoint.""" - + config: Optional[EndpointCoreConfigSummary] = None """The config that is currently being served by the endpoint.""" - + creation_timestamp: Optional[int] = None """The timestamp when the endpoint was created in Unix time.""" - + creator: Optional[str] = None """The email of the user who created the serving endpoint.""" - + id: Optional[str] = None """System-generated ID of the endpoint, included to be used by the Permissions API.""" - + last_updated_timestamp: Optional[int] = None """The timestamp when the endpoint was last updated by a user in Unix time.""" - + name: Optional[str] = None """The name of the serving endpoint.""" - + state: Optional[EndpointState] = None """Information corresponding to the state of the serving endpoint.""" - + tags: Optional[List[EndpointTag]] = None """Tags attached to the serving endpoint.""" - + task: Optional[str] = None """The task type of the serving endpoint.""" - + def as_dict(self) -> dict: """Serializes the ServingEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.config: body['config'] = self.config.as_dict() - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.name is not None: body['name'] = self.name - if self.state: body['state'] = self.state.as_dict() - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] - if self.task is not None: body['task'] = self.task + if self.ai_gateway: + body["ai_gateway"] = self.ai_gateway.as_dict() + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.config: + body["config"] = self.config.as_dict() + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.name is not None: + body["name"] = self.name + if self.state: + body["state"] = self.state.as_dict() + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] + if self.task is not None: + body["task"] = self.task return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai_gateway: body['ai_gateway'] = self.ai_gateway - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.config: body['config'] = self.config - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.name is not None: body['name'] = self.name - if self.state: body['state'] = self.state - if self.tags: body['tags'] = self.tags - if self.task is not None: body['task'] = self.task + if self.ai_gateway: + body["ai_gateway"] = self.ai_gateway + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.config: + body["config"] = self.config + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.name is not None: + body["name"] = self.name + if self.state: + body["state"] = self.state + if self.tags: + body["tags"] = self.tags + if self.task is not None: + body["task"] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpoint: """Deserializes the ServingEndpoint from a dictionary.""" - return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig), budget_policy_id=d.get('budget_policy_id', None), config=_from_dict(d, 'config', EndpointCoreConfigSummary), creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), name=d.get('name', None), state=_from_dict(d, 'state', EndpointState), tags=_repeated_dict(d, 'tags', EndpointTag), task=d.get('task', None)) - - + return cls( + ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), + budget_policy_id=d.get("budget_policy_id", None), + config=_from_dict(d, "config", EndpointCoreConfigSummary), + creation_timestamp=d.get("creation_timestamp", None), + creator=d.get("creator", None), + id=d.get("id", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + name=d.get("name", None), + state=_from_dict(d, "state", EndpointState), + tags=_repeated_dict(d, "tags", EndpointTag), + task=d.get("task", None), + ) @dataclass class ServingEndpointAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[ServingEndpointPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointAccessControlRequest: """Deserializes the ServingEndpointAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', ServingEndpointPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", ServingEndpointPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class ServingEndpointAccessControlResponse: all_permissions: Optional[List[ServingEndpointPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointAccessControlResponse: """Deserializes the ServingEndpointAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', ServingEndpointPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", ServingEndpointPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass @@ -3270,353 +4005,442 @@ class ServingEndpointDetailed: """The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables.""" - + budget_policy_id: Optional[str] = None """The budget policy associated with the endpoint.""" - + config: Optional[EndpointCoreConfigOutput] = None """The config that is currently being served by the endpoint.""" - + creation_timestamp: Optional[int] = None """The timestamp when the endpoint was created in Unix time.""" - + creator: Optional[str] = None """The email of the user who created the serving endpoint.""" - + data_plane_info: Optional[ModelDataPlaneInfo] = None """Information required to query DataPlane APIs.""" - + endpoint_url: Optional[str] = None """Endpoint invocation url if route optimization is enabled for endpoint""" - + id: Optional[str] = None """System-generated ID of the endpoint. This is used to refer to the endpoint in the Permissions API""" - + last_updated_timestamp: Optional[int] = None """The timestamp when the endpoint was last updated by a user in Unix time.""" - + name: Optional[str] = None """The name of the serving endpoint.""" - + pending_config: Optional[EndpointPendingConfig] = None """The config that the endpoint is attempting to update to.""" - + permission_level: Optional[ServingEndpointDetailedPermissionLevel] = None """The permission level of the principal making the request.""" - + route_optimized: Optional[bool] = None """Boolean representing if route optimization has been enabled for the endpoint""" - + state: Optional[EndpointState] = None """Information corresponding to the state of the serving endpoint.""" - + tags: Optional[List[EndpointTag]] = None """Tags attached to the serving endpoint.""" - + task: Optional[str] = None """The task type of the serving endpoint.""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointDetailed into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.config: body['config'] = self.config.as_dict() - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.data_plane_info: body['data_plane_info'] = self.data_plane_info.as_dict() - if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.name is not None: body['name'] = self.name - if self.pending_config: body['pending_config'] = self.pending_config.as_dict() - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.route_optimized is not None: body['route_optimized'] = self.route_optimized - if self.state: body['state'] = self.state.as_dict() - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] - if self.task is not None: body['task'] = self.task + if self.ai_gateway: + body["ai_gateway"] = self.ai_gateway.as_dict() + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.config: + body["config"] = self.config.as_dict() + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.data_plane_info: + body["data_plane_info"] = self.data_plane_info.as_dict() + if self.endpoint_url is not None: + body["endpoint_url"] = self.endpoint_url + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.name is not None: + body["name"] = self.name + if self.pending_config: + body["pending_config"] = self.pending_config.as_dict() + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.route_optimized is not None: + body["route_optimized"] = self.route_optimized + if self.state: + body["state"] = self.state.as_dict() + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] + if self.task is not None: + body["task"] = self.task return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointDetailed into a shallow dictionary of its immediate attributes.""" body = {} - if self.ai_gateway: body['ai_gateway'] = self.ai_gateway - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.config: body['config'] = self.config - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.data_plane_info: body['data_plane_info'] = self.data_plane_info - if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.name is not None: body['name'] = self.name - if self.pending_config: body['pending_config'] = self.pending_config - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.route_optimized is not None: body['route_optimized'] = self.route_optimized - if self.state: body['state'] = self.state - if self.tags: body['tags'] = self.tags - if self.task is not None: body['task'] = self.task + if self.ai_gateway: + body["ai_gateway"] = self.ai_gateway + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.config: + body["config"] = self.config + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.data_plane_info: + body["data_plane_info"] = self.data_plane_info + if self.endpoint_url is not None: + body["endpoint_url"] = self.endpoint_url + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.name is not None: + body["name"] = self.name + if self.pending_config: + body["pending_config"] = self.pending_config + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.route_optimized is not None: + body["route_optimized"] = self.route_optimized + if self.state: + body["state"] = self.state + if self.tags: + body["tags"] = self.tags + if self.task is not None: + body["task"] = self.task return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointDetailed: """Deserializes the ServingEndpointDetailed from a dictionary.""" - return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig), budget_policy_id=d.get('budget_policy_id', None), config=_from_dict(d, 'config', EndpointCoreConfigOutput), creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), data_plane_info=_from_dict(d, 'data_plane_info', ModelDataPlaneInfo), endpoint_url=d.get('endpoint_url', None), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), name=d.get('name', None), pending_config=_from_dict(d, 'pending_config', EndpointPendingConfig), permission_level=_enum(d, 'permission_level', ServingEndpointDetailedPermissionLevel), route_optimized=d.get('route_optimized', None), state=_from_dict(d, 'state', EndpointState), tags=_repeated_dict(d, 'tags', EndpointTag), task=d.get('task', None)) - - + return cls( + ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), + budget_policy_id=d.get("budget_policy_id", None), + config=_from_dict(d, "config", EndpointCoreConfigOutput), + creation_timestamp=d.get("creation_timestamp", None), + creator=d.get("creator", None), + data_plane_info=_from_dict(d, "data_plane_info", ModelDataPlaneInfo), + endpoint_url=d.get("endpoint_url", None), + id=d.get("id", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + name=d.get("name", None), + pending_config=_from_dict(d, "pending_config", EndpointPendingConfig), + permission_level=_enum(d, "permission_level", ServingEndpointDetailedPermissionLevel), + route_optimized=d.get("route_optimized", None), + state=_from_dict(d, "state", EndpointState), + tags=_repeated_dict(d, "tags", EndpointTag), + task=d.get("task", None), + ) class ServingEndpointDetailedPermissionLevel(Enum): - - - CAN_MANAGE = 'CAN_MANAGE' - CAN_QUERY = 'CAN_QUERY' - CAN_VIEW = 'CAN_VIEW' + + CAN_MANAGE = "CAN_MANAGE" + CAN_QUERY = "CAN_QUERY" + CAN_VIEW = "CAN_VIEW" + @dataclass class ServingEndpointPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[ServingEndpointPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermission: """Deserializes the ServingEndpointPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', ServingEndpointPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", ServingEndpointPermissionLevel), + ) class ServingEndpointPermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = 'CAN_MANAGE' - CAN_QUERY = 'CAN_QUERY' - CAN_VIEW = 'CAN_VIEW' + + CAN_MANAGE = "CAN_MANAGE" + CAN_QUERY = "CAN_QUERY" + CAN_VIEW = "CAN_VIEW" + @dataclass class ServingEndpointPermissions: access_control_list: Optional[List[ServingEndpointAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the ServingEndpointPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissions: """Deserializes the ServingEndpointPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', ServingEndpointAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", ServingEndpointAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class ServingEndpointPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[ServingEndpointPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissionsDescription: """Deserializes the ServingEndpointPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', ServingEndpointPermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", ServingEndpointPermissionLevel), + ) @dataclass class ServingEndpointPermissionsRequest: access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None - + serving_endpoint_id: Optional[str] = None """The serving endpoint for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the ServingEndpointPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.serving_endpoint_id is not None: + body["serving_endpoint_id"] = self.serving_endpoint_id return body def as_shallow_dict(self) -> dict: """Serializes the ServingEndpointPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.serving_endpoint_id is not None: + body["serving_endpoint_id"] = self.serving_endpoint_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissionsRequest: """Deserializes the ServingEndpointPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', ServingEndpointAccessControlRequest), serving_endpoint_id=d.get('serving_endpoint_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", ServingEndpointAccessControlRequest), + serving_endpoint_id=d.get("serving_endpoint_id", None), + ) class ServingModelWorkloadType(Enum): """Please keep this in sync with with workload types in InferenceEndpointEntities.scala""" - - CPU = 'CPU' - GPU_LARGE = 'GPU_LARGE' - GPU_MEDIUM = 'GPU_MEDIUM' - GPU_SMALL = 'GPU_SMALL' - MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM' + + CPU = "CPU" + GPU_LARGE = "GPU_LARGE" + GPU_MEDIUM = "GPU_MEDIUM" + GPU_SMALL = "GPU_SMALL" + MULTIGPU_MEDIUM = "MULTIGPU_MEDIUM" + @dataclass class TrafficConfig: routes: Optional[List[Route]] = None """The list of routes that define traffic to each served entity.""" - + def as_dict(self) -> dict: """Serializes the TrafficConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.routes: body['routes'] = [v.as_dict() for v in self.routes] + if self.routes: + body["routes"] = [v.as_dict() for v in self.routes] return body def as_shallow_dict(self) -> dict: """Serializes the TrafficConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.routes: body['routes'] = self.routes + if self.routes: + body["routes"] = self.routes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TrafficConfig: """Deserializes the TrafficConfig from a dictionary.""" - return cls(routes=_repeated_dict(d, 'routes', Route)) - - + return cls(routes=_repeated_dict(d, "routes", Route)) @dataclass class UpdateProvisionedThroughputEndpointConfigRequest: config: PtEndpointCoreConfig - + name: Optional[str] = None """The name of the pt endpoint to update. This field is required.""" - + def as_dict(self) -> dict: """Serializes the UpdateProvisionedThroughputEndpointConfigRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config: body['config'] = self.config.as_dict() - if self.name is not None: body['name'] = self.name + if self.config: + body["config"] = self.config.as_dict() + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProvisionedThroughputEndpointConfigRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.config: body['config'] = self.config - if self.name is not None: body['name'] = self.name + if self.config: + body["config"] = self.config + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProvisionedThroughputEndpointConfigRequest: """Deserializes the UpdateProvisionedThroughputEndpointConfigRequest from a dictionary.""" - return cls(config=_from_dict(d, 'config', PtEndpointCoreConfig), name=d.get('name', None)) - - + return cls(config=_from_dict(d, "config", PtEndpointCoreConfig), name=d.get("name", None)) @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None """The finish reason returned by the endpoint.""" - + index: Optional[int] = None """The index of the choice in the __chat or completions__ response.""" - + logprobs: Optional[int] = None """The logprobs returned only by the __completions__ endpoint.""" - + message: Optional[ChatMessage] = None """The message response from the __chat__ endpoint.""" - + text: Optional[str] = None """The text response from the __completions__ endpoint.""" - + def as_dict(self) -> dict: """Serializes the V1ResponseChoiceElement into a dictionary suitable for use as a JSON request body.""" body = {} - if self.finish_reason is not None: body['finishReason'] = self.finish_reason - if self.index is not None: body['index'] = self.index - if self.logprobs is not None: body['logprobs'] = self.logprobs - if self.message: body['message'] = self.message.as_dict() - if self.text is not None: body['text'] = self.text + if self.finish_reason is not None: + body["finishReason"] = self.finish_reason + if self.index is not None: + body["index"] = self.index + if self.logprobs is not None: + body["logprobs"] = self.logprobs + if self.message: + body["message"] = self.message.as_dict() + if self.text is not None: + body["text"] = self.text return body def as_shallow_dict(self) -> dict: """Serializes the V1ResponseChoiceElement into a shallow dictionary of its immediate attributes.""" body = {} - if self.finish_reason is not None: body['finishReason'] = self.finish_reason - if self.index is not None: body['index'] = self.index - if self.logprobs is not None: body['logprobs'] = self.logprobs - if self.message: body['message'] = self.message - if self.text is not None: body['text'] = self.text + if self.finish_reason is not None: + body["finishReason"] = self.finish_reason + if self.index is not None: + body["index"] = self.index + if self.logprobs is not None: + body["logprobs"] = self.logprobs + if self.message: + body["message"] = self.message + if self.text is not None: + body["text"] = self.text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> V1ResponseChoiceElement: """Deserializes the V1ResponseChoiceElement from a dictionary.""" - return cls(finish_reason=d.get('finishReason', None), index=d.get('index', None), logprobs=d.get('logprobs', None), message=_from_dict(d, 'message', ChatMessage), text=d.get('text', None)) - - - - + return cls( + finish_reason=d.get("finishReason", None), + index=d.get("index", None), + logprobs=d.get("logprobs", None), + message=_from_dict(d, "message", ChatMessage), + text=d.get("text", None), + ) class ServingEndpointsAPI: """The Serving Endpoints API allows you to create, update, and delete model serving endpoints. - + You can use a serving endpoint to serve models from the Databricks Model Registry or from Unity Catalog. Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means the endpoints and associated compute resources are fully managed by Databricks and will not appear in your @@ -3625,79 +4449,80 @@ class ServingEndpointsAPI: configure traffic settings to define how requests should be routed to your served entities behind an endpoint. Additionally, you can configure the scale of resources that should be applied to each served entity.""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_get_serving_endpoint_not_updating(self, name: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[ServingEndpointDetailed], None]] = None) -> ServingEndpointDetailed: - deadline = time.time() + timeout.total_seconds() - target_states = (EndpointStateConfigUpdate.NOT_UPDATING, ) - failure_states = (EndpointStateConfigUpdate.UPDATE_FAILED, EndpointStateConfigUpdate.UPDATE_CANCELED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.state.config_update - status_message = f'current status: {status}' - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach NOT_UPDATING, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - def build_logs(self - , name: str, served_model_name: str - ) -> BuildLogsResponse: + def wait_get_serving_endpoint_not_updating( + self, + name: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[ServingEndpointDetailed], None]] = None, + ) -> ServingEndpointDetailed: + deadline = time.time() + timeout.total_seconds() + target_states = (EndpointStateConfigUpdate.NOT_UPDATING,) + failure_states = ( + EndpointStateConfigUpdate.UPDATE_FAILED, + EndpointStateConfigUpdate.UPDATE_CANCELED, + ) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.state.config_update + status_message = f"current status: {status}" + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach NOT_UPDATING, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def build_logs(self, name: str, served_model_name: str) -> BuildLogsResponse: """Get build logs for a served model. - + Retrieves the build logs associated with the provided served model. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. :param served_model_name: str The name of the served model that build logs will be retrieved for. This field is required. - + :returns: :class:`BuildLogsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/build-logs' - - , headers=headers - ) - return BuildLogsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/build-logs", headers=headers + ) + return BuildLogsResponse.from_dict(res) - def create(self - , name: str - , * - , ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, config: Optional[EndpointCoreConfigInput] = None, rate_limits: Optional[List[RateLimit]] = None, route_optimized: Optional[bool] = None, tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]: + def create( + self, + name: str, + *, + ai_gateway: Optional[AiGatewayConfig] = None, + budget_policy_id: Optional[str] = None, + config: Optional[EndpointCoreConfigInput] = None, + rate_limits: Optional[List[RateLimit]] = None, + route_optimized: Optional[bool] = None, + tags: Optional[List[EndpointTag]] = None, + ) -> Wait[ServingEndpointDetailed]: """Create a new serving endpoint. - + :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. @@ -3716,46 +4541,71 @@ def create(self Enable route optimization for the serving endpoint. :param tags: List[:class:`EndpointTag`] (optional) Tags to be attached to the serving endpoint and automatically propagated to billing logs. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ body = {} - if ai_gateway is not None: body['ai_gateway'] = ai_gateway.as_dict() - if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id - if config is not None: body['config'] = config.as_dict() - if name is not None: body['name'] = name - if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits] - if route_optimized is not None: body['route_optimized'] = route_optimized - if tags is not None: body['tags'] = [v.as_dict() for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.0/serving-endpoints', body=body - - , headers=headers - ) - return Wait(self.wait_get_serving_endpoint_not_updating - , response = ServingEndpointDetailed.from_dict(op_response) - , name=op_response['name']) - - - def create_and_wait(self - , name: str - , * - , ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, config: Optional[EndpointCoreConfigInput] = None, rate_limits: Optional[List[RateLimit]] = None, route_optimized: Optional[bool] = None, tags: Optional[List[EndpointTag]] = None, - timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: - return self.create(ai_gateway=ai_gateway, budget_policy_id=budget_policy_id, config=config, name=name, rate_limits=rate_limits, route_optimized=route_optimized, tags=tags).result(timeout=timeout) - - - - - def create_provisioned_throughput_endpoint(self - , name: str, config: PtEndpointCoreConfig - , * - , ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]: + if ai_gateway is not None: + body["ai_gateway"] = ai_gateway.as_dict() + if budget_policy_id is not None: + body["budget_policy_id"] = budget_policy_id + if config is not None: + body["config"] = config.as_dict() + if name is not None: + body["name"] = name + if rate_limits is not None: + body["rate_limits"] = [v.as_dict() for v in rate_limits] + if route_optimized is not None: + body["route_optimized"] = route_optimized + if tags is not None: + body["tags"] = [v.as_dict() for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.0/serving-endpoints", body=body, headers=headers) + return Wait( + self.wait_get_serving_endpoint_not_updating, + response=ServingEndpointDetailed.from_dict(op_response), + name=op_response["name"], + ) + + def create_and_wait( + self, + name: str, + *, + ai_gateway: Optional[AiGatewayConfig] = None, + budget_policy_id: Optional[str] = None, + config: Optional[EndpointCoreConfigInput] = None, + rate_limits: Optional[List[RateLimit]] = None, + route_optimized: Optional[bool] = None, + tags: Optional[List[EndpointTag]] = None, + timeout=timedelta(minutes=20), + ) -> ServingEndpointDetailed: + return self.create( + ai_gateway=ai_gateway, + budget_policy_id=budget_policy_id, + config=config, + name=name, + rate_limits=rate_limits, + route_optimized=route_optimized, + tags=tags, + ).result(timeout=timeout) + + def create_provisioned_throughput_endpoint( + self, + name: str, + config: PtEndpointCoreConfig, + *, + ai_gateway: Optional[AiGatewayConfig] = None, + budget_policy_id: Optional[str] = None, + tags: Optional[List[EndpointTag]] = None, + ) -> Wait[ServingEndpointDetailed]: """Create a new PT serving endpoint. - + :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. @@ -3767,194 +4617,167 @@ def create_provisioned_throughput_endpoint(self The budget policy associated with the endpoint. :param tags: List[:class:`EndpointTag`] (optional) Tags to be attached to the serving endpoint and automatically propagated to billing logs. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ body = {} - if ai_gateway is not None: body['ai_gateway'] = ai_gateway.as_dict() - if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id - if config is not None: body['config'] = config.as_dict() - if name is not None: body['name'] = name - if tags is not None: body['tags'] = [v.as_dict() for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.0/serving-endpoints/pt', body=body - - , headers=headers - ) - return Wait(self.wait_get_serving_endpoint_not_updating - , response = ServingEndpointDetailed.from_dict(op_response) - , name=op_response['name']) - - - def create_provisioned_throughput_endpoint_and_wait(self - , name: str, config: PtEndpointCoreConfig - , * - , ai_gateway: Optional[AiGatewayConfig] = None, budget_policy_id: Optional[str] = None, tags: Optional[List[EndpointTag]] = None, - timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: - return self.create_provisioned_throughput_endpoint(ai_gateway=ai_gateway, budget_policy_id=budget_policy_id, config=config, name=name, tags=tags).result(timeout=timeout) - - - - - def delete(self - , name: str - ): + if ai_gateway is not None: + body["ai_gateway"] = ai_gateway.as_dict() + if budget_policy_id is not None: + body["budget_policy_id"] = budget_policy_id + if config is not None: + body["config"] = config.as_dict() + if name is not None: + body["name"] = name + if tags is not None: + body["tags"] = [v.as_dict() for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.0/serving-endpoints/pt", body=body, headers=headers) + return Wait( + self.wait_get_serving_endpoint_not_updating, + response=ServingEndpointDetailed.from_dict(op_response), + name=op_response["name"], + ) + + def create_provisioned_throughput_endpoint_and_wait( + self, + name: str, + config: PtEndpointCoreConfig, + *, + ai_gateway: Optional[AiGatewayConfig] = None, + budget_policy_id: Optional[str] = None, + tags: Optional[List[EndpointTag]] = None, + timeout=timedelta(minutes=20), + ) -> ServingEndpointDetailed: + return self.create_provisioned_throughput_endpoint( + ai_gateway=ai_gateway, budget_policy_id=budget_policy_id, config=config, name=name, tags=tags + ).result(timeout=timeout) + + def delete(self, name: str): """Delete a serving endpoint. - + :param name: str - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/serving-endpoints/{name}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.0/serving-endpoints/{name}", headers=headers) - def export_metrics(self - , name: str - ) -> ExportMetricsResponse: + def export_metrics(self, name: str) -> ExportMetricsResponse: """Get metrics of a serving endpoint. - + Retrieves the metrics associated with the provided serving endpoint in either Prometheus or OpenMetrics exposition format. - + :param name: str The name of the serving endpoint to retrieve metrics for. This field is required. - + :returns: :class:`ExportMetricsResponse` """ - - headers = {'Accept': 'text/plain',} - - res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}/metrics' - - , headers=headers - , raw=True) - return ExportMetricsResponse.from_dict(res) - - - + headers = { + "Accept": "text/plain", + } - def get(self - , name: str - ) -> ServingEndpointDetailed: + res = self._api.do("GET", f"/api/2.0/serving-endpoints/{name}/metrics", headers=headers, raw=True) + return ExportMetricsResponse.from_dict(res) + + def get(self, name: str) -> ServingEndpointDetailed: """Get a single serving endpoint. - + Retrieves the details for a single serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. - + :returns: :class:`ServingEndpointDetailed` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}' - - , headers=headers - ) - return ServingEndpointDetailed.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/serving-endpoints/{name}", headers=headers) + return ServingEndpointDetailed.from_dict(res) - def get_open_api(self - , name: str - ) -> GetOpenApiResponse: + def get_open_api(self, name: str) -> GetOpenApiResponse: """Get the schema for a serving endpoint. - + Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for the supported paths, input and output format and datatypes. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. - + :returns: :class:`GetOpenApiResponse` """ - - headers = {'Accept': 'text/plain',} - - res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}/openapi' - - , headers=headers - , raw=True) - return GetOpenApiResponse.from_dict(res) - - - + headers = { + "Accept": "text/plain", + } - def get_permission_levels(self - , serving_endpoint_id: str - ) -> GetServingEndpointPermissionLevelsResponse: + res = self._api.do("GET", f"/api/2.0/serving-endpoints/{name}/openapi", headers=headers, raw=True) + return GetOpenApiResponse.from_dict(res) + + def get_permission_levels(self, serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse: """Get serving endpoint permission levels. - + Gets the permission levels that a user can have on an object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. - + :returns: :class:`GetServingEndpointPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}/permissionLevels' - - , headers=headers - ) - return GetServingEndpointPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}/permissionLevels", headers=headers + ) + return GetServingEndpointPermissionLevelsResponse.from_dict(res) - def get_permissions(self - , serving_endpoint_id: str - ) -> ServingEndpointPermissions: + def get_permissions(self, serving_endpoint_id: str) -> ServingEndpointPermissions: """Get serving endpoint permissions. - + Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. - + :returns: :class:`ServingEndpointPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}' - - , headers=headers - ) - return ServingEndpointPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def http_request(self - , connection_name: str, method: ExternalFunctionRequestHttpMethod, path: str - , * - , headers: Optional[str] = None, json: Optional[str] = None, params: Optional[str] = None) -> HttpRequestResponse: + res = self._api.do("GET", f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}", headers=headers) + return ServingEndpointPermissions.from_dict(res) + + def http_request( + self, + connection_name: str, + method: ExternalFunctionRequestHttpMethod, + path: str, + *, + headers: Optional[str] = None, + json: Optional[str] = None, + params: Optional[str] = None, + ) -> HttpRequestResponse: """Make external services call using the credentials stored in UC Connection. - + :param connection_name: str The connection name to use. This is required to identify the external connection. :param method: :class:`ExternalFunctionRequestHttpMethod` @@ -3968,144 +4791,133 @@ def http_request(self The JSON payload to send in the request body. :param params: str (optional) Query parameters for the request. - + :returns: :class:`HttpRequestResponse` """ body = {} - if connection_name is not None: body['connection_name'] = connection_name - if headers is not None: body['headers'] = headers - if json is not None: body['json'] = json - if method is not None: body['method'] = method.value - if params is not None: body['params'] = params - if path is not None: body['path'] = path - headers = {'Accept': 'text/plain','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/external-function', body=body - - , headers=headers - , raw=True) + if connection_name is not None: + body["connection_name"] = connection_name + if headers is not None: + body["headers"] = headers + if json is not None: + body["json"] = json + if method is not None: + body["method"] = method.value + if params is not None: + body["params"] = params + if path is not None: + body["path"] = path + headers = { + "Accept": "text/plain", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/external-function", body=body, headers=headers, raw=True) return HttpRequestResponse.from_dict(res) - - - - def list(self) -> Iterator[ServingEndpoint]: """Get all serving endpoints. - + :returns: Iterator over :class:`ServingEndpoint` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/serving-endpoints' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/serving-endpoints", headers=headers) parsed = ListEndpointsResponse.from_dict(json).endpoints return parsed if parsed is not None else [] - - - - - - def logs(self - , name: str, served_model_name: str - ) -> ServerLogsResponse: + def logs(self, name: str, served_model_name: str) -> ServerLogsResponse: """Get the latest logs for a served model. - + Retrieves the service logs associated with the provided served model. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. :param served_model_name: str The name of the served model that logs will be retrieved for. This field is required. - + :returns: :class:`ServerLogsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/logs' - - , headers=headers - ) - return ServerLogsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/serving-endpoints/{name}/served-models/{served_model_name}/logs", headers=headers + ) + return ServerLogsResponse.from_dict(res) - def patch(self - , name: str - , * - , add_tags: Optional[List[EndpointTag]] = None, delete_tags: Optional[List[str]] = None) -> EndpointTags: + def patch( + self, name: str, *, add_tags: Optional[List[EndpointTag]] = None, delete_tags: Optional[List[str]] = None + ) -> EndpointTags: """Update tags of a serving endpoint. - + Used to batch add and delete tags from a serving endpoint with a single API call. - + :param name: str The name of the serving endpoint who's tags to patch. This field is required. :param add_tags: List[:class:`EndpointTag`] (optional) List of endpoint tags to add :param delete_tags: List[str] (optional) List of tag keys to delete - + :returns: :class:`EndpointTags` """ body = {} - if add_tags is not None: body['add_tags'] = [v.as_dict() for v in add_tags] - if delete_tags is not None: body['delete_tags'] = [v for v in delete_tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/serving-endpoints/{name}/tags', body=body - - , headers=headers - ) - return EndpointTags.from_dict(res) + if add_tags is not None: + body["add_tags"] = [v.as_dict() for v in add_tags] + if delete_tags is not None: + body["delete_tags"] = [v for v in delete_tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PATCH", f"/api/2.0/serving-endpoints/{name}/tags", body=body, headers=headers) + return EndpointTags.from_dict(res) - def put(self - , name: str - , * - , rate_limits: Optional[List[RateLimit]] = None) -> PutResponse: + def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse: """Update rate limits of a serving endpoint. - + Deprecated: Please use AI Gateway to manage rate limits instead. - + :param name: str The name of the serving endpoint whose rate limits are being updated. This field is required. :param rate_limits: List[:class:`RateLimit`] (optional) The list of endpoint rate limits. - + :returns: :class:`PutResponse` """ body = {} - if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/serving-endpoints/{name}/rate-limits', body=body - - , headers=headers - ) - return PutResponse.from_dict(res) + if rate_limits is not None: + body["rate_limits"] = [v.as_dict() for v in rate_limits] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PUT", f"/api/2.0/serving-endpoints/{name}/rate-limits", body=body, headers=headers) + return PutResponse.from_dict(res) - def put_ai_gateway(self - , name: str - , * - , fallback_config: Optional[FallbackConfig] = None, guardrails: Optional[AiGatewayGuardrails] = None, inference_table_config: Optional[AiGatewayInferenceTableConfig] = None, rate_limits: Optional[List[AiGatewayRateLimit]] = None, usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None) -> PutAiGatewayResponse: + def put_ai_gateway( + self, + name: str, + *, + fallback_config: Optional[FallbackConfig] = None, + guardrails: Optional[AiGatewayGuardrails] = None, + inference_table_config: Optional[AiGatewayInferenceTableConfig] = None, + rate_limits: Optional[List[AiGatewayRateLimit]] = None, + usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None, + ) -> PutAiGatewayResponse: """Update AI Gateway of a serving endpoint. - + Used to update the AI Gateway of a serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables. - + :param name: str The name of the serving endpoint whose AI Gateway is being updated. This field is required. :param fallback_config: :class:`FallbackConfig` (optional) @@ -4121,33 +4933,48 @@ def put_ai_gateway(self :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional) Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs. - + :returns: :class:`PutAiGatewayResponse` """ body = {} - if fallback_config is not None: body['fallback_config'] = fallback_config.as_dict() - if guardrails is not None: body['guardrails'] = guardrails.as_dict() - if inference_table_config is not None: body['inference_table_config'] = inference_table_config.as_dict() - if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits] - if usage_tracking_config is not None: body['usage_tracking_config'] = usage_tracking_config.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/serving-endpoints/{name}/ai-gateway', body=body - - , headers=headers - ) + if fallback_config is not None: + body["fallback_config"] = fallback_config.as_dict() + if guardrails is not None: + body["guardrails"] = guardrails.as_dict() + if inference_table_config is not None: + body["inference_table_config"] = inference_table_config.as_dict() + if rate_limits is not None: + body["rate_limits"] = [v.as_dict() for v in rate_limits] + if usage_tracking_config is not None: + body["usage_tracking_config"] = usage_tracking_config.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PUT", f"/api/2.0/serving-endpoints/{name}/ai-gateway", body=body, headers=headers) return PutAiGatewayResponse.from_dict(res) - - - - - def query(self - , name: str - , * - , dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str,str]] = None, input: Optional[Any] = None, inputs: Optional[Any] = None, instances: Optional[List[Any]] = None, max_tokens: Optional[int] = None, messages: Optional[List[ChatMessage]] = None, n: Optional[int] = None, prompt: Optional[Any] = None, stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None) -> QueryEndpointResponse: + def query( + self, + name: str, + *, + dataframe_records: Optional[List[Any]] = None, + dataframe_split: Optional[DataframeSplitInput] = None, + extra_params: Optional[Dict[str, str]] = None, + input: Optional[Any] = None, + inputs: Optional[Any] = None, + instances: Optional[List[Any]] = None, + max_tokens: Optional[int] = None, + messages: Optional[List[ChatMessage]] = None, + n: Optional[int] = None, + prompt: Optional[Any] = None, + stop: Optional[List[str]] = None, + stream: Optional[bool] = None, + temperature: Optional[float] = None, + ) -> QueryEndpointResponse: """Query a serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. :param dataframe_records: List[Any] (optional) @@ -4191,74 +5018,97 @@ def query(self The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - + :returns: :class:`QueryEndpointResponse` """ body = {} - if dataframe_records is not None: body['dataframe_records'] = [v for v in dataframe_records] - if dataframe_split is not None: body['dataframe_split'] = dataframe_split.as_dict() - if extra_params is not None: body['extra_params'] = extra_params - if input is not None: body['input'] = input - if inputs is not None: body['inputs'] = inputs - if instances is not None: body['instances'] = [v for v in instances] - if max_tokens is not None: body['max_tokens'] = max_tokens - if messages is not None: body['messages'] = [v.as_dict() for v in messages] - if n is not None: body['n'] = n - if prompt is not None: body['prompt'] = prompt - if stop is not None: body['stop'] = [v for v in stop] - if stream is not None: body['stream'] = stream - if temperature is not None: body['temperature'] = temperature - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - response_headers = ['served-model-name',] - res = self._api.do('POST',f'/serving-endpoints/{name}/invocations', body=body - - , headers=headers - , response_headers=response_headers) + if dataframe_records is not None: + body["dataframe_records"] = [v for v in dataframe_records] + if dataframe_split is not None: + body["dataframe_split"] = dataframe_split.as_dict() + if extra_params is not None: + body["extra_params"] = extra_params + if input is not None: + body["input"] = input + if inputs is not None: + body["inputs"] = inputs + if instances is not None: + body["instances"] = [v for v in instances] + if max_tokens is not None: + body["max_tokens"] = max_tokens + if messages is not None: + body["messages"] = [v.as_dict() for v in messages] + if n is not None: + body["n"] = n + if prompt is not None: + body["prompt"] = prompt + if stop is not None: + body["stop"] = [v for v in stop] + if stream is not None: + body["stream"] = stream + if temperature is not None: + body["temperature"] = temperature + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + response_headers = [ + "served-model-name", + ] + res = self._api.do( + "POST", + f"/serving-endpoints/{name}/invocations", + body=body, + headers=headers, + response_headers=response_headers, + ) return QueryEndpointResponse.from_dict(res) - - - - - def set_permissions(self - , serving_endpoint_id: str - , * - , access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None) -> ServingEndpointPermissions: + def set_permissions( + self, + serving_endpoint_id: str, + *, + access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None, + ) -> ServingEndpointPermissions: """Set serving endpoint permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional) - + :returns: :class:`ServingEndpointPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}', body=body - - , headers=headers - ) - return ServingEndpointPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "PUT", f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}", body=body, headers=headers + ) + return ServingEndpointPermissions.from_dict(res) - def update_config(self - , name: str - , * - , auto_capture_config: Optional[AutoCaptureConfigInput] = None, served_entities: Optional[List[ServedEntityInput]] = None, served_models: Optional[List[ServedModelInput]] = None, traffic_config: Optional[TrafficConfig] = None) -> Wait[ServingEndpointDetailed]: + def update_config( + self, + name: str, + *, + auto_capture_config: Optional[AutoCaptureConfigInput] = None, + served_entities: Optional[List[ServedEntityInput]] = None, + served_models: Optional[List[ServedModelInput]] = None, + traffic_config: Optional[TrafficConfig] = None, + ) -> Wait[ServingEndpointDetailed]: """Update config of a serving endpoint. - + Updates any combination of the serving endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the current update completes or fails. - + :param name: str The name of the serving endpoint to update. This field is required. :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional) @@ -4273,147 +5123,168 @@ def update_config(self config. :param traffic_config: :class:`TrafficConfig` (optional) The traffic configuration associated with the serving endpoint config. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ body = {} - if auto_capture_config is not None: body['auto_capture_config'] = auto_capture_config.as_dict() - if served_entities is not None: body['served_entities'] = [v.as_dict() for v in served_entities] - if served_models is not None: body['served_models'] = [v.as_dict() for v in served_models] - if traffic_config is not None: body['traffic_config'] = traffic_config.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('PUT',f'/api/2.0/serving-endpoints/{name}/config', body=body - - , headers=headers - ) - return Wait(self.wait_get_serving_endpoint_not_updating - , response = ServingEndpointDetailed.from_dict(op_response) - , name=op_response['name']) - - - def update_config_and_wait(self - , name: str - , * - , auto_capture_config: Optional[AutoCaptureConfigInput] = None, served_entities: Optional[List[ServedEntityInput]] = None, served_models: Optional[List[ServedModelInput]] = None, traffic_config: Optional[TrafficConfig] = None, - timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: - return self.update_config(auto_capture_config=auto_capture_config, name=name, served_entities=served_entities, served_models=served_models, traffic_config=traffic_config).result(timeout=timeout) - - - - - def update_permissions(self - , serving_endpoint_id: str - , * - , access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None) -> ServingEndpointPermissions: + if auto_capture_config is not None: + body["auto_capture_config"] = auto_capture_config.as_dict() + if served_entities is not None: + body["served_entities"] = [v.as_dict() for v in served_entities] + if served_models is not None: + body["served_models"] = [v.as_dict() for v in served_models] + if traffic_config is not None: + body["traffic_config"] = traffic_config.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("PUT", f"/api/2.0/serving-endpoints/{name}/config", body=body, headers=headers) + return Wait( + self.wait_get_serving_endpoint_not_updating, + response=ServingEndpointDetailed.from_dict(op_response), + name=op_response["name"], + ) + + def update_config_and_wait( + self, + name: str, + *, + auto_capture_config: Optional[AutoCaptureConfigInput] = None, + served_entities: Optional[List[ServedEntityInput]] = None, + served_models: Optional[List[ServedModelInput]] = None, + traffic_config: Optional[TrafficConfig] = None, + timeout=timedelta(minutes=20), + ) -> ServingEndpointDetailed: + return self.update_config( + auto_capture_config=auto_capture_config, + name=name, + served_entities=served_entities, + served_models=served_models, + traffic_config=traffic_config, + ).result(timeout=timeout) + + def update_permissions( + self, + serving_endpoint_id: str, + *, + access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None, + ) -> ServingEndpointPermissions: """Update serving endpoint permissions. - + Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional) - + :returns: :class:`ServingEndpointPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}', body=body - - , headers=headers - ) - return ServingEndpointPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "PATCH", f"/api/2.0/permissions/serving-endpoints/{serving_endpoint_id}", body=body, headers=headers + ) + return ServingEndpointPermissions.from_dict(res) - def update_provisioned_throughput_endpoint_config(self - , name: str, config: PtEndpointCoreConfig - ) -> Wait[ServingEndpointDetailed]: + def update_provisioned_throughput_endpoint_config( + self, name: str, config: PtEndpointCoreConfig + ) -> Wait[ServingEndpointDetailed]: """Update config of a PT serving endpoint. - + Updates any combination of the pt endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. Updates are instantaneous and endpoint should be updated instantly - + :param name: str The name of the pt endpoint to update. This field is required. :param config: :class:`PtEndpointCoreConfig` - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. """ body = {} - if config is not None: body['config'] = config.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('PUT',f'/api/2.0/serving-endpoints/pt/{name}/config', body=body - - , headers=headers - ) - return Wait(self.wait_get_serving_endpoint_not_updating - , response = ServingEndpointDetailed.from_dict(op_response) - , name=op_response['name']) - - - def update_provisioned_throughput_endpoint_config_and_wait(self - , name: str, config: PtEndpointCoreConfig - , - timeout=timedelta(minutes=20)) -> ServingEndpointDetailed: + if config is not None: + body["config"] = config.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("PUT", f"/api/2.0/serving-endpoints/pt/{name}/config", body=body, headers=headers) + return Wait( + self.wait_get_serving_endpoint_not_updating, + response=ServingEndpointDetailed.from_dict(op_response), + name=op_response["name"], + ) + + def update_provisioned_throughput_endpoint_config_and_wait( + self, name: str, config: PtEndpointCoreConfig, timeout=timedelta(minutes=20) + ) -> ServingEndpointDetailed: return self.update_provisioned_throughput_endpoint_config(config=config, name=name).result(timeout=timeout) - - + + class ServingEndpointsDataPlaneAPI: """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service.""" - + def __init__(self, api_client, control_plane_service, dpts): self._api = api_client self._lock = threading.Lock() self._control_plane_service = control_plane_service self._dpts = dpts self._data_plane_details = {} - - - - - - - - def _data_plane_info_query (self - , name: str - ) -> DataPlaneInfo: - key = "query" + "/".join([ - str(name), - ]) + def _data_plane_info_query(self, name: str) -> DataPlaneInfo: + key = "query" + "/".join( + [ + str(name), + ] + ) with self._lock: if key in self._data_plane_details: return self._data_plane_details[key] response = self._control_plane_service.get( - name = name, - ) + name=name, + ) if response.data_plane_info is None: raise Exception("Resource does not support direct Data Plane access") result = response.data_plane_info.query_info with self._lock: self._data_plane_details[key] = result return result - - def query(self - , name: str - , * - , dataframe_records: Optional[List[Any]] = None, dataframe_split: Optional[DataframeSplitInput] = None, extra_params: Optional[Dict[str,str]] = None, input: Optional[Any] = None, inputs: Optional[Any] = None, instances: Optional[List[Any]] = None, max_tokens: Optional[int] = None, messages: Optional[List[ChatMessage]] = None, n: Optional[int] = None, prompt: Optional[Any] = None, stop: Optional[List[str]] = None, stream: Optional[bool] = None, temperature: Optional[float] = None) -> QueryEndpointResponse: + def query( + self, + name: str, + *, + dataframe_records: Optional[List[Any]] = None, + dataframe_split: Optional[DataframeSplitInput] = None, + extra_params: Optional[Dict[str, str]] = None, + input: Optional[Any] = None, + inputs: Optional[Any] = None, + instances: Optional[List[Any]] = None, + max_tokens: Optional[int] = None, + messages: Optional[List[ChatMessage]] = None, + n: Optional[int] = None, + prompt: Optional[Any] = None, + stop: Optional[List[str]] = None, + stream: Optional[bool] = None, + temperature: Optional[float] = None, + ) -> QueryEndpointResponse: """Query a serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. :param dataframe_records: List[Any] (optional) @@ -4457,39 +5328,59 @@ def query(self The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - + :returns: :class:`QueryEndpointResponse` """ body = {} - if dataframe_records is not None: body['dataframe_records'] = [v for v in dataframe_records] - if dataframe_split is not None: body['dataframe_split'] = dataframe_split.as_dict() - if extra_params is not None: body['extra_params'] = extra_params - if input is not None: body['input'] = input - if inputs is not None: body['inputs'] = inputs - if instances is not None: body['instances'] = [v for v in instances] - if max_tokens is not None: body['max_tokens'] = max_tokens - if messages is not None: body['messages'] = [v.as_dict() for v in messages] - if n is not None: body['n'] = n - if prompt is not None: body['prompt'] = prompt - if stop is not None: body['stop'] = [v for v in stop] - if stream is not None: body['stream'] = stream - if temperature is not None: body['temperature'] = temperature + if dataframe_records is not None: + body["dataframe_records"] = [v for v in dataframe_records] + if dataframe_split is not None: + body["dataframe_split"] = dataframe_split.as_dict() + if extra_params is not None: + body["extra_params"] = extra_params + if input is not None: + body["input"] = input + if inputs is not None: + body["inputs"] = inputs + if instances is not None: + body["instances"] = [v for v in instances] + if max_tokens is not None: + body["max_tokens"] = max_tokens + if messages is not None: + body["messages"] = [v.as_dict() for v in messages] + if n is not None: + body["n"] = n + if prompt is not None: + body["prompt"] = prompt + if stop is not None: + body["stop"] = [v for v in stop] + if stream is not None: + body["stream"] = stream + if temperature is not None: + body["temperature"] = temperature data_plane_info = self._data_plane_info_query( - name = name, - ) + name=name, + ) token = self._dpts.token(data_plane_info.endpoint_url, data_plane_info.authorization_details) def auth(r: requests.PreparedRequest) -> requests.PreparedRequest: authorization = f"{token.token_type} {token.access_token}" r.headers["Authorization"] = authorization return r - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - response_headers = ['served-model-name',] - res = self._api.do('POST',url=data_plane_info.endpoint_url, body=body - - , headers=headers - , response_headers=response_headers,auth=auth) - return QueryEndpointResponse.from_dict(res) - - \ No newline at end of file + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + response_headers = [ + "served-model-name", + ] + res = self._api.do( + "POST", + url=data_plane_info.endpoint_url, + body=body, + headers=headers, + response_headers=response_headers, + auth=auth, + ) + return QueryEndpointResponse.from_dict(res) diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index f3d019dcf..3bdbffb31 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -1,30 +1,24 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AccountIpAccessEnable: acct_ip_acl_enable: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -32,107 +26,125 @@ class AccountIpAccessEnable: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the AccountIpAccessEnable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.acct_ip_acl_enable: + body["acct_ip_acl_enable"] = self.acct_ip_acl_enable.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the AccountIpAccessEnable into a shallow dictionary of its immediate attributes.""" body = {} - if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.acct_ip_acl_enable: + body["acct_ip_acl_enable"] = self.acct_ip_acl_enable + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountIpAccessEnable: """Deserializes the AccountIpAccessEnable from a dictionary.""" - return cls(acct_ip_acl_enable=_from_dict(d, 'acct_ip_acl_enable', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + acct_ip_acl_enable=_from_dict(d, "acct_ip_acl_enable", BooleanMessage), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class AccountNetworkPolicy: account_id: Optional[str] = None """The associated account ID for this Network Policy object.""" - + egress: Optional[NetworkPolicyEgress] = None """The network policies applying for egress traffic.""" - + network_policy_id: Optional[str] = None """The unique identifier for the network policy.""" - + def as_dict(self) -> dict: """Serializes the AccountNetworkPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.egress: body['egress'] = self.egress.as_dict() - if self.network_policy_id is not None: body['network_policy_id'] = self.network_policy_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.egress: + body["egress"] = self.egress.as_dict() + if self.network_policy_id is not None: + body["network_policy_id"] = self.network_policy_id return body def as_shallow_dict(self) -> dict: """Serializes the AccountNetworkPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.egress: body['egress'] = self.egress - if self.network_policy_id is not None: body['network_policy_id'] = self.network_policy_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.egress: + body["egress"] = self.egress + if self.network_policy_id is not None: + body["network_policy_id"] = self.network_policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccountNetworkPolicy: """Deserializes the AccountNetworkPolicy from a dictionary.""" - return cls(account_id=d.get('account_id', None), egress=_from_dict(d, 'egress', NetworkPolicyEgress), network_policy_id=d.get('network_policy_id', None)) - - + return cls( + account_id=d.get("account_id", None), + egress=_from_dict(d, "egress", NetworkPolicyEgress), + network_policy_id=d.get("network_policy_id", None), + ) @dataclass class AibiDashboardEmbeddingAccessPolicy: access_policy_type: AibiDashboardEmbeddingAccessPolicyAccessPolicyType - + def as_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type.value + if self.access_policy_type is not None: + body["access_policy_type"] = self.access_policy_type.value return body def as_shallow_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type + if self.access_policy_type is not None: + body["access_policy_type"] = self.access_policy_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingAccessPolicy: """Deserializes the AibiDashboardEmbeddingAccessPolicy from a dictionary.""" - return cls(access_policy_type=_enum(d, 'access_policy_type', AibiDashboardEmbeddingAccessPolicyAccessPolicyType)) - - + return cls( + access_policy_type=_enum(d, "access_policy_type", AibiDashboardEmbeddingAccessPolicyAccessPolicyType) + ) class AibiDashboardEmbeddingAccessPolicyAccessPolicyType(Enum): - - - ALLOW_ALL_DOMAINS = 'ALLOW_ALL_DOMAINS' - ALLOW_APPROVED_DOMAINS = 'ALLOW_APPROVED_DOMAINS' - DENY_ALL_DOMAINS = 'DENY_ALL_DOMAINS' + + ALLOW_ALL_DOMAINS = "ALLOW_ALL_DOMAINS" + ALLOW_APPROVED_DOMAINS = "ALLOW_APPROVED_DOMAINS" + DENY_ALL_DOMAINS = "DENY_ALL_DOMAINS" + @dataclass class AibiDashboardEmbeddingAccessPolicySetting: aibi_dashboard_embedding_access_policy: AibiDashboardEmbeddingAccessPolicy - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -140,65 +152,75 @@ class AibiDashboardEmbeddingAccessPolicySetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aibi_dashboard_embedding_access_policy: body['aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.aibi_dashboard_embedding_access_policy: + body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.aibi_dashboard_embedding_access_policy: body['aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.aibi_dashboard_embedding_access_policy: + body["aibi_dashboard_embedding_access_policy"] = self.aibi_dashboard_embedding_access_policy + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingAccessPolicySetting: """Deserializes the AibiDashboardEmbeddingAccessPolicySetting from a dictionary.""" - return cls(aibi_dashboard_embedding_access_policy=_from_dict(d, 'aibi_dashboard_embedding_access_policy', AibiDashboardEmbeddingAccessPolicy), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + aibi_dashboard_embedding_access_policy=_from_dict( + d, "aibi_dashboard_embedding_access_policy", AibiDashboardEmbeddingAccessPolicy + ), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class AibiDashboardEmbeddingApprovedDomains: approved_domains: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingApprovedDomains into a dictionary suitable for use as a JSON request body.""" body = {} - if self.approved_domains: body['approved_domains'] = [v for v in self.approved_domains] + if self.approved_domains: + body["approved_domains"] = [v for v in self.approved_domains] return body def as_shallow_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingApprovedDomains into a shallow dictionary of its immediate attributes.""" body = {} - if self.approved_domains: body['approved_domains'] = self.approved_domains + if self.approved_domains: + body["approved_domains"] = self.approved_domains return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingApprovedDomains: """Deserializes the AibiDashboardEmbeddingApprovedDomains from a dictionary.""" - return cls(approved_domains=d.get('approved_domains', None)) - - + return cls(approved_domains=d.get("approved_domains", None)) @dataclass class AibiDashboardEmbeddingApprovedDomainsSetting: aibi_dashboard_embedding_approved_domains: AibiDashboardEmbeddingApprovedDomains - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -206,41 +228,51 @@ class AibiDashboardEmbeddingApprovedDomainsSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aibi_dashboard_embedding_approved_domains: body['aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.aibi_dashboard_embedding_approved_domains: + body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.aibi_dashboard_embedding_approved_domains: body['aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.aibi_dashboard_embedding_approved_domains: + body["aibi_dashboard_embedding_approved_domains"] = self.aibi_dashboard_embedding_approved_domains + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AibiDashboardEmbeddingApprovedDomainsSetting: """Deserializes the AibiDashboardEmbeddingApprovedDomainsSetting from a dictionary.""" - return cls(aibi_dashboard_embedding_approved_domains=_from_dict(d, 'aibi_dashboard_embedding_approved_domains', AibiDashboardEmbeddingApprovedDomains), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + aibi_dashboard_embedding_approved_domains=_from_dict( + d, "aibi_dashboard_embedding_approved_domains", AibiDashboardEmbeddingApprovedDomains + ), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class AutomaticClusterUpdateSetting: automatic_cluster_update_workspace: ClusterAutoRestartMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -248,104 +280,128 @@ class AutomaticClusterUpdateSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the AutomaticClusterUpdateSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.automatic_cluster_update_workspace: body['automatic_cluster_update_workspace'] = self.automatic_cluster_update_workspace.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.automatic_cluster_update_workspace: + body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the AutomaticClusterUpdateSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.automatic_cluster_update_workspace: body['automatic_cluster_update_workspace'] = self.automatic_cluster_update_workspace - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.automatic_cluster_update_workspace: + body["automatic_cluster_update_workspace"] = self.automatic_cluster_update_workspace + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AutomaticClusterUpdateSetting: """Deserializes the AutomaticClusterUpdateSetting from a dictionary.""" - return cls(automatic_cluster_update_workspace=_from_dict(d, 'automatic_cluster_update_workspace', ClusterAutoRestartMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + automatic_cluster_update_workspace=_from_dict( + d, "automatic_cluster_update_workspace", ClusterAutoRestartMessage + ), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class BooleanMessage: value: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the BooleanMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the BooleanMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BooleanMessage: """Deserializes the BooleanMessage from a dictionary.""" - return cls(value=d.get('value', None)) - - + return cls(value=d.get("value", None)) @dataclass class ClusterAutoRestartMessage: can_toggle: Optional[bool] = None - + enabled: Optional[bool] = None - + enablement_details: Optional[ClusterAutoRestartMessageEnablementDetails] = None """Contains an information about the enablement status judging (e.g. whether the enterprise tier is enabled) This is only additional information that MUST NOT be used to decide whether the setting is enabled or not. This is intended to use only for purposes like showing an error message to the customer with the additional details. For example, using these details we can check why exactly the feature is disabled for this customer.""" - + maintenance_window: Optional[ClusterAutoRestartMessageMaintenanceWindow] = None - + restart_even_if_no_updates_available: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_toggle is not None: body['can_toggle'] = self.can_toggle - if self.enabled is not None: body['enabled'] = self.enabled - if self.enablement_details: body['enablement_details'] = self.enablement_details.as_dict() - if self.maintenance_window: body['maintenance_window'] = self.maintenance_window.as_dict() - if self.restart_even_if_no_updates_available is not None: body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available + if self.can_toggle is not None: + body["can_toggle"] = self.can_toggle + if self.enabled is not None: + body["enabled"] = self.enabled + if self.enablement_details: + body["enablement_details"] = self.enablement_details.as_dict() + if self.maintenance_window: + body["maintenance_window"] = self.maintenance_window.as_dict() + if self.restart_even_if_no_updates_available is not None: + body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.can_toggle is not None: body['can_toggle'] = self.can_toggle - if self.enabled is not None: body['enabled'] = self.enabled - if self.enablement_details: body['enablement_details'] = self.enablement_details - if self.maintenance_window: body['maintenance_window'] = self.maintenance_window - if self.restart_even_if_no_updates_available is not None: body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available + if self.can_toggle is not None: + body["can_toggle"] = self.can_toggle + if self.enabled is not None: + body["enabled"] = self.enabled + if self.enablement_details: + body["enablement_details"] = self.enablement_details + if self.maintenance_window: + body["maintenance_window"] = self.maintenance_window + if self.restart_even_if_no_updates_available is not None: + body["restart_even_if_no_updates_available"] = self.restart_even_if_no_updates_available return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessage: """Deserializes the ClusterAutoRestartMessage from a dictionary.""" - return cls(can_toggle=d.get('can_toggle', None), enabled=d.get('enabled', None), enablement_details=_from_dict(d, 'enablement_details', ClusterAutoRestartMessageEnablementDetails), maintenance_window=_from_dict(d, 'maintenance_window', ClusterAutoRestartMessageMaintenanceWindow), restart_even_if_no_updates_available=d.get('restart_even_if_no_updates_available', None)) - - + return cls( + can_toggle=d.get("can_toggle", None), + enabled=d.get("enabled", None), + enablement_details=_from_dict(d, "enablement_details", ClusterAutoRestartMessageEnablementDetails), + maintenance_window=_from_dict(d, "maintenance_window", ClusterAutoRestartMessageMaintenanceWindow), + restart_even_if_no_updates_available=d.get("restart_even_if_no_updates_available", None), + ) @dataclass @@ -355,183 +411,212 @@ class ClusterAutoRestartMessageEnablementDetails: is enabled or not. This is intended to use only for purposes like showing an error message to the customer with the additional details. For example, using these details we can check why exactly the feature is disabled for this customer.""" - + forced_for_compliance_mode: Optional[bool] = None """The feature is force enabled if compliance mode is active""" - + unavailable_for_disabled_entitlement: Optional[bool] = None """The feature is unavailable if the corresponding entitlement disabled (see getShieldEntitlementEnable)""" - + unavailable_for_non_enterprise_tier: Optional[bool] = None """The feature is unavailable if the customer doesn't have enterprise tier""" - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageEnablementDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.forced_for_compliance_mode is not None: body['forced_for_compliance_mode'] = self.forced_for_compliance_mode - if self.unavailable_for_disabled_entitlement is not None: body['unavailable_for_disabled_entitlement'] = self.unavailable_for_disabled_entitlement - if self.unavailable_for_non_enterprise_tier is not None: body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier + if self.forced_for_compliance_mode is not None: + body["forced_for_compliance_mode"] = self.forced_for_compliance_mode + if self.unavailable_for_disabled_entitlement is not None: + body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement + if self.unavailable_for_non_enterprise_tier is not None: + body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageEnablementDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.forced_for_compliance_mode is not None: body['forced_for_compliance_mode'] = self.forced_for_compliance_mode - if self.unavailable_for_disabled_entitlement is not None: body['unavailable_for_disabled_entitlement'] = self.unavailable_for_disabled_entitlement - if self.unavailable_for_non_enterprise_tier is not None: body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier + if self.forced_for_compliance_mode is not None: + body["forced_for_compliance_mode"] = self.forced_for_compliance_mode + if self.unavailable_for_disabled_entitlement is not None: + body["unavailable_for_disabled_entitlement"] = self.unavailable_for_disabled_entitlement + if self.unavailable_for_non_enterprise_tier is not None: + body["unavailable_for_non_enterprise_tier"] = self.unavailable_for_non_enterprise_tier return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageEnablementDetails: """Deserializes the ClusterAutoRestartMessageEnablementDetails from a dictionary.""" - return cls(forced_for_compliance_mode=d.get('forced_for_compliance_mode', None), unavailable_for_disabled_entitlement=d.get('unavailable_for_disabled_entitlement', None), unavailable_for_non_enterprise_tier=d.get('unavailable_for_non_enterprise_tier', None)) - - + return cls( + forced_for_compliance_mode=d.get("forced_for_compliance_mode", None), + unavailable_for_disabled_entitlement=d.get("unavailable_for_disabled_entitlement", None), + unavailable_for_non_enterprise_tier=d.get("unavailable_for_non_enterprise_tier", None), + ) @dataclass class ClusterAutoRestartMessageMaintenanceWindow: week_day_based_schedule: Optional[ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule] = None - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a dictionary suitable for use as a JSON request body.""" body = {} - if self.week_day_based_schedule: body['week_day_based_schedule'] = self.week_day_based_schedule.as_dict() + if self.week_day_based_schedule: + body["week_day_based_schedule"] = self.week_day_based_schedule.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a shallow dictionary of its immediate attributes.""" body = {} - if self.week_day_based_schedule: body['week_day_based_schedule'] = self.week_day_based_schedule + if self.week_day_based_schedule: + body["week_day_based_schedule"] = self.week_day_based_schedule return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindow: """Deserializes the ClusterAutoRestartMessageMaintenanceWindow from a dictionary.""" - return cls(week_day_based_schedule=_from_dict(d, 'week_day_based_schedule', ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule)) - - + return cls( + week_day_based_schedule=_from_dict( + d, "week_day_based_schedule", ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule + ) + ) class ClusterAutoRestartMessageMaintenanceWindowDayOfWeek(Enum): - - - FRIDAY = 'FRIDAY' - MONDAY = 'MONDAY' - SATURDAY = 'SATURDAY' - SUNDAY = 'SUNDAY' - THURSDAY = 'THURSDAY' - TUESDAY = 'TUESDAY' - WEDNESDAY = 'WEDNESDAY' + + FRIDAY = "FRIDAY" + MONDAY = "MONDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" + THURSDAY = "THURSDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" + @dataclass class ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule: day_of_week: Optional[ClusterAutoRestartMessageMaintenanceWindowDayOfWeek] = None - + frequency: Optional[ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency] = None - + window_start_time: Optional[ClusterAutoRestartMessageMaintenanceWindowWindowStartTime] = None - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.day_of_week is not None: body['day_of_week'] = self.day_of_week.value - if self.frequency is not None: body['frequency'] = self.frequency.value - if self.window_start_time: body['window_start_time'] = self.window_start_time.as_dict() + if self.day_of_week is not None: + body["day_of_week"] = self.day_of_week.value + if self.frequency is not None: + body["frequency"] = self.frequency.value + if self.window_start_time: + body["window_start_time"] = self.window_start_time.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.day_of_week is not None: body['day_of_week'] = self.day_of_week - if self.frequency is not None: body['frequency'] = self.frequency - if self.window_start_time: body['window_start_time'] = self.window_start_time + if self.day_of_week is not None: + body["day_of_week"] = self.day_of_week + if self.frequency is not None: + body["frequency"] = self.frequency + if self.window_start_time: + body["window_start_time"] = self.window_start_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule: """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule from a dictionary.""" - return cls(day_of_week=_enum(d, 'day_of_week', ClusterAutoRestartMessageMaintenanceWindowDayOfWeek), frequency=_enum(d, 'frequency', ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency), window_start_time=_from_dict(d, 'window_start_time', ClusterAutoRestartMessageMaintenanceWindowWindowStartTime)) - - + return cls( + day_of_week=_enum(d, "day_of_week", ClusterAutoRestartMessageMaintenanceWindowDayOfWeek), + frequency=_enum(d, "frequency", ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency), + window_start_time=_from_dict( + d, "window_start_time", ClusterAutoRestartMessageMaintenanceWindowWindowStartTime + ), + ) class ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency(Enum): - - - EVERY_WEEK = 'EVERY_WEEK' - FIRST_AND_THIRD_OF_MONTH = 'FIRST_AND_THIRD_OF_MONTH' - FIRST_OF_MONTH = 'FIRST_OF_MONTH' - FOURTH_OF_MONTH = 'FOURTH_OF_MONTH' - SECOND_AND_FOURTH_OF_MONTH = 'SECOND_AND_FOURTH_OF_MONTH' - SECOND_OF_MONTH = 'SECOND_OF_MONTH' - THIRD_OF_MONTH = 'THIRD_OF_MONTH' + + EVERY_WEEK = "EVERY_WEEK" + FIRST_AND_THIRD_OF_MONTH = "FIRST_AND_THIRD_OF_MONTH" + FIRST_OF_MONTH = "FIRST_OF_MONTH" + FOURTH_OF_MONTH = "FOURTH_OF_MONTH" + SECOND_AND_FOURTH_OF_MONTH = "SECOND_AND_FOURTH_OF_MONTH" + SECOND_OF_MONTH = "SECOND_OF_MONTH" + THIRD_OF_MONTH = "THIRD_OF_MONTH" + @dataclass class ClusterAutoRestartMessageMaintenanceWindowWindowStartTime: hours: Optional[int] = None - + minutes: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a dictionary suitable for use as a JSON request body.""" body = {} - if self.hours is not None: body['hours'] = self.hours - if self.minutes is not None: body['minutes'] = self.minutes + if self.hours is not None: + body["hours"] = self.hours + if self.minutes is not None: + body["minutes"] = self.minutes return body def as_shallow_dict(self) -> dict: """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a shallow dictionary of its immediate attributes.""" body = {} - if self.hours is not None: body['hours'] = self.hours - if self.minutes is not None: body['minutes'] = self.minutes + if self.hours is not None: + body["hours"] = self.hours + if self.minutes is not None: + body["minutes"] = self.minutes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClusterAutoRestartMessageMaintenanceWindowWindowStartTime: """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime from a dictionary.""" - return cls(hours=d.get('hours', None), minutes=d.get('minutes', None)) - - + return cls(hours=d.get("hours", None), minutes=d.get("minutes", None)) @dataclass class ComplianceSecurityProfile: """SHIELD feature: CSP""" - + compliance_standards: Optional[List[ComplianceStandard]] = None """Set by customers when they request Compliance Security Profile (CSP)""" - + is_enabled: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compliance_standards: body['compliance_standards'] = [v.value for v in self.compliance_standards] - if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + if self.compliance_standards: + body["compliance_standards"] = [v.value for v in self.compliance_standards] + if self.is_enabled is not None: + body["is_enabled"] = self.is_enabled return body def as_shallow_dict(self) -> dict: """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.compliance_standards: body['compliance_standards'] = self.compliance_standards - if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + if self.compliance_standards: + body["compliance_standards"] = self.compliance_standards + if self.is_enabled is not None: + body["is_enabled"] = self.is_enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfile: """Deserializes the ComplianceSecurityProfile from a dictionary.""" - return cls(compliance_standards=_repeated_enum(d, 'compliance_standards', ComplianceStandard), is_enabled=d.get('is_enabled', None)) - - + return cls( + compliance_standards=_repeated_enum(d, "compliance_standards", ComplianceStandard), + is_enabled=d.get("is_enabled", None), + ) @dataclass class ComplianceSecurityProfileSetting: compliance_security_profile_workspace: ComplianceSecurityProfile """SHIELD feature: CSP""" - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -539,461 +624,522 @@ class ComplianceSecurityProfileSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the ComplianceSecurityProfileSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compliance_security_profile_workspace: body['compliance_security_profile_workspace'] = self.compliance_security_profile_workspace.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.compliance_security_profile_workspace: + body["compliance_security_profile_workspace"] = self.compliance_security_profile_workspace.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the ComplianceSecurityProfileSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.compliance_security_profile_workspace: body['compliance_security_profile_workspace'] = self.compliance_security_profile_workspace - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.compliance_security_profile_workspace: + body["compliance_security_profile_workspace"] = self.compliance_security_profile_workspace + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ComplianceSecurityProfileSetting: """Deserializes the ComplianceSecurityProfileSetting from a dictionary.""" - return cls(compliance_security_profile_workspace=_from_dict(d, 'compliance_security_profile_workspace', ComplianceSecurityProfile), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + compliance_security_profile_workspace=_from_dict( + d, "compliance_security_profile_workspace", ComplianceSecurityProfile + ), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) class ComplianceStandard(Enum): """Compliance stardard for SHIELD customers""" - - CANADA_PROTECTED_B = 'CANADA_PROTECTED_B' - CYBER_ESSENTIAL_PLUS = 'CYBER_ESSENTIAL_PLUS' - FEDRAMP_HIGH = 'FEDRAMP_HIGH' - FEDRAMP_IL5 = 'FEDRAMP_IL5' - FEDRAMP_MODERATE = 'FEDRAMP_MODERATE' - HIPAA = 'HIPAA' - HITRUST = 'HITRUST' - IRAP_PROTECTED = 'IRAP_PROTECTED' - ISMAP = 'ISMAP' - ITAR_EAR = 'ITAR_EAR' - K_FSI = 'K_FSI' - NONE = 'NONE' - PCI_DSS = 'PCI_DSS' + + CANADA_PROTECTED_B = "CANADA_PROTECTED_B" + CYBER_ESSENTIAL_PLUS = "CYBER_ESSENTIAL_PLUS" + FEDRAMP_HIGH = "FEDRAMP_HIGH" + FEDRAMP_IL5 = "FEDRAMP_IL5" + FEDRAMP_MODERATE = "FEDRAMP_MODERATE" + HIPAA = "HIPAA" + HITRUST = "HITRUST" + IRAP_PROTECTED = "IRAP_PROTECTED" + ISMAP = "ISMAP" + ITAR_EAR = "ITAR_EAR" + K_FSI = "K_FSI" + NONE = "NONE" + PCI_DSS = "PCI_DSS" + @dataclass class Config: email: Optional[EmailConfig] = None - + generic_webhook: Optional[GenericWebhookConfig] = None - + microsoft_teams: Optional[MicrosoftTeamsConfig] = None - + pagerduty: Optional[PagerdutyConfig] = None - + slack: Optional[SlackConfig] = None - + def as_dict(self) -> dict: """Serializes the Config into a dictionary suitable for use as a JSON request body.""" body = {} - if self.email: body['email'] = self.email.as_dict() - if self.generic_webhook: body['generic_webhook'] = self.generic_webhook.as_dict() - if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams.as_dict() - if self.pagerduty: body['pagerduty'] = self.pagerduty.as_dict() - if self.slack: body['slack'] = self.slack.as_dict() + if self.email: + body["email"] = self.email.as_dict() + if self.generic_webhook: + body["generic_webhook"] = self.generic_webhook.as_dict() + if self.microsoft_teams: + body["microsoft_teams"] = self.microsoft_teams.as_dict() + if self.pagerduty: + body["pagerduty"] = self.pagerduty.as_dict() + if self.slack: + body["slack"] = self.slack.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Config into a shallow dictionary of its immediate attributes.""" body = {} - if self.email: body['email'] = self.email - if self.generic_webhook: body['generic_webhook'] = self.generic_webhook - if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams - if self.pagerduty: body['pagerduty'] = self.pagerduty - if self.slack: body['slack'] = self.slack + if self.email: + body["email"] = self.email + if self.generic_webhook: + body["generic_webhook"] = self.generic_webhook + if self.microsoft_teams: + body["microsoft_teams"] = self.microsoft_teams + if self.pagerduty: + body["pagerduty"] = self.pagerduty + if self.slack: + body["slack"] = self.slack return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Config: """Deserializes the Config from a dictionary.""" - return cls(email=_from_dict(d, 'email', EmailConfig), generic_webhook=_from_dict(d, 'generic_webhook', GenericWebhookConfig), microsoft_teams=_from_dict(d, 'microsoft_teams', MicrosoftTeamsConfig), pagerduty=_from_dict(d, 'pagerduty', PagerdutyConfig), slack=_from_dict(d, 'slack', SlackConfig)) - - + return cls( + email=_from_dict(d, "email", EmailConfig), + generic_webhook=_from_dict(d, "generic_webhook", GenericWebhookConfig), + microsoft_teams=_from_dict(d, "microsoft_teams", MicrosoftTeamsConfig), + pagerduty=_from_dict(d, "pagerduty", PagerdutyConfig), + slack=_from_dict(d, "slack", SlackConfig), + ) @dataclass class CreateIpAccessList: """Details required to configure a block list or allow list.""" - + label: str """Label for the IP access list. This **cannot** be empty.""" - + list_type: ListType """Type of IP access list. Valid values are as follows and are case-sensitive: * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - + ip_addresses: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the CreateIpAccessList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] - if self.label is not None: body['label'] = self.label - if self.list_type is not None: body['list_type'] = self.list_type.value + if self.ip_addresses: + body["ip_addresses"] = [v for v in self.ip_addresses] + if self.label is not None: + body["label"] = self.label + if self.list_type is not None: + body["list_type"] = self.list_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateIpAccessList into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_addresses: body['ip_addresses'] = self.ip_addresses - if self.label is not None: body['label'] = self.label - if self.list_type is not None: body['list_type'] = self.list_type + if self.ip_addresses: + body["ip_addresses"] = self.ip_addresses + if self.label is not None: + body["label"] = self.label + if self.list_type is not None: + body["list_type"] = self.list_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateIpAccessList: """Deserializes the CreateIpAccessList from a dictionary.""" - return cls(ip_addresses=d.get('ip_addresses', None), label=d.get('label', None), list_type=_enum(d, 'list_type', ListType)) - - + return cls( + ip_addresses=d.get("ip_addresses", None), + label=d.get("label", None), + list_type=_enum(d, "list_type", ListType), + ) @dataclass class CreateIpAccessListResponse: """An IP access list was successfully created.""" - + ip_access_list: Optional[IpAccessListInfo] = None """Definition of an IP Access list""" - + def as_dict(self) -> dict: """Serializes the CreateIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateIpAccessListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateIpAccessListResponse: """Deserializes the CreateIpAccessListResponse from a dictionary.""" - return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) - - - - - + return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo)) @dataclass class CreateNetworkConnectivityConfiguration: """Properties of the new network connectivity configuration.""" - + name: str """The name of the network connectivity configuration. The name can contain alphanumeric characters, hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the regular expression ^[0-9a-zA-Z-_]{3,30}$""" - + region: str """The region for the network connectivity configuration. Only workspaces in the same region can be attached to the network connectivity configuration.""" - + def as_dict(self) -> dict: """Serializes the CreateNetworkConnectivityConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.region is not None: body['region'] = self.region + if self.name is not None: + body["name"] = self.name + if self.region is not None: + body["region"] = self.region return body def as_shallow_dict(self) -> dict: """Serializes the CreateNetworkConnectivityConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.region is not None: body['region'] = self.region + if self.name is not None: + body["name"] = self.name + if self.region is not None: + body["region"] = self.region return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateNetworkConnectivityConfiguration: """Deserializes the CreateNetworkConnectivityConfiguration from a dictionary.""" - return cls(name=d.get('name', None), region=d.get('region', None)) - - - - - + return cls(name=d.get("name", None), region=d.get("region", None)) @dataclass class CreateNotificationDestinationRequest: config: Optional[Config] = None """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" - + display_name: Optional[str] = None """The display name for the notification destination.""" - + def as_dict(self) -> dict: """Serializes the CreateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config: body['config'] = self.config.as_dict() - if self.display_name is not None: body['display_name'] = self.display_name + if self.config: + body["config"] = self.config.as_dict() + if self.display_name is not None: + body["display_name"] = self.display_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.config: body['config'] = self.config - if self.display_name is not None: body['display_name'] = self.display_name + if self.config: + body["config"] = self.config + if self.display_name is not None: + body["display_name"] = self.display_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateNotificationDestinationRequest: """Deserializes the CreateNotificationDestinationRequest from a dictionary.""" - return cls(config=_from_dict(d, 'config', Config), display_name=d.get('display_name', None)) - - + return cls(config=_from_dict(d, "config", Config), display_name=d.get("display_name", None)) @dataclass class CreateOboTokenRequest: """Configuration details for creating on-behalf tokens.""" - + application_id: str """Application ID of the service principal.""" - + comment: Optional[str] = None """Comment that describes the purpose of the token.""" - + lifetime_seconds: Optional[int] = None """The number of seconds before the token expires.""" - + def as_dict(self) -> dict: """Serializes the CreateOboTokenRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.application_id is not None: body['application_id'] = self.application_id - if self.comment is not None: body['comment'] = self.comment - if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds + if self.application_id is not None: + body["application_id"] = self.application_id + if self.comment is not None: + body["comment"] = self.comment + if self.lifetime_seconds is not None: + body["lifetime_seconds"] = self.lifetime_seconds return body def as_shallow_dict(self) -> dict: """Serializes the CreateOboTokenRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.application_id is not None: body['application_id'] = self.application_id - if self.comment is not None: body['comment'] = self.comment - if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds + if self.application_id is not None: + body["application_id"] = self.application_id + if self.comment is not None: + body["comment"] = self.comment + if self.lifetime_seconds is not None: + body["lifetime_seconds"] = self.lifetime_seconds return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateOboTokenRequest: """Deserializes the CreateOboTokenRequest from a dictionary.""" - return cls(application_id=d.get('application_id', None), comment=d.get('comment', None), lifetime_seconds=d.get('lifetime_seconds', None)) - - + return cls( + application_id=d.get("application_id", None), + comment=d.get("comment", None), + lifetime_seconds=d.get("lifetime_seconds", None), + ) @dataclass class CreateOboTokenResponse: """An on-behalf token was successfully created for the service principal.""" - + token_info: Optional[TokenInfo] = None - + token_value: Optional[str] = None """Value of the token.""" - + def as_dict(self) -> dict: """Serializes the CreateOboTokenResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_info: body['token_info'] = self.token_info.as_dict() - if self.token_value is not None: body['token_value'] = self.token_value + if self.token_info: + body["token_info"] = self.token_info.as_dict() + if self.token_value is not None: + body["token_value"] = self.token_value return body def as_shallow_dict(self) -> dict: """Serializes the CreateOboTokenResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_info: body['token_info'] = self.token_info - if self.token_value is not None: body['token_value'] = self.token_value + if self.token_info: + body["token_info"] = self.token_info + if self.token_value is not None: + body["token_value"] = self.token_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateOboTokenResponse: """Deserializes the CreateOboTokenResponse from a dictionary.""" - return cls(token_info=_from_dict(d, 'token_info', TokenInfo), token_value=d.get('token_value', None)) - - + return cls(token_info=_from_dict(d, "token_info", TokenInfo), token_value=d.get("token_value", None)) @dataclass class CreatePrivateEndpointRule: """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization.""" - + domain_names: Optional[List[str]] = None """Only used by private endpoints to customer-managed private endpoint services. Domain names of target private link service. When updating this field, the full list of target domain_names must be specified.""" - + endpoint_service: Optional[str] = None """The full target AWS endpoint service name that connects to the destination resources of the private endpoint.""" - + group_id: Optional[str] = None """Not used by customer-managed private endpoint services. The sub-resource type (group ID) of the target resource. Note that to connect to workspace root storage (root DBFS), you need two endpoints, one for blob and one for dfs.""" - + resource_id: Optional[str] = None """The Azure resource ID of the target resource.""" - + resource_names: Optional[List[str]] = None """Only used by private endpoints towards AWS S3 service. The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names must be in the same region as the NCC/endpoint service. When updating this field, we perform full update on this field. Please ensure a full list of desired resource_names is provided.""" - + def as_dict(self) -> dict: """Serializes the CreatePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.domain_names: body['domain_names'] = [v for v in self.domain_names] - if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service - if self.group_id is not None: body['group_id'] = self.group_id - if self.resource_id is not None: body['resource_id'] = self.resource_id - if self.resource_names: body['resource_names'] = [v for v in self.resource_names] + if self.domain_names: + body["domain_names"] = [v for v in self.domain_names] + if self.endpoint_service is not None: + body["endpoint_service"] = self.endpoint_service + if self.group_id is not None: + body["group_id"] = self.group_id + if self.resource_id is not None: + body["resource_id"] = self.resource_id + if self.resource_names: + body["resource_names"] = [v for v in self.resource_names] return body def as_shallow_dict(self) -> dict: """Serializes the CreatePrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.domain_names: body['domain_names'] = self.domain_names - if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service - if self.group_id is not None: body['group_id'] = self.group_id - if self.resource_id is not None: body['resource_id'] = self.resource_id - if self.resource_names: body['resource_names'] = self.resource_names + if self.domain_names: + body["domain_names"] = self.domain_names + if self.endpoint_service is not None: + body["endpoint_service"] = self.endpoint_service + if self.group_id is not None: + body["group_id"] = self.group_id + if self.resource_id is not None: + body["resource_id"] = self.resource_id + if self.resource_names: + body["resource_names"] = self.resource_names return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreatePrivateEndpointRule: """Deserializes the CreatePrivateEndpointRule from a dictionary.""" - return cls(domain_names=d.get('domain_names', None), endpoint_service=d.get('endpoint_service', None), group_id=d.get('group_id', None), resource_id=d.get('resource_id', None), resource_names=d.get('resource_names', None)) - - - - - + return cls( + domain_names=d.get("domain_names", None), + endpoint_service=d.get("endpoint_service", None), + group_id=d.get("group_id", None), + resource_id=d.get("resource_id", None), + resource_names=d.get("resource_names", None), + ) @dataclass class CreateTokenRequest: comment: Optional[str] = None """Optional description to attach to the token.""" - + lifetime_seconds: Optional[int] = None """The lifetime of the token, in seconds. If the lifetime is not specified, this token remains valid indefinitely.""" - + def as_dict(self) -> dict: """Serializes the CreateTokenRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds + if self.comment is not None: + body["comment"] = self.comment + if self.lifetime_seconds is not None: + body["lifetime_seconds"] = self.lifetime_seconds return body def as_shallow_dict(self) -> dict: """Serializes the CreateTokenRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds + if self.comment is not None: + body["comment"] = self.comment + if self.lifetime_seconds is not None: + body["lifetime_seconds"] = self.lifetime_seconds return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTokenRequest: """Deserializes the CreateTokenRequest from a dictionary.""" - return cls(comment=d.get('comment', None), lifetime_seconds=d.get('lifetime_seconds', None)) - - + return cls(comment=d.get("comment", None), lifetime_seconds=d.get("lifetime_seconds", None)) @dataclass class CreateTokenResponse: token_info: Optional[PublicTokenInfo] = None """The information for the new token.""" - + token_value: Optional[str] = None """The value of the new token.""" - + def as_dict(self) -> dict: """Serializes the CreateTokenResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_info: body['token_info'] = self.token_info.as_dict() - if self.token_value is not None: body['token_value'] = self.token_value + if self.token_info: + body["token_info"] = self.token_info.as_dict() + if self.token_value is not None: + body["token_value"] = self.token_value return body def as_shallow_dict(self) -> dict: """Serializes the CreateTokenResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_info: body['token_info'] = self.token_info - if self.token_value is not None: body['token_value'] = self.token_value + if self.token_info: + body["token_info"] = self.token_info + if self.token_value is not None: + body["token_value"] = self.token_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateTokenResponse: """Deserializes the CreateTokenResponse from a dictionary.""" - return cls(token_info=_from_dict(d, 'token_info', PublicTokenInfo), token_value=d.get('token_value', None)) - - + return cls(token_info=_from_dict(d, "token_info", PublicTokenInfo), token_value=d.get("token_value", None)) @dataclass class CspEnablementAccount: """Account level policy for CSP""" - + compliance_standards: Optional[List[ComplianceStandard]] = None """Set by customers when they request Compliance Security Profile (CSP) Invariants are enforced in Settings policy.""" - + is_enforced: Optional[bool] = None """Enforced = it cannot be overriden at workspace level.""" - + def as_dict(self) -> dict: """Serializes the CspEnablementAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compliance_standards: body['compliance_standards'] = [v.value for v in self.compliance_standards] - if self.is_enforced is not None: body['is_enforced'] = self.is_enforced + if self.compliance_standards: + body["compliance_standards"] = [v.value for v in self.compliance_standards] + if self.is_enforced is not None: + body["is_enforced"] = self.is_enforced return body def as_shallow_dict(self) -> dict: """Serializes the CspEnablementAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.compliance_standards: body['compliance_standards'] = self.compliance_standards - if self.is_enforced is not None: body['is_enforced'] = self.is_enforced + if self.compliance_standards: + body["compliance_standards"] = self.compliance_standards + if self.is_enforced is not None: + body["is_enforced"] = self.is_enforced return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CspEnablementAccount: """Deserializes the CspEnablementAccount from a dictionary.""" - return cls(compliance_standards=_repeated_enum(d, 'compliance_standards', ComplianceStandard), is_enforced=d.get('is_enforced', None)) - - + return cls( + compliance_standards=_repeated_enum(d, "compliance_standards", ComplianceStandard), + is_enforced=d.get("is_enforced", None), + ) @dataclass class CspEnablementAccountSetting: csp_enablement_account: CspEnablementAccount """Account level policy for CSP""" - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1001,35 +1147,43 @@ class CspEnablementAccountSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the CspEnablementAccountSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.csp_enablement_account: body['csp_enablement_account'] = self.csp_enablement_account.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.csp_enablement_account: + body["csp_enablement_account"] = self.csp_enablement_account.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the CspEnablementAccountSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.csp_enablement_account: body['csp_enablement_account'] = self.csp_enablement_account - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.csp_enablement_account: + body["csp_enablement_account"] = self.csp_enablement_account + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CspEnablementAccountSetting: """Deserializes the CspEnablementAccountSetting from a dictionary.""" - return cls(csp_enablement_account=_from_dict(d, 'csp_enablement_account', CspEnablementAccount), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + csp_enablement_account=_from_dict(d, "csp_enablement_account", CspEnablementAccount), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass @@ -1037,11 +1191,13 @@ class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule: """Properties of the new private endpoint rule. Note that for private endpoints towards a VPC endpoint service behind a customer-managed NLB, you must approve the endpoint in AWS console after initialization.""" - + account_id: Optional[str] = None """Databricks account ID. You can find your account ID from the Accounts Console.""" - - connection_state: Optional[CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState] = None + + connection_state: Optional[ + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState + ] = None """The current status of this private endpoint. The private endpoint rules are effective only if the connection state is ESTABLISHED. Remember that you must approve new endpoints on your resources in the AWS console before they take effect. The possible values are: - PENDING: The @@ -1050,16 +1206,16 @@ class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule: the private link resource owner. - DISCONNECTED: Connection was removed by the private link resource owner, the private endpoint becomes informative and should be deleted for clean-up. - EXPIRED: If the endpoint is created but not approved in 14 days, it is EXPIRED.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when this object was created.""" - + deactivated: Optional[bool] = None """Whether this private endpoint is deactivated.""" - + deactivated_at: Optional[int] = None """Time in epoch milliseconds when this object was deactivated.""" - + domain_names: Optional[List[str]] = None """Only used by private endpoints towards a VPC endpoint service for customer-managed VPC endpoint service. @@ -1067,94 +1223,136 @@ class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule: The target AWS resource FQDNs accessible via the VPC endpoint service. When updating this field, we perform full update on this field. Please ensure a full list of desired domain_names is provided.""" - + enabled: Optional[bool] = None """Only used by private endpoints towards an AWS S3 service. Update this field to activate/deactivate this private endpoint to allow egress access from serverless compute resources.""" - + endpoint_service: Optional[str] = None """The full target AWS endpoint service name that connects to the destination resources of the private endpoint.""" - + network_connectivity_config_id: Optional[str] = None """The ID of a network connectivity configuration, which is the parent resource of this private endpoint rule object.""" - + resource_names: Optional[List[str]] = None """Only used by private endpoints towards AWS S3 service. The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names must be in the same region as the NCC/endpoint service. When updating this field, we perform full update on this field. Please ensure a full list of desired resource_names is provided.""" - + rule_id: Optional[str] = None """The ID of a private endpoint rule.""" - + updated_time: Optional[int] = None """Time in epoch milliseconds when this object was updated.""" - + vpc_endpoint_id: Optional[str] = None """The AWS VPC endpoint ID. You can use this ID to identify VPC endpoint created by Databricks.""" - + def as_dict(self) -> dict: """Serializes the CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.connection_state is not None: body['connection_state'] = self.connection_state.value - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.deactivated is not None: body['deactivated'] = self.deactivated - if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at - if self.domain_names: body['domain_names'] = [v for v in self.domain_names] - if self.enabled is not None: body['enabled'] = self.enabled - if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.resource_names: body['resource_names'] = [v for v in self.resource_names] - if self.rule_id is not None: body['rule_id'] = self.rule_id - if self.updated_time is not None: body['updated_time'] = self.updated_time - if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.connection_state is not None: + body["connection_state"] = self.connection_state.value + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.deactivated is not None: + body["deactivated"] = self.deactivated + if self.deactivated_at is not None: + body["deactivated_at"] = self.deactivated_at + if self.domain_names: + body["domain_names"] = [v for v in self.domain_names] + if self.enabled is not None: + body["enabled"] = self.enabled + if self.endpoint_service is not None: + body["endpoint_service"] = self.endpoint_service + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.resource_names: + body["resource_names"] = [v for v in self.resource_names] + if self.rule_id is not None: + body["rule_id"] = self.rule_id + if self.updated_time is not None: + body["updated_time"] = self.updated_time + if self.vpc_endpoint_id is not None: + body["vpc_endpoint_id"] = self.vpc_endpoint_id return body def as_shallow_dict(self) -> dict: """Serializes the CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.connection_state is not None: body['connection_state'] = self.connection_state - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.deactivated is not None: body['deactivated'] = self.deactivated - if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at - if self.domain_names: body['domain_names'] = self.domain_names - if self.enabled is not None: body['enabled'] = self.enabled - if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.resource_names: body['resource_names'] = self.resource_names - if self.rule_id is not None: body['rule_id'] = self.rule_id - if self.updated_time is not None: body['updated_time'] = self.updated_time - if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.connection_state is not None: + body["connection_state"] = self.connection_state + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.deactivated is not None: + body["deactivated"] = self.deactivated + if self.deactivated_at is not None: + body["deactivated_at"] = self.deactivated_at + if self.domain_names: + body["domain_names"] = self.domain_names + if self.enabled is not None: + body["enabled"] = self.enabled + if self.endpoint_service is not None: + body["endpoint_service"] = self.endpoint_service + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.resource_names: + body["resource_names"] = self.resource_names + if self.rule_id is not None: + body["rule_id"] = self.rule_id + if self.updated_time is not None: + body["updated_time"] = self.updated_time + if self.vpc_endpoint_id is not None: + body["vpc_endpoint_id"] = self.vpc_endpoint_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule: """Deserializes the CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule from a dictionary.""" - return cls(account_id=d.get('account_id', None), connection_state=_enum(d, 'connection_state', CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState), creation_time=d.get('creation_time', None), deactivated=d.get('deactivated', None), deactivated_at=d.get('deactivated_at', None), domain_names=d.get('domain_names', None), enabled=d.get('enabled', None), endpoint_service=d.get('endpoint_service', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), resource_names=d.get('resource_names', None), rule_id=d.get('rule_id', None), updated_time=d.get('updated_time', None), vpc_endpoint_id=d.get('vpc_endpoint_id', None)) - - + return cls( + account_id=d.get("account_id", None), + connection_state=_enum( + d, + "connection_state", + CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState, + ), + creation_time=d.get("creation_time", None), + deactivated=d.get("deactivated", None), + deactivated_at=d.get("deactivated_at", None), + domain_names=d.get("domain_names", None), + enabled=d.get("enabled", None), + endpoint_service=d.get("endpoint_service", None), + network_connectivity_config_id=d.get("network_connectivity_config_id", None), + resource_names=d.get("resource_names", None), + rule_id=d.get("rule_id", None), + updated_time=d.get("updated_time", None), + vpc_endpoint_id=d.get("vpc_endpoint_id", None), + ) class CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState(Enum): - - - DISCONNECTED = 'DISCONNECTED' - ESTABLISHED = 'ESTABLISHED' - EXPIRED = 'EXPIRED' - PENDING = 'PENDING' - REJECTED = 'REJECTED' + + DISCONNECTED = "DISCONNECTED" + ESTABLISHED = "ESTABLISHED" + EXPIRED = "EXPIRED" + PENDING = "PENDING" + REJECTED = "REJECTED" + @dataclass class DashboardEmailSubscriptions: boolean_val: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1162,35 +1360,43 @@ class DashboardEmailSubscriptions: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the DashboardEmailSubscriptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the DashboardEmailSubscriptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardEmailSubscriptions: """Deserializes the DashboardEmailSubscriptions from a dictionary.""" - return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + boolean_val=_from_dict(d, "boolean_val", BooleanMessage), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass @@ -1202,9 +1408,9 @@ class DefaultNamespaceSetting: 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute.""" - + namespace: StringMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1212,44 +1418,49 @@ class DefaultNamespaceSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the DefaultNamespaceSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.namespace: body['namespace'] = self.namespace.as_dict() - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.etag is not None: + body["etag"] = self.etag + if self.namespace: + body["namespace"] = self.namespace.as_dict() + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the DefaultNamespaceSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.namespace: body['namespace'] = self.namespace - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.etag is not None: + body["etag"] = self.etag + if self.namespace: + body["namespace"] = self.namespace + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DefaultNamespaceSetting: """Deserializes the DefaultNamespaceSetting from a dictionary.""" - return cls(etag=d.get('etag', None), namespace=_from_dict(d, 'namespace', StringMessage), setting_name=d.get('setting_name', None)) - - - - - + return cls( + etag=d.get("etag", None), + namespace=_from_dict(d, "namespace", StringMessage), + setting_name=d.get("setting_name", None), + ) @dataclass class DeleteAccountIpAccessEnableResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1257,37 +1468,31 @@ class DeleteAccountIpAccessEnableResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteAccountIpAccessEnableResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteAccountIpAccessEnableResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteAccountIpAccessEnableResponse: """Deserializes the DeleteAccountIpAccessEnableResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - - - - + return cls(etag=d.get("etag", None)) @dataclass class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1295,34 +1500,31 @@ class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: """Deserializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - + return cls(etag=d.get("etag", None)) @dataclass class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1330,34 +1532,31 @@ class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: """Deserializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - + return cls(etag=d.get("etag", None)) @dataclass class DeleteDashboardEmailSubscriptionsResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1365,34 +1564,31 @@ class DeleteDashboardEmailSubscriptionsResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteDashboardEmailSubscriptionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDashboardEmailSubscriptionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDashboardEmailSubscriptionsResponse: """Deserializes the DeleteDashboardEmailSubscriptionsResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - + return cls(etag=d.get("etag", None)) @dataclass class DeleteDefaultNamespaceSettingResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1400,34 +1596,31 @@ class DeleteDefaultNamespaceSettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteDefaultNamespaceSettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDefaultNamespaceSettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDefaultNamespaceSettingResponse: """Deserializes the DeleteDefaultNamespaceSettingResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - + return cls(etag=d.get("etag", None)) @dataclass class DeleteDisableLegacyAccessResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1435,34 +1628,31 @@ class DeleteDisableLegacyAccessResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteDisableLegacyAccessResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDisableLegacyAccessResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDisableLegacyAccessResponse: """Deserializes the DeleteDisableLegacyAccessResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - + return cls(etag=d.get("etag", None)) @dataclass class DeleteDisableLegacyDbfsResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1470,34 +1660,31 @@ class DeleteDisableLegacyDbfsResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteDisableLegacyDbfsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDisableLegacyDbfsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDisableLegacyDbfsResponse: """Deserializes the DeleteDisableLegacyDbfsResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - + return cls(etag=d.get("etag", None)) @dataclass class DeleteDisableLegacyFeaturesResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1505,37 +1692,31 @@ class DeleteDisableLegacyFeaturesResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteDisableLegacyFeaturesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDisableLegacyFeaturesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDisableLegacyFeaturesResponse: """Deserializes the DeleteDisableLegacyFeaturesResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - - - - + return cls(etag=d.get("etag", None)) @dataclass class DeleteLlmProxyPartnerPoweredWorkspaceResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1543,28 +1724,25 @@ class DeleteLlmProxyPartnerPoweredWorkspaceResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteLlmProxyPartnerPoweredWorkspaceResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteLlmProxyPartnerPoweredWorkspaceResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse: """Deserializes the DeleteLlmProxyPartnerPoweredWorkspaceResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - + return cls(etag=d.get("etag", None)) @dataclass @@ -1583,11 +1761,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteNetworkConnectivityConfigurationResponse: """Deserializes the DeleteNetworkConnectivityConfigurationResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -1606,20 +1779,12 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteNetworkPolicyRpcResponse: """Deserializes the DeleteNetworkPolicyRpcResponse from a dictionary.""" return cls() - - - - - - - - @dataclass class DeletePersonalComputeSettingResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1627,28 +1792,25 @@ class DeletePersonalComputeSettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeletePersonalComputeSettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeletePersonalComputeSettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeletePersonalComputeSettingResponse: """Deserializes the DeletePersonalComputeSettingResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - + return cls(etag=d.get("etag", None)) @dataclass @@ -1667,17 +1829,12 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - - - - @dataclass class DeleteRestrictWorkspaceAdminsSettingResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1685,34 +1842,31 @@ class DeleteRestrictWorkspaceAdminsSettingResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteRestrictWorkspaceAdminsSettingResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteRestrictWorkspaceAdminsSettingResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteRestrictWorkspaceAdminsSettingResponse: """Deserializes the DeleteRestrictWorkspaceAdminsSettingResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - - - + return cls(etag=d.get("etag", None)) @dataclass class DeleteSqlResultsDownloadResponse: """The etag is returned.""" - + etag: str """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1720,43 +1874,40 @@ class DeleteSqlResultsDownloadResponse: -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting.""" - + def as_dict(self) -> dict: """Serializes the DeleteSqlResultsDownloadResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body def as_shallow_dict(self) -> dict: """Serializes the DeleteSqlResultsDownloadResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag + if self.etag is not None: + body["etag"] = self.etag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteSqlResultsDownloadResponse: """Deserializes the DeleteSqlResultsDownloadResponse from a dictionary.""" - return cls(etag=d.get('etag', None)) - - - + return cls(etag=d.get("etag", None)) +class DestinationType(Enum): + EMAIL = "EMAIL" + MICROSOFT_TEAMS = "MICROSOFT_TEAMS" + PAGERDUTY = "PAGERDUTY" + SLACK = "SLACK" + WEBHOOK = "WEBHOOK" -class DestinationType(Enum): - - - EMAIL = 'EMAIL' - MICROSOFT_TEAMS = 'MICROSOFT_TEAMS' - PAGERDUTY = 'PAGERDUTY' - SLACK = 'SLACK' - WEBHOOK = 'WEBHOOK' @dataclass class DisableLegacyAccess: disable_legacy_access: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1764,41 +1915,49 @@ class DisableLegacyAccess: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the DisableLegacyAccess into a dictionary suitable for use as a JSON request body.""" body = {} - if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.disable_legacy_access: + body["disable_legacy_access"] = self.disable_legacy_access.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the DisableLegacyAccess into a shallow dictionary of its immediate attributes.""" body = {} - if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.disable_legacy_access: + body["disable_legacy_access"] = self.disable_legacy_access + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DisableLegacyAccess: """Deserializes the DisableLegacyAccess from a dictionary.""" - return cls(disable_legacy_access=_from_dict(d, 'disable_legacy_access', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + disable_legacy_access=_from_dict(d, "disable_legacy_access", BooleanMessage), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class DisableLegacyDbfs: disable_legacy_dbfs: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1806,41 +1965,49 @@ class DisableLegacyDbfs: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the DisableLegacyDbfs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.disable_legacy_dbfs: + body["disable_legacy_dbfs"] = self.disable_legacy_dbfs.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the DisableLegacyDbfs into a shallow dictionary of its immediate attributes.""" body = {} - if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.disable_legacy_dbfs: + body["disable_legacy_dbfs"] = self.disable_legacy_dbfs + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DisableLegacyDbfs: """Deserializes the DisableLegacyDbfs from a dictionary.""" - return cls(disable_legacy_dbfs=_from_dict(d, 'disable_legacy_dbfs', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + disable_legacy_dbfs=_from_dict(d, "disable_legacy_dbfs", BooleanMessage), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class DisableLegacyFeatures: disable_legacy_features: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -1848,35 +2015,43 @@ class DisableLegacyFeatures: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the DisableLegacyFeatures into a dictionary suitable for use as a JSON request body.""" body = {} - if self.disable_legacy_features: body['disable_legacy_features'] = self.disable_legacy_features.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.disable_legacy_features: + body["disable_legacy_features"] = self.disable_legacy_features.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the DisableLegacyFeatures into a shallow dictionary of its immediate attributes.""" body = {} - if self.disable_legacy_features: body['disable_legacy_features'] = self.disable_legacy_features - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.disable_legacy_features: + body["disable_legacy_features"] = self.disable_legacy_features + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DisableLegacyFeatures: """Deserializes the DisableLegacyFeatures from a dictionary.""" - return cls(disable_legacy_features=_from_dict(d, 'disable_legacy_features', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + disable_legacy_features=_from_dict(d, "disable_legacy_features", BooleanMessage), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass @@ -1884,70 +2059,85 @@ class EgressNetworkPolicy: """The network policies applying for egress traffic. This message is used by the UI/REST API. We translate this message to the format expected by the dataplane in Lakehouse Network Manager (for the format expected by the dataplane, see networkconfig.textproto).""" - + internet_access: Optional[EgressNetworkPolicyInternetAccessPolicy] = None """The access policy enforced for egress traffic to the internet.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.internet_access: body['internet_access'] = self.internet_access.as_dict() + if self.internet_access: + body["internet_access"] = self.internet_access.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.internet_access: body['internet_access'] = self.internet_access + if self.internet_access: + body["internet_access"] = self.internet_access return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicy: """Deserializes the EgressNetworkPolicy from a dictionary.""" - return cls(internet_access=_from_dict(d, 'internet_access', EgressNetworkPolicyInternetAccessPolicy)) - - + return cls(internet_access=_from_dict(d, "internet_access", EgressNetworkPolicyInternetAccessPolicy)) @dataclass class EgressNetworkPolicyInternetAccessPolicy: allowed_internet_destinations: Optional[List[EgressNetworkPolicyInternetAccessPolicyInternetDestination]] = None - + allowed_storage_destinations: Optional[List[EgressNetworkPolicyInternetAccessPolicyStorageDestination]] = None - + log_only_mode: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyMode] = None """Optional. If not specified, assume the policy is enforced for all workloads.""" - + restriction_mode: Optional[EgressNetworkPolicyInternetAccessPolicyRestrictionMode] = None """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can only access explicitly allowed internet and storage destinations, as well as UC connections and external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via private link.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_internet_destinations: body['allowed_internet_destinations'] = [v.as_dict() for v in self.allowed_internet_destinations] - if self.allowed_storage_destinations: body['allowed_storage_destinations'] = [v.as_dict() for v in self.allowed_storage_destinations] - if self.log_only_mode: body['log_only_mode'] = self.log_only_mode.as_dict() - if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode.value + if self.allowed_internet_destinations: + body["allowed_internet_destinations"] = [v.as_dict() for v in self.allowed_internet_destinations] + if self.allowed_storage_destinations: + body["allowed_storage_destinations"] = [v.as_dict() for v in self.allowed_storage_destinations] + if self.log_only_mode: + body["log_only_mode"] = self.log_only_mode.as_dict() + if self.restriction_mode is not None: + body["restriction_mode"] = self.restriction_mode.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_internet_destinations: body['allowed_internet_destinations'] = self.allowed_internet_destinations - if self.allowed_storage_destinations: body['allowed_storage_destinations'] = self.allowed_storage_destinations - if self.log_only_mode: body['log_only_mode'] = self.log_only_mode - if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode + if self.allowed_internet_destinations: + body["allowed_internet_destinations"] = self.allowed_internet_destinations + if self.allowed_storage_destinations: + body["allowed_storage_destinations"] = self.allowed_storage_destinations + if self.log_only_mode: + body["log_only_mode"] = self.log_only_mode + if self.restriction_mode is not None: + body["restriction_mode"] = self.restriction_mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyInternetAccessPolicy: """Deserializes the EgressNetworkPolicyInternetAccessPolicy from a dictionary.""" - return cls(allowed_internet_destinations=_repeated_dict(d, 'allowed_internet_destinations', EgressNetworkPolicyInternetAccessPolicyInternetDestination), allowed_storage_destinations=_repeated_dict(d, 'allowed_storage_destinations', EgressNetworkPolicyInternetAccessPolicyStorageDestination), log_only_mode=_from_dict(d, 'log_only_mode', EgressNetworkPolicyInternetAccessPolicyLogOnlyMode), restriction_mode=_enum(d, 'restriction_mode', EgressNetworkPolicyInternetAccessPolicyRestrictionMode)) - - + return cls( + allowed_internet_destinations=_repeated_dict( + d, "allowed_internet_destinations", EgressNetworkPolicyInternetAccessPolicyInternetDestination + ), + allowed_storage_destinations=_repeated_dict( + d, "allowed_storage_destinations", EgressNetworkPolicyInternetAccessPolicyStorageDestination + ), + log_only_mode=_from_dict(d, "log_only_mode", EgressNetworkPolicyInternetAccessPolicyLogOnlyMode), + restriction_mode=_enum(d, "restriction_mode", EgressNetworkPolicyInternetAccessPolicyRestrictionMode), + ) @dataclass @@ -1955,39 +2145,53 @@ class EgressNetworkPolicyInternetAccessPolicyInternetDestination: """Users can specify accessible internet destinations when outbound access is restricted. We only support domain name (FQDN) destinations for the time being, though going forwards we want to support host names and IP addresses.""" - + destination: Optional[str] = None - - protocol: Optional[EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol] = None + + protocol: Optional[ + EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol + ] = None """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be set to TCP by default and hidden from the user. In the future, users may be able to select HTTP filtering (i.e. SNI based filtering, filtering by FQDN).""" - + type: Optional[EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType] = None - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: body['destination'] = self.destination - if self.protocol is not None: body['protocol'] = self.protocol.value - if self.type is not None: body['type'] = self.type.value + if self.destination is not None: + body["destination"] = self.destination + if self.protocol is not None: + body["protocol"] = self.protocol.value + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: body['destination'] = self.destination - if self.protocol is not None: body['protocol'] = self.protocol - if self.type is not None: body['type'] = self.type + if self.destination is not None: + body["destination"] = self.destination + if self.protocol is not None: + body["protocol"] = self.protocol + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyInternetAccessPolicyInternetDestination: """Deserializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination from a dictionary.""" - return cls(destination=d.get('destination', None), protocol=_enum(d, 'protocol', EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol), type=_enum(d, 'type', EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType)) - - + return cls( + destination=d.get("destination", None), + protocol=_enum( + d, + "protocol", + EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol, + ), + type=_enum(d, "type", EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType), + ) class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol(Enum): @@ -1995,53 +2199,62 @@ class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinat filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be set to TCP by default and hidden from the user. In the future, users may be able to select HTTP filtering (i.e. SNI based filtering, filtering by FQDN).""" - - TCP = 'TCP' + + TCP = "TCP" + class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType(Enum): - - - FQDN = 'FQDN' + + FQDN = "FQDN" + @dataclass class EgressNetworkPolicyInternetAccessPolicyLogOnlyMode: log_only_mode_type: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType] = None - + workloads: Optional[List[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType]] = None - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type.value - if self.workloads: body['workloads'] = [v.value for v in self.workloads] + if self.log_only_mode_type is not None: + body["log_only_mode_type"] = self.log_only_mode_type.value + if self.workloads: + body["workloads"] = [v.value for v in self.workloads] return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a shallow dictionary of its immediate attributes.""" body = {} - if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type - if self.workloads: body['workloads'] = self.workloads + if self.log_only_mode_type is not None: + body["log_only_mode_type"] = self.log_only_mode_type + if self.workloads: + body["workloads"] = self.workloads return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyInternetAccessPolicyLogOnlyMode: """Deserializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode from a dictionary.""" - return cls(log_only_mode_type=_enum(d, 'log_only_mode_type', EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType), workloads=_repeated_enum(d, 'workloads', EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType)) - - + return cls( + log_only_mode_type=_enum( + d, "log_only_mode_type", EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType + ), + workloads=_repeated_enum(d, "workloads", EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType), + ) class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType(Enum): - - - ALL_SERVICES = 'ALL_SERVICES' - SELECTED_SERVICES = 'SELECTED_SERVICES' + + ALL_SERVICES = "ALL_SERVICES" + SELECTED_SERVICES = "SELECTED_SERVICES" + class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType(Enum): """The values should match the list of workloads used in networkconfig.proto""" - - DBSQL = 'DBSQL' - ML_SERVING = 'ML_SERVING' + + DBSQL = "DBSQL" + ML_SERVING = "ML_SERVING" + class EgressNetworkPolicyInternetAccessPolicyRestrictionMode(Enum): """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: @@ -2049,113 +2262,154 @@ class EgressNetworkPolicyInternetAccessPolicyRestrictionMode(Enum): only access explicitly allowed internet and storage destinations, as well as UC connections and external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via private link.""" - - FULL_ACCESS = 'FULL_ACCESS' - PRIVATE_ACCESS_ONLY = 'PRIVATE_ACCESS_ONLY' - RESTRICTED_ACCESS = 'RESTRICTED_ACCESS' + + FULL_ACCESS = "FULL_ACCESS" + PRIVATE_ACCESS_ONLY = "PRIVATE_ACCESS_ONLY" + RESTRICTED_ACCESS = "RESTRICTED_ACCESS" + @dataclass class EgressNetworkPolicyInternetAccessPolicyStorageDestination: """Users can specify accessible storage destinations.""" - + allowed_paths: Optional[List[str]] = None - + azure_container: Optional[str] = None - + azure_dns_zone: Optional[str] = None - + azure_storage_account: Optional[str] = None - + azure_storage_service: Optional[str] = None - + bucket_name: Optional[str] = None - + region: Optional[str] = None - + type: Optional[EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType] = None - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_paths: body['allowed_paths'] = [v for v in self.allowed_paths] - if self.azure_container is not None: body['azure_container'] = self.azure_container - if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone - if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account - if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service - if self.bucket_name is not None: body['bucket_name'] = self.bucket_name - if self.region is not None: body['region'] = self.region - if self.type is not None: body['type'] = self.type.value + if self.allowed_paths: + body["allowed_paths"] = [v for v in self.allowed_paths] + if self.azure_container is not None: + body["azure_container"] = self.azure_container + if self.azure_dns_zone is not None: + body["azure_dns_zone"] = self.azure_dns_zone + if self.azure_storage_account is not None: + body["azure_storage_account"] = self.azure_storage_account + if self.azure_storage_service is not None: + body["azure_storage_service"] = self.azure_storage_service + if self.bucket_name is not None: + body["bucket_name"] = self.bucket_name + if self.region is not None: + body["region"] = self.region + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_paths: body['allowed_paths'] = self.allowed_paths - if self.azure_container is not None: body['azure_container'] = self.azure_container - if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone - if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account - if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service - if self.bucket_name is not None: body['bucket_name'] = self.bucket_name - if self.region is not None: body['region'] = self.region - if self.type is not None: body['type'] = self.type + if self.allowed_paths: + body["allowed_paths"] = self.allowed_paths + if self.azure_container is not None: + body["azure_container"] = self.azure_container + if self.azure_dns_zone is not None: + body["azure_dns_zone"] = self.azure_dns_zone + if self.azure_storage_account is not None: + body["azure_storage_account"] = self.azure_storage_account + if self.azure_storage_service is not None: + body["azure_storage_service"] = self.azure_storage_service + if self.bucket_name is not None: + body["bucket_name"] = self.bucket_name + if self.region is not None: + body["region"] = self.region + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyInternetAccessPolicyStorageDestination: """Deserializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination from a dictionary.""" - return cls(allowed_paths=d.get('allowed_paths', None), azure_container=d.get('azure_container', None), azure_dns_zone=d.get('azure_dns_zone', None), azure_storage_account=d.get('azure_storage_account', None), azure_storage_service=d.get('azure_storage_service', None), bucket_name=d.get('bucket_name', None), region=d.get('region', None), type=_enum(d, 'type', EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType)) - - + return cls( + allowed_paths=d.get("allowed_paths", None), + azure_container=d.get("azure_container", None), + azure_dns_zone=d.get("azure_dns_zone", None), + azure_storage_account=d.get("azure_storage_account", None), + azure_storage_service=d.get("azure_storage_service", None), + bucket_name=d.get("bucket_name", None), + region=d.get("region", None), + type=_enum(d, "type", EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType), + ) class EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType(Enum): - - - AWS_S3 = 'AWS_S3' - AZURE_STORAGE = 'AZURE_STORAGE' - CLOUDFLARE_R2 = 'CLOUDFLARE_R2' - GOOGLE_CLOUD_STORAGE = 'GOOGLE_CLOUD_STORAGE' + + AWS_S3 = "AWS_S3" + AZURE_STORAGE = "AZURE_STORAGE" + CLOUDFLARE_R2 = "CLOUDFLARE_R2" + GOOGLE_CLOUD_STORAGE = "GOOGLE_CLOUD_STORAGE" + @dataclass class EgressNetworkPolicyNetworkAccessPolicy: restriction_mode: EgressNetworkPolicyNetworkAccessPolicyRestrictionMode """The restriction mode that controls how serverless workloads can access the internet.""" - + allowed_internet_destinations: Optional[List[EgressNetworkPolicyNetworkAccessPolicyInternetDestination]] = None """List of internet destinations that serverless workloads are allowed to access when in RESTRICTED_ACCESS mode.""" - + allowed_storage_destinations: Optional[List[EgressNetworkPolicyNetworkAccessPolicyStorageDestination]] = None """List of storage destinations that serverless workloads are allowed to access when in RESTRICTED_ACCESS mode.""" - + policy_enforcement: Optional[EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement] = None """Optional. When policy_enforcement is not provided, we default to ENFORCE_MODE_ALL_SERVICES""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_internet_destinations: body['allowed_internet_destinations'] = [v.as_dict() for v in self.allowed_internet_destinations] - if self.allowed_storage_destinations: body['allowed_storage_destinations'] = [v.as_dict() for v in self.allowed_storage_destinations] - if self.policy_enforcement: body['policy_enforcement'] = self.policy_enforcement.as_dict() - if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode.value + if self.allowed_internet_destinations: + body["allowed_internet_destinations"] = [v.as_dict() for v in self.allowed_internet_destinations] + if self.allowed_storage_destinations: + body["allowed_storage_destinations"] = [v.as_dict() for v in self.allowed_storage_destinations] + if self.policy_enforcement: + body["policy_enforcement"] = self.policy_enforcement.as_dict() + if self.restriction_mode is not None: + body["restriction_mode"] = self.restriction_mode.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_internet_destinations: body['allowed_internet_destinations'] = self.allowed_internet_destinations - if self.allowed_storage_destinations: body['allowed_storage_destinations'] = self.allowed_storage_destinations - if self.policy_enforcement: body['policy_enforcement'] = self.policy_enforcement - if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode + if self.allowed_internet_destinations: + body["allowed_internet_destinations"] = self.allowed_internet_destinations + if self.allowed_storage_destinations: + body["allowed_storage_destinations"] = self.allowed_storage_destinations + if self.policy_enforcement: + body["policy_enforcement"] = self.policy_enforcement + if self.restriction_mode is not None: + body["restriction_mode"] = self.restriction_mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyNetworkAccessPolicy: """Deserializes the EgressNetworkPolicyNetworkAccessPolicy from a dictionary.""" - return cls(allowed_internet_destinations=_repeated_dict(d, 'allowed_internet_destinations', EgressNetworkPolicyNetworkAccessPolicyInternetDestination), allowed_storage_destinations=_repeated_dict(d, 'allowed_storage_destinations', EgressNetworkPolicyNetworkAccessPolicyStorageDestination), policy_enforcement=_from_dict(d, 'policy_enforcement', EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement), restriction_mode=_enum(d, 'restriction_mode', EgressNetworkPolicyNetworkAccessPolicyRestrictionMode)) - - + return cls( + allowed_internet_destinations=_repeated_dict( + d, "allowed_internet_destinations", EgressNetworkPolicyNetworkAccessPolicyInternetDestination + ), + allowed_storage_destinations=_repeated_dict( + d, "allowed_storage_destinations", EgressNetworkPolicyNetworkAccessPolicyStorageDestination + ), + policy_enforcement=_from_dict( + d, "policy_enforcement", EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement + ), + restriction_mode=_enum(d, "restriction_mode", EgressNetworkPolicyNetworkAccessPolicyRestrictionMode), + ) @dataclass @@ -2163,177 +2417,224 @@ class EgressNetworkPolicyNetworkAccessPolicyInternetDestination: """Users can specify accessible internet destinations when outbound access is restricted. We only support DNS_NAME (FQDN format) destinations for the time being. Going forward we may extend support to host names and IP addresses.""" - + destination: Optional[str] = None """The internet destination to which access will be allowed. Format dependent on the destination type.""" - - internet_destination_type: Optional[EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType] = None + + internet_destination_type: Optional[ + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType + ] = None """The type of internet destination. Currently only DNS_NAME is supported.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyInternetDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination is not None: body['destination'] = self.destination - if self.internet_destination_type is not None: body['internet_destination_type'] = self.internet_destination_type.value + if self.destination is not None: + body["destination"] = self.destination + if self.internet_destination_type is not None: + body["internet_destination_type"] = self.internet_destination_type.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyInternetDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination is not None: body['destination'] = self.destination - if self.internet_destination_type is not None: body['internet_destination_type'] = self.internet_destination_type + if self.destination is not None: + body["destination"] = self.destination + if self.internet_destination_type is not None: + body["internet_destination_type"] = self.internet_destination_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyNetworkAccessPolicyInternetDestination: """Deserializes the EgressNetworkPolicyNetworkAccessPolicyInternetDestination from a dictionary.""" - return cls(destination=d.get('destination', None), internet_destination_type=_enum(d, 'internet_destination_type', EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType)) - - + return cls( + destination=d.get("destination", None), + internet_destination_type=_enum( + d, + "internet_destination_type", + EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType, + ), + ) class EgressNetworkPolicyNetworkAccessPolicyInternetDestinationInternetDestinationType(Enum): - - - DNS_NAME = 'DNS_NAME' + + DNS_NAME = "DNS_NAME" + @dataclass class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement: - dry_run_mode_product_filter: Optional[List[EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter]] = None + dry_run_mode_product_filter: Optional[ + List[EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter] + ] = None """When empty, it means dry run for all products. When non-empty, it means dry run for specific products and for the other products, they will run in enforced mode.""" - + enforcement_mode: Optional[EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode] = None """The mode of policy enforcement. ENFORCED blocks traffic that violates policy, while DRY_RUN only logs violations without blocking. When not specified, defaults to ENFORCED.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dry_run_mode_product_filter: body['dry_run_mode_product_filter'] = [v.value for v in self.dry_run_mode_product_filter] - if self.enforcement_mode is not None: body['enforcement_mode'] = self.enforcement_mode.value + if self.dry_run_mode_product_filter: + body["dry_run_mode_product_filter"] = [v.value for v in self.dry_run_mode_product_filter] + if self.enforcement_mode is not None: + body["enforcement_mode"] = self.enforcement_mode.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement into a shallow dictionary of its immediate attributes.""" body = {} - if self.dry_run_mode_product_filter: body['dry_run_mode_product_filter'] = self.dry_run_mode_product_filter - if self.enforcement_mode is not None: body['enforcement_mode'] = self.enforcement_mode + if self.dry_run_mode_product_filter: + body["dry_run_mode_product_filter"] = self.dry_run_mode_product_filter + if self.enforcement_mode is not None: + body["enforcement_mode"] = self.enforcement_mode return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement: """Deserializes the EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcement from a dictionary.""" - return cls(dry_run_mode_product_filter=_repeated_enum(d, 'dry_run_mode_product_filter', EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter), enforcement_mode=_enum(d, 'enforcement_mode', EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode)) - - + return cls( + dry_run_mode_product_filter=_repeated_enum( + d, + "dry_run_mode_product_filter", + EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter, + ), + enforcement_mode=_enum( + d, "enforcement_mode", EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode + ), + ) class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementDryRunModeProductFilter(Enum): """The values should match the list of workloads used in networkconfig.proto""" - - DBSQL = 'DBSQL' - ML_SERVING = 'ML_SERVING' + + DBSQL = "DBSQL" + ML_SERVING = "ML_SERVING" + class EgressNetworkPolicyNetworkAccessPolicyPolicyEnforcementEnforcementMode(Enum): - - - DRY_RUN = 'DRY_RUN' - ENFORCED = 'ENFORCED' + + DRY_RUN = "DRY_RUN" + ENFORCED = "ENFORCED" + class EgressNetworkPolicyNetworkAccessPolicyRestrictionMode(Enum): """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can only access explicitly allowed internet and storage destinations, as well as UC connections and external locations.""" - - FULL_ACCESS = 'FULL_ACCESS' - RESTRICTED_ACCESS = 'RESTRICTED_ACCESS' + + FULL_ACCESS = "FULL_ACCESS" + RESTRICTED_ACCESS = "RESTRICTED_ACCESS" + @dataclass class EgressNetworkPolicyNetworkAccessPolicyStorageDestination: """Users can specify accessible storage destinations.""" - + azure_storage_account: Optional[str] = None """The Azure storage account name.""" - + azure_storage_service: Optional[str] = None """The Azure storage service type (blob, dfs, etc.).""" - + bucket_name: Optional[str] = None - + region: Optional[str] = None """The region of the S3 bucket.""" - - storage_destination_type: Optional[EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType] = None + + storage_destination_type: Optional[ + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType + ] = None """The type of storage destination.""" - + def as_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyStorageDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account - if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service - if self.bucket_name is not None: body['bucket_name'] = self.bucket_name - if self.region is not None: body['region'] = self.region - if self.storage_destination_type is not None: body['storage_destination_type'] = self.storage_destination_type.value + if self.azure_storage_account is not None: + body["azure_storage_account"] = self.azure_storage_account + if self.azure_storage_service is not None: + body["azure_storage_service"] = self.azure_storage_service + if self.bucket_name is not None: + body["bucket_name"] = self.bucket_name + if self.region is not None: + body["region"] = self.region + if self.storage_destination_type is not None: + body["storage_destination_type"] = self.storage_destination_type.value return body def as_shallow_dict(self) -> dict: """Serializes the EgressNetworkPolicyNetworkAccessPolicyStorageDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account - if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service - if self.bucket_name is not None: body['bucket_name'] = self.bucket_name - if self.region is not None: body['region'] = self.region - if self.storage_destination_type is not None: body['storage_destination_type'] = self.storage_destination_type + if self.azure_storage_account is not None: + body["azure_storage_account"] = self.azure_storage_account + if self.azure_storage_service is not None: + body["azure_storage_service"] = self.azure_storage_service + if self.bucket_name is not None: + body["bucket_name"] = self.bucket_name + if self.region is not None: + body["region"] = self.region + if self.storage_destination_type is not None: + body["storage_destination_type"] = self.storage_destination_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EgressNetworkPolicyNetworkAccessPolicyStorageDestination: """Deserializes the EgressNetworkPolicyNetworkAccessPolicyStorageDestination from a dictionary.""" - return cls(azure_storage_account=d.get('azure_storage_account', None), azure_storage_service=d.get('azure_storage_service', None), bucket_name=d.get('bucket_name', None), region=d.get('region', None), storage_destination_type=_enum(d, 'storage_destination_type', EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType)) - - + return cls( + azure_storage_account=d.get("azure_storage_account", None), + azure_storage_service=d.get("azure_storage_service", None), + bucket_name=d.get("bucket_name", None), + region=d.get("region", None), + storage_destination_type=_enum( + d, + "storage_destination_type", + EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType, + ), + ) class EgressNetworkPolicyNetworkAccessPolicyStorageDestinationStorageDestinationType(Enum): - - - AWS_S3 = 'AWS_S3' - AZURE_STORAGE = 'AZURE_STORAGE' - GOOGLE_CLOUD_STORAGE = 'GOOGLE_CLOUD_STORAGE' + + AWS_S3 = "AWS_S3" + AZURE_STORAGE = "AZURE_STORAGE" + GOOGLE_CLOUD_STORAGE = "GOOGLE_CLOUD_STORAGE" + class EgressResourceType(Enum): """The target resources that are supported by Network Connectivity Config. Note: some egress types can support general types that are not defined in EgressResourceType. E.g.: Azure private endpoint supports private link enabled Azure services.""" - - AZURE_BLOB_STORAGE = 'AZURE_BLOB_STORAGE' + + AZURE_BLOB_STORAGE = "AZURE_BLOB_STORAGE" + @dataclass class EmailConfig: addresses: Optional[List[str]] = None """Email addresses to notify.""" - + def as_dict(self) -> dict: """Serializes the EmailConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.addresses: body['addresses'] = [v for v in self.addresses] + if self.addresses: + body["addresses"] = [v for v in self.addresses] return body def as_shallow_dict(self) -> dict: """Serializes the EmailConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.addresses: body['addresses'] = self.addresses + if self.addresses: + body["addresses"] = self.addresses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmailConfig: """Deserializes the EmailConfig from a dictionary.""" - return cls(addresses=d.get('addresses', None)) - - + return cls(addresses=d.get("addresses", None)) @dataclass @@ -2352,137 +2653,141 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Empty: """Deserializes the Empty from a dictionary.""" return cls() - - @dataclass class EnableExportNotebook: boolean_val: Optional[BooleanMessage] = None - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EnableExportNotebook into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val.as_dict() + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EnableExportNotebook into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnableExportNotebook: """Deserializes the EnableExportNotebook from a dictionary.""" - return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), setting_name=d.get('setting_name', None)) - - + return cls(boolean_val=_from_dict(d, "boolean_val", BooleanMessage), setting_name=d.get("setting_name", None)) @dataclass class EnableNotebookTableClipboard: boolean_val: Optional[BooleanMessage] = None - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EnableNotebookTableClipboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val.as_dict() + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EnableNotebookTableClipboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnableNotebookTableClipboard: """Deserializes the EnableNotebookTableClipboard from a dictionary.""" - return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), setting_name=d.get('setting_name', None)) - - + return cls(boolean_val=_from_dict(d, "boolean_val", BooleanMessage), setting_name=d.get("setting_name", None)) @dataclass class EnableResultsDownloading: boolean_val: Optional[BooleanMessage] = None - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EnableResultsDownloading into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val.as_dict() + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EnableResultsDownloading into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnableResultsDownloading: """Deserializes the EnableResultsDownloading from a dictionary.""" - return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), setting_name=d.get('setting_name', None)) - - + return cls(boolean_val=_from_dict(d, "boolean_val", BooleanMessage), setting_name=d.get("setting_name", None)) @dataclass class EnhancedSecurityMonitoring: """SHIELD feature: ESM""" - + is_enabled: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the EnhancedSecurityMonitoring into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + if self.is_enabled is not None: + body["is_enabled"] = self.is_enabled return body def as_shallow_dict(self) -> dict: """Serializes the EnhancedSecurityMonitoring into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_enabled is not None: body['is_enabled'] = self.is_enabled + if self.is_enabled is not None: + body["is_enabled"] = self.is_enabled return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnhancedSecurityMonitoring: """Deserializes the EnhancedSecurityMonitoring from a dictionary.""" - return cls(is_enabled=d.get('is_enabled', None)) - - + return cls(is_enabled=d.get("is_enabled", None)) @dataclass class EnhancedSecurityMonitoringSetting: enhanced_security_monitoring_workspace: EnhancedSecurityMonitoring """SHIELD feature: ESM""" - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -2490,68 +2795,78 @@ class EnhancedSecurityMonitoringSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EnhancedSecurityMonitoringSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enhanced_security_monitoring_workspace: body['enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.enhanced_security_monitoring_workspace: + body["enhanced_security_monitoring_workspace"] = self.enhanced_security_monitoring_workspace.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EnhancedSecurityMonitoringSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.enhanced_security_monitoring_workspace: body['enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.enhanced_security_monitoring_workspace: + body["enhanced_security_monitoring_workspace"] = self.enhanced_security_monitoring_workspace + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnhancedSecurityMonitoringSetting: """Deserializes the EnhancedSecurityMonitoringSetting from a dictionary.""" - return cls(enhanced_security_monitoring_workspace=_from_dict(d, 'enhanced_security_monitoring_workspace', EnhancedSecurityMonitoring), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + enhanced_security_monitoring_workspace=_from_dict( + d, "enhanced_security_monitoring_workspace", EnhancedSecurityMonitoring + ), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class EsmEnablementAccount: """Account level policy for ESM""" - + is_enforced: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the EsmEnablementAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.is_enforced is not None: body['is_enforced'] = self.is_enforced + if self.is_enforced is not None: + body["is_enforced"] = self.is_enforced return body def as_shallow_dict(self) -> dict: """Serializes the EsmEnablementAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.is_enforced is not None: body['is_enforced'] = self.is_enforced + if self.is_enforced is not None: + body["is_enforced"] = self.is_enforced return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EsmEnablementAccount: """Deserializes the EsmEnablementAccount from a dictionary.""" - return cls(is_enforced=d.get('is_enforced', None)) - - + return cls(is_enforced=d.get("is_enforced", None)) @dataclass class EsmEnablementAccountSetting: esm_enablement_account: EsmEnablementAccount """Account level policy for ESM""" - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -2559,749 +2874,753 @@ class EsmEnablementAccountSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the EsmEnablementAccountSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.esm_enablement_account: + body["esm_enablement_account"] = self.esm_enablement_account.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the EsmEnablementAccountSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.esm_enablement_account: + body["esm_enablement_account"] = self.esm_enablement_account + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EsmEnablementAccountSetting: """Deserializes the EsmEnablementAccountSetting from a dictionary.""" - return cls(esm_enablement_account=_from_dict(d, 'esm_enablement_account', EsmEnablementAccount), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + esm_enablement_account=_from_dict(d, "esm_enablement_account", EsmEnablementAccount), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class ExchangeToken: """The exchange token is the result of the token exchange with the IdP""" - + credential: Optional[str] = None """The requested token.""" - + credential_eol_time: Optional[int] = None """The end-of-life timestamp of the token. The value is in milliseconds since the Unix epoch.""" - + owner_id: Optional[int] = None """User ID of the user that owns this token.""" - + scopes: Optional[List[str]] = None """The scopes of access granted in the token.""" - + token_type: Optional[TokenType] = None """The type of this exchange token""" - + def as_dict(self) -> dict: """Serializes the ExchangeToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential is not None: body['credential'] = self.credential - if self.credential_eol_time is not None: body['credentialEolTime'] = self.credential_eol_time - if self.owner_id is not None: body['ownerId'] = self.owner_id - if self.scopes: body['scopes'] = [v for v in self.scopes] - if self.token_type is not None: body['tokenType'] = self.token_type.value + if self.credential is not None: + body["credential"] = self.credential + if self.credential_eol_time is not None: + body["credentialEolTime"] = self.credential_eol_time + if self.owner_id is not None: + body["ownerId"] = self.owner_id + if self.scopes: + body["scopes"] = [v for v in self.scopes] + if self.token_type is not None: + body["tokenType"] = self.token_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential is not None: body['credential'] = self.credential - if self.credential_eol_time is not None: body['credentialEolTime'] = self.credential_eol_time - if self.owner_id is not None: body['ownerId'] = self.owner_id - if self.scopes: body['scopes'] = self.scopes - if self.token_type is not None: body['tokenType'] = self.token_type + if self.credential is not None: + body["credential"] = self.credential + if self.credential_eol_time is not None: + body["credentialEolTime"] = self.credential_eol_time + if self.owner_id is not None: + body["ownerId"] = self.owner_id + if self.scopes: + body["scopes"] = self.scopes + if self.token_type is not None: + body["tokenType"] = self.token_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeToken: """Deserializes the ExchangeToken from a dictionary.""" - return cls(credential=d.get('credential', None), credential_eol_time=d.get('credentialEolTime', None), owner_id=d.get('ownerId', None), scopes=d.get('scopes', None), token_type=_enum(d, 'tokenType', TokenType)) - - + return cls( + credential=d.get("credential", None), + credential_eol_time=d.get("credentialEolTime", None), + owner_id=d.get("ownerId", None), + scopes=d.get("scopes", None), + token_type=_enum(d, "tokenType", TokenType), + ) @dataclass class ExchangeTokenRequest: """Exchange a token with the IdP""" - + partition_id: PartitionId """The partition of Credentials store""" - + token_type: List[TokenType] """A list of token types being requested""" - + scopes: List[str] """Array of scopes for the token request.""" - + def as_dict(self) -> dict: """Serializes the ExchangeTokenRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.partition_id: body['partitionId'] = self.partition_id.as_dict() - if self.scopes: body['scopes'] = [v for v in self.scopes] - if self.token_type: body['tokenType'] = [v.value for v in self.token_type] + if self.partition_id: + body["partitionId"] = self.partition_id.as_dict() + if self.scopes: + body["scopes"] = [v for v in self.scopes] + if self.token_type: + body["tokenType"] = [v.value for v in self.token_type] return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeTokenRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.partition_id: body['partitionId'] = self.partition_id - if self.scopes: body['scopes'] = self.scopes - if self.token_type: body['tokenType'] = self.token_type + if self.partition_id: + body["partitionId"] = self.partition_id + if self.scopes: + body["scopes"] = self.scopes + if self.token_type: + body["tokenType"] = self.token_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeTokenRequest: """Deserializes the ExchangeTokenRequest from a dictionary.""" - return cls(partition_id=_from_dict(d, 'partitionId', PartitionId), scopes=d.get('scopes', None), token_type=_repeated_enum(d, 'tokenType', TokenType)) - - + return cls( + partition_id=_from_dict(d, "partitionId", PartitionId), + scopes=d.get("scopes", None), + token_type=_repeated_enum(d, "tokenType", TokenType), + ) @dataclass class ExchangeTokenResponse: """Exhanged tokens were successfully returned.""" - + values: Optional[List[ExchangeToken]] = None - + def as_dict(self) -> dict: """Serializes the ExchangeTokenResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.values: body['values'] = [v.as_dict() for v in self.values] + if self.values: + body["values"] = [v.as_dict() for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the ExchangeTokenResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.values: body['values'] = self.values + if self.values: + body["values"] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExchangeTokenResponse: """Deserializes the ExchangeTokenResponse from a dictionary.""" - return cls(values=_repeated_dict(d, 'values', ExchangeToken)) - - + return cls(values=_repeated_dict(d, "values", ExchangeToken)) @dataclass class FetchIpAccessListResponse: """An IP access list was successfully returned.""" - + ip_access_list: Optional[IpAccessListInfo] = None """Definition of an IP Access list""" - + def as_dict(self) -> dict: """Serializes the FetchIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the FetchIpAccessListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FetchIpAccessListResponse: """Deserializes the FetchIpAccessListResponse from a dictionary.""" - return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) - - + return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo)) @dataclass class GenericWebhookConfig: password: Optional[str] = None """[Input-Only][Optional] Password for webhook.""" - + password_set: Optional[bool] = None """[Output-Only] Whether password is set.""" - + url: Optional[str] = None """[Input-Only] URL for webhook.""" - + url_set: Optional[bool] = None """[Output-Only] Whether URL is set.""" - + username: Optional[str] = None """[Input-Only][Optional] Username for webhook.""" - + username_set: Optional[bool] = None """[Output-Only] Whether username is set.""" - + def as_dict(self) -> dict: """Serializes the GenericWebhookConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.password is not None: body['password'] = self.password - if self.password_set is not None: body['password_set'] = self.password_set - if self.url is not None: body['url'] = self.url - if self.url_set is not None: body['url_set'] = self.url_set - if self.username is not None: body['username'] = self.username - if self.username_set is not None: body['username_set'] = self.username_set + if self.password is not None: + body["password"] = self.password + if self.password_set is not None: + body["password_set"] = self.password_set + if self.url is not None: + body["url"] = self.url + if self.url_set is not None: + body["url_set"] = self.url_set + if self.username is not None: + body["username"] = self.username + if self.username_set is not None: + body["username_set"] = self.username_set return body def as_shallow_dict(self) -> dict: """Serializes the GenericWebhookConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.password is not None: body['password'] = self.password - if self.password_set is not None: body['password_set'] = self.password_set - if self.url is not None: body['url'] = self.url - if self.url_set is not None: body['url_set'] = self.url_set - if self.username is not None: body['username'] = self.username - if self.username_set is not None: body['username_set'] = self.username_set + if self.password is not None: + body["password"] = self.password + if self.password_set is not None: + body["password_set"] = self.password_set + if self.url is not None: + body["url"] = self.url + if self.url_set is not None: + body["url_set"] = self.url_set + if self.username is not None: + body["username"] = self.username + if self.username_set is not None: + body["username_set"] = self.username_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GenericWebhookConfig: """Deserializes the GenericWebhookConfig from a dictionary.""" - return cls(password=d.get('password', None), password_set=d.get('password_set', None), url=d.get('url', None), url_set=d.get('url_set', None), username=d.get('username', None), username_set=d.get('username_set', None)) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + return cls( + password=d.get("password", None), + password_set=d.get("password_set", None), + url=d.get("url", None), + url_set=d.get("url_set", None), + username=d.get("username", None), + username_set=d.get("username_set", None), + ) @dataclass class GetIpAccessListResponse: ip_access_list: Optional[IpAccessListInfo] = None """Definition of an IP Access list""" - + def as_dict(self) -> dict: """Serializes the GetIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetIpAccessListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetIpAccessListResponse: """Deserializes the GetIpAccessListResponse from a dictionary.""" - return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) - - + return cls(ip_access_list=_from_dict(d, "ip_access_list", IpAccessListInfo)) @dataclass class GetIpAccessListsResponse: """IP access lists were successfully returned.""" - + ip_access_lists: Optional[List[IpAccessListInfo]] = None - + def as_dict(self) -> dict: """Serializes the GetIpAccessListsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists] + if self.ip_access_lists: + body["ip_access_lists"] = [v.as_dict() for v in self.ip_access_lists] return body def as_shallow_dict(self) -> dict: """Serializes the GetIpAccessListsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists + if self.ip_access_lists: + body["ip_access_lists"] = self.ip_access_lists return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetIpAccessListsResponse: """Deserializes the GetIpAccessListsResponse from a dictionary.""" - return cls(ip_access_lists=_repeated_dict(d, 'ip_access_lists', IpAccessListInfo)) - - + return cls(ip_access_lists=_repeated_dict(d, "ip_access_lists", IpAccessListInfo)) +@dataclass +class GetTokenPermissionLevelsResponse: + permission_levels: Optional[List[TokenPermissionsDescription]] = None + """Specific permission levels""" + def as_dict(self) -> dict: + """Serializes the GetTokenPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] + return body + def as_shallow_dict(self) -> dict: + """Serializes the GetTokenPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.permission_levels: + body["permission_levels"] = self.permission_levels + return body + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetTokenPermissionLevelsResponse: + """Deserializes the GetTokenPermissionLevelsResponse from a dictionary.""" + return cls(permission_levels=_repeated_dict(d, "permission_levels", TokenPermissionsDescription)) +@dataclass +class GetTokenResponse: + """Token with specified Token ID was successfully returned.""" + token_info: Optional[TokenInfo] = None + def as_dict(self) -> dict: + """Serializes the GetTokenResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.token_info: + body["token_info"] = self.token_info.as_dict() + return body - - - - - - - - - - - - - - - - - - - - - - - - - - - - -@dataclass -class GetTokenPermissionLevelsResponse: - permission_levels: Optional[List[TokenPermissionsDescription]] = None - """Specific permission levels""" - - def as_dict(self) -> dict: - """Serializes the GetTokenPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GetTokenPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetTokenPermissionLevelsResponse: - """Deserializes the GetTokenPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', TokenPermissionsDescription)) - - - - -@dataclass -class GetTokenResponse: - """Token with specified Token ID was successfully returned.""" - - token_info: Optional[TokenInfo] = None - - def as_dict(self) -> dict: - """Serializes the GetTokenResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.token_info: body['token_info'] = self.token_info.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GetTokenResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.token_info: body['token_info'] = self.token_info - return body + def as_shallow_dict(self) -> dict: + """Serializes the GetTokenResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.token_info: + body["token_info"] = self.token_info + return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetTokenResponse: """Deserializes the GetTokenResponse from a dictionary.""" - return cls(token_info=_from_dict(d, 'token_info', TokenInfo)) - - - - - + return cls(token_info=_from_dict(d, "token_info", TokenInfo)) @dataclass class IpAccessListInfo: """Definition of an IP Access list""" - + address_count: Optional[int] = None """Total number of IP or CIDR values.""" - + created_at: Optional[int] = None """Creation timestamp in milliseconds.""" - + created_by: Optional[int] = None """User ID of the user who created this list.""" - + enabled: Optional[bool] = None """Specifies whether this IP access list is enabled.""" - + ip_addresses: Optional[List[str]] = None - + label: Optional[str] = None """Label for the IP access list. This **cannot** be empty.""" - + list_id: Optional[str] = None """Universally unique identifier (UUID) of the IP access list.""" - + list_type: Optional[ListType] = None """Type of IP access list. Valid values are as follows and are case-sensitive: * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - + updated_at: Optional[int] = None """Update timestamp in milliseconds.""" - + updated_by: Optional[int] = None """User ID of the user who updated this list.""" - + def as_dict(self) -> dict: """Serializes the IpAccessListInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.address_count is not None: body['address_count'] = self.address_count - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.enabled is not None: body['enabled'] = self.enabled - if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] - if self.label is not None: body['label'] = self.label - if self.list_id is not None: body['list_id'] = self.list_id - if self.list_type is not None: body['list_type'] = self.list_type.value - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.address_count is not None: + body["address_count"] = self.address_count + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.enabled is not None: + body["enabled"] = self.enabled + if self.ip_addresses: + body["ip_addresses"] = [v for v in self.ip_addresses] + if self.label is not None: + body["label"] = self.label + if self.list_id is not None: + body["list_id"] = self.list_id + if self.list_type is not None: + body["list_type"] = self.list_type.value + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the IpAccessListInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.address_count is not None: body['address_count'] = self.address_count - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.enabled is not None: body['enabled'] = self.enabled - if self.ip_addresses: body['ip_addresses'] = self.ip_addresses - if self.label is not None: body['label'] = self.label - if self.list_id is not None: body['list_id'] = self.list_id - if self.list_type is not None: body['list_type'] = self.list_type - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.address_count is not None: + body["address_count"] = self.address_count + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.enabled is not None: + body["enabled"] = self.enabled + if self.ip_addresses: + body["ip_addresses"] = self.ip_addresses + if self.label is not None: + body["label"] = self.label + if self.list_id is not None: + body["list_id"] = self.list_id + if self.list_type is not None: + body["list_type"] = self.list_type + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IpAccessListInfo: """Deserializes the IpAccessListInfo from a dictionary.""" - return cls(address_count=d.get('address_count', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), enabled=d.get('enabled', None), ip_addresses=d.get('ip_addresses', None), label=d.get('label', None), list_id=d.get('list_id', None), list_type=_enum(d, 'list_type', ListType), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + address_count=d.get("address_count", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + enabled=d.get("enabled", None), + ip_addresses=d.get("ip_addresses", None), + label=d.get("label", None), + list_id=d.get("list_id", None), + list_type=_enum(d, "list_type", ListType), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class ListIpAccessListResponse: """IP access lists were successfully returned.""" - + ip_access_lists: Optional[List[IpAccessListInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListIpAccessListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists] + if self.ip_access_lists: + body["ip_access_lists"] = [v.as_dict() for v in self.ip_access_lists] return body def as_shallow_dict(self) -> dict: """Serializes the ListIpAccessListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists + if self.ip_access_lists: + body["ip_access_lists"] = self.ip_access_lists return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListIpAccessListResponse: """Deserializes the ListIpAccessListResponse from a dictionary.""" - return cls(ip_access_lists=_repeated_dict(d, 'ip_access_lists', IpAccessListInfo)) - - - - - + return cls(ip_access_lists=_repeated_dict(d, "ip_access_lists", IpAccessListInfo)) @dataclass class ListNetworkConnectivityConfigurationsResponse: """The network connectivity configuration list was successfully retrieved.""" - + items: Optional[List[NetworkConnectivityConfiguration]] = None - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If null, there are no more results to show.""" - + def as_dict(self) -> dict: """Serializes the ListNetworkConnectivityConfigurationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items: body['items'] = [v.as_dict() for v in self.items] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.items: + body["items"] = [v.as_dict() for v in self.items] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListNetworkConnectivityConfigurationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items: body['items'] = self.items - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.items: + body["items"] = self.items + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNetworkConnectivityConfigurationsResponse: """Deserializes the ListNetworkConnectivityConfigurationsResponse from a dictionary.""" - return cls(items=_repeated_dict(d, 'items', NetworkConnectivityConfiguration), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + items=_repeated_dict(d, "items", NetworkConnectivityConfiguration), + next_page_token=d.get("next_page_token", None), + ) @dataclass class ListNetworkPoliciesResponse: items: Optional[List[AccountNetworkPolicy]] = None """List of network policies.""" - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If null, there are no more results to show.""" - + def as_dict(self) -> dict: """Serializes the ListNetworkPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items: body['items'] = [v.as_dict() for v in self.items] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.items: + body["items"] = [v.as_dict() for v in self.items] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListNetworkPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items: body['items'] = self.items - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.items: + body["items"] = self.items + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNetworkPoliciesResponse: """Deserializes the ListNetworkPoliciesResponse from a dictionary.""" - return cls(items=_repeated_dict(d, 'items', AccountNetworkPolicy), next_page_token=d.get('next_page_token', None)) - - - - - + return cls( + items=_repeated_dict(d, "items", AccountNetworkPolicy), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListNotificationDestinationsResponse: next_page_token: Optional[str] = None """Page token for next of results.""" - + results: Optional[List[ListNotificationDestinationsResult]] = None - + def as_dict(self) -> dict: """Serializes the ListNotificationDestinationsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListNotificationDestinationsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = self.results + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNotificationDestinationsResponse: """Deserializes the ListNotificationDestinationsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', ListNotificationDestinationsResult)) - - + return cls( + next_page_token=d.get("next_page_token", None), + results=_repeated_dict(d, "results", ListNotificationDestinationsResult), + ) @dataclass class ListNotificationDestinationsResult: destination_type: Optional[DestinationType] = None """[Output-only] The type of the notification destination. The type can not be changed once set.""" - + display_name: Optional[str] = None """The display name for the notification destination.""" - + id: Optional[str] = None """UUID identifying notification destination.""" - + def as_dict(self) -> dict: """Serializes the ListNotificationDestinationsResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_type is not None: body['destination_type'] = self.destination_type.value - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id + if self.destination_type is not None: + body["destination_type"] = self.destination_type.value + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the ListNotificationDestinationsResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_type is not None: body['destination_type'] = self.destination_type - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id + if self.destination_type is not None: + body["destination_type"] = self.destination_type + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListNotificationDestinationsResult: """Deserializes the ListNotificationDestinationsResult from a dictionary.""" - return cls(destination_type=_enum(d, 'destination_type', DestinationType), display_name=d.get('display_name', None), id=d.get('id', None)) - - - - - + return cls( + destination_type=_enum(d, "destination_type", DestinationType), + display_name=d.get("display_name", None), + id=d.get("id", None), + ) @dataclass class ListPrivateEndpointRulesResponse: """The private endpoint rule list was successfully retrieved.""" - + items: Optional[List[NccPrivateEndpointRule]] = None - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If null, there are no more results to show.""" - + def as_dict(self) -> dict: """Serializes the ListPrivateEndpointRulesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items: body['items'] = [v.as_dict() for v in self.items] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.items: + body["items"] = [v.as_dict() for v in self.items] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListPrivateEndpointRulesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items: body['items'] = self.items - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.items: + body["items"] = self.items + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPrivateEndpointRulesResponse: """Deserializes the ListPrivateEndpointRulesResponse from a dictionary.""" - return cls(items=_repeated_dict(d, 'items', NccPrivateEndpointRule), next_page_token=d.get('next_page_token', None)) - - + return cls( + items=_repeated_dict(d, "items", NccPrivateEndpointRule), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListPublicTokensResponse: token_infos: Optional[List[PublicTokenInfo]] = None """The information for each token.""" - + def as_dict(self) -> dict: """Serializes the ListPublicTokensResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos] + if self.token_infos: + body["token_infos"] = [v.as_dict() for v in self.token_infos] return body def as_shallow_dict(self) -> dict: """Serializes the ListPublicTokensResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_infos: body['token_infos'] = self.token_infos + if self.token_infos: + body["token_infos"] = self.token_infos return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListPublicTokensResponse: """Deserializes the ListPublicTokensResponse from a dictionary.""" - return cls(token_infos=_repeated_dict(d, 'token_infos', PublicTokenInfo)) - - - - - + return cls(token_infos=_repeated_dict(d, "token_infos", PublicTokenInfo)) @dataclass class ListTokensResponse: """Tokens were successfully returned.""" - + token_infos: Optional[List[TokenInfo]] = None """Token metadata of each user-created token in the workspace""" - + def as_dict(self) -> dict: """Serializes the ListTokensResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos] + if self.token_infos: + body["token_infos"] = [v.as_dict() for v in self.token_infos] return body def as_shallow_dict(self) -> dict: """Serializes the ListTokensResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_infos: body['token_infos'] = self.token_infos + if self.token_infos: + body["token_infos"] = self.token_infos return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListTokensResponse: """Deserializes the ListTokensResponse from a dictionary.""" - return cls(token_infos=_repeated_dict(d, 'token_infos', TokenInfo)) - - + return cls(token_infos=_repeated_dict(d, "token_infos", TokenInfo)) class ListType(Enum): """Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - - ALLOW = 'ALLOW' - BLOCK = 'BLOCK' + + ALLOW = "ALLOW" + BLOCK = "BLOCK" + @dataclass class LlmProxyPartnerPoweredAccount: boolean_val: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -3309,41 +3628,49 @@ class LlmProxyPartnerPoweredAccount: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LlmProxyPartnerPoweredAccount: """Deserializes the LlmProxyPartnerPoweredAccount from a dictionary.""" - return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + boolean_val=_from_dict(d, "boolean_val", BooleanMessage), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class LlmProxyPartnerPoweredEnforce: boolean_val: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -3351,41 +3678,49 @@ class LlmProxyPartnerPoweredEnforce: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredEnforce into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredEnforce into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LlmProxyPartnerPoweredEnforce: """Deserializes the LlmProxyPartnerPoweredEnforce from a dictionary.""" - return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + boolean_val=_from_dict(d, "boolean_val", BooleanMessage), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class LlmProxyPartnerPoweredWorkspace: boolean_val: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -3393,101 +3728,111 @@ class LlmProxyPartnerPoweredWorkspace: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredWorkspace into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the LlmProxyPartnerPoweredWorkspace into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LlmProxyPartnerPoweredWorkspace: """Deserializes the LlmProxyPartnerPoweredWorkspace from a dictionary.""" - return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + boolean_val=_from_dict(d, "boolean_val", BooleanMessage), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class MicrosoftTeamsConfig: url: Optional[str] = None """[Input-Only] URL for Microsoft Teams.""" - + url_set: Optional[bool] = None """[Output-Only] Whether URL is set.""" - + def as_dict(self) -> dict: """Serializes the MicrosoftTeamsConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.url is not None: body['url'] = self.url - if self.url_set is not None: body['url_set'] = self.url_set + if self.url is not None: + body["url"] = self.url + if self.url_set is not None: + body["url_set"] = self.url_set return body def as_shallow_dict(self) -> dict: """Serializes the MicrosoftTeamsConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.url is not None: body['url'] = self.url - if self.url_set is not None: body['url_set'] = self.url_set + if self.url is not None: + body["url"] = self.url + if self.url_set is not None: + body["url_set"] = self.url_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MicrosoftTeamsConfig: """Deserializes the MicrosoftTeamsConfig from a dictionary.""" - return cls(url=d.get('url', None), url_set=d.get('url_set', None)) - - + return cls(url=d.get("url", None), url_set=d.get("url_set", None)) @dataclass class NccAwsStableIpRule: """The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to allow traffic from your Databricks workspace.""" - + cidr_blocks: Optional[List[str]] = None """The list of stable IP CIDR blocks from which Databricks network traffic originates when accessing your resources.""" - + def as_dict(self) -> dict: """Serializes the NccAwsStableIpRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cidr_blocks: body['cidr_blocks'] = [v for v in self.cidr_blocks] + if self.cidr_blocks: + body["cidr_blocks"] = [v for v in self.cidr_blocks] return body def as_shallow_dict(self) -> dict: """Serializes the NccAwsStableIpRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.cidr_blocks: body['cidr_blocks'] = self.cidr_blocks + if self.cidr_blocks: + body["cidr_blocks"] = self.cidr_blocks return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccAwsStableIpRule: """Deserializes the NccAwsStableIpRule from a dictionary.""" - return cls(cidr_blocks=d.get('cidr_blocks', None)) - - + return cls(cidr_blocks=d.get("cidr_blocks", None)) @dataclass class NccAzurePrivateEndpointRule: """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization.""" - + connection_state: Optional[NccAzurePrivateEndpointRuleConnectionState] = None """The current status of this private endpoint. The private endpoint rules are effective only if the connection state is ESTABLISHED. Remember that you must approve new endpoints on your @@ -3498,131 +3843,171 @@ class NccAzurePrivateEndpointRule: link resource owner. - DISCONNECTED: Connection was removed by the private link resource owner, the private endpoint becomes informative and should be deleted for clean-up. - EXPIRED: If the endpoint was created but not approved in 14 days, it will be EXPIRED.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when this object was created.""" - + deactivated: Optional[bool] = None """Whether this private endpoint is deactivated.""" - + deactivated_at: Optional[int] = None """Time in epoch milliseconds when this object was deactivated.""" - + domain_names: Optional[List[str]] = None """Not used by customer-managed private endpoint services. Domain names of target private link service. When updating this field, the full list of target domain_names must be specified.""" - + endpoint_name: Optional[str] = None """The name of the Azure private endpoint resource.""" - + group_id: Optional[str] = None """Only used by private endpoints to Azure first-party services. The sub-resource type (group ID) of the target resource. Note that to connect to workspace root storage (root DBFS), you need two endpoints, one for blob and one for dfs.""" - + network_connectivity_config_id: Optional[str] = None """The ID of a network connectivity configuration, which is the parent resource of this private endpoint rule object.""" - + resource_id: Optional[str] = None """The Azure resource ID of the target resource.""" - + rule_id: Optional[str] = None """The ID of a private endpoint rule.""" - + updated_time: Optional[int] = None """Time in epoch milliseconds when this object was updated.""" - + def as_dict(self) -> dict: """Serializes the NccAzurePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection_state is not None: body['connection_state'] = self.connection_state.value - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.deactivated is not None: body['deactivated'] = self.deactivated - if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at - if self.domain_names: body['domain_names'] = [v for v in self.domain_names] - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.group_id is not None: body['group_id'] = self.group_id - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.resource_id is not None: body['resource_id'] = self.resource_id - if self.rule_id is not None: body['rule_id'] = self.rule_id - if self.updated_time is not None: body['updated_time'] = self.updated_time + if self.connection_state is not None: + body["connection_state"] = self.connection_state.value + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.deactivated is not None: + body["deactivated"] = self.deactivated + if self.deactivated_at is not None: + body["deactivated_at"] = self.deactivated_at + if self.domain_names: + body["domain_names"] = [v for v in self.domain_names] + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.group_id is not None: + body["group_id"] = self.group_id + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.resource_id is not None: + body["resource_id"] = self.resource_id + if self.rule_id is not None: + body["rule_id"] = self.rule_id + if self.updated_time is not None: + body["updated_time"] = self.updated_time return body def as_shallow_dict(self) -> dict: """Serializes the NccAzurePrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection_state is not None: body['connection_state'] = self.connection_state - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.deactivated is not None: body['deactivated'] = self.deactivated - if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at - if self.domain_names: body['domain_names'] = self.domain_names - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.group_id is not None: body['group_id'] = self.group_id - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.resource_id is not None: body['resource_id'] = self.resource_id - if self.rule_id is not None: body['rule_id'] = self.rule_id - if self.updated_time is not None: body['updated_time'] = self.updated_time + if self.connection_state is not None: + body["connection_state"] = self.connection_state + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.deactivated is not None: + body["deactivated"] = self.deactivated + if self.deactivated_at is not None: + body["deactivated_at"] = self.deactivated_at + if self.domain_names: + body["domain_names"] = self.domain_names + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.group_id is not None: + body["group_id"] = self.group_id + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.resource_id is not None: + body["resource_id"] = self.resource_id + if self.rule_id is not None: + body["rule_id"] = self.rule_id + if self.updated_time is not None: + body["updated_time"] = self.updated_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccAzurePrivateEndpointRule: """Deserializes the NccAzurePrivateEndpointRule from a dictionary.""" - return cls(connection_state=_enum(d, 'connection_state', NccAzurePrivateEndpointRuleConnectionState), creation_time=d.get('creation_time', None), deactivated=d.get('deactivated', None), deactivated_at=d.get('deactivated_at', None), domain_names=d.get('domain_names', None), endpoint_name=d.get('endpoint_name', None), group_id=d.get('group_id', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), resource_id=d.get('resource_id', None), rule_id=d.get('rule_id', None), updated_time=d.get('updated_time', None)) - - + return cls( + connection_state=_enum(d, "connection_state", NccAzurePrivateEndpointRuleConnectionState), + creation_time=d.get("creation_time", None), + deactivated=d.get("deactivated", None), + deactivated_at=d.get("deactivated_at", None), + domain_names=d.get("domain_names", None), + endpoint_name=d.get("endpoint_name", None), + group_id=d.get("group_id", None), + network_connectivity_config_id=d.get("network_connectivity_config_id", None), + resource_id=d.get("resource_id", None), + rule_id=d.get("rule_id", None), + updated_time=d.get("updated_time", None), + ) class NccAzurePrivateEndpointRuleConnectionState(Enum): - - - DISCONNECTED = 'DISCONNECTED' - ESTABLISHED = 'ESTABLISHED' - EXPIRED = 'EXPIRED' - INIT = 'INIT' - PENDING = 'PENDING' - REJECTED = 'REJECTED' + + DISCONNECTED = "DISCONNECTED" + ESTABLISHED = "ESTABLISHED" + EXPIRED = "EXPIRED" + INIT = "INIT" + PENDING = "PENDING" + REJECTED = "REJECTED" + @dataclass class NccAzureServiceEndpointRule: """The stable Azure service endpoints. You can configure the firewall of your Azure resources to allow traffic from your Databricks serverless compute resources.""" - + subnets: Optional[List[str]] = None """The list of subnets from which Databricks network traffic originates when accessing your Azure resources.""" - + target_region: Optional[str] = None """The Azure region in which this service endpoint rule applies..""" - + target_services: Optional[List[EgressResourceType]] = None """The Azure services to which this service endpoint rule applies to.""" - + def as_dict(self) -> dict: """Serializes the NccAzureServiceEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.subnets: body['subnets'] = [v for v in self.subnets] - if self.target_region is not None: body['target_region'] = self.target_region - if self.target_services: body['target_services'] = [v.value for v in self.target_services] + if self.subnets: + body["subnets"] = [v for v in self.subnets] + if self.target_region is not None: + body["target_region"] = self.target_region + if self.target_services: + body["target_services"] = [v.value for v in self.target_services] return body def as_shallow_dict(self) -> dict: """Serializes the NccAzureServiceEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.subnets: body['subnets'] = self.subnets - if self.target_region is not None: body['target_region'] = self.target_region - if self.target_services: body['target_services'] = self.target_services + if self.subnets: + body["subnets"] = self.subnets + if self.target_region is not None: + body["target_region"] = self.target_region + if self.target_services: + body["target_services"] = self.target_services return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccAzureServiceEndpointRule: """Deserializes the NccAzureServiceEndpointRule from a dictionary.""" - return cls(subnets=d.get('subnets', None), target_region=d.get('target_region', None), target_services=_repeated_enum(d, 'target_services', EgressResourceType)) - - + return cls( + subnets=d.get("subnets", None), + target_region=d.get("target_region", None), + target_services=_repeated_enum(d, "target_services", EgressResourceType), + ) @dataclass @@ -3631,106 +4016,123 @@ class NccEgressConfig: """The network connectivity rules that are applied by default without resource specific configurations. You can find the stable network information of your serverless compute resources here.""" - + target_rules: Optional[NccEgressTargetRules] = None """The network connectivity rules that configured for each destinations. These rules override default rules.""" - + def as_dict(self) -> dict: """Serializes the NccEgressConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_rules: body['default_rules'] = self.default_rules.as_dict() - if self.target_rules: body['target_rules'] = self.target_rules.as_dict() + if self.default_rules: + body["default_rules"] = self.default_rules.as_dict() + if self.target_rules: + body["target_rules"] = self.target_rules.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the NccEgressConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_rules: body['default_rules'] = self.default_rules - if self.target_rules: body['target_rules'] = self.target_rules + if self.default_rules: + body["default_rules"] = self.default_rules + if self.target_rules: + body["target_rules"] = self.target_rules return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccEgressConfig: """Deserializes the NccEgressConfig from a dictionary.""" - return cls(default_rules=_from_dict(d, 'default_rules', NccEgressDefaultRules), target_rules=_from_dict(d, 'target_rules', NccEgressTargetRules)) - - + return cls( + default_rules=_from_dict(d, "default_rules", NccEgressDefaultRules), + target_rules=_from_dict(d, "target_rules", NccEgressTargetRules), + ) @dataclass class NccEgressDefaultRules: """Default rules don't have specific targets.""" - + aws_stable_ip_rule: Optional[NccAwsStableIpRule] = None """The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to allow traffic from your Databricks workspace.""" - + azure_service_endpoint_rule: Optional[NccAzureServiceEndpointRule] = None """The stable Azure service endpoints. You can configure the firewall of your Azure resources to allow traffic from your Databricks serverless compute resources.""" - + def as_dict(self) -> dict: """Serializes the NccEgressDefaultRules into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_stable_ip_rule: body['aws_stable_ip_rule'] = self.aws_stable_ip_rule.as_dict() - if self.azure_service_endpoint_rule: body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule.as_dict() + if self.aws_stable_ip_rule: + body["aws_stable_ip_rule"] = self.aws_stable_ip_rule.as_dict() + if self.azure_service_endpoint_rule: + body["azure_service_endpoint_rule"] = self.azure_service_endpoint_rule.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the NccEgressDefaultRules into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_stable_ip_rule: body['aws_stable_ip_rule'] = self.aws_stable_ip_rule - if self.azure_service_endpoint_rule: body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule + if self.aws_stable_ip_rule: + body["aws_stable_ip_rule"] = self.aws_stable_ip_rule + if self.azure_service_endpoint_rule: + body["azure_service_endpoint_rule"] = self.azure_service_endpoint_rule return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccEgressDefaultRules: """Deserializes the NccEgressDefaultRules from a dictionary.""" - return cls(aws_stable_ip_rule=_from_dict(d, 'aws_stable_ip_rule', NccAwsStableIpRule), azure_service_endpoint_rule=_from_dict(d, 'azure_service_endpoint_rule', NccAzureServiceEndpointRule)) - - + return cls( + aws_stable_ip_rule=_from_dict(d, "aws_stable_ip_rule", NccAwsStableIpRule), + azure_service_endpoint_rule=_from_dict(d, "azure_service_endpoint_rule", NccAzureServiceEndpointRule), + ) @dataclass class NccEgressTargetRules: """Target rule controls the egress rules that are dedicated to specific resources.""" - + aws_private_endpoint_rules: Optional[List[CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule]] = None """AWS private endpoint rule controls the AWS private endpoint based egress rules.""" - + azure_private_endpoint_rules: Optional[List[NccAzurePrivateEndpointRule]] = None - + def as_dict(self) -> dict: """Serializes the NccEgressTargetRules into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_private_endpoint_rules: body['aws_private_endpoint_rules'] = [v.as_dict() for v in self.aws_private_endpoint_rules] - if self.azure_private_endpoint_rules: body['azure_private_endpoint_rules'] = [v.as_dict() for v in self.azure_private_endpoint_rules] + if self.aws_private_endpoint_rules: + body["aws_private_endpoint_rules"] = [v.as_dict() for v in self.aws_private_endpoint_rules] + if self.azure_private_endpoint_rules: + body["azure_private_endpoint_rules"] = [v.as_dict() for v in self.azure_private_endpoint_rules] return body def as_shallow_dict(self) -> dict: """Serializes the NccEgressTargetRules into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_private_endpoint_rules: body['aws_private_endpoint_rules'] = self.aws_private_endpoint_rules - if self.azure_private_endpoint_rules: body['azure_private_endpoint_rules'] = self.azure_private_endpoint_rules + if self.aws_private_endpoint_rules: + body["aws_private_endpoint_rules"] = self.aws_private_endpoint_rules + if self.azure_private_endpoint_rules: + body["azure_private_endpoint_rules"] = self.azure_private_endpoint_rules return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccEgressTargetRules: """Deserializes the NccEgressTargetRules from a dictionary.""" - return cls(aws_private_endpoint_rules=_repeated_dict(d, 'aws_private_endpoint_rules', CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule), azure_private_endpoint_rules=_repeated_dict(d, 'azure_private_endpoint_rules', NccAzurePrivateEndpointRule)) - - + return cls( + aws_private_endpoint_rules=_repeated_dict( + d, "aws_private_endpoint_rules", CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRule + ), + azure_private_endpoint_rules=_repeated_dict(d, "azure_private_endpoint_rules", NccAzurePrivateEndpointRule), + ) @dataclass class NccPrivateEndpointRule: """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization.""" - + account_id: Optional[str] = None """Databricks account ID. You can find your account ID from the Accounts Console.""" - + connection_state: Optional[NccPrivateEndpointRulePrivateLinkConnectionState] = None """The current status of this private endpoint. The private endpoint rules are effective only if the connection state is ESTABLISHED. Remember that you must approve new endpoints on your @@ -3740,182 +4142,249 @@ class NccPrivateEndpointRule: the private link resource owner. - DISCONNECTED: Connection was removed by the private link resource owner, the private endpoint becomes informative and should be deleted for clean-up. - EXPIRED: If the endpoint was created but not approved in 14 days, it will be EXPIRED.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when this object was created.""" - + deactivated: Optional[bool] = None """Whether this private endpoint is deactivated.""" - + deactivated_at: Optional[int] = None """Time in epoch milliseconds when this object was deactivated.""" - + domain_names: Optional[List[str]] = None """Only used by private endpoints to customer-managed private endpoint services. Domain names of target private link service. When updating this field, the full list of target domain_names must be specified.""" - + enabled: Optional[bool] = None """Only used by private endpoints towards an AWS S3 service. Update this field to activate/deactivate this private endpoint to allow egress access from serverless compute resources.""" - + endpoint_name: Optional[str] = None """The name of the Azure private endpoint resource.""" - + endpoint_service: Optional[str] = None """The full target AWS endpoint service name that connects to the destination resources of the private endpoint.""" - + group_id: Optional[str] = None """Not used by customer-managed private endpoint services. The sub-resource type (group ID) of the target resource. Note that to connect to workspace root storage (root DBFS), you need two endpoints, one for blob and one for dfs.""" - + network_connectivity_config_id: Optional[str] = None """The ID of a network connectivity configuration, which is the parent resource of this private endpoint rule object.""" - + resource_id: Optional[str] = None """The Azure resource ID of the target resource.""" - + resource_names: Optional[List[str]] = None """Only used by private endpoints towards AWS S3 service. The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names must be in the same region as the NCC/endpoint service. When updating this field, we perform full update on this field. Please ensure a full list of desired resource_names is provided.""" - + rule_id: Optional[str] = None """The ID of a private endpoint rule.""" - + updated_time: Optional[int] = None """Time in epoch milliseconds when this object was updated.""" - + vpc_endpoint_id: Optional[str] = None """The AWS VPC endpoint ID. You can use this ID to identify the VPC endpoint created by Databricks.""" - + def as_dict(self) -> dict: """Serializes the NccPrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.connection_state is not None: body['connection_state'] = self.connection_state.value - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.deactivated is not None: body['deactivated'] = self.deactivated - if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at - if self.domain_names: body['domain_names'] = [v for v in self.domain_names] - if self.enabled is not None: body['enabled'] = self.enabled - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service - if self.group_id is not None: body['group_id'] = self.group_id - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.resource_id is not None: body['resource_id'] = self.resource_id - if self.resource_names: body['resource_names'] = [v for v in self.resource_names] - if self.rule_id is not None: body['rule_id'] = self.rule_id - if self.updated_time is not None: body['updated_time'] = self.updated_time - if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.connection_state is not None: + body["connection_state"] = self.connection_state.value + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.deactivated is not None: + body["deactivated"] = self.deactivated + if self.deactivated_at is not None: + body["deactivated_at"] = self.deactivated_at + if self.domain_names: + body["domain_names"] = [v for v in self.domain_names] + if self.enabled is not None: + body["enabled"] = self.enabled + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.endpoint_service is not None: + body["endpoint_service"] = self.endpoint_service + if self.group_id is not None: + body["group_id"] = self.group_id + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.resource_id is not None: + body["resource_id"] = self.resource_id + if self.resource_names: + body["resource_names"] = [v for v in self.resource_names] + if self.rule_id is not None: + body["rule_id"] = self.rule_id + if self.updated_time is not None: + body["updated_time"] = self.updated_time + if self.vpc_endpoint_id is not None: + body["vpc_endpoint_id"] = self.vpc_endpoint_id return body def as_shallow_dict(self) -> dict: """Serializes the NccPrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.connection_state is not None: body['connection_state'] = self.connection_state - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.deactivated is not None: body['deactivated'] = self.deactivated - if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at - if self.domain_names: body['domain_names'] = self.domain_names - if self.enabled is not None: body['enabled'] = self.enabled - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.endpoint_service is not None: body['endpoint_service'] = self.endpoint_service - if self.group_id is not None: body['group_id'] = self.group_id - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.resource_id is not None: body['resource_id'] = self.resource_id - if self.resource_names: body['resource_names'] = self.resource_names - if self.rule_id is not None: body['rule_id'] = self.rule_id - if self.updated_time is not None: body['updated_time'] = self.updated_time - if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id + if self.account_id is not None: + body["account_id"] = self.account_id + if self.connection_state is not None: + body["connection_state"] = self.connection_state + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.deactivated is not None: + body["deactivated"] = self.deactivated + if self.deactivated_at is not None: + body["deactivated_at"] = self.deactivated_at + if self.domain_names: + body["domain_names"] = self.domain_names + if self.enabled is not None: + body["enabled"] = self.enabled + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.endpoint_service is not None: + body["endpoint_service"] = self.endpoint_service + if self.group_id is not None: + body["group_id"] = self.group_id + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.resource_id is not None: + body["resource_id"] = self.resource_id + if self.resource_names: + body["resource_names"] = self.resource_names + if self.rule_id is not None: + body["rule_id"] = self.rule_id + if self.updated_time is not None: + body["updated_time"] = self.updated_time + if self.vpc_endpoint_id is not None: + body["vpc_endpoint_id"] = self.vpc_endpoint_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NccPrivateEndpointRule: """Deserializes the NccPrivateEndpointRule from a dictionary.""" - return cls(account_id=d.get('account_id', None), connection_state=_enum(d, 'connection_state', NccPrivateEndpointRulePrivateLinkConnectionState), creation_time=d.get('creation_time', None), deactivated=d.get('deactivated', None), deactivated_at=d.get('deactivated_at', None), domain_names=d.get('domain_names', None), enabled=d.get('enabled', None), endpoint_name=d.get('endpoint_name', None), endpoint_service=d.get('endpoint_service', None), group_id=d.get('group_id', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), resource_id=d.get('resource_id', None), resource_names=d.get('resource_names', None), rule_id=d.get('rule_id', None), updated_time=d.get('updated_time', None), vpc_endpoint_id=d.get('vpc_endpoint_id', None)) - - + return cls( + account_id=d.get("account_id", None), + connection_state=_enum(d, "connection_state", NccPrivateEndpointRulePrivateLinkConnectionState), + creation_time=d.get("creation_time", None), + deactivated=d.get("deactivated", None), + deactivated_at=d.get("deactivated_at", None), + domain_names=d.get("domain_names", None), + enabled=d.get("enabled", None), + endpoint_name=d.get("endpoint_name", None), + endpoint_service=d.get("endpoint_service", None), + group_id=d.get("group_id", None), + network_connectivity_config_id=d.get("network_connectivity_config_id", None), + resource_id=d.get("resource_id", None), + resource_names=d.get("resource_names", None), + rule_id=d.get("rule_id", None), + updated_time=d.get("updated_time", None), + vpc_endpoint_id=d.get("vpc_endpoint_id", None), + ) class NccPrivateEndpointRulePrivateLinkConnectionState(Enum): - - - DISCONNECTED = 'DISCONNECTED' - ESTABLISHED = 'ESTABLISHED' - EXPIRED = 'EXPIRED' - PENDING = 'PENDING' - REJECTED = 'REJECTED' + + DISCONNECTED = "DISCONNECTED" + ESTABLISHED = "ESTABLISHED" + EXPIRED = "EXPIRED" + PENDING = "PENDING" + REJECTED = "REJECTED" + @dataclass class NetworkConnectivityConfiguration: """Properties of the new network connectivity configuration.""" - + account_id: Optional[str] = None """Your Databricks account ID. You can find your account ID in your Databricks accounts console.""" - + creation_time: Optional[int] = None """Time in epoch milliseconds when this object was created.""" - + egress_config: Optional[NccEgressConfig] = None """The network connectivity rules that apply to network traffic from your serverless compute resources.""" - + name: Optional[str] = None """The name of the network connectivity configuration. The name can contain alphanumeric characters, hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the regular expression ^[0-9a-zA-Z-_]{3,30}$""" - + network_connectivity_config_id: Optional[str] = None """Databricks network connectivity configuration ID.""" - + region: Optional[str] = None """The region for the network connectivity configuration. Only workspaces in the same region can be attached to the network connectivity configuration.""" - + updated_time: Optional[int] = None """Time in epoch milliseconds when this object was updated.""" - + def as_dict(self) -> dict: """Serializes the NetworkConnectivityConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.egress_config: body['egress_config'] = self.egress_config.as_dict() - if self.name is not None: body['name'] = self.name - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.region is not None: body['region'] = self.region - if self.updated_time is not None: body['updated_time'] = self.updated_time + if self.account_id is not None: + body["account_id"] = self.account_id + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.egress_config: + body["egress_config"] = self.egress_config.as_dict() + if self.name is not None: + body["name"] = self.name + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.region is not None: + body["region"] = self.region + if self.updated_time is not None: + body["updated_time"] = self.updated_time return body def as_shallow_dict(self) -> dict: """Serializes the NetworkConnectivityConfiguration into a shallow dictionary of its immediate attributes.""" body = {} - if self.account_id is not None: body['account_id'] = self.account_id - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.egress_config: body['egress_config'] = self.egress_config - if self.name is not None: body['name'] = self.name - if self.network_connectivity_config_id is not None: body['network_connectivity_config_id'] = self.network_connectivity_config_id - if self.region is not None: body['region'] = self.region - if self.updated_time is not None: body['updated_time'] = self.updated_time + if self.account_id is not None: + body["account_id"] = self.account_id + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.egress_config: + body["egress_config"] = self.egress_config + if self.name is not None: + body["name"] = self.name + if self.network_connectivity_config_id is not None: + body["network_connectivity_config_id"] = self.network_connectivity_config_id + if self.region is not None: + body["region"] = self.region + if self.updated_time is not None: + body["updated_time"] = self.updated_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkConnectivityConfiguration: """Deserializes the NetworkConnectivityConfiguration from a dictionary.""" - return cls(account_id=d.get('account_id', None), creation_time=d.get('creation_time', None), egress_config=_from_dict(d, 'egress_config', NccEgressConfig), name=d.get('name', None), network_connectivity_config_id=d.get('network_connectivity_config_id', None), region=d.get('region', None), updated_time=d.get('updated_time', None)) - - + return cls( + account_id=d.get("account_id", None), + creation_time=d.get("creation_time", None), + egress_config=_from_dict(d, "egress_config", NccEgressConfig), + name=d.get("name", None), + network_connectivity_config_id=d.get("network_connectivity_config_id", None), + region=d.get("region", None), + updated_time=d.get("updated_time", None), + ) @dataclass @@ -3925,28 +4394,28 @@ class NetworkPolicyEgress: the format expected by the dataplane, see networkconfig.textproto). This policy should be consistent with [[com.databricks.api.proto.settingspolicy.EgressNetworkPolicy]]. Details see API-design: https://docs.google.com/document/d/1DKWO_FpZMCY4cF2O62LpwII1lx8gsnDGG-qgE3t3TOA/""" - + network_access: Optional[EgressNetworkPolicyNetworkAccessPolicy] = None """The access policy enforced for egress traffic to the internet.""" - + def as_dict(self) -> dict: """Serializes the NetworkPolicyEgress into a dictionary suitable for use as a JSON request body.""" body = {} - if self.network_access: body['network_access'] = self.network_access.as_dict() + if self.network_access: + body["network_access"] = self.network_access.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the NetworkPolicyEgress into a shallow dictionary of its immediate attributes.""" body = {} - if self.network_access: body['network_access'] = self.network_access + if self.network_access: + body["network_access"] = self.network_access return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NetworkPolicyEgress: """Deserializes the NetworkPolicyEgress from a dictionary.""" - return cls(network_access=_from_dict(d, 'network_access', EgressNetworkPolicyNetworkAccessPolicy)) - - + return cls(network_access=_from_dict(d, "network_access", EgressNetworkPolicyNetworkAccessPolicy)) @dataclass @@ -3954,97 +4423,112 @@ class NotificationDestination: config: Optional[Config] = None """The configuration for the notification destination. Will be exactly one of the nested configs. Only returns for users with workspace admin permissions.""" - + destination_type: Optional[DestinationType] = None """[Output-only] The type of the notification destination. The type can not be changed once set.""" - + display_name: Optional[str] = None """The display name for the notification destination.""" - + id: Optional[str] = None """UUID identifying notification destination.""" - + def as_dict(self) -> dict: """Serializes the NotificationDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config: body['config'] = self.config.as_dict() - if self.destination_type is not None: body['destination_type'] = self.destination_type.value - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id + if self.config: + body["config"] = self.config.as_dict() + if self.destination_type is not None: + body["destination_type"] = self.destination_type.value + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the NotificationDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.config: body['config'] = self.config - if self.destination_type is not None: body['destination_type'] = self.destination_type - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id + if self.config: + body["config"] = self.config + if self.destination_type is not None: + body["destination_type"] = self.destination_type + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotificationDestination: """Deserializes the NotificationDestination from a dictionary.""" - return cls(config=_from_dict(d, 'config', Config), destination_type=_enum(d, 'destination_type', DestinationType), display_name=d.get('display_name', None), id=d.get('id', None)) - - + return cls( + config=_from_dict(d, "config", Config), + destination_type=_enum(d, "destination_type", DestinationType), + display_name=d.get("display_name", None), + id=d.get("id", None), + ) @dataclass class PagerdutyConfig: integration_key: Optional[str] = None """[Input-Only] Integration key for PagerDuty.""" - + integration_key_set: Optional[bool] = None """[Output-Only] Whether integration key is set.""" - + def as_dict(self) -> dict: """Serializes the PagerdutyConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.integration_key is not None: body['integration_key'] = self.integration_key - if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set + if self.integration_key is not None: + body["integration_key"] = self.integration_key + if self.integration_key_set is not None: + body["integration_key_set"] = self.integration_key_set return body def as_shallow_dict(self) -> dict: """Serializes the PagerdutyConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.integration_key is not None: body['integration_key'] = self.integration_key - if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set + if self.integration_key is not None: + body["integration_key"] = self.integration_key + if self.integration_key_set is not None: + body["integration_key_set"] = self.integration_key_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PagerdutyConfig: """Deserializes the PagerdutyConfig from a dictionary.""" - return cls(integration_key=d.get('integration_key', None), integration_key_set=d.get('integration_key_set', None)) - - + return cls( + integration_key=d.get("integration_key", None), integration_key_set=d.get("integration_key_set", None) + ) @dataclass class PartitionId: """Partition by workspace or account""" - + workspace_id: Optional[int] = None """The ID of the workspace.""" - + def as_dict(self) -> dict: """Serializes the PartitionId into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspace_id is not None: body['workspaceId'] = self.workspace_id + if self.workspace_id is not None: + body["workspaceId"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the PartitionId into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspace_id is not None: body['workspaceId'] = self.workspace_id + if self.workspace_id is not None: + body["workspaceId"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PartitionId: """Deserializes the PartitionId from a dictionary.""" - return cls(workspace_id=d.get('workspaceId', None)) - - + return cls(workspace_id=d.get("workspaceId", None)) @dataclass @@ -4055,25 +4539,25 @@ class PersonalComputeMessage: Personal Compute default policy to individual workspaces and requires a workspace’s users or groups to be added to the ACLs of that workspace’s Personal Compute default policy before they will be able to create compute resources through that policy.""" - + def as_dict(self) -> dict: """Serializes the PersonalComputeMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: body['value'] = self.value.value + if self.value is not None: + body["value"] = self.value.value return body def as_shallow_dict(self) -> dict: """Serializes the PersonalComputeMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PersonalComputeMessage: """Deserializes the PersonalComputeMessage from a dictionary.""" - return cls(value=_enum(d, 'value', PersonalComputeMessageEnum)) - - + return cls(value=_enum(d, "value", PersonalComputeMessageEnum)) class PersonalComputeMessageEnum(Enum): @@ -4082,14 +4566,15 @@ class PersonalComputeMessageEnum(Enum): Personal Compute default policy to individual workspaces and requires a workspace’s users or groups to be added to the ACLs of that workspace’s Personal Compute default policy before they will be able to create compute resources through that policy.""" - - DELEGATE = 'DELEGATE' - ON = 'ON' + + DELEGATE = "DELEGATE" + ON = "ON" + @dataclass class PersonalComputeSetting: personal_compute: PersonalComputeMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -4097,124 +4582,157 @@ class PersonalComputeSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the PersonalComputeSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.personal_compute: body['personal_compute'] = self.personal_compute.as_dict() - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.etag is not None: + body["etag"] = self.etag + if self.personal_compute: + body["personal_compute"] = self.personal_compute.as_dict() + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the PersonalComputeSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.personal_compute: body['personal_compute'] = self.personal_compute - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.etag is not None: + body["etag"] = self.etag + if self.personal_compute: + body["personal_compute"] = self.personal_compute + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PersonalComputeSetting: """Deserializes the PersonalComputeSetting from a dictionary.""" - return cls(etag=d.get('etag', None), personal_compute=_from_dict(d, 'personal_compute', PersonalComputeMessage), setting_name=d.get('setting_name', None)) - - + return cls( + etag=d.get("etag", None), + personal_compute=_from_dict(d, "personal_compute", PersonalComputeMessage), + setting_name=d.get("setting_name", None), + ) @dataclass class PublicTokenInfo: comment: Optional[str] = None """Comment the token was created with, if applicable.""" - + creation_time: Optional[int] = None """Server time (in epoch milliseconds) when the token was created.""" - + expiry_time: Optional[int] = None """Server time (in epoch milliseconds) when the token will expire, or -1 if not applicable.""" - + token_id: Optional[str] = None """The ID of this token.""" - + def as_dict(self) -> dict: """Serializes the PublicTokenInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.expiry_time is not None: body['expiry_time'] = self.expiry_time - if self.token_id is not None: body['token_id'] = self.token_id + if self.comment is not None: + body["comment"] = self.comment + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.expiry_time is not None: + body["expiry_time"] = self.expiry_time + if self.token_id is not None: + body["token_id"] = self.token_id return body def as_shallow_dict(self) -> dict: """Serializes the PublicTokenInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.expiry_time is not None: body['expiry_time'] = self.expiry_time - if self.token_id is not None: body['token_id'] = self.token_id + if self.comment is not None: + body["comment"] = self.comment + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.expiry_time is not None: + body["expiry_time"] = self.expiry_time + if self.token_id is not None: + body["token_id"] = self.token_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PublicTokenInfo: """Deserializes the PublicTokenInfo from a dictionary.""" - return cls(comment=d.get('comment', None), creation_time=d.get('creation_time', None), expiry_time=d.get('expiry_time', None), token_id=d.get('token_id', None)) - - + return cls( + comment=d.get("comment", None), + creation_time=d.get("creation_time", None), + expiry_time=d.get("expiry_time", None), + token_id=d.get("token_id", None), + ) @dataclass class ReplaceIpAccessList: """Details required to replace an IP access list.""" - + label: str """Label for the IP access list. This **cannot** be empty.""" - + list_type: ListType """Type of IP access list. Valid values are as follows and are case-sensitive: * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - + enabled: bool """Specifies whether this IP access list is enabled.""" - + ip_access_list_id: Optional[str] = None """The ID for the corresponding IP access list""" - + ip_addresses: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the ReplaceIpAccessList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id - if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] - if self.label is not None: body['label'] = self.label - if self.list_type is not None: body['list_type'] = self.list_type.value + if self.enabled is not None: + body["enabled"] = self.enabled + if self.ip_access_list_id is not None: + body["ip_access_list_id"] = self.ip_access_list_id + if self.ip_addresses: + body["ip_addresses"] = [v for v in self.ip_addresses] + if self.label is not None: + body["label"] = self.label + if self.list_type is not None: + body["list_type"] = self.list_type.value return body def as_shallow_dict(self) -> dict: """Serializes the ReplaceIpAccessList into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id - if self.ip_addresses: body['ip_addresses'] = self.ip_addresses - if self.label is not None: body['label'] = self.label - if self.list_type is not None: body['list_type'] = self.list_type + if self.enabled is not None: + body["enabled"] = self.enabled + if self.ip_access_list_id is not None: + body["ip_access_list_id"] = self.ip_access_list_id + if self.ip_addresses: + body["ip_addresses"] = self.ip_addresses + if self.label is not None: + body["label"] = self.label + if self.list_type is not None: + body["list_type"] = self.list_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ReplaceIpAccessList: """Deserializes the ReplaceIpAccessList from a dictionary.""" - return cls(enabled=d.get('enabled', None), ip_access_list_id=d.get('ip_access_list_id', None), ip_addresses=d.get('ip_addresses', None), label=d.get('label', None), list_type=_enum(d, 'list_type', ListType)) - - + return cls( + enabled=d.get("enabled", None), + ip_access_list_id=d.get("ip_access_list_id", None), + ip_addresses=d.get("ip_addresses", None), + label=d.get("label", None), + list_type=_enum(d, "list_type", ListType), + ) @dataclass @@ -4233,44 +4751,42 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ReplaceResponse: """Deserializes the ReplaceResponse from a dictionary.""" return cls() - - @dataclass class RestrictWorkspaceAdminsMessage: status: RestrictWorkspaceAdminsMessageStatus - + def as_dict(self) -> dict: """Serializes the RestrictWorkspaceAdminsMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.status is not None: body['status'] = self.status.value + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the RestrictWorkspaceAdminsMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.status is not None: body['status'] = self.status + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestrictWorkspaceAdminsMessage: """Deserializes the RestrictWorkspaceAdminsMessage from a dictionary.""" - return cls(status=_enum(d, 'status', RestrictWorkspaceAdminsMessageStatus)) - - + return cls(status=_enum(d, "status", RestrictWorkspaceAdminsMessageStatus)) class RestrictWorkspaceAdminsMessageStatus(Enum): - - - ALLOW_ALL = 'ALLOW_ALL' - RESTRICT_TOKENS_AND_JOB_RUN_AS = 'RESTRICT_TOKENS_AND_JOB_RUN_AS' + + ALLOW_ALL = "ALLOW_ALL" + RESTRICT_TOKENS_AND_JOB_RUN_AS = "RESTRICT_TOKENS_AND_JOB_RUN_AS" + @dataclass class RestrictWorkspaceAdminsSetting: restrict_workspace_admins: RestrictWorkspaceAdminsMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -4278,60 +4794,68 @@ class RestrictWorkspaceAdminsSetting: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the RestrictWorkspaceAdminsSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.restrict_workspace_admins: body['restrict_workspace_admins'] = self.restrict_workspace_admins.as_dict() - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.etag is not None: + body["etag"] = self.etag + if self.restrict_workspace_admins: + body["restrict_workspace_admins"] = self.restrict_workspace_admins.as_dict() + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the RestrictWorkspaceAdminsSetting into a shallow dictionary of its immediate attributes.""" body = {} - if self.etag is not None: body['etag'] = self.etag - if self.restrict_workspace_admins: body['restrict_workspace_admins'] = self.restrict_workspace_admins - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.etag is not None: + body["etag"] = self.etag + if self.restrict_workspace_admins: + body["restrict_workspace_admins"] = self.restrict_workspace_admins + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RestrictWorkspaceAdminsSetting: """Deserializes the RestrictWorkspaceAdminsSetting from a dictionary.""" - return cls(etag=d.get('etag', None), restrict_workspace_admins=_from_dict(d, 'restrict_workspace_admins', RestrictWorkspaceAdminsMessage), setting_name=d.get('setting_name', None)) - - + return cls( + etag=d.get("etag", None), + restrict_workspace_admins=_from_dict(d, "restrict_workspace_admins", RestrictWorkspaceAdminsMessage), + setting_name=d.get("setting_name", None), + ) @dataclass class RevokeTokenRequest: token_id: str """The ID of the token to be revoked.""" - + def as_dict(self) -> dict: """Serializes the RevokeTokenRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.token_id is not None: body['token_id'] = self.token_id + if self.token_id is not None: + body["token_id"] = self.token_id return body def as_shallow_dict(self) -> dict: """Serializes the RevokeTokenRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.token_id is not None: body['token_id'] = self.token_id + if self.token_id is not None: + body["token_id"] = self.token_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RevokeTokenRequest: """Deserializes the RevokeTokenRequest from a dictionary.""" - return cls(token_id=d.get('token_id', None)) - - + return cls(token_id=d.get("token_id", None)) @dataclass @@ -4350,8 +4874,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RevokeTokenResponse: """Deserializes the RevokeTokenResponse from a dictionary.""" return cls() - - @dataclass @@ -4370,44 +4892,44 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetStatusResponse: """Deserializes the SetStatusResponse from a dictionary.""" return cls() - - @dataclass class SlackConfig: url: Optional[str] = None """[Input-Only] URL for Slack destination.""" - + url_set: Optional[bool] = None """[Output-Only] Whether URL is set.""" - + def as_dict(self) -> dict: """Serializes the SlackConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.url is not None: body['url'] = self.url - if self.url_set is not None: body['url_set'] = self.url_set + if self.url is not None: + body["url"] = self.url + if self.url_set is not None: + body["url_set"] = self.url_set return body def as_shallow_dict(self) -> dict: """Serializes the SlackConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.url is not None: body['url'] = self.url - if self.url_set is not None: body['url_set'] = self.url_set + if self.url is not None: + body["url"] = self.url + if self.url_set is not None: + body["url_set"] = self.url_set return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SlackConfig: """Deserializes the SlackConfig from a dictionary.""" - return cls(url=d.get('url', None), url_set=d.get('url_set', None)) - - + return cls(url=d.get("url", None), url_set=d.get("url_set", None)) @dataclass class SqlResultsDownload: boolean_val: BooleanMessage - + etag: Optional[str] = None """etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting @@ -4415,353 +4937,436 @@ class SqlResultsDownload: -> update pattern to perform setting updates in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the PATCH request to identify the setting version you are updating.""" - + setting_name: Optional[str] = None """Name of the corresponding setting. This field is populated in the response, but it will not be respected even if it's set in the request body. The setting name in the path parameter will be respected instead. Setting name is required to be 'default' if the setting only has one instance per workspace.""" - + def as_dict(self) -> dict: """Serializes the SqlResultsDownload into a dictionary suitable for use as a JSON request body.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val.as_dict() - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val.as_dict() + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body def as_shallow_dict(self) -> dict: """Serializes the SqlResultsDownload into a shallow dictionary of its immediate attributes.""" body = {} - if self.boolean_val: body['boolean_val'] = self.boolean_val - if self.etag is not None: body['etag'] = self.etag - if self.setting_name is not None: body['setting_name'] = self.setting_name + if self.boolean_val: + body["boolean_val"] = self.boolean_val + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SqlResultsDownload: """Deserializes the SqlResultsDownload from a dictionary.""" - return cls(boolean_val=_from_dict(d, 'boolean_val', BooleanMessage), etag=d.get('etag', None), setting_name=d.get('setting_name', None)) - - + return cls( + boolean_val=_from_dict(d, "boolean_val", BooleanMessage), + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + ) @dataclass class StringMessage: value: Optional[str] = None """Represents a generic string value.""" - + def as_dict(self) -> dict: """Serializes the StringMessage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the StringMessage into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StringMessage: """Deserializes the StringMessage from a dictionary.""" - return cls(value=d.get('value', None)) - - + return cls(value=d.get("value", None)) @dataclass class TokenAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[TokenPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the TokenAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the TokenAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenAccessControlRequest: """Deserializes the TokenAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', TokenPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", TokenPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class TokenAccessControlResponse: all_permissions: Optional[List[TokenPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the TokenAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the TokenAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenAccessControlResponse: """Deserializes the TokenAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', TokenPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", TokenPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class TokenInfo: comment: Optional[str] = None """Comment that describes the purpose of the token, specified by the token creator.""" - + created_by_id: Optional[int] = None """User ID of the user that created the token.""" - + created_by_username: Optional[str] = None """Username of the user that created the token.""" - + creation_time: Optional[int] = None """Timestamp when the token was created.""" - + expiry_time: Optional[int] = None """Timestamp when the token expires.""" - + last_used_day: Optional[int] = None """Approximate timestamp for the day the token was last used. Accurate up to 1 day.""" - + owner_id: Optional[int] = None """User ID of the user that owns the token.""" - + token_id: Optional[str] = None """ID of the token.""" - + workspace_id: Optional[int] = None """If applicable, the ID of the workspace that the token was created in.""" - + def as_dict(self) -> dict: """Serializes the TokenInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.created_by_id is not None: body['created_by_id'] = self.created_by_id - if self.created_by_username is not None: body['created_by_username'] = self.created_by_username - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.expiry_time is not None: body['expiry_time'] = self.expiry_time - if self.last_used_day is not None: body['last_used_day'] = self.last_used_day - if self.owner_id is not None: body['owner_id'] = self.owner_id - if self.token_id is not None: body['token_id'] = self.token_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.comment is not None: + body["comment"] = self.comment + if self.created_by_id is not None: + body["created_by_id"] = self.created_by_id + if self.created_by_username is not None: + body["created_by_username"] = self.created_by_username + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.expiry_time is not None: + body["expiry_time"] = self.expiry_time + if self.last_used_day is not None: + body["last_used_day"] = self.last_used_day + if self.owner_id is not None: + body["owner_id"] = self.owner_id + if self.token_id is not None: + body["token_id"] = self.token_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the TokenInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.created_by_id is not None: body['created_by_id'] = self.created_by_id - if self.created_by_username is not None: body['created_by_username'] = self.created_by_username - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.expiry_time is not None: body['expiry_time'] = self.expiry_time - if self.last_used_day is not None: body['last_used_day'] = self.last_used_day - if self.owner_id is not None: body['owner_id'] = self.owner_id - if self.token_id is not None: body['token_id'] = self.token_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.comment is not None: + body["comment"] = self.comment + if self.created_by_id is not None: + body["created_by_id"] = self.created_by_id + if self.created_by_username is not None: + body["created_by_username"] = self.created_by_username + if self.creation_time is not None: + body["creation_time"] = self.creation_time + if self.expiry_time is not None: + body["expiry_time"] = self.expiry_time + if self.last_used_day is not None: + body["last_used_day"] = self.last_used_day + if self.owner_id is not None: + body["owner_id"] = self.owner_id + if self.token_id is not None: + body["token_id"] = self.token_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenInfo: """Deserializes the TokenInfo from a dictionary.""" - return cls(comment=d.get('comment', None), created_by_id=d.get('created_by_id', None), created_by_username=d.get('created_by_username', None), creation_time=d.get('creation_time', None), expiry_time=d.get('expiry_time', None), last_used_day=d.get('last_used_day', None), owner_id=d.get('owner_id', None), token_id=d.get('token_id', None), workspace_id=d.get('workspace_id', None)) - - + return cls( + comment=d.get("comment", None), + created_by_id=d.get("created_by_id", None), + created_by_username=d.get("created_by_username", None), + creation_time=d.get("creation_time", None), + expiry_time=d.get("expiry_time", None), + last_used_day=d.get("last_used_day", None), + owner_id=d.get("owner_id", None), + token_id=d.get("token_id", None), + workspace_id=d.get("workspace_id", None), + ) @dataclass class TokenPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[TokenPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the TokenPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the TokenPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenPermission: """Deserializes the TokenPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', TokenPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", TokenPermissionLevel), + ) class TokenPermissionLevel(Enum): """Permission level""" - - CAN_USE = 'CAN_USE' + + CAN_USE = "CAN_USE" + @dataclass class TokenPermissions: access_control_list: Optional[List[TokenAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the TokenPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the TokenPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenPermissions: """Deserializes the TokenPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', TokenAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", TokenAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class TokenPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[TokenPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the TokenPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the TokenPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenPermissionsDescription: """Deserializes the TokenPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', TokenPermissionLevel)) - - + return cls( + description=d.get("description", None), permission_level=_enum(d, "permission_level", TokenPermissionLevel) + ) @dataclass class TokenPermissionsRequest: access_control_list: Optional[List[TokenAccessControlRequest]] = None - + def as_dict(self) -> dict: """Serializes the TokenPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] return body def as_shallow_dict(self) -> dict: """Serializes the TokenPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list + if self.access_control_list: + body["access_control_list"] = self.access_control_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TokenPermissionsRequest: """Deserializes the TokenPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', TokenAccessControlRequest)) - - + return cls(access_control_list=_repeated_dict(d, "access_control_list", TokenAccessControlRequest)) class TokenType(Enum): """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported.""" - - ARCLIGHT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN' - ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY' - ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN' - ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = 'ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY' - AZURE_ACTIVE_DIRECTORY_TOKEN = 'AZURE_ACTIVE_DIRECTORY_TOKEN' + + ARCLIGHT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_AZURE_EXCHANGE_TOKEN" + ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = "ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY" + ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN" + ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = ( + "ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY" + ) + AZURE_ACTIVE_DIRECTORY_TOKEN = "AZURE_ACTIVE_DIRECTORY_TOKEN" + @dataclass class UpdateAccountIpAccessEnableRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: AccountIpAccessEnable - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4772,40 +5377,48 @@ class UpdateAccountIpAccessEnableRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateAccountIpAccessEnableRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAccountIpAccessEnableRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAccountIpAccessEnableRequest: """Deserializes the UpdateAccountIpAccessEnableRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', AccountIpAccessEnable)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", AccountIpAccessEnable), + ) @dataclass class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: AibiDashboardEmbeddingAccessPolicySetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4816,40 +5429,48 @@ class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: """Deserializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', AibiDashboardEmbeddingAccessPolicySetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", AibiDashboardEmbeddingAccessPolicySetting), + ) @dataclass class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: AibiDashboardEmbeddingApprovedDomainsSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4860,40 +5481,48 @@ class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: """Deserializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', AibiDashboardEmbeddingApprovedDomainsSetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", AibiDashboardEmbeddingApprovedDomainsSetting), + ) @dataclass class UpdateAutomaticClusterUpdateSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: AutomaticClusterUpdateSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4904,40 +5533,48 @@ class UpdateAutomaticClusterUpdateSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAutomaticClusterUpdateSettingRequest: """Deserializes the UpdateAutomaticClusterUpdateSettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', AutomaticClusterUpdateSetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", AutomaticClusterUpdateSetting), + ) @dataclass class UpdateComplianceSecurityProfileSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: ComplianceSecurityProfileSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4948,40 +5585,48 @@ class UpdateComplianceSecurityProfileSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateComplianceSecurityProfileSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateComplianceSecurityProfileSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateComplianceSecurityProfileSettingRequest: """Deserializes the UpdateComplianceSecurityProfileSettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', ComplianceSecurityProfileSetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", ComplianceSecurityProfileSetting), + ) @dataclass class UpdateCspEnablementAccountSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: CspEnablementAccountSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -4992,40 +5637,48 @@ class UpdateCspEnablementAccountSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCspEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCspEnablementAccountSettingRequest: """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', CspEnablementAccountSetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", CspEnablementAccountSetting), + ) @dataclass class UpdateDashboardEmailSubscriptionsRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: DashboardEmailSubscriptions - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5036,38 +5689,46 @@ class UpdateDashboardEmailSubscriptionsRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateDashboardEmailSubscriptionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateDashboardEmailSubscriptionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateDashboardEmailSubscriptionsRequest: """Deserializes the UpdateDashboardEmailSubscriptionsRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DashboardEmailSubscriptions)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", DashboardEmailSubscriptions), + ) @dataclass class UpdateDefaultNamespaceSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: DefaultNamespaceSetting """This represents the setting configuration for the default namespace in the Databricks workspace. Setting the default catalog for the workspace determines the catalog that is used when queries @@ -5076,7 +5737,7 @@ class UpdateDefaultNamespaceSettingRequest: 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute.""" - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5087,40 +5748,48 @@ class UpdateDefaultNamespaceSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateDefaultNamespaceSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateDefaultNamespaceSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateDefaultNamespaceSettingRequest: """Deserializes the UpdateDefaultNamespaceSettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DefaultNamespaceSetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", DefaultNamespaceSetting), + ) @dataclass class UpdateDisableLegacyAccessRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: DisableLegacyAccess - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5131,40 +5800,48 @@ class UpdateDisableLegacyAccessRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateDisableLegacyAccessRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyAccessRequest: """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DisableLegacyAccess)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", DisableLegacyAccess), + ) @dataclass class UpdateDisableLegacyDbfsRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: DisableLegacyDbfs - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5175,40 +5852,48 @@ class UpdateDisableLegacyDbfsRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateDisableLegacyDbfsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyDbfsRequest: """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DisableLegacyDbfs)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", DisableLegacyDbfs), + ) @dataclass class UpdateDisableLegacyFeaturesRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: DisableLegacyFeatures - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5219,40 +5904,48 @@ class UpdateDisableLegacyFeaturesRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateDisableLegacyFeaturesRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyFeaturesRequest: """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', DisableLegacyFeatures)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", DisableLegacyFeatures), + ) @dataclass class UpdateEnableExportNotebookRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EnableExportNotebook - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5263,40 +5956,48 @@ class UpdateEnableExportNotebookRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEnableExportNotebookRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEnableExportNotebookRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableExportNotebookRequest: """Deserializes the UpdateEnableExportNotebookRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EnableExportNotebook)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", EnableExportNotebook), + ) @dataclass class UpdateEnableNotebookTableClipboardRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EnableNotebookTableClipboard - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5307,40 +6008,48 @@ class UpdateEnableNotebookTableClipboardRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEnableNotebookTableClipboardRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEnableNotebookTableClipboardRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableNotebookTableClipboardRequest: """Deserializes the UpdateEnableNotebookTableClipboardRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EnableNotebookTableClipboard)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", EnableNotebookTableClipboard), + ) @dataclass class UpdateEnableResultsDownloadingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EnableResultsDownloading - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5351,40 +6060,48 @@ class UpdateEnableResultsDownloadingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEnableResultsDownloadingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEnableResultsDownloadingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableResultsDownloadingRequest: """Deserializes the UpdateEnableResultsDownloadingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EnableResultsDownloading)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", EnableResultsDownloading), + ) @dataclass class UpdateEnhancedSecurityMonitoringSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EnhancedSecurityMonitoringSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5395,40 +6112,48 @@ class UpdateEnhancedSecurityMonitoringSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEnhancedSecurityMonitoringSettingRequest: """Deserializes the UpdateEnhancedSecurityMonitoringSettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EnhancedSecurityMonitoringSetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", EnhancedSecurityMonitoringSetting), + ) @dataclass class UpdateEsmEnablementAccountSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: EsmEnablementAccountSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5439,89 +6164,111 @@ class UpdateEsmEnablementAccountSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEsmEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEsmEnablementAccountSettingRequest: """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', EsmEnablementAccountSetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", EsmEnablementAccountSetting), + ) @dataclass class UpdateIpAccessList: """Details required to update an IP access list.""" - + enabled: Optional[bool] = None """Specifies whether this IP access list is enabled.""" - + ip_access_list_id: Optional[str] = None """The ID for the corresponding IP access list""" - + ip_addresses: Optional[List[str]] = None - + label: Optional[str] = None """Label for the IP access list. This **cannot** be empty.""" - + list_type: Optional[ListType] = None """Type of IP access list. Valid values are as follows and are case-sensitive: * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list.""" - + def as_dict(self) -> dict: """Serializes the UpdateIpAccessList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id - if self.ip_addresses: body['ip_addresses'] = [v for v in self.ip_addresses] - if self.label is not None: body['label'] = self.label - if self.list_type is not None: body['list_type'] = self.list_type.value + if self.enabled is not None: + body["enabled"] = self.enabled + if self.ip_access_list_id is not None: + body["ip_access_list_id"] = self.ip_access_list_id + if self.ip_addresses: + body["ip_addresses"] = [v for v in self.ip_addresses] + if self.label is not None: + body["label"] = self.label + if self.list_type is not None: + body["list_type"] = self.list_type.value return body def as_shallow_dict(self) -> dict: """Serializes the UpdateIpAccessList into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id - if self.ip_addresses: body['ip_addresses'] = self.ip_addresses - if self.label is not None: body['label'] = self.label - if self.list_type is not None: body['list_type'] = self.list_type + if self.enabled is not None: + body["enabled"] = self.enabled + if self.ip_access_list_id is not None: + body["ip_access_list_id"] = self.ip_access_list_id + if self.ip_addresses: + body["ip_addresses"] = self.ip_addresses + if self.label is not None: + body["label"] = self.label + if self.list_type is not None: + body["list_type"] = self.list_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateIpAccessList: """Deserializes the UpdateIpAccessList from a dictionary.""" - return cls(enabled=d.get('enabled', None), ip_access_list_id=d.get('ip_access_list_id', None), ip_addresses=d.get('ip_addresses', None), label=d.get('label', None), list_type=_enum(d, 'list_type', ListType)) - - + return cls( + enabled=d.get("enabled", None), + ip_access_list_id=d.get("ip_access_list_id", None), + ip_addresses=d.get("ip_addresses", None), + label=d.get("label", None), + list_type=_enum(d, "list_type", ListType), + ) @dataclass class UpdateLlmProxyPartnerPoweredAccountRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: LlmProxyPartnerPoweredAccount - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5532,40 +6279,48 @@ class UpdateLlmProxyPartnerPoweredAccountRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredAccountRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredAccountRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredAccountRequest: """Deserializes the UpdateLlmProxyPartnerPoweredAccountRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', LlmProxyPartnerPoweredAccount)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", LlmProxyPartnerPoweredAccount), + ) @dataclass class UpdateLlmProxyPartnerPoweredEnforceRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: LlmProxyPartnerPoweredEnforce - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5576,40 +6331,48 @@ class UpdateLlmProxyPartnerPoweredEnforceRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredEnforceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredEnforceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredEnforceRequest: """Deserializes the UpdateLlmProxyPartnerPoweredEnforceRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', LlmProxyPartnerPoweredEnforce)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", LlmProxyPartnerPoweredEnforce), + ) @dataclass class UpdateLlmProxyPartnerPoweredWorkspaceRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: LlmProxyPartnerPoweredWorkspace - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5620,81 +6383,89 @@ class UpdateLlmProxyPartnerPoweredWorkspaceRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredWorkspaceRequest: """Deserializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', LlmProxyPartnerPoweredWorkspace)) - - - - - - - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", LlmProxyPartnerPoweredWorkspace), + ) @dataclass class UpdateNotificationDestinationRequest: config: Optional[Config] = None """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" - + display_name: Optional[str] = None """The display name for the notification destination.""" - + id: Optional[str] = None """UUID identifying notification destination.""" - + def as_dict(self) -> dict: """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config: body['config'] = self.config.as_dict() - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id + if self.config: + body["config"] = self.config.as_dict() + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.config: body['config'] = self.config - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id + if self.config: + body["config"] = self.config + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateNotificationDestinationRequest: """Deserializes the UpdateNotificationDestinationRequest from a dictionary.""" - return cls(config=_from_dict(d, 'config', Config), display_name=d.get('display_name', None), id=d.get('id', None)) - - + return cls( + config=_from_dict(d, "config", Config), display_name=d.get("display_name", None), id=d.get("id", None) + ) @dataclass class UpdatePersonalComputeSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: PersonalComputeSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5705,77 +6476,93 @@ class UpdatePersonalComputeSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdatePersonalComputeSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePersonalComputeSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalComputeSettingRequest: """Deserializes the UpdatePersonalComputeSettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', PersonalComputeSetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", PersonalComputeSetting), + ) @dataclass class UpdatePrivateEndpointRule: """Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization.""" - + domain_names: Optional[List[str]] = None """Only used by private endpoints to customer-managed private endpoint services. Domain names of target private link service. When updating this field, the full list of target domain_names must be specified.""" - + enabled: Optional[bool] = None """Only used by private endpoints towards an AWS S3 service. Update this field to activate/deactivate this private endpoint to allow egress access from serverless compute resources.""" - + resource_names: Optional[List[str]] = None """Only used by private endpoints towards AWS S3 service. The globally unique S3 bucket names that will be accessed via the VPC endpoint. The bucket names must be in the same region as the NCC/endpoint service. When updating this field, we perform full update on this field. Please ensure a full list of desired resource_names is provided.""" - + def as_dict(self) -> dict: """Serializes the UpdatePrivateEndpointRule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.domain_names: body['domain_names'] = [v for v in self.domain_names] - if self.enabled is not None: body['enabled'] = self.enabled - if self.resource_names: body['resource_names'] = [v for v in self.resource_names] + if self.domain_names: + body["domain_names"] = [v for v in self.domain_names] + if self.enabled is not None: + body["enabled"] = self.enabled + if self.resource_names: + body["resource_names"] = [v for v in self.resource_names] return body def as_shallow_dict(self) -> dict: """Serializes the UpdatePrivateEndpointRule into a shallow dictionary of its immediate attributes.""" body = {} - if self.domain_names: body['domain_names'] = self.domain_names - if self.enabled is not None: body['enabled'] = self.enabled - if self.resource_names: body['resource_names'] = self.resource_names + if self.domain_names: + body["domain_names"] = self.domain_names + if self.enabled is not None: + body["enabled"] = self.enabled + if self.resource_names: + body["resource_names"] = self.resource_names return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdatePrivateEndpointRule: """Deserializes the UpdatePrivateEndpointRule from a dictionary.""" - return cls(domain_names=d.get('domain_names', None), enabled=d.get('enabled', None), resource_names=d.get('resource_names', None)) - - + return cls( + domain_names=d.get("domain_names", None), + enabled=d.get("enabled", None), + resource_names=d.get("resource_names", None), + ) @dataclass @@ -5794,19 +6581,17 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() - - @dataclass class UpdateRestrictWorkspaceAdminsSettingRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: RestrictWorkspaceAdminsSetting - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5817,40 +6602,48 @@ class UpdateRestrictWorkspaceAdminsSettingRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRestrictWorkspaceAdminsSettingRequest: """Deserializes the UpdateRestrictWorkspaceAdminsSettingRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', RestrictWorkspaceAdminsSetting)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", RestrictWorkspaceAdminsSetting), + ) @dataclass class UpdateSqlResultsDownloadRequest: """Details required to update a setting.""" - + allow_missing: bool """This should always be set to true for Settings API. Added for AIP compliance.""" - + setting: SqlResultsDownload - + field_mask: str """The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields @@ -5861,35 +6654,40 @@ class UpdateSqlResultsDownloadRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + def as_dict(self) -> dict: """Serializes the UpdateSqlResultsDownloadRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting.as_dict() + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateSqlResultsDownloadRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_missing is not None: body['allow_missing'] = self.allow_missing - if self.field_mask is not None: body['field_mask'] = self.field_mask - if self.setting: body['setting'] = self.setting + if self.allow_missing is not None: + body["allow_missing"] = self.allow_missing + if self.field_mask is not None: + body["field_mask"] = self.field_mask + if self.setting: + body["setting"] = self.setting return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateSqlResultsDownloadRequest: """Deserializes the UpdateSqlResultsDownloadRequest from a dictionary.""" - return cls(allow_missing=d.get('allow_missing', None), field_mask=d.get('field_mask', None), setting=_from_dict(d, 'setting', SqlResultsDownload)) - - + return cls( + allow_missing=d.get("allow_missing", None), + field_mask=d.get("field_mask", None), + setting=_from_dict(d, "setting", SqlResultsDownload), + ) - - - -WorkspaceConf = Dict[str,str] +WorkspaceConf = Dict[str, str] @dataclass @@ -5898,191 +6696,170 @@ class WorkspaceNetworkOption: """The network policy ID to apply to the workspace. This controls the network access rules for all serverless compute resources in the workspace. Each workspace can only be linked to one policy at a time. If no policy is explicitly assigned, the workspace will use 'default-policy'.""" - + workspace_id: Optional[int] = None """The workspace ID.""" - + def as_dict(self) -> dict: """Serializes the WorkspaceNetworkOption into a dictionary suitable for use as a JSON request body.""" body = {} - if self.network_policy_id is not None: body['network_policy_id'] = self.network_policy_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.network_policy_id is not None: + body["network_policy_id"] = self.network_policy_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceNetworkOption into a shallow dictionary of its immediate attributes.""" body = {} - if self.network_policy_id is not None: body['network_policy_id'] = self.network_policy_id - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + if self.network_policy_id is not None: + body["network_policy_id"] = self.network_policy_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceNetworkOption: """Deserializes the WorkspaceNetworkOption from a dictionary.""" - return cls(network_policy_id=d.get('network_policy_id', None), workspace_id=d.get('workspace_id', None)) - - - - + return cls(network_policy_id=d.get("network_policy_id", None), workspace_id=d.get("workspace_id", None)) class AccountIpAccessListsAPI: """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console. - + Account IP Access Lists affect web application access and REST API access to the account console and account APIs. If the feature is disabled for the account, all access is allowed for this account. There is support for allow lists (inclusion) and block lists (exclusion). - + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address matches any block list, the connection is rejected. 2. **If the connection was not rejected by block lists**, the IP address is compared with the allow lists. - + If there is at least one allow list for the account, the connection is allowed only if the IP address matches an allow list. If there are no allow lists for the account, all IP addresses are allowed. - + For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. - + After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , label: str, list_type: ListType - , * - , ip_addresses: Optional[List[str]] = None) -> CreateIpAccessListResponse: + def create( + self, label: str, list_type: ListType, *, ip_addresses: Optional[List[str]] = None + ) -> CreateIpAccessListResponse: """Create access list. - + Creates an IP access list for the account. - + A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. - + When creating or updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. - + :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) - + :returns: :class:`CreateIpAccessListResponse` """ body = {} - if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] - if label is not None: body['label'] = label - if list_type is not None: body['list_type'] = list_type.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists', body=body - - , headers=headers - ) + if ip_addresses is not None: + body["ip_addresses"] = [v for v in ip_addresses] + if label is not None: + body["label"] = label + if list_type is not None: + body["list_type"] = list_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists", body=body, headers=headers + ) return CreateIpAccessListResponse.from_dict(res) - - - - - def delete(self - , ip_access_list_id: str - ): + def delete(self, ip_access_list_id: str): """Delete access list. - + Deletes an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}' - - , headers=headers - ) - - - - + self._api.do( + "DELETE", f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}", headers=headers + ) - def get(self - , ip_access_list_id: str - ) -> GetIpAccessListResponse: + def get(self, ip_access_list_id: str) -> GetIpAccessListResponse: """Get IP access list. - + Gets an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - + :returns: :class:`GetIpAccessListResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}' - - , headers=headers - ) - return GetIpAccessListResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}", headers=headers + ) + return GetIpAccessListResponse.from_dict(res) def list(self) -> Iterator[IpAccessListInfo]: """Get access lists. - + Gets all IP access lists for the specified account. - + :returns: Iterator over :class:`IpAccessListInfo` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists", headers=headers) parsed = GetIpAccessListsResponse.from_dict(json).ip_access_lists return parsed if parsed is not None else [] - - - - - - def replace(self - , ip_access_list_id: str, label: str, list_type: ListType, enabled: bool - , * - , ip_addresses: Optional[List[str]] = None): + def replace( + self, + ip_access_list_id: str, + label: str, + list_type: ListType, + enabled: bool, + *, + ip_addresses: Optional[List[str]] = None, + ): """Replace access list. - + Replaces an IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one @@ -6090,59 +6867,67 @@ def replace(self `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take effect. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) - - - """ - body = {} - if enabled is not None: body['enabled'] = enabled - if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] - if label is not None: body['label'] = label - if list_type is not None: body['list_type'] = list_type.value - headers = {'Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}', body=body - - , headers=headers - ) - - - - - def update(self - , ip_access_list_id: str - , * - , enabled: Optional[bool] = None, ip_addresses: Optional[List[str]] = None, label: Optional[str] = None, list_type: Optional[ListType] = None): + """ + body = {} + if enabled is not None: + body["enabled"] = enabled + if ip_addresses is not None: + body["ip_addresses"] = [v for v in ip_addresses] + if label is not None: + body["label"] = label + if list_type is not None: + body["list_type"] = list_type.value + headers = { + "Content-Type": "application/json", + } + + self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}", + body=body, + headers=headers, + ) + + def update( + self, + ip_access_list_id: str, + *, + enabled: Optional[bool] = None, + ip_addresses: Optional[List[str]] = None, + label: Optional[str] = None, + list_type: Optional[ListType] = None, + ): """Update access list. - + Updates an existing IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. - + When updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param enabled: bool (optional) @@ -6152,33 +6937,39 @@ def update(self Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. - - + + """ body = {} - if enabled is not None: body['enabled'] = enabled - if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] - if label is not None: body['label'] = label - if list_type is not None: body['list_type'] = list_type.value - headers = {'Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}', body=body - - , headers=headers - ) - + if enabled is not None: + body["enabled"] = enabled + if ip_addresses is not None: + body["ip_addresses"] = [v for v in ip_addresses] + if label is not None: + body["label"] = label + if list_type is not None: + body["list_type"] = list_type.value + headers = { + "Content-Type": "application/json", + } + + self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/ip-access-lists/{ip_access_list_id}", + body=body, + headers=headers, + ) + - - class AccountSettingsAPI: """Accounts Settings API allows users to manage settings at the account level.""" - + def __init__(self, api_client): self._api = api_client - + self._csp_enablement_account = CspEnablementAccountAPI(self._api) self._disable_legacy_features = DisableLegacyFeaturesAPI(self._api) self._enable_ip_access_lists = EnableIpAccessListsAPI(self._api) @@ -6187,133 +6978,114 @@ def __init__(self, api_client): self._llm_proxy_partner_powered_enforce = LlmProxyPartnerPoweredEnforceAPI(self._api) self._personal_compute = PersonalComputeAPI(self._api) - @property def csp_enablement_account(self) -> CspEnablementAccountAPI: """The compliance security profile settings at the account level control whether to enable it for new workspaces.""" return self._csp_enablement_account - + @property def disable_legacy_features(self) -> DisableLegacyFeaturesAPI: """Disable legacy features for new Databricks workspaces.""" return self._disable_legacy_features - + @property def enable_ip_access_lists(self) -> EnableIpAccessListsAPI: """Controls the enforcement of IP access lists for accessing the account console.""" return self._enable_ip_access_lists - + @property def esm_enablement_account(self) -> EsmEnablementAccountAPI: """The enhanced security monitoring setting at the account level controls whether to enable the feature on new workspaces.""" return self._esm_enablement_account - + @property def llm_proxy_partner_powered_account(self) -> LlmProxyPartnerPoweredAccountAPI: """Determines if partner powered models are enabled or not for a specific account.""" return self._llm_proxy_partner_powered_account - + @property def llm_proxy_partner_powered_enforce(self) -> LlmProxyPartnerPoweredEnforceAPI: """Determines if the account-level partner-powered setting value is enforced upon the workspace-level partner-powered setting.""" return self._llm_proxy_partner_powered_enforce - + @property def personal_compute(self) -> PersonalComputeAPI: """The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources.""" return self._personal_compute - - - class AibiDashboardEmbeddingAccessPolicyAPI: """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse: """Delete the AI/BI dashboard embedding access policy. - + Delete the AI/BI dashboard embedding access policy, reverting back to the default. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE','/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", + "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default", + query=query, + headers=headers, + ) return DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting: + def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting: """Retrieve the AI/BI dashboard embedding access policy. - + Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default", query=query, headers=headers + ) return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str - ) -> AibiDashboardEmbeddingAccessPolicySetting: + def update( + self, allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str + ) -> AibiDashboardEmbeddingAccessPolicySetting: """Update the AI/BI dashboard embedding access policy. - + Updates the AI/BI dashboard embedding access policy at the workspace level. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting` @@ -6323,115 +7095,107 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default", body=body, headers=headers + ) return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res) - - + class AibiDashboardEmbeddingApprovedDomainsAPI: """Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse: """Delete AI/BI dashboard embedding approved domains. - + Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default empty list. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE','/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", + "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default", + query=query, + headers=headers, + ) return DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting: + def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting: """Retrieve the list of domains approved to host embedded AI/BI dashboards. - + Retrieves the list of domains approved to host embedded AI/BI dashboards. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default", + query=query, + headers=headers, + ) return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str - ) -> AibiDashboardEmbeddingApprovedDomainsSetting: + def update( + self, allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str + ) -> AibiDashboardEmbeddingApprovedDomainsSetting: """Update the list of domains approved to host embedded AI/BI dashboards. - + Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` @@ -6441,84 +7205,78 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + "/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default", + body=body, + headers=headers, + ) return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res) - - + class AutomaticClusterUpdateAPI: """Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned off.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - - , * - , etag: Optional[str] = None) -> AutomaticClusterUpdateSetting: + def get(self, *, etag: Optional[str] = None) -> AutomaticClusterUpdateSetting: """Get the automatic cluster update setting. - + Gets the automatic cluster update setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AutomaticClusterUpdateSetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/automatic_cluster_update/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/automatic_cluster_update/names/default", query=query, headers=headers + ) return AutomaticClusterUpdateSetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str - ) -> AutomaticClusterUpdateSetting: + def update( + self, allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str + ) -> AutomaticClusterUpdateSetting: """Update the automatic cluster update setting. - + Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AutomaticClusterUpdateSetting` @@ -6528,86 +7286,77 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AutomaticClusterUpdateSetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/automatic_cluster_update/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/automatic_cluster_update/names/default", body=body, headers=headers + ) return AutomaticClusterUpdateSetting.from_dict(res) - - + class ComplianceSecurityProfileAPI: """Controls whether to enable the compliance security profile for the current workspace. Enabling it on a workspace is permanent. By default, it is turned off. - + This settings can NOT be disabled once it is enabled.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - - , * - , etag: Optional[str] = None) -> ComplianceSecurityProfileSetting: + def get(self, *, etag: Optional[str] = None) -> ComplianceSecurityProfileSetting: """Get the compliance security profile setting. - + Gets the compliance security profile setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`ComplianceSecurityProfileSetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default", query=query, headers=headers + ) return ComplianceSecurityProfileSetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str - ) -> ComplianceSecurityProfileSetting: + def update( + self, allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str + ) -> ComplianceSecurityProfileSetting: """Update the compliance security profile setting. - + Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`ComplianceSecurityProfileSetting` @@ -6617,131 +7366,119 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`ComplianceSecurityProfileSetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default", body=body, headers=headers + ) return ComplianceSecurityProfileSetting.from_dict(res) - - + class CredentialsManagerAPI: """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def exchange_token(self - , partition_id: PartitionId, token_type: List[TokenType], scopes: List[str] - ) -> ExchangeTokenResponse: + def exchange_token( + self, partition_id: PartitionId, token_type: List[TokenType], scopes: List[str] + ) -> ExchangeTokenResponse: """Exchange token. - + Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to determine token permissions. - + :param partition_id: :class:`PartitionId` The partition of Credentials store :param token_type: List[:class:`TokenType`] A list of token types being requested :param scopes: List[str] Array of scopes for the token request. - + :returns: :class:`ExchangeTokenResponse` """ body = {} - if partition_id is not None: body['partitionId'] = partition_id.as_dict() - if scopes is not None: body['scopes'] = [v for v in scopes] - if token_type is not None: body['tokenType'] = [v.value for v in token_type] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/credentials-manager/exchange-tokens/token', body=body - - , headers=headers - ) + if partition_id is not None: + body["partitionId"] = partition_id.as_dict() + if scopes is not None: + body["scopes"] = [v for v in scopes] + if token_type is not None: + body["tokenType"] = [v.value for v in token_type] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/credentials-manager/exchange-tokens/token", body=body, headers=headers) return ExchangeTokenResponse.from_dict(res) - - + class CspEnablementAccountAPI: """The compliance security profile settings at the account level control whether to enable it for new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace creation, account admins can enable the compliance security profile individually for each workspace. - + This settings can be disabled so that new workspaces do not have compliance security profile enabled by default.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - - , * - , etag: Optional[str] = None) -> CspEnablementAccountSetting: + def get(self, *, etag: Optional[str] = None) -> CspEnablementAccountSetting: """Get the compliance security profile setting for new workspaces. - + Gets the compliance security profile setting for new workspaces. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`CspEnablementAccountSetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default", + query=query, + headers=headers, + ) return CspEnablementAccountSetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str - ) -> CspEnablementAccountSetting: + def update( + self, allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str + ) -> CspEnablementAccountSetting: """Update the compliance security profile setting for new workspaces. - + Updates the value of the compliance security profile setting for new workspaces. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`CspEnablementAccountSetting` @@ -6751,114 +7488,106 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`CspEnablementAccountSetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_csp_enablement_ac/names/default", + body=body, + headers=headers, + ) return CspEnablementAccountSetting.from_dict(res) - - + class DashboardEmailSubscriptionsAPI: """Controls whether schedules or workload tasks for refreshing AI/BI Dashboards in the workspace can send subscription emails containing PDFs and/or images of the dashboard. By default, this setting is enabled (set to `true`)""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteDashboardEmailSubscriptionsResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteDashboardEmailSubscriptionsResponse: """Delete the Dashboard Email Subscriptions setting. - + Reverts the Dashboard Email Subscriptions setting to its default value. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDashboardEmailSubscriptionsResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE','/api/2.0/settings/types/dashboard_email_subscriptions/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", + "/api/2.0/settings/types/dashboard_email_subscriptions/names/default", + query=query, + headers=headers, + ) return DeleteDashboardEmailSubscriptionsResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> DashboardEmailSubscriptions: + def get(self, *, etag: Optional[str] = None) -> DashboardEmailSubscriptions: """Get the Dashboard Email Subscriptions setting. - + Gets the Dashboard Email Subscriptions setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DashboardEmailSubscriptions` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/dashboard_email_subscriptions/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/dashboard_email_subscriptions/names/default", query=query, headers=headers + ) return DashboardEmailSubscriptions.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: DashboardEmailSubscriptions, field_mask: str - ) -> DashboardEmailSubscriptions: + def update( + self, allow_missing: bool, setting: DashboardEmailSubscriptions, field_mask: str + ) -> DashboardEmailSubscriptions: """Update the Dashboard Email Subscriptions setting. - + Updates the Dashboard Email Subscriptions setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DashboardEmailSubscriptions` @@ -6868,129 +7597,113 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DashboardEmailSubscriptions` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/dashboard_email_subscriptions/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/dashboard_email_subscriptions/names/default", body=body, headers=headers + ) return DashboardEmailSubscriptions.from_dict(res) - - + class DefaultNamespaceAPI: """The default namespace setting API allows users to configure the default namespace for a Databricks workspace. - + Through this API, users can retrieve, set, or modify the default namespace used when queries do not reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the object 'retail_prod.default.myTable' (the schema 'default' is always assumed). - + This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteDefaultNamespaceSettingResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteDefaultNamespaceSettingResponse: """Delete the default namespace setting. - + Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDefaultNamespaceSettingResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE','/api/2.0/settings/types/default_namespace_ws/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", "/api/2.0/settings/types/default_namespace_ws/names/default", query=query, headers=headers + ) return DeleteDefaultNamespaceSettingResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> DefaultNamespaceSetting: + def get(self, *, etag: Optional[str] = None) -> DefaultNamespaceSetting: """Get the default namespace setting. - + Gets the default namespace setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DefaultNamespaceSetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/default_namespace_ws/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/default_namespace_ws/names/default", query=query, headers=headers + ) return DefaultNamespaceSetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str - ) -> DefaultNamespaceSetting: + def update(self, allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str) -> DefaultNamespaceSetting: """Update the default namespace setting. - + Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the etag is present in the error response, which should be set in the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DefaultNamespaceSetting` @@ -7007,116 +7720,100 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DefaultNamespaceSetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/default_namespace_ws/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/default_namespace_ws/names/default", body=body, headers=headers + ) return DefaultNamespaceSetting.from_dict(res) - - + class DisableLegacyAccessAPI: """'Disabling legacy access' has the following impacts: - + 1. Disables direct access to Hive Metastores from the workspace. However, you can still access a Hive Metastore through Hive Metastore federation. 2. Disables fallback mode on external location access from the workspace. 3. Disables Databricks Runtime versions prior to 13.3LTS.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteDisableLegacyAccessResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyAccessResponse: """Delete Legacy Access Disablement Status. - + Deletes legacy access disablement status. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyAccessResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE','/api/2.0/settings/types/disable_legacy_access/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", "/api/2.0/settings/types/disable_legacy_access/names/default", query=query, headers=headers + ) return DeleteDisableLegacyAccessResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> DisableLegacyAccess: + def get(self, *, etag: Optional[str] = None) -> DisableLegacyAccess: """Retrieve Legacy Access Disablement Status. - + Retrieves legacy access disablement Status. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyAccess` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/disable_legacy_access/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/disable_legacy_access/names/default", query=query, headers=headers + ) return DisableLegacyAccess.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: DisableLegacyAccess, field_mask: str - ) -> DisableLegacyAccess: + def update(self, allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess: """Update Legacy Access Disablement Status. - + Updates legacy access disablement status. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyAccess` @@ -7126,119 +7823,103 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyAccess` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/disable_legacy_access/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/disable_legacy_access/names/default", body=body, headers=headers + ) return DisableLegacyAccess.from_dict(res) - - + class DisableLegacyDbfsAPI: """Disabling legacy DBFS has the following implications: - + 1. Access to DBFS root and DBFS mounts is disallowed (as well as the creation of new mounts). 2. Disables Databricks Runtime versions prior to 13.3LTS. - + When the setting is off, all DBFS functionality is enabled and no restrictions are imposed on Databricks Runtime versions. This setting can take up to 20 minutes to take effect and requires a manual restart of all-purpose compute clusters and SQL warehouses.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteDisableLegacyDbfsResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyDbfsResponse: """Delete the disable legacy DBFS setting. - + Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyDbfsResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE','/api/2.0/settings/types/disable_legacy_dbfs/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", "/api/2.0/settings/types/disable_legacy_dbfs/names/default", query=query, headers=headers + ) return DeleteDisableLegacyDbfsResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> DisableLegacyDbfs: + def get(self, *, etag: Optional[str] = None) -> DisableLegacyDbfs: """Get the disable legacy DBFS setting. - + Gets the disable legacy DBFS setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyDbfs` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/disable_legacy_dbfs/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/disable_legacy_dbfs/names/default", query=query, headers=headers + ) return DisableLegacyDbfs.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str - ) -> DisableLegacyDbfs: + def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs: """Update the disable legacy DBFS setting. - + Updates the disable legacy DBFS setting for the workspace. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyDbfs` @@ -7248,116 +7929,106 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyDbfs` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/disable_legacy_dbfs/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/disable_legacy_dbfs/names/default", body=body, headers=headers + ) return DisableLegacyDbfs.from_dict(res) - - + class DisableLegacyFeaturesAPI: """Disable legacy features for new Databricks workspaces. - + For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions prior to 13.3LTS.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteDisableLegacyFeaturesResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyFeaturesResponse: """Delete the disable legacy features setting. - + Deletes the disable legacy features setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyFeaturesResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default", + query=query, + headers=headers, + ) return DeleteDisableLegacyFeaturesResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> DisableLegacyFeatures: + def get(self, *, etag: Optional[str] = None) -> DisableLegacyFeatures: """Get the disable legacy features setting. - + Gets the value of the disable legacy features setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyFeatures` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default", + query=query, + headers=headers, + ) return DisableLegacyFeatures.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str - ) -> DisableLegacyFeatures: + def update(self, allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures: """Update the disable legacy features setting. - + Updates the value of the disable legacy features setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyFeatures` @@ -7367,69 +8038,64 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyFeatures` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default", + body=body, + headers=headers, + ) return DisableLegacyFeatures.from_dict(res) - - + class EnableExportNotebookAPI: """Controls whether users can export notebooks and files from the Workspace UI. By default, this setting is enabled.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - def get_enable_export_notebook(self) -> EnableExportNotebook: """Get the Notebook and File exporting setting. - + Gets the Notebook and File exporting setting. - + :returns: :class:`EnableExportNotebook` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/enable-export-notebook/names/default' - , headers=headers - ) - return EnableExportNotebook.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/settings/types/enable-export-notebook/names/default", headers=headers) + return EnableExportNotebook.from_dict(res) - def patch_enable_export_notebook(self - , allow_missing: bool, setting: EnableExportNotebook, field_mask: str - ) -> EnableExportNotebook: + def patch_enable_export_notebook( + self, allow_missing: bool, setting: EnableExportNotebook, field_mask: str + ) -> EnableExportNotebook: """Update the Notebook and File exporting setting. - + Updates the Notebook and File exporting setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableExportNotebook` @@ -7439,113 +8105,103 @@ def patch_enable_export_notebook(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableExportNotebook` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/enable-export-notebook/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/enable-export-notebook/names/default", body=body, headers=headers + ) return EnableExportNotebook.from_dict(res) - - + class EnableIpAccessListsAPI: """Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or disable restricted access based on IP addresses.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteAccountIpAccessEnableResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteAccountIpAccessEnableResponse: """Delete the account IP access toggle setting. - + Reverts the value of the account IP access toggle setting to default (ON) - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAccountIpAccessEnableResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default", + query=query, + headers=headers, + ) return DeleteAccountIpAccessEnableResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> AccountIpAccessEnable: + def get(self, *, etag: Optional[str] = None) -> AccountIpAccessEnable: """Get the account IP access toggle setting. - + Gets the value of the account IP access toggle setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AccountIpAccessEnable` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default", + query=query, + headers=headers, + ) return AccountIpAccessEnable.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str - ) -> AccountIpAccessEnable: + def update(self, allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable: """Update the account IP access toggle setting. - + Updates the value of the account IP access toggle setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AccountIpAccessEnable` @@ -7555,69 +8211,66 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AccountIpAccessEnable` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default", + body=body, + headers=headers, + ) return AccountIpAccessEnable.from_dict(res) - - + class EnableNotebookTableClipboardAPI: """Controls whether users can copy tabular data to the clipboard via the UI. By default, this setting is enabled.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - def get_enable_notebook_table_clipboard(self) -> EnableNotebookTableClipboard: """Get the Results Table Clipboard features setting. - + Gets the Results Table Clipboard features setting. - + :returns: :class:`EnableNotebookTableClipboard` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/enable-notebook-table-clipboard/names/default' - , headers=headers - ) - return EnableNotebookTableClipboard.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def patch_enable_notebook_table_clipboard(self - , allow_missing: bool, setting: EnableNotebookTableClipboard, field_mask: str - ) -> EnableNotebookTableClipboard: + res = self._api.do( + "GET", "/api/2.0/settings/types/enable-notebook-table-clipboard/names/default", headers=headers + ) + return EnableNotebookTableClipboard.from_dict(res) + + def patch_enable_notebook_table_clipboard( + self, allow_missing: bool, setting: EnableNotebookTableClipboard, field_mask: str + ) -> EnableNotebookTableClipboard: """Update the Results Table Clipboard features setting. - + Updates the Results Table Clipboard features setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableNotebookTableClipboard` @@ -7627,68 +8280,60 @@ def patch_enable_notebook_table_clipboard(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableNotebookTableClipboard` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/enable-notebook-table-clipboard/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/enable-notebook-table-clipboard/names/default", body=body, headers=headers + ) return EnableNotebookTableClipboard.from_dict(res) - - + class EnableResultsDownloadingAPI: """Controls whether users can download notebook results. By default, this setting is enabled.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - def get_enable_results_downloading(self) -> EnableResultsDownloading: """Get the Notebook results download setting. - + Gets the Notebook results download setting. - + :returns: :class:`EnableResultsDownloading` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/enable-results-downloading/names/default' - , headers=headers - ) - return EnableResultsDownloading.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/settings/types/enable-results-downloading/names/default", headers=headers) + return EnableResultsDownloading.from_dict(res) - def patch_enable_results_downloading(self - , allow_missing: bool, setting: EnableResultsDownloading, field_mask: str - ) -> EnableResultsDownloading: + def patch_enable_results_downloading( + self, allow_missing: bool, setting: EnableResultsDownloading, field_mask: str + ) -> EnableResultsDownloading: """Update the Notebook results download setting. - + Updates the Notebook results download setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableResultsDownloading` @@ -7698,88 +8343,79 @@ def patch_enable_results_downloading(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableResultsDownloading` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/enable-results-downloading/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/enable-results-downloading/names/default", body=body, headers=headers + ) return EnableResultsDownloading.from_dict(res) - - + class EnhancedSecurityMonitoringAPI: """Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the compliance security profile is enabled, this is automatically enabled. - + If the compliance security profile is disabled, you can enable or disable this setting and it is not permanent.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - - , * - , etag: Optional[str] = None) -> EnhancedSecurityMonitoringSetting: + def get(self, *, etag: Optional[str] = None) -> EnhancedSecurityMonitoringSetting: """Get the enhanced security monitoring setting. - + Gets the enhanced security monitoring setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`EnhancedSecurityMonitoringSetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default", query=query, headers=headers + ) return EnhancedSecurityMonitoringSetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str - ) -> EnhancedSecurityMonitoringSetting: + def update( + self, allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str + ) -> EnhancedSecurityMonitoringSetting: """Update the enhanced security monitoring setting. - + Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnhancedSecurityMonitoringSetting` @@ -7789,82 +8425,76 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnhancedSecurityMonitoringSetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default", body=body, headers=headers + ) return EnhancedSecurityMonitoringSetting.from_dict(res) - - + class EsmEnablementAccountAPI: """The enhanced security monitoring setting at the account level controls whether to enable the feature on new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace creation, account admins can enable enhanced security monitoring individually for each workspace.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - - , * - , etag: Optional[str] = None) -> EsmEnablementAccountSetting: + def get(self, *, etag: Optional[str] = None) -> EsmEnablementAccountSetting: """Get the enhanced security monitoring setting for new workspaces. - + Gets the enhanced security monitoring setting for new workspaces. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`EsmEnablementAccountSetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default", + query=query, + headers=headers, + ) return EsmEnablementAccountSetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str - ) -> EsmEnablementAccountSetting: + def update( + self, allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str + ) -> EsmEnablementAccountSetting: """Update the enhanced security monitoring setting for new workspaces. - + Updates the value of the enhanced security monitoring setting for new workspaces. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EsmEnablementAccountSetting` @@ -7874,184 +8504,164 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EsmEnablementAccountSetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/shield_esm_enablement_ac/names/default", + body=body, + headers=headers, + ) return EsmEnablementAccountSetting.from_dict(res) - - + class IpAccessListsAPI: """IP Access List enables admins to configure IP access lists. - + IP access lists affect web application access and REST API access to this workspace only. If the feature is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists (inclusion) and block lists (exclusion). - + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address matches any block list, the connection is rejected. 2. **If the connection was not rejected by block lists**, the IP address is compared with the allow lists. - + If there is at least one allow list for the workspace, the connection is allowed only if the IP address matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed. - + For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. - + After changes to the IP access list feature, it can take a few minutes for changes to take effect.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , label: str, list_type: ListType - , * - , ip_addresses: Optional[List[str]] = None) -> CreateIpAccessListResponse: + def create( + self, label: str, list_type: ListType, *, ip_addresses: Optional[List[str]] = None + ) -> CreateIpAccessListResponse: """Create access list. - + Creates an IP access list for this workspace. - + A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. - + When creating or updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus - + :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) - + :returns: :class:`CreateIpAccessListResponse` """ body = {} - if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] - if label is not None: body['label'] = label - if list_type is not None: body['list_type'] = list_type.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/ip-access-lists', body=body - - , headers=headers - ) + if ip_addresses is not None: + body["ip_addresses"] = [v for v in ip_addresses] + if label is not None: + body["label"] = label + if list_type is not None: + body["list_type"] = list_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/ip-access-lists", body=body, headers=headers) return CreateIpAccessListResponse.from_dict(res) - - - - - def delete(self - , ip_access_list_id: str - ): + def delete(self, ip_access_list_id: str): """Delete access list. - + Deletes an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/ip-access-lists/{ip_access_list_id}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.0/ip-access-lists/{ip_access_list_id}", headers=headers) - def get(self - , ip_access_list_id: str - ) -> FetchIpAccessListResponse: + def get(self, ip_access_list_id: str) -> FetchIpAccessListResponse: """Get access list. - + Gets an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - + :returns: :class:`FetchIpAccessListResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/ip-access-lists/{ip_access_list_id}' - - , headers=headers - ) - return FetchIpAccessListResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/ip-access-lists/{ip_access_list_id}", headers=headers) + return FetchIpAccessListResponse.from_dict(res) def list(self) -> Iterator[IpAccessListInfo]: """Get access lists. - + Gets all IP access lists for the specified workspace. - + :returns: Iterator over :class:`IpAccessListInfo` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/ip-access-lists' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/ip-access-lists", headers=headers) parsed = ListIpAccessListResponse.from_dict(json).ip_access_lists return parsed if parsed is not None else [] - - - - - - def replace(self - , ip_access_list_id: str, label: str, list_type: ListType, enabled: bool - , * - , ip_addresses: Optional[List[str]] = None): + def replace( + self, + ip_access_list_id: str, + label: str, + list_type: ListType, + enabled: bool, + *, + ip_addresses: Optional[List[str]] = None, + ): """Replace access list. - + Replaces an IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one @@ -8060,60 +8670,63 @@ def replace(self returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) - - - """ - body = {} - if enabled is not None: body['enabled'] = enabled - if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] - if label is not None: body['label'] = label - if list_type is not None: body['list_type'] = list_type.value - headers = {'Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/ip-access-lists/{ip_access_list_id}', body=body - - , headers=headers - ) - - - - - def update(self - , ip_access_list_id: str - , * - , enabled: Optional[bool] = None, ip_addresses: Optional[List[str]] = None, label: Optional[str] = None, list_type: Optional[ListType] = None): + """ + body = {} + if enabled is not None: + body["enabled"] = enabled + if ip_addresses is not None: + body["ip_addresses"] = [v for v in ip_addresses] + if label is not None: + body["label"] = label + if list_type is not None: + body["list_type"] = list_type.value + headers = { + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/ip-access-lists/{ip_access_list_id}", body=body, headers=headers) + + def update( + self, + ip_access_list_id: str, + *, + enabled: Optional[bool] = None, + ip_addresses: Optional[List[str]] = None, + label: Optional[str] = None, + list_type: Optional[ListType] = None, + ): """Update access list. - + Updates an existing IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. - + When updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param enabled: bool (optional) @@ -8123,80 +8736,71 @@ def update(self Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. - - + + """ body = {} - if enabled is not None: body['enabled'] = enabled - if ip_addresses is not None: body['ip_addresses'] = [v for v in ip_addresses] - if label is not None: body['label'] = label - if list_type is not None: body['list_type'] = list_type.value - headers = {'Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/ip-access-lists/{ip_access_list_id}', body=body - - , headers=headers - ) - + if enabled is not None: + body["enabled"] = enabled + if ip_addresses is not None: + body["ip_addresses"] = [v for v in ip_addresses] + if label is not None: + body["label"] = label + if list_type is not None: + body["list_type"] = list_type.value + headers = { + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/ip-access-lists/{ip_access_list_id}", body=body, headers=headers) + - - class LlmProxyPartnerPoweredAccountAPI: """Determines if partner powered models are enabled or not for a specific account""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - - , * - , etag: Optional[str] = None) -> LlmProxyPartnerPoweredAccount: + def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredAccount: """Get the enable partner powered AI features account setting. - + Gets the enable partner powered AI features account setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredAccount` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered/names/default", + query=query, + headers=headers, + ) return LlmProxyPartnerPoweredAccount.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: LlmProxyPartnerPoweredAccount, field_mask: str - ) -> LlmProxyPartnerPoweredAccount: + def update( + self, allow_missing: bool, setting: LlmProxyPartnerPoweredAccount, field_mask: str + ) -> LlmProxyPartnerPoweredAccount: """Update the enable partner powered AI features account setting. - + Updates the enable partner powered AI features account setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredAccount` @@ -8206,81 +8810,78 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredAccount` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered/names/default", + body=body, + headers=headers, + ) return LlmProxyPartnerPoweredAccount.from_dict(res) - - + class LlmProxyPartnerPoweredEnforceAPI: """Determines if the account-level partner-powered setting value is enforced upon the workspace-level partner-powered setting""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - - , * - , etag: Optional[str] = None) -> LlmProxyPartnerPoweredEnforce: + def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredEnforce: """Get the enforcement status of partner powered AI features account setting. - + Gets the enforcement status of partner powered AI features account setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredEnforce` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered_enforce/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered_enforce/names/default", + query=query, + headers=headers, + ) return LlmProxyPartnerPoweredEnforce.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: LlmProxyPartnerPoweredEnforce, field_mask: str - ) -> LlmProxyPartnerPoweredEnforce: + def update( + self, allow_missing: bool, setting: LlmProxyPartnerPoweredEnforce, field_mask: str + ) -> LlmProxyPartnerPoweredEnforce: """Update the enforcement status of partner powered AI features account setting. - + Updates the enable enforcement status of partner powered AI features account setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredEnforce` @@ -8290,112 +8891,101 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredEnforce` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered_enforce/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/llm_proxy_partner_powered_enforce/names/default", + body=body, + headers=headers, + ) return LlmProxyPartnerPoweredEnforce.from_dict(res) - - + class LlmProxyPartnerPoweredWorkspaceAPI: """Determines if partner powered models are enabled or not for a specific workspace""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse: """Delete the enable partner powered AI features workspace setting. - + Reverts the enable partner powered AI features workspace setting to its default value. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteLlmProxyPartnerPoweredWorkspaceResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE','/api/2.0/settings/types/llm_proxy_partner_powered/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", "/api/2.0/settings/types/llm_proxy_partner_powered/names/default", query=query, headers=headers + ) return DeleteLlmProxyPartnerPoweredWorkspaceResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> LlmProxyPartnerPoweredWorkspace: + def get(self, *, etag: Optional[str] = None) -> LlmProxyPartnerPoweredWorkspace: """Get the enable partner powered AI features workspace setting. - + Gets the enable partner powered AI features workspace setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredWorkspace` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/llm_proxy_partner_powered/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/llm_proxy_partner_powered/names/default", query=query, headers=headers + ) return LlmProxyPartnerPoweredWorkspace.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: LlmProxyPartnerPoweredWorkspace, field_mask: str - ) -> LlmProxyPartnerPoweredWorkspace: + def update( + self, allow_missing: bool, setting: LlmProxyPartnerPoweredWorkspace, field_mask: str + ) -> LlmProxyPartnerPoweredWorkspace: """Update the enable partner powered AI features workspace setting. - + Updates the enable partner powered AI features workspace setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredWorkspace` @@ -8405,311 +8995,296 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredWorkspace` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/llm_proxy_partner_powered/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/llm_proxy_partner_powered/names/default", body=body, headers=headers + ) return LlmProxyPartnerPoweredWorkspace.from_dict(res) - - + class NetworkConnectivityAPI: """These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources. This API provides stable subnets for your workspace so that you can configure your firewalls on your Azure Storage accounts to allow access from Databricks. You can also use the API to provision private endpoints for Databricks to privately connect serverless compute resources to your Azure resources using Azure Private Link. See [configure serverless secure connectivity]. - - [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security""" - - def __init__(self, api_client): - self._api = api_client - - - - + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security + """ - - + def __init__(self, api_client): + self._api = api_client - def create_network_connectivity_configuration(self - , network_connectivity_config: CreateNetworkConnectivityConfiguration - ) -> NetworkConnectivityConfiguration: + def create_network_connectivity_configuration( + self, network_connectivity_config: CreateNetworkConnectivityConfiguration + ) -> NetworkConnectivityConfiguration: """Create a network connectivity configuration. - + Creates a network connectivity configuration (NCC), which provides stable Azure service subnets when accessing your Azure Storage accounts. You can also use a network connectivity configuration to create Databricks managed private endpoints so that Databricks serverless compute resources privately access your resources. - + **IMPORTANT**: After you create the network connectivity configuration, you must assign one or more workspaces to the new network connectivity configuration. You can share one network connectivity configuration with multiple workspaces from the same Azure region within the same Databricks account. See [configure serverless secure connectivity]. - + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security - + :param network_connectivity_config: :class:`CreateNetworkConnectivityConfiguration` Properties of the new network connectivity configuration. - + :returns: :class:`NetworkConnectivityConfiguration` """ body = network_connectivity_config.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs", body=body, headers=headers + ) return NetworkConnectivityConfiguration.from_dict(res) - - - - - def create_private_endpoint_rule(self - , network_connectivity_config_id: str, private_endpoint_rule: CreatePrivateEndpointRule - ) -> NccPrivateEndpointRule: + def create_private_endpoint_rule( + self, network_connectivity_config_id: str, private_endpoint_rule: CreatePrivateEndpointRule + ) -> NccPrivateEndpointRule: """Create a private endpoint rule. - + Create a private endpoint rule for the specified network connectivity config object. Once the object is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure resource. - + **IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to complete the connection. To get the information of the private endpoint created, make a `GET` request on the new private endpoint rule. See [serverless private link]. - + [serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. :param private_endpoint_rule: :class:`CreatePrivateEndpointRule` Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization. - + :returns: :class:`NccPrivateEndpointRule` """ body = private_endpoint_rule.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules", + body=body, + headers=headers, + ) return NccPrivateEndpointRule.from_dict(res) - - - - - def delete_network_connectivity_configuration(self - , network_connectivity_config_id: str - ): + def delete_network_connectivity_configuration(self, network_connectivity_config_id: str): """Delete a network connectivity configuration. - + Deletes a network connectivity configuration. - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def delete_private_endpoint_rule(self - , network_connectivity_config_id: str, private_endpoint_rule_id: str - ) -> NccPrivateEndpointRule: + self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}", + headers=headers, + ) + + def delete_private_endpoint_rule( + self, network_connectivity_config_id: str, private_endpoint_rule_id: str + ) -> NccPrivateEndpointRule: """Delete a private endpoint rule. - + Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is set to `true` and the private endpoint is not available to your serverless compute resources. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param private_endpoint_rule_id: str Your private endpoint rule ID. - + :returns: :class:`NccPrivateEndpointRule` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}' - - , headers=headers - ) - return NccPrivateEndpointRule.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}", + headers=headers, + ) + return NccPrivateEndpointRule.from_dict(res) - def get_network_connectivity_configuration(self - , network_connectivity_config_id: str - ) -> NetworkConnectivityConfiguration: + def get_network_connectivity_configuration( + self, network_connectivity_config_id: str + ) -> NetworkConnectivityConfiguration: """Get a network connectivity configuration. - + Gets a network connectivity configuration. - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. - + :returns: :class:`NetworkConnectivityConfiguration` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}' - - , headers=headers - ) - return NetworkConnectivityConfiguration.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}", + headers=headers, + ) + return NetworkConnectivityConfiguration.from_dict(res) - def get_private_endpoint_rule(self - , network_connectivity_config_id: str, private_endpoint_rule_id: str - ) -> NccPrivateEndpointRule: + def get_private_endpoint_rule( + self, network_connectivity_config_id: str, private_endpoint_rule_id: str + ) -> NccPrivateEndpointRule: """Gets a private endpoint rule. - + Gets the private endpoint rule. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param private_endpoint_rule_id: str Your private endpoint rule ID. - + :returns: :class:`NccPrivateEndpointRule` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}' - - , headers=headers - ) - return NccPrivateEndpointRule.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list_network_connectivity_configurations(self - - , * - , page_token: Optional[str] = None) -> Iterator[NetworkConnectivityConfiguration]: + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}", + headers=headers, + ) + return NccPrivateEndpointRule.from_dict(res) + + def list_network_connectivity_configurations( + self, *, page_token: Optional[str] = None + ) -> Iterator[NetworkConnectivityConfiguration]: """List network connectivity configurations. - + Gets an array of network connectivity configurations. - + :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`NetworkConnectivityConfiguration` """ - - query = {} - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs', query=query - - , headers=headers - ) - if 'items' in json: - for v in json['items']: - yield NetworkConnectivityConfiguration.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def list_private_endpoint_rules(self - , network_connectivity_config_id: str - , * - , page_token: Optional[str] = None) -> Iterator[NccPrivateEndpointRule]: + while True: + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs", + query=query, + headers=headers, + ) + if "items" in json: + for v in json["items"]: + yield NetworkConnectivityConfiguration.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_private_endpoint_rules( + self, network_connectivity_config_id: str, *, page_token: Optional[str] = None + ) -> Iterator[NccPrivateEndpointRule]: """List private endpoint rules. - + Gets an array of private endpoint rules. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`NccPrivateEndpointRule` """ - - query = {} - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules', query=query - - , headers=headers - ) - if 'items' in json: - for v in json['items']: - yield NccPrivateEndpointRule.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update_private_endpoint_rule(self - , network_connectivity_config_id: str, private_endpoint_rule_id: str, private_endpoint_rule: UpdatePrivateEndpointRule, update_mask: str - ) -> NccPrivateEndpointRule: + while True: + json = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules", + query=query, + headers=headers, + ) + if "items" in json: + for v in json["items"]: + yield NccPrivateEndpointRule.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_private_endpoint_rule( + self, + network_connectivity_config_id: str, + private_endpoint_rule_id: str, + private_endpoint_rule: UpdatePrivateEndpointRule, + update_mask: str, + ) -> NccPrivateEndpointRule: """Update a private endpoint rule. - + Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed resources is allowed to be updated. - + :param network_connectivity_config_id: str The ID of a network connectivity configuration, which is the parent resource of this private endpoint rule object. @@ -8724,22 +9299,28 @@ def update_private_endpoint_rule(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + :returns: :class:`NccPrivateEndpointRule` """ body = private_endpoint_rule.as_dict() query = {} - if update_mask is not None: query['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}', query=query, body=body - - , headers=headers - ) + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/network-connectivity-configs/{network_connectivity_config_id}/private-endpoint-rules/{private_endpoint_rule_id}", + query=query, + body=body, + headers=headers, + ) return NccPrivateEndpointRule.from_dict(res) - - + class NetworkPoliciesAPI: """These APIs manage network policies for this account. Network policies control which network destinations can be accessed from the Databricks environment. Each Databricks account includes a default policy named @@ -8747,411 +9328,333 @@ class NetworkPoliciesAPI: assignment, and is automatically associated with each newly created workspace. 'default-policy' is reserved and cannot be deleted, but it can be updated to customize the default network access rules for your account.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_network_policy_rpc(self - , network_policy: AccountNetworkPolicy - ) -> AccountNetworkPolicy: + def create_network_policy_rpc(self, network_policy: AccountNetworkPolicy) -> AccountNetworkPolicy: """Create a network policy. - + Creates a new network policy to manage which network destinations can be accessed from the Databricks environment. - + :param network_policy: :class:`AccountNetworkPolicy` - + :returns: :class:`AccountNetworkPolicy` """ body = network_policy.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/accounts/{self._api.account_id}/network-policies', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/accounts/{self._api.account_id}/network-policies", body=body, headers=headers + ) return AccountNetworkPolicy.from_dict(res) - - - - - def delete_network_policy_rpc(self - , network_policy_id: str - ): + def delete_network_policy_rpc(self, network_policy_id: str): """Delete a network policy. - + Deletes a network policy. Cannot be called on 'default-policy'. - + :param network_policy_id: str The unique identifier of the network policy to delete. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}", headers=headers + ) - def get_network_policy_rpc(self - , network_policy_id: str - ) -> AccountNetworkPolicy: + def get_network_policy_rpc(self, network_policy_id: str) -> AccountNetworkPolicy: """Get a network policy. - + Gets a network policy. - + :param network_policy_id: str The unique identifier of the network policy to retrieve. - + :returns: :class:`AccountNetworkPolicy` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}' - - , headers=headers - ) - return AccountNetworkPolicy.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}", headers=headers + ) + return AccountNetworkPolicy.from_dict(res) - def list_network_policies_rpc(self - - , * - , page_token: Optional[str] = None) -> Iterator[AccountNetworkPolicy]: + def list_network_policies_rpc(self, *, page_token: Optional[str] = None) -> Iterator[AccountNetworkPolicy]: """List network policies. - + Gets an array of network policies. - + :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`AccountNetworkPolicy` """ - - query = {} - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/network-policies', query=query - - , headers=headers - ) - if 'items' in json: - for v in json['items']: - yield AccountNetworkPolicy.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update_network_policy_rpc(self - , network_policy_id: str, network_policy: AccountNetworkPolicy - ) -> AccountNetworkPolicy: + while True: + json = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/network-policies", query=query, headers=headers + ) + if "items" in json: + for v in json["items"]: + yield AccountNetworkPolicy.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_network_policy_rpc( + self, network_policy_id: str, network_policy: AccountNetworkPolicy + ) -> AccountNetworkPolicy: """Update a network policy. - + Updates a network policy. This allows you to modify the configuration of a network policy. - + :param network_policy_id: str The unique identifier for the network policy. :param network_policy: :class:`AccountNetworkPolicy` - + :returns: :class:`AccountNetworkPolicy` """ body = network_policy.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/network-policies/{network_policy_id}", + body=body, + headers=headers, + ) return AccountNetworkPolicy.from_dict(res) - - + class NotificationDestinationsAPI: """The notification destinations API lets you programmatically manage a workspace's notification destinations. Notification destinations are used to send notifications for query alerts and jobs to destinations outside of Databricks. Only workspace admins can create, update, and delete notification destinations.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , config: Optional[Config] = None, display_name: Optional[str] = None) -> NotificationDestination: + def create(self, *, config: Optional[Config] = None, display_name: Optional[str] = None) -> NotificationDestination: """Create a notification destination. - + Creates a notification destination. Requires workspace admin permissions. - + :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) The display name for the notification destination. - + :returns: :class:`NotificationDestination` """ body = {} - if config is not None: body['config'] = config.as_dict() - if display_name is not None: body['display_name'] = display_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/notification-destinations', body=body - - , headers=headers - ) - return NotificationDestination.from_dict(res) + if config is not None: + body["config"] = config.as_dict() + if display_name is not None: + body["display_name"] = display_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/notification-destinations", body=body, headers=headers) + return NotificationDestination.from_dict(res) - def delete(self - , id: str - ): + def delete(self, id: str): """Delete a notification destination. - + Deletes a notification destination. Requires workspace admin permissions. - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/notification-destinations/{id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/notification-destinations/{id}", headers=headers) - def get(self - , id: str - ) -> NotificationDestination: + def get(self, id: str) -> NotificationDestination: """Get a notification destination. - + Gets a notification destination. - + :param id: str - + :returns: :class:`NotificationDestination` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/notification-destinations/{id}' - - , headers=headers - ) - return NotificationDestination.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ListNotificationDestinationsResult]: + res = self._api.do("GET", f"/api/2.0/notification-destinations/{id}", headers=headers) + return NotificationDestination.from_dict(res) + + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ListNotificationDestinationsResult]: """List notification destinations. - + Lists notification destinations. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListNotificationDestinationsResult` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/notification-destinations', query=query - - , headers=headers - ) - if 'results' in json: - for v in json['results']: - yield ListNotificationDestinationsResult.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , id: str - , * - , config: Optional[Config] = None, display_name: Optional[str] = None) -> NotificationDestination: + while True: + json = self._api.do("GET", "/api/2.0/notification-destinations", query=query, headers=headers) + if "results" in json: + for v in json["results"]: + yield ListNotificationDestinationsResult.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, id: str, *, config: Optional[Config] = None, display_name: Optional[str] = None + ) -> NotificationDestination: """Update a notification destination. - + Updates a notification destination. Requires workspace admin permissions. At least one field is required in the request body. - + :param id: str UUID identifying notification destination. :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) The display name for the notification destination. - + :returns: :class:`NotificationDestination` """ body = {} - if config is not None: body['config'] = config.as_dict() - if display_name is not None: body['display_name'] = display_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/notification-destinations/{id}', body=body - - , headers=headers - ) + if config is not None: + body["config"] = config.as_dict() + if display_name is not None: + body["display_name"] = display_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/notification-destinations/{id}", body=body, headers=headers) return NotificationDestination.from_dict(res) - - + class PersonalComputeAPI: """The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can change the setting to instead let individual workspaces configure access control (DELEGATE). - + There is only one instance of this setting per account. Since this setting has a default value, this setting is present on all accounts even though it's never set on a given account. Deletion reverts the value of the setting back to the default value.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeletePersonalComputeSettingResponse: + def delete(self, *, etag: Optional[str] = None) -> DeletePersonalComputeSettingResponse: """Delete Personal Compute setting. - + Reverts back the Personal Compute setting value to default (ON) - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeletePersonalComputeSettingResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE',f'/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default", + query=query, + headers=headers, + ) return DeletePersonalComputeSettingResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> PersonalComputeSetting: + def get(self, *, etag: Optional[str] = None) -> PersonalComputeSetting: """Get Personal Compute setting. - + Gets the value of the Personal Compute setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`PersonalComputeSetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default", + query=query, + headers=headers, + ) return PersonalComputeSetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: PersonalComputeSetting, field_mask: str - ) -> PersonalComputeSetting: + def update(self, allow_missing: bool, setting: PersonalComputeSetting, field_mask: str) -> PersonalComputeSetting: """Update Personal Compute setting. - + Updates the value of the Personal Compute setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`PersonalComputeSetting` @@ -9161,27 +9664,34 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`PersonalComputeSetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/accounts/{self._api.account_id}/settings/types/dcp_acct_enable/names/default", + body=body, + headers=headers, + ) return PersonalComputeSetting.from_dict(res) - - + class RestrictWorkspaceAdminsAPI: """The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. With the setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on @@ -9192,95 +9702,77 @@ class RestrictWorkspaceAdminsAPI: service principals they have the Service Principal User role on. They can also only change a job owner to themselves. And they can change the job run_as setting to themselves or to a service principal on which they have the Service Principal User role.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteRestrictWorkspaceAdminsSettingResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteRestrictWorkspaceAdminsSettingResponse: """Delete the restrict workspace admins setting. - + Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteRestrictWorkspaceAdminsSettingResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE','/api/2.0/settings/types/restrict_workspace_admins/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", "/api/2.0/settings/types/restrict_workspace_admins/names/default", query=query, headers=headers + ) return DeleteRestrictWorkspaceAdminsSettingResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> RestrictWorkspaceAdminsSetting: + def get(self, *, etag: Optional[str] = None) -> RestrictWorkspaceAdminsSetting: """Get the restrict workspace admins setting. - + Gets the restrict workspace admins setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`RestrictWorkspaceAdminsSetting` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/restrict_workspace_admins/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/restrict_workspace_admins/names/default", query=query, headers=headers + ) return RestrictWorkspaceAdminsSetting.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str - ) -> RestrictWorkspaceAdminsSetting: + def update( + self, allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str + ) -> RestrictWorkspaceAdminsSetting: """Update the restrict workspace admins setting. - + Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`RestrictWorkspaceAdminsSetting` @@ -9290,33 +9782,37 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`RestrictWorkspaceAdminsSetting` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/restrict_workspace_admins/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/restrict_workspace_admins/names/default", body=body, headers=headers + ) return RestrictWorkspaceAdminsSetting.from_dict(res) - - + class SettingsAPI: """Workspace Settings API allows users to manage settings at the workspace level.""" - + def __init__(self, api_client): self._api = api_client - + self._aibi_dashboard_embedding_access_policy = AibiDashboardEmbeddingAccessPolicyAPI(self._api) self._aibi_dashboard_embedding_approved_domains = AibiDashboardEmbeddingApprovedDomainsAPI(self._api) self._automatic_cluster_update = AutomaticClusterUpdateAPI(self._api) @@ -9333,172 +9829,148 @@ def __init__(self, api_client): self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api) self._sql_results_download = SqlResultsDownloadAPI(self._api) - @property def aibi_dashboard_embedding_access_policy(self) -> AibiDashboardEmbeddingAccessPolicyAPI: """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the workspace level.""" return self._aibi_dashboard_embedding_access_policy - + @property def aibi_dashboard_embedding_approved_domains(self) -> AibiDashboardEmbeddingApprovedDomainsAPI: """Controls the list of domains approved to host the embedded AI/BI dashboards.""" return self._aibi_dashboard_embedding_approved_domains - + @property def automatic_cluster_update(self) -> AutomaticClusterUpdateAPI: """Controls whether automatic cluster update is enabled for the current workspace.""" return self._automatic_cluster_update - + @property def compliance_security_profile(self) -> ComplianceSecurityProfileAPI: """Controls whether to enable the compliance security profile for the current workspace.""" return self._compliance_security_profile - + @property def dashboard_email_subscriptions(self) -> DashboardEmailSubscriptionsAPI: """Controls whether schedules or workload tasks for refreshing AI/BI Dashboards in the workspace can send subscription emails containing PDFs and/or images of the dashboard.""" return self._dashboard_email_subscriptions - + @property def default_namespace(self) -> DefaultNamespaceAPI: """The default namespace setting API allows users to configure the default namespace for a Databricks workspace.""" return self._default_namespace - + @property def disable_legacy_access(self) -> DisableLegacyAccessAPI: """'Disabling legacy access' has the following impacts: 1.""" return self._disable_legacy_access - + @property def disable_legacy_dbfs(self) -> DisableLegacyDbfsAPI: """Disabling legacy DBFS has the following implications: 1.""" return self._disable_legacy_dbfs - + @property def enable_export_notebook(self) -> EnableExportNotebookAPI: """Controls whether users can export notebooks and files from the Workspace UI.""" return self._enable_export_notebook - + @property def enable_notebook_table_clipboard(self) -> EnableNotebookTableClipboardAPI: """Controls whether users can copy tabular data to the clipboard via the UI.""" return self._enable_notebook_table_clipboard - + @property def enable_results_downloading(self) -> EnableResultsDownloadingAPI: """Controls whether users can download notebook results.""" return self._enable_results_downloading - + @property def enhanced_security_monitoring(self) -> EnhancedSecurityMonitoringAPI: """Controls whether enhanced security monitoring is enabled for the current workspace.""" return self._enhanced_security_monitoring - + @property def llm_proxy_partner_powered_workspace(self) -> LlmProxyPartnerPoweredWorkspaceAPI: """Determines if partner powered models are enabled or not for a specific workspace.""" return self._llm_proxy_partner_powered_workspace - + @property def restrict_workspace_admins(self) -> RestrictWorkspaceAdminsAPI: """The Restrict Workspace Admins setting lets you control the capabilities of workspace admins.""" return self._restrict_workspace_admins - + @property def sql_results_download(self) -> SqlResultsDownloadAPI: """Controls whether users within the workspace are allowed to download results from the SQL Editor and AI/BI Dashboards UIs.""" return self._sql_results_download - - - class SqlResultsDownloadAPI: """Controls whether users within the workspace are allowed to download results from the SQL Editor and AI/BI Dashboards UIs. By default, this setting is enabled (set to `true`)""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - - , * - , etag: Optional[str] = None) -> DeleteSqlResultsDownloadResponse: + def delete(self, *, etag: Optional[str] = None) -> DeleteSqlResultsDownloadResponse: """Delete the SQL Results Download setting. - + Reverts the SQL Results Download setting to its default value. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteSqlResultsDownloadResponse` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE','/api/2.0/settings/types/sql_results_download/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", "/api/2.0/settings/types/sql_results_download/names/default", query=query, headers=headers + ) return DeleteSqlResultsDownloadResponse.from_dict(res) - - - - - def get(self - - , * - , etag: Optional[str] = None) -> SqlResultsDownload: + def get(self, *, etag: Optional[str] = None) -> SqlResultsDownload: """Get the SQL Results Download setting. - + Gets the SQL Results Download setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`SqlResultsDownload` """ - + query = {} - if etag is not None: query['etag'] = etag - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/settings/types/sql_results_download/names/default', query=query - - , headers=headers - ) + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/sql_results_download/names/default", query=query, headers=headers + ) return SqlResultsDownload.from_dict(res) - - - - - def update(self - , allow_missing: bool, setting: SqlResultsDownload, field_mask: str - ) -> SqlResultsDownload: + def update(self, allow_missing: bool, setting: SqlResultsDownload, field_mask: str) -> SqlResultsDownload: """Update the SQL Results Download setting. - + Updates the SQL Results Download setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`SqlResultsDownload` @@ -9508,471 +9980,378 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`SqlResultsDownload` """ body = {} - if allow_missing is not None: body['allow_missing'] = allow_missing - if field_mask is not None: body['field_mask'] = field_mask - if setting is not None: body['setting'] = setting.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/settings/types/sql_results_download/names/default', body=body - - , headers=headers - ) + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/sql_results_download/names/default", body=body, headers=headers + ) return SqlResultsDownload.from_dict(res) - - + class TokenManagementAPI: """Enables administrators to get all tokens and delete tokens for other users. Admins can either get every token, get a specific token by ID, or get all tokens for a particular user.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_obo_token(self - , application_id: str - , * - , comment: Optional[str] = None, lifetime_seconds: Optional[int] = None) -> CreateOboTokenResponse: + def create_obo_token( + self, application_id: str, *, comment: Optional[str] = None, lifetime_seconds: Optional[int] = None + ) -> CreateOboTokenResponse: """Create on-behalf token. - + Creates a token on behalf of a service principal. - + :param application_id: str Application ID of the service principal. :param comment: str (optional) Comment that describes the purpose of the token. :param lifetime_seconds: int (optional) The number of seconds before the token expires. - + :returns: :class:`CreateOboTokenResponse` """ body = {} - if application_id is not None: body['application_id'] = application_id - if comment is not None: body['comment'] = comment - if lifetime_seconds is not None: body['lifetime_seconds'] = lifetime_seconds - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/token-management/on-behalf-of/tokens', body=body - - , headers=headers - ) + if application_id is not None: + body["application_id"] = application_id + if comment is not None: + body["comment"] = comment + if lifetime_seconds is not None: + body["lifetime_seconds"] = lifetime_seconds + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/token-management/on-behalf-of/tokens", body=body, headers=headers) return CreateOboTokenResponse.from_dict(res) - - - - - def delete(self - , token_id: str - ): + def delete(self, token_id: str): """Delete a token. - + Deletes a token, specified by its ID. - + :param token_id: str The ID of the token to revoke. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.0/token-management/tokens/{token_id}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.0/token-management/tokens/{token_id}", headers=headers) - def get(self - , token_id: str - ) -> GetTokenResponse: + def get(self, token_id: str) -> GetTokenResponse: """Get token info. - + Gets information about a token, specified by its ID. - + :param token_id: str The ID of the token to get. - + :returns: :class:`GetTokenResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/token-management/tokens/{token_id}' - - , headers=headers - ) - return GetTokenResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/token-management/tokens/{token_id}", headers=headers) + return GetTokenResponse.from_dict(res) def get_permission_levels(self) -> GetTokenPermissionLevelsResponse: """Get token permission levels. - + Gets the permission levels that a user can have on an object. - + :returns: :class:`GetTokenPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/permissions/authorization/tokens/permissionLevels' - , headers=headers - ) - return GetTokenPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/permissions/authorization/tokens/permissionLevels", headers=headers) + return GetTokenPermissionLevelsResponse.from_dict(res) def get_permissions(self) -> TokenPermissions: """Get token permissions. - + Gets the permissions of all tokens. Tokens can inherit permissions from their root object. - + :returns: :class:`TokenPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/permissions/authorization/tokens' - , headers=headers - ) - return TokenPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , created_by_id: Optional[int] = None, created_by_username: Optional[str] = None) -> Iterator[TokenInfo]: + res = self._api.do("GET", "/api/2.0/permissions/authorization/tokens", headers=headers) + return TokenPermissions.from_dict(res) + + def list( + self, *, created_by_id: Optional[int] = None, created_by_username: Optional[str] = None + ) -> Iterator[TokenInfo]: """List all tokens. - + Lists all tokens associated with the specified workspace or user. - + :param created_by_id: int (optional) User ID of the user that created the token. :param created_by_username: str (optional) Username of the user that created the token. - + :returns: Iterator over :class:`TokenInfo` """ - + query = {} - if created_by_id is not None: query['created_by_id'] = created_by_id - if created_by_username is not None: query['created_by_username'] = created_by_username - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/token-management/tokens', query=query - - , headers=headers - ) + if created_by_id is not None: + query["created_by_id"] = created_by_id + if created_by_username is not None: + query["created_by_username"] = created_by_username + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/token-management/tokens", query=query, headers=headers) parsed = ListTokensResponse.from_dict(json).token_infos return parsed if parsed is not None else [] - - - - - - def set_permissions(self - - , * - , access_control_list: Optional[List[TokenAccessControlRequest]] = None) -> TokenPermissions: + def set_permissions( + self, *, access_control_list: Optional[List[TokenAccessControlRequest]] = None + ) -> TokenPermissions: """Set token permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) - + :returns: :class:`TokenPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT','/api/2.0/permissions/authorization/tokens', body=body - - , headers=headers - ) - return TokenPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PUT", "/api/2.0/permissions/authorization/tokens", body=body, headers=headers) + return TokenPermissions.from_dict(res) - def update_permissions(self - - , * - , access_control_list: Optional[List[TokenAccessControlRequest]] = None) -> TokenPermissions: + def update_permissions( + self, *, access_control_list: Optional[List[TokenAccessControlRequest]] = None + ) -> TokenPermissions: """Update token permissions. - + Updates the permissions on all tokens. Tokens can inherit permissions from their root object. - + :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) - + :returns: :class:`TokenPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH','/api/2.0/permissions/authorization/tokens', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", "/api/2.0/permissions/authorization/tokens", body=body, headers=headers) return TokenPermissions.from_dict(res) - - + class TokensAPI: """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , comment: Optional[str] = None, lifetime_seconds: Optional[int] = None) -> CreateTokenResponse: + def create(self, *, comment: Optional[str] = None, lifetime_seconds: Optional[int] = None) -> CreateTokenResponse: """Create a user token. - + Creates and returns a token for a user. If this call is made through token authentication, it creates a token with the same client ID as the authenticated token. If the user's token quota is exceeded, this call returns an error **QUOTA_EXCEEDED**. - + :param comment: str (optional) Optional description to attach to the token. :param lifetime_seconds: int (optional) The lifetime of the token, in seconds. - + If the lifetime is not specified, this token remains valid indefinitely. - + :returns: :class:`CreateTokenResponse` """ body = {} - if comment is not None: body['comment'] = comment - if lifetime_seconds is not None: body['lifetime_seconds'] = lifetime_seconds - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/token/create', body=body - - , headers=headers - ) - return CreateTokenResponse.from_dict(res) + if comment is not None: + body["comment"] = comment + if lifetime_seconds is not None: + body["lifetime_seconds"] = lifetime_seconds + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/token/create", body=body, headers=headers) + return CreateTokenResponse.from_dict(res) - def delete(self - , token_id: str - ): + def delete(self, token_id: str): """Revoke token. - + Revokes an access token. - + If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**. - + :param token_id: str The ID of the token to be revoked. - - + + """ body = {} - if token_id is not None: body['token_id'] = token_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/token/delete', body=body - - , headers=headers - ) - + if token_id is not None: + body["token_id"] = token_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/token/delete", body=body, headers=headers) def list(self) -> Iterator[PublicTokenInfo]: """List tokens. - + Lists all the valid tokens for a user-workspace pair. - + :returns: Iterator over :class:`PublicTokenInfo` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/token/list' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/token/list", headers=headers) parsed = ListPublicTokensResponse.from_dict(json).token_infos return parsed if parsed is not None else [] - - - + class WorkspaceConfAPI: """This API allows updating known workspace settings for advanced users.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get_status(self - , keys: str - ) -> WorkspaceConf: + def get_status(self, keys: str) -> WorkspaceConf: """Check configuration status. - + Gets the configuration status for a workspace. - + :param keys: str - + :returns: Dict[str,str] """ - + query = {} - if keys is not None: query['keys'] = keys - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/workspace-conf', query=query - - , headers=headers - ) - return res + if keys is not None: + query["keys"] = keys + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/workspace-conf", query=query, headers=headers) + return res - def set_status(self, contents: Dict[str,str] - - ): + def set_status(self, contents: Dict[str, str]): """Enable/disable features. - + Sets the configuration status for a workspace, including enabling or disabling it. - - - + + + """ - - headers = {'Content-Type': 'application/json',} - - self._api.do('PATCH','/api/2.0/workspace-conf', body=contents - - , headers=headers - ) - - - + headers = { + "Content-Type": "application/json", + } + + self._api.do("PATCH", "/api/2.0/workspace-conf", body=contents, headers=headers) + + class WorkspaceNetworkConfigurationAPI: """These APIs allow configuration of network settings for Databricks workspaces by selecting which network policy to associate with the workspace. Each workspace is always associated with exactly one network policy that controls which network destinations can be accessed from the Databricks environment. By default, workspaces are associated with the 'default-policy' network policy. You cannot create or delete a workspace's network option, only update it to associate the workspace with a different policy""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get_workspace_network_option_rpc(self - , workspace_id: int - ) -> WorkspaceNetworkOption: + def get_workspace_network_option_rpc(self, workspace_id: int) -> WorkspaceNetworkOption: """Get workspace network option. - + Gets the network option for a workspace. Every workspace has exactly one network policy binding, with 'default-policy' used if no explicit assignment exists. - + :param workspace_id: int The workspace ID. - + :returns: :class:`WorkspaceNetworkOption` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/network' - - , headers=headers - ) - return WorkspaceNetworkOption.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/network", headers=headers + ) + return WorkspaceNetworkOption.from_dict(res) - def update_workspace_network_option_rpc(self - , workspace_id: int, workspace_network_option: WorkspaceNetworkOption - ) -> WorkspaceNetworkOption: + def update_workspace_network_option_rpc( + self, workspace_id: int, workspace_network_option: WorkspaceNetworkOption + ) -> WorkspaceNetworkOption: """Update workspace network option. - + Updates the network option for a workspace. This operation associates the workspace with the specified network policy. To revert to the default policy, specify 'default-policy' as the network_policy_id. - + :param workspace_id: int The workspace ID. :param workspace_network_option: :class:`WorkspaceNetworkOption` - + :returns: :class:`WorkspaceNetworkOption` """ body = workspace_network_option.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/network', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PUT", + f"/api/2.0/accounts/{self._api.account_id}/workspaces/{workspace_id}/network", + body=body, + headers=headers, + ) return WorkspaceNetworkOption.from_dict(res) - - - \ No newline at end of file diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index eb2e9a734..f72682702 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -1,218 +1,245 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading +from typing import Any, Dict, Iterator, List, Optional -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum -_LOG = logging.getLogger('databricks.sdk') +_LOG = logging.getLogger("databricks.sdk") from databricks.sdk.service import catalog # all definitions in this file are in alphabetical order + class AuthenticationType(Enum): """The delta sharing authentication type.""" - - DATABRICKS = 'DATABRICKS' - OAUTH_CLIENT_CREDENTIALS = 'OAUTH_CLIENT_CREDENTIALS' - OIDC_FEDERATION = 'OIDC_FEDERATION' - TOKEN = 'TOKEN' -class ColumnTypeName(Enum): - """UC supported column types Copied from - https://src.dev.databricks.com/databricks/universe@23a85902bb58695ab9293adc9f327b0714b55e72/-/blob/managed-catalog/api/messages/table.proto?L68""" - - ARRAY = 'ARRAY' - BINARY = 'BINARY' - BOOLEAN = 'BOOLEAN' - BYTE = 'BYTE' - CHAR = 'CHAR' - DATE = 'DATE' - DECIMAL = 'DECIMAL' - DOUBLE = 'DOUBLE' - FLOAT = 'FLOAT' - INT = 'INT' - INTERVAL = 'INTERVAL' - LONG = 'LONG' - MAP = 'MAP' - NULL = 'NULL' - SHORT = 'SHORT' - STRING = 'STRING' - STRUCT = 'STRUCT' - TABLE_TYPE = 'TABLE_TYPE' - TIMESTAMP = 'TIMESTAMP' - TIMESTAMP_NTZ = 'TIMESTAMP_NTZ' - USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' - VARIANT = 'VARIANT' + DATABRICKS = "DATABRICKS" + OAUTH_CLIENT_CREDENTIALS = "OAUTH_CLIENT_CREDENTIALS" + OIDC_FEDERATION = "OIDC_FEDERATION" + TOKEN = "TOKEN" +class ColumnTypeName(Enum): + """UC supported column types Copied from + https://src.dev.databricks.com/databricks/universe@23a85902bb58695ab9293adc9f327b0714b55e72/-/blob/managed-catalog/api/messages/table.proto?L68 + """ + + ARRAY = "ARRAY" + BINARY = "BINARY" + BOOLEAN = "BOOLEAN" + BYTE = "BYTE" + CHAR = "CHAR" + DATE = "DATE" + DECIMAL = "DECIMAL" + DOUBLE = "DOUBLE" + FLOAT = "FLOAT" + INT = "INT" + INTERVAL = "INTERVAL" + LONG = "LONG" + MAP = "MAP" + NULL = "NULL" + SHORT = "SHORT" + STRING = "STRING" + STRUCT = "STRUCT" + TABLE_TYPE = "TABLE_TYPE" + TIMESTAMP = "TIMESTAMP" + TIMESTAMP_NTZ = "TIMESTAMP_NTZ" + USER_DEFINED_TYPE = "USER_DEFINED_TYPE" + VARIANT = "VARIANT" @dataclass class CreateProvider: name: str """The name of the Provider.""" - + authentication_type: AuthenticationType """The delta sharing authentication type.""" - + comment: Optional[str] = None """Description about the provider.""" - + recipient_profile_str: Optional[str] = None """This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - + def as_dict(self) -> dict: """Serializes the CreateProvider into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str + if self.authentication_type is not None: + body["authentication_type"] = self.authentication_type.value + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.recipient_profile_str is not None: + body["recipient_profile_str"] = self.recipient_profile_str return body def as_shallow_dict(self) -> dict: """Serializes the CreateProvider into a shallow dictionary of its immediate attributes.""" body = {} - if self.authentication_type is not None: body['authentication_type'] = self.authentication_type - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str + if self.authentication_type is not None: + body["authentication_type"] = self.authentication_type + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.recipient_profile_str is not None: + body["recipient_profile_str"] = self.recipient_profile_str return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateProvider: """Deserializes the CreateProvider from a dictionary.""" - return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), comment=d.get('comment', None), name=d.get('name', None), recipient_profile_str=d.get('recipient_profile_str', None)) - - + return cls( + authentication_type=_enum(d, "authentication_type", AuthenticationType), + comment=d.get("comment", None), + name=d.get("name", None), + recipient_profile_str=d.get("recipient_profile_str", None), + ) @dataclass class CreateRecipient: name: str """Name of Recipient.""" - + authentication_type: AuthenticationType """The delta sharing authentication type.""" - + comment: Optional[str] = None """Description about the recipient.""" - + data_recipient_global_metastore_id: Optional[str] = None """The global Unity Catalog metastore id provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token, in epoch milliseconds.""" - + ip_access_list: Optional[IpAccessList] = None """IP Access List""" - + owner: Optional[str] = None """Username of the recipient owner.""" - + properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None """Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write.""" - + sharing_code: Optional[str] = None """The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + def as_dict(self) -> dict: """Serializes the CreateRecipient into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value - if self.comment is not None: body['comment'] = self.comment - if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict() - if self.sharing_code is not None: body['sharing_code'] = self.sharing_code + if self.authentication_type is not None: + body["authentication_type"] = self.authentication_type.value + if self.comment is not None: + body["comment"] = self.comment + if self.data_recipient_global_metastore_id is not None: + body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list.as_dict() + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties_kvpairs: + body["properties_kvpairs"] = self.properties_kvpairs.as_dict() + if self.sharing_code is not None: + body["sharing_code"] = self.sharing_code return body def as_shallow_dict(self) -> dict: """Serializes the CreateRecipient into a shallow dictionary of its immediate attributes.""" body = {} - if self.authentication_type is not None: body['authentication_type'] = self.authentication_type - if self.comment is not None: body['comment'] = self.comment - if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs - if self.sharing_code is not None: body['sharing_code'] = self.sharing_code + if self.authentication_type is not None: + body["authentication_type"] = self.authentication_type + if self.comment is not None: + body["comment"] = self.comment + if self.data_recipient_global_metastore_id is not None: + body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties_kvpairs: + body["properties_kvpairs"] = self.properties_kvpairs + if self.sharing_code is not None: + body["sharing_code"] = self.sharing_code return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRecipient: """Deserializes the CreateRecipient from a dictionary.""" - return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), comment=d.get('comment', None), data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), name=d.get('name', None), owner=d.get('owner', None), properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs), sharing_code=d.get('sharing_code', None)) - - + return cls( + authentication_type=_enum(d, "authentication_type", AuthenticationType), + comment=d.get("comment", None), + data_recipient_global_metastore_id=d.get("data_recipient_global_metastore_id", None), + expiration_time=d.get("expiration_time", None), + ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), + name=d.get("name", None), + owner=d.get("owner", None), + properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), + sharing_code=d.get("sharing_code", None), + ) @dataclass class CreateShare: name: str """Name of the share.""" - + comment: Optional[str] = None """User-provided free-form text description.""" - + storage_root: Optional[str] = None """Storage root URL for the share.""" - + def as_dict(self) -> dict: """Serializes the CreateShare into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body def as_shallow_dict(self) -> dict: """Serializes the CreateShare into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.storage_root is not None: body['storage_root'] = self.storage_root + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateShare: """Deserializes the CreateShare from a dictionary.""" - return cls(comment=d.get('comment', None), name=d.get('name', None), storage_root=d.get('storage_root', None)) - - - - - - - - - - - + return cls(comment=d.get("comment", None), name=d.get("name", None), storage_root=d.get("storage_root", None)) @dataclass @@ -231,405 +258,505 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - - - - @dataclass class DeltaSharingDependency: """Represents a UC dependency.""" - + function: Optional[DeltaSharingFunctionDependency] = None """A Function in UC as a dependency.""" - + table: Optional[DeltaSharingTableDependency] = None """A Table in UC as a dependency.""" - + def as_dict(self) -> dict: """Serializes the DeltaSharingDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function: body['function'] = self.function.as_dict() - if self.table: body['table'] = self.table.as_dict() + if self.function: + body["function"] = self.function.as_dict() + if self.table: + body["table"] = self.table.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.function: body['function'] = self.function - if self.table: body['table'] = self.table + if self.function: + body["function"] = self.function + if self.table: + body["table"] = self.table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingDependency: """Deserializes the DeltaSharingDependency from a dictionary.""" - return cls(function=_from_dict(d, 'function', DeltaSharingFunctionDependency), table=_from_dict(d, 'table', DeltaSharingTableDependency)) - - + return cls( + function=_from_dict(d, "function", DeltaSharingFunctionDependency), + table=_from_dict(d, "table", DeltaSharingTableDependency), + ) @dataclass class DeltaSharingDependencyList: """Represents a list of dependencies.""" - + dependencies: Optional[List[DeltaSharingDependency]] = None """An array of Dependency.""" - + def as_dict(self) -> dict: """Serializes the DeltaSharingDependencyList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dependencies: body['dependencies'] = [v.as_dict() for v in self.dependencies] + if self.dependencies: + body["dependencies"] = [v.as_dict() for v in self.dependencies] return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingDependencyList into a shallow dictionary of its immediate attributes.""" body = {} - if self.dependencies: body['dependencies'] = self.dependencies + if self.dependencies: + body["dependencies"] = self.dependencies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingDependencyList: """Deserializes the DeltaSharingDependencyList from a dictionary.""" - return cls(dependencies=_repeated_dict(d, 'dependencies', DeltaSharingDependency)) - - + return cls(dependencies=_repeated_dict(d, "dependencies", DeltaSharingDependency)) @dataclass class DeltaSharingFunction: aliases: Optional[List[RegisteredModelAlias]] = None """The aliass of registered model.""" - + comment: Optional[str] = None """The comment of the function.""" - + data_type: Optional[ColumnTypeName] = None """The data type of the function.""" - + dependency_list: Optional[DeltaSharingDependencyList] = None """The dependency list of the function.""" - + full_data_type: Optional[str] = None """The full data type of the function.""" - + id: Optional[str] = None """The id of the function.""" - + input_params: Optional[FunctionParameterInfos] = None """The function parameter information.""" - + name: Optional[str] = None """The name of the function.""" - + properties: Optional[str] = None """The properties of the function.""" - + routine_definition: Optional[str] = None """The routine definition of the function.""" - + schema: Optional[str] = None """The name of the schema that the function belongs to.""" - + securable_kind: Optional[SharedSecurableKind] = None """The securable kind of the function.""" - + share: Optional[str] = None """The name of the share that the function belongs to.""" - + share_id: Optional[str] = None """The id of the share that the function belongs to.""" - + storage_location: Optional[str] = None """The storage location of the function.""" - + tags: Optional[List[catalog.TagKeyValue]] = None """The tags of the function.""" - + def as_dict(self) -> dict: """Serializes the DeltaSharingFunction into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases] - if self.comment is not None: body['comment'] = self.comment - if self.data_type is not None: body['data_type'] = self.data_type.value - if self.dependency_list: body['dependency_list'] = self.dependency_list.as_dict() - if self.full_data_type is not None: body['full_data_type'] = self.full_data_type - if self.id is not None: body['id'] = self.id - if self.input_params: body['input_params'] = self.input_params.as_dict() - if self.name is not None: body['name'] = self.name - if self.properties is not None: body['properties'] = self.properties - if self.routine_definition is not None: body['routine_definition'] = self.routine_definition - if self.schema is not None: body['schema'] = self.schema - if self.securable_kind is not None: body['securable_kind'] = self.securable_kind.value - if self.share is not None: body['share'] = self.share - if self.share_id is not None: body['share_id'] = self.share_id - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.aliases: + body["aliases"] = [v.as_dict() for v in self.aliases] + if self.comment is not None: + body["comment"] = self.comment + if self.data_type is not None: + body["data_type"] = self.data_type.value + if self.dependency_list: + body["dependency_list"] = self.dependency_list.as_dict() + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.id is not None: + body["id"] = self.id + if self.input_params: + body["input_params"] = self.input_params.as_dict() + if self.name is not None: + body["name"] = self.name + if self.properties is not None: + body["properties"] = self.properties + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.schema is not None: + body["schema"] = self.schema + if self.securable_kind is not None: + body["securable_kind"] = self.securable_kind.value + if self.share is not None: + body["share"] = self.share + if self.share_id is not None: + body["share_id"] = self.share_id + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingFunction into a shallow dictionary of its immediate attributes.""" body = {} - if self.aliases: body['aliases'] = self.aliases - if self.comment is not None: body['comment'] = self.comment - if self.data_type is not None: body['data_type'] = self.data_type - if self.dependency_list: body['dependency_list'] = self.dependency_list - if self.full_data_type is not None: body['full_data_type'] = self.full_data_type - if self.id is not None: body['id'] = self.id - if self.input_params: body['input_params'] = self.input_params - if self.name is not None: body['name'] = self.name - if self.properties is not None: body['properties'] = self.properties - if self.routine_definition is not None: body['routine_definition'] = self.routine_definition - if self.schema is not None: body['schema'] = self.schema - if self.securable_kind is not None: body['securable_kind'] = self.securable_kind - if self.share is not None: body['share'] = self.share - if self.share_id is not None: body['share_id'] = self.share_id - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.tags: body['tags'] = self.tags + if self.aliases: + body["aliases"] = self.aliases + if self.comment is not None: + body["comment"] = self.comment + if self.data_type is not None: + body["data_type"] = self.data_type + if self.dependency_list: + body["dependency_list"] = self.dependency_list + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.id is not None: + body["id"] = self.id + if self.input_params: + body["input_params"] = self.input_params + if self.name is not None: + body["name"] = self.name + if self.properties is not None: + body["properties"] = self.properties + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.schema is not None: + body["schema"] = self.schema + if self.securable_kind is not None: + body["securable_kind"] = self.securable_kind + if self.share is not None: + body["share"] = self.share + if self.share_id is not None: + body["share_id"] = self.share_id + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingFunction: """Deserializes the DeltaSharingFunction from a dictionary.""" - return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias), comment=d.get('comment', None), data_type=_enum(d, 'data_type', ColumnTypeName), dependency_list=_from_dict(d, 'dependency_list', DeltaSharingDependencyList), full_data_type=d.get('full_data_type', None), id=d.get('id', None), input_params=_from_dict(d, 'input_params', FunctionParameterInfos), name=d.get('name', None), properties=d.get('properties', None), routine_definition=d.get('routine_definition', None), schema=d.get('schema', None), securable_kind=_enum(d, 'securable_kind', SharedSecurableKind), share=d.get('share', None), share_id=d.get('share_id', None), storage_location=d.get('storage_location', None), tags=_repeated_dict(d, 'tags', catalog.TagKeyValue)) - - + return cls( + aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), + comment=d.get("comment", None), + data_type=_enum(d, "data_type", ColumnTypeName), + dependency_list=_from_dict(d, "dependency_list", DeltaSharingDependencyList), + full_data_type=d.get("full_data_type", None), + id=d.get("id", None), + input_params=_from_dict(d, "input_params", FunctionParameterInfos), + name=d.get("name", None), + properties=d.get("properties", None), + routine_definition=d.get("routine_definition", None), + schema=d.get("schema", None), + securable_kind=_enum(d, "securable_kind", SharedSecurableKind), + share=d.get("share", None), + share_id=d.get("share_id", None), + storage_location=d.get("storage_location", None), + tags=_repeated_dict(d, "tags", catalog.TagKeyValue), + ) @dataclass class DeltaSharingFunctionDependency: """A Function in UC as a dependency.""" - + function_name: Optional[str] = None - + schema_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the DeltaSharingFunctionDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_name is not None: body['function_name'] = self.function_name - if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.function_name is not None: + body["function_name"] = self.function_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingFunctionDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_name is not None: body['function_name'] = self.function_name - if self.schema_name is not None: body['schema_name'] = self.schema_name + if self.function_name is not None: + body["function_name"] = self.function_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingFunctionDependency: """Deserializes the DeltaSharingFunctionDependency from a dictionary.""" - return cls(function_name=d.get('function_name', None), schema_name=d.get('schema_name', None)) - - + return cls(function_name=d.get("function_name", None), schema_name=d.get("schema_name", None)) @dataclass class DeltaSharingTableDependency: """A Table in UC as a dependency.""" - + schema_name: Optional[str] = None - + table_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the DeltaSharingTableDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.table_name is not None: body['table_name'] = self.table_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.table_name is not None: + body["table_name"] = self.table_name return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSharingTableDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.schema_name is not None: body['schema_name'] = self.schema_name - if self.table_name is not None: body['table_name'] = self.table_name + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.table_name is not None: + body["table_name"] = self.table_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSharingTableDependency: """Deserializes the DeltaSharingTableDependency from a dictionary.""" - return cls(schema_name=d.get('schema_name', None), table_name=d.get('table_name', None)) - - + return cls(schema_name=d.get("schema_name", None), table_name=d.get("table_name", None)) @dataclass class FederationPolicy: comment: Optional[str] = None """Description of the policy. This is a user-provided description.""" - + create_time: Optional[str] = None """System-generated timestamp indicating when the policy was created.""" - + id: Optional[str] = None """Unique, immutable system-generated identifier for the federation policy.""" - + name: Optional[str] = None """Name of the federation policy. A recipient can have multiple policies with different names. The name must contain only lowercase alphanumeric characters, numbers, and hyphens.""" - + oidc_policy: Optional[OidcFederationPolicy] = None """Specifies the policy to use for validating OIDC claims in the federated tokens.""" - + update_time: Optional[str] = None """System-generated timestamp indicating when the policy was last updated.""" - + def as_dict(self) -> dict: """Serializes the FederationPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.create_time is not None: body['create_time'] = self.create_time - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.oidc_policy: body['oidc_policy'] = self.oidc_policy.as_dict() - if self.update_time is not None: body['update_time'] = self.update_time + if self.comment is not None: + body["comment"] = self.comment + if self.create_time is not None: + body["create_time"] = self.create_time + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.oidc_policy: + body["oidc_policy"] = self.oidc_policy.as_dict() + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the FederationPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.create_time is not None: body['create_time'] = self.create_time - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.oidc_policy: body['oidc_policy'] = self.oidc_policy - if self.update_time is not None: body['update_time'] = self.update_time + if self.comment is not None: + body["comment"] = self.comment + if self.create_time is not None: + body["create_time"] = self.create_time + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.oidc_policy: + body["oidc_policy"] = self.oidc_policy + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FederationPolicy: """Deserializes the FederationPolicy from a dictionary.""" - return cls(comment=d.get('comment', None), create_time=d.get('create_time', None), id=d.get('id', None), name=d.get('name', None), oidc_policy=_from_dict(d, 'oidc_policy', OidcFederationPolicy), update_time=d.get('update_time', None)) - - + return cls( + comment=d.get("comment", None), + create_time=d.get("create_time", None), + id=d.get("id", None), + name=d.get("name", None), + oidc_policy=_from_dict(d, "oidc_policy", OidcFederationPolicy), + update_time=d.get("update_time", None), + ) @dataclass class FunctionParameterInfo: """Represents a parameter of a function. The same message is used for both input and output columns.""" - + comment: Optional[str] = None """The comment of the parameter.""" - + name: Optional[str] = None """The name of the parameter.""" - + parameter_default: Optional[str] = None """The default value of the parameter.""" - + parameter_mode: Optional[FunctionParameterMode] = None """The mode of the function parameter.""" - + parameter_type: Optional[FunctionParameterType] = None """The type of the function parameter.""" - + position: Optional[int] = None """The position of the parameter.""" - + type_interval_type: Optional[str] = None """The interval type of the parameter type.""" - + type_json: Optional[str] = None """The type of the parameter in JSON format.""" - + type_name: Optional[ColumnTypeName] = None """The type of the parameter in Enum format.""" - + type_precision: Optional[int] = None """The precision of the parameter type.""" - + type_scale: Optional[int] = None """The scale of the parameter type.""" - + type_text: Optional[str] = None """The type of the parameter in text format.""" - + def as_dict(self) -> dict: """Serializes the FunctionParameterInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.parameter_default is not None: body['parameter_default'] = self.parameter_default - if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode.value - if self.parameter_type is not None: body['parameter_type'] = self.parameter_type.value - if self.position is not None: body['position'] = self.position - if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type - if self.type_json is not None: body['type_json'] = self.type_json - if self.type_name is not None: body['type_name'] = self.type_name.value - if self.type_precision is not None: body['type_precision'] = self.type_precision - if self.type_scale is not None: body['type_scale'] = self.type_scale - if self.type_text is not None: body['type_text'] = self.type_text + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.parameter_default is not None: + body["parameter_default"] = self.parameter_default + if self.parameter_mode is not None: + body["parameter_mode"] = self.parameter_mode.value + if self.parameter_type is not None: + body["parameter_type"] = self.parameter_type.value + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name.value + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body def as_shallow_dict(self) -> dict: """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.parameter_default is not None: body['parameter_default'] = self.parameter_default - if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode - if self.parameter_type is not None: body['parameter_type'] = self.parameter_type - if self.position is not None: body['position'] = self.position - if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type - if self.type_json is not None: body['type_json'] = self.type_json - if self.type_name is not None: body['type_name'] = self.type_name - if self.type_precision is not None: body['type_precision'] = self.type_precision - if self.type_scale is not None: body['type_scale'] = self.type_scale - if self.type_text is not None: body['type_text'] = self.type_text + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.parameter_default is not None: + body["parameter_default"] = self.parameter_default + if self.parameter_mode is not None: + body["parameter_mode"] = self.parameter_mode + if self.parameter_type is not None: + body["parameter_type"] = self.parameter_type + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: """Deserializes the FunctionParameterInfo from a dictionary.""" - return cls(comment=d.get('comment', None), name=d.get('name', None), parameter_default=d.get('parameter_default', None), parameter_mode=_enum(d, 'parameter_mode', FunctionParameterMode), parameter_type=_enum(d, 'parameter_type', FunctionParameterType), position=d.get('position', None), type_interval_type=d.get('type_interval_type', None), type_json=d.get('type_json', None), type_name=_enum(d, 'type_name', ColumnTypeName), type_precision=d.get('type_precision', None), type_scale=d.get('type_scale', None), type_text=d.get('type_text', None)) - - + return cls( + comment=d.get("comment", None), + name=d.get("name", None), + parameter_default=d.get("parameter_default", None), + parameter_mode=_enum(d, "parameter_mode", FunctionParameterMode), + parameter_type=_enum(d, "parameter_type", FunctionParameterType), + position=d.get("position", None), + type_interval_type=d.get("type_interval_type", None), + type_json=d.get("type_json", None), + type_name=_enum(d, "type_name", ColumnTypeName), + type_precision=d.get("type_precision", None), + type_scale=d.get("type_scale", None), + type_text=d.get("type_text", None), + ) @dataclass class FunctionParameterInfos: parameters: Optional[List[FunctionParameterInfo]] = None """The list of parameters of the function.""" - + def as_dict(self) -> dict: """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] return body def as_shallow_dict(self) -> dict: """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: body['parameters'] = self.parameters + if self.parameters: + body["parameters"] = self.parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: """Deserializes the FunctionParameterInfos from a dictionary.""" - return cls(parameters=_repeated_dict(d, 'parameters', FunctionParameterInfo)) - - + return cls(parameters=_repeated_dict(d, "parameters", FunctionParameterInfo)) class FunctionParameterMode(Enum): - - - IN = 'IN' - INOUT = 'INOUT' - OUT = 'OUT' -class FunctionParameterType(Enum): - - - COLUMN = 'COLUMN' - PARAM = 'PARAM' + IN = "IN" + INOUT = "INOUT" + OUT = "OUT" + +class FunctionParameterType(Enum): + COLUMN = "COLUMN" + PARAM = "PARAM" @dataclass @@ -648,17 +775,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> GetActivationUrlInfoResponse: """Deserializes the GetActivationUrlInfoResponse from a dictionary.""" return cls() - - - - - - - - - - - @dataclass @@ -666,30 +782,35 @@ class GetRecipientSharePermissionsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + permissions_out: Optional[List[ShareToPrivilegeAssignment]] = None """An array of data share permissions for a recipient.""" - + def as_dict(self) -> dict: """Serializes the GetRecipientSharePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.permissions_out: + body["permissions_out"] = [v.as_dict() for v in self.permissions_out] return body def as_shallow_dict(self) -> dict: """Serializes the GetRecipientSharePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.permissions_out: body['permissions_out'] = self.permissions_out + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.permissions_out: + body["permissions_out"] = self.permissions_out return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRecipientSharePermissionsResponse: """Deserializes the GetRecipientSharePermissionsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), permissions_out=_repeated_dict(d, 'permissions_out', ShareToPrivilegeAssignment)) - - + return cls( + next_page_token=d.get("next_page_token", None), + permissions_out=_repeated_dict(d, "permissions_out", ShareToPrivilegeAssignment), + ) @dataclass @@ -697,134 +818,145 @@ class GetSharePermissionsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + privilege_assignments: Optional[List[PrivilegeAssignment]] = None """The privileges assigned to each principal""" - + def as_dict(self) -> dict: """Serializes the GetSharePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the GetSharePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetSharePermissionsResponse: """Deserializes the GetSharePermissionsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), + privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), + ) @dataclass class IpAccessList: allowed_ip_addresses: Optional[List[str]] = None """Allowed IP Addresses in CIDR notation. Limit of 100.""" - + def as_dict(self) -> dict: """Serializes the IpAccessList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allowed_ip_addresses: body['allowed_ip_addresses'] = [v for v in self.allowed_ip_addresses] + if self.allowed_ip_addresses: + body["allowed_ip_addresses"] = [v for v in self.allowed_ip_addresses] return body def as_shallow_dict(self) -> dict: """Serializes the IpAccessList into a shallow dictionary of its immediate attributes.""" body = {} - if self.allowed_ip_addresses: body['allowed_ip_addresses'] = self.allowed_ip_addresses + if self.allowed_ip_addresses: + body["allowed_ip_addresses"] = self.allowed_ip_addresses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> IpAccessList: """Deserializes the IpAccessList from a dictionary.""" - return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None)) - - - - - + return cls(allowed_ip_addresses=d.get("allowed_ip_addresses", None)) @dataclass class ListFederationPoliciesResponse: next_page_token: Optional[str] = None - + policies: Optional[List[FederationPolicy]] = None - + def as_dict(self) -> dict: """Serializes the ListFederationPoliciesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.policies: body['policies'] = [v.as_dict() for v in self.policies] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = [v.as_dict() for v in self.policies] return body def as_shallow_dict(self) -> dict: """Serializes the ListFederationPoliciesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.policies: body['policies'] = self.policies + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.policies: + body["policies"] = self.policies return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListFederationPoliciesResponse: """Deserializes the ListFederationPoliciesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), policies=_repeated_dict(d, 'policies', FederationPolicy)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), policies=_repeated_dict(d, "policies", FederationPolicy) + ) @dataclass class ListProviderShareAssetsResponse: """Response to ListProviderShareAssets, which contains the list of assets of a share.""" - + functions: Optional[List[DeltaSharingFunction]] = None """The list of functions in the share.""" - + notebooks: Optional[List[NotebookFile]] = None """The list of notebooks in the share.""" - + tables: Optional[List[Table]] = None """The list of tables in the share.""" - + volumes: Optional[List[Volume]] = None """The list of volumes in the share.""" - + def as_dict(self) -> dict: """Serializes the ListProviderShareAssetsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.functions: body['functions'] = [v.as_dict() for v in self.functions] - if self.notebooks: body['notebooks'] = [v.as_dict() for v in self.notebooks] - if self.tables: body['tables'] = [v.as_dict() for v in self.tables] - if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes] + if self.functions: + body["functions"] = [v.as_dict() for v in self.functions] + if self.notebooks: + body["notebooks"] = [v.as_dict() for v in self.notebooks] + if self.tables: + body["tables"] = [v.as_dict() for v in self.tables] + if self.volumes: + body["volumes"] = [v.as_dict() for v in self.volumes] return body def as_shallow_dict(self) -> dict: """Serializes the ListProviderShareAssetsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.functions: body['functions'] = self.functions - if self.notebooks: body['notebooks'] = self.notebooks - if self.tables: body['tables'] = self.tables - if self.volumes: body['volumes'] = self.volumes + if self.functions: + body["functions"] = self.functions + if self.notebooks: + body["notebooks"] = self.notebooks + if self.tables: + body["tables"] = self.tables + if self.volumes: + body["volumes"] = self.volumes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProviderShareAssetsResponse: """Deserializes the ListProviderShareAssetsResponse from a dictionary.""" - return cls(functions=_repeated_dict(d, 'functions', DeltaSharingFunction), notebooks=_repeated_dict(d, 'notebooks', NotebookFile), tables=_repeated_dict(d, 'tables', Table), volumes=_repeated_dict(d, 'volumes', Volume)) - - + return cls( + functions=_repeated_dict(d, "functions", DeltaSharingFunction), + notebooks=_repeated_dict(d, "notebooks", NotebookFile), + tables=_repeated_dict(d, "tables", Table), + volumes=_repeated_dict(d, "volumes", Volume), + ) @dataclass @@ -832,33 +964,32 @@ class ListProviderSharesResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + shares: Optional[List[ProviderShare]] = None """An array of provider shares.""" - + def as_dict(self) -> dict: """Serializes the ListProviderSharesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.shares: body['shares'] = [v.as_dict() for v in self.shares] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.shares: + body["shares"] = [v.as_dict() for v in self.shares] return body def as_shallow_dict(self) -> dict: """Serializes the ListProviderSharesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.shares: body['shares'] = self.shares + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.shares: + body["shares"] = self.shares return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProviderSharesResponse: """Deserializes the ListProviderSharesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), shares=_repeated_dict(d, 'shares', ProviderShare)) - - - - - + return cls(next_page_token=d.get("next_page_token", None), shares=_repeated_dict(d, "shares", ProviderShare)) @dataclass @@ -866,33 +997,34 @@ class ListProvidersResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + providers: Optional[List[ProviderInfo]] = None """An array of provider information objects.""" - + def as_dict(self) -> dict: """Serializes the ListProvidersResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.providers: body['providers'] = [v.as_dict() for v in self.providers] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.providers: + body["providers"] = [v.as_dict() for v in self.providers] return body def as_shallow_dict(self) -> dict: """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.providers: body['providers'] = self.providers + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.providers: + body["providers"] = self.providers return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListProvidersResponse: """Deserializes the ListProvidersResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), providers=_repeated_dict(d, 'providers', ProviderInfo)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), providers=_repeated_dict(d, "providers", ProviderInfo) + ) @dataclass @@ -900,33 +1032,34 @@ class ListRecipientsResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + recipients: Optional[List[RecipientInfo]] = None """An array of recipient information objects.""" - + def as_dict(self) -> dict: """Serializes the ListRecipientsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.recipients: + body["recipients"] = [v.as_dict() for v in self.recipients] return body def as_shallow_dict(self) -> dict: """Serializes the ListRecipientsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.recipients: body['recipients'] = self.recipients + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.recipients: + body["recipients"] = self.recipients return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListRecipientsResponse: """Deserializes the ListRecipientsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), recipients=_repeated_dict(d, 'recipients', RecipientInfo)) - - - - - + return cls( + next_page_token=d.get("next_page_token", None), recipients=_repeated_dict(d, "recipients", RecipientInfo) + ) @dataclass @@ -934,80 +1067,99 @@ class ListSharesResponse: next_page_token: Optional[str] = None """Opaque token to retrieve the next page of results. Absent if there are no more pages. __page_token__ should be set to this value for the next request (for the next page of results).""" - + shares: Optional[List[ShareInfo]] = None """An array of data share information objects.""" - + def as_dict(self) -> dict: """Serializes the ListSharesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.shares: body['shares'] = [v.as_dict() for v in self.shares] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.shares: + body["shares"] = [v.as_dict() for v in self.shares] return body def as_shallow_dict(self) -> dict: """Serializes the ListSharesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.shares: body['shares'] = self.shares + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.shares: + body["shares"] = self.shares return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSharesResponse: """Deserializes the ListSharesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), shares=_repeated_dict(d, 'shares', ShareInfo)) - - + return cls(next_page_token=d.get("next_page_token", None), shares=_repeated_dict(d, "shares", ShareInfo)) @dataclass class NotebookFile: comment: Optional[str] = None """The comment of the notebook file.""" - + id: Optional[str] = None """The id of the notebook file.""" - + name: Optional[str] = None """Name of the notebook file.""" - + share: Optional[str] = None """The name of the share that the notebook file belongs to.""" - + share_id: Optional[str] = None """The id of the share that the notebook file belongs to.""" - + tags: Optional[List[catalog.TagKeyValue]] = None """The tags of the notebook file.""" - + def as_dict(self) -> dict: """Serializes the NotebookFile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.share is not None: body['share'] = self.share - if self.share_id is not None: body['share_id'] = self.share_id - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.comment is not None: + body["comment"] = self.comment + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.share is not None: + body["share"] = self.share + if self.share_id is not None: + body["share_id"] = self.share_id + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the NotebookFile into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.share is not None: body['share'] = self.share - if self.share_id is not None: body['share_id'] = self.share_id - if self.tags: body['tags'] = self.tags + if self.comment is not None: + body["comment"] = self.comment + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.share is not None: + body["share"] = self.share + if self.share_id is not None: + body["share_id"] = self.share_id + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NotebookFile: """Deserializes the NotebookFile from a dictionary.""" - return cls(comment=d.get('comment', None), id=d.get('id', None), name=d.get('name', None), share=d.get('share', None), share_id=d.get('share_id', None), tags=_repeated_dict(d, 'tags', catalog.TagKeyValue)) - - + return cls( + comment=d.get("comment", None), + id=d.get("id", None), + name=d.get("name", None), + share=d.get("share", None), + share_id=d.get("share_id", None), + tags=_repeated_dict(d, "tags", catalog.TagKeyValue), + ) @dataclass @@ -1015,10 +1167,10 @@ class OidcFederationPolicy: """Specifies the policy to use for validating OIDC claims in your federated tokens from Delta Sharing Clients. Refer to https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed for more details.""" - + issuer: str """The required token issuer, as specified in the 'iss' claim of federated tokens.""" - + subject_claim: str """The claim that contains the subject of the token. Depending on the identity provider and the use case (U2M or M2M), this can vary: - For Entra ID (AAD): * U2M flow (group access): Use `groups`. @@ -1027,7 +1179,7 @@ class OidcFederationPolicy: Supported `subject_claim` values are: - `oid`: Object ID of the user. - `azp`: Client ID of the OAuth app. - `groups`: Object ID of the group. - `sub`: Subject identifier for other use cases.""" - + subject: str """The required token subject, as specified in the subject claim of federated tokens. The subject claim identifies the identity of the user or machine accessing the resource. Examples for Entra @@ -1035,504 +1187,637 @@ class OidcFederationPolicy: ID of the group in Entra ID. - U2M flow (user access): If the subject claim is `oid`, this must be the Object ID of the user in Entra ID. - M2M flow (OAuth App access): If the subject claim is `azp`, this must be the client ID of the OAuth app registered in Entra ID.""" - + audiences: Optional[List[str]] = None """The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience identifier is intended to represent the recipient of the token. Can be any non-empty string value. As long as the audience in the token matches at least one audience in the policy,""" - + def as_dict(self) -> dict: """Serializes the OidcFederationPolicy into a dictionary suitable for use as a JSON request body.""" body = {} - if self.audiences: body['audiences'] = [v for v in self.audiences] - if self.issuer is not None: body['issuer'] = self.issuer - if self.subject is not None: body['subject'] = self.subject - if self.subject_claim is not None: body['subject_claim'] = self.subject_claim + if self.audiences: + body["audiences"] = [v for v in self.audiences] + if self.issuer is not None: + body["issuer"] = self.issuer + if self.subject is not None: + body["subject"] = self.subject + if self.subject_claim is not None: + body["subject_claim"] = self.subject_claim return body def as_shallow_dict(self) -> dict: """Serializes the OidcFederationPolicy into a shallow dictionary of its immediate attributes.""" body = {} - if self.audiences: body['audiences'] = self.audiences - if self.issuer is not None: body['issuer'] = self.issuer - if self.subject is not None: body['subject'] = self.subject - if self.subject_claim is not None: body['subject_claim'] = self.subject_claim + if self.audiences: + body["audiences"] = self.audiences + if self.issuer is not None: + body["issuer"] = self.issuer + if self.subject is not None: + body["subject"] = self.subject + if self.subject_claim is not None: + body["subject_claim"] = self.subject_claim return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OidcFederationPolicy: """Deserializes the OidcFederationPolicy from a dictionary.""" - return cls(audiences=d.get('audiences', None), issuer=d.get('issuer', None), subject=d.get('subject', None), subject_claim=d.get('subject_claim', None)) - - + return cls( + audiences=d.get("audiences", None), + issuer=d.get("issuer", None), + subject=d.get("subject", None), + subject_claim=d.get("subject_claim", None), + ) @dataclass class Partition: values: Optional[List[PartitionValue]] = None """An array of partition values.""" - + def as_dict(self) -> dict: """Serializes the Partition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.values: body['values'] = [v.as_dict() for v in self.values] + if self.values: + body["values"] = [v.as_dict() for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the Partition into a shallow dictionary of its immediate attributes.""" body = {} - if self.values: body['values'] = self.values + if self.values: + body["values"] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Partition: """Deserializes the Partition from a dictionary.""" - return cls(values=_repeated_dict(d, 'values', PartitionValue)) - - + return cls(values=_repeated_dict(d, "values", PartitionValue)) @dataclass class PartitionValue: name: Optional[str] = None """The name of the partition column.""" - + op: Optional[PartitionValueOp] = None """The operator to apply for the value.""" - + recipient_property_key: Optional[str] = None """The key of a Delta Sharing recipient's property. For example "databricks-account-id". When this field is set, field `value` can not be set.""" - + value: Optional[str] = None """The value of the partition column. When this value is not set, it means `null` value. When this field is set, field `recipient_property_key` can not be set.""" - + def as_dict(self) -> dict: """Serializes the PartitionValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.op is not None: body['op'] = self.op.value - if self.recipient_property_key is not None: body['recipient_property_key'] = self.recipient_property_key - if self.value is not None: body['value'] = self.value + if self.name is not None: + body["name"] = self.name + if self.op is not None: + body["op"] = self.op.value + if self.recipient_property_key is not None: + body["recipient_property_key"] = self.recipient_property_key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the PartitionValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.op is not None: body['op'] = self.op - if self.recipient_property_key is not None: body['recipient_property_key'] = self.recipient_property_key - if self.value is not None: body['value'] = self.value + if self.name is not None: + body["name"] = self.name + if self.op is not None: + body["op"] = self.op + if self.recipient_property_key is not None: + body["recipient_property_key"] = self.recipient_property_key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PartitionValue: """Deserializes the PartitionValue from a dictionary.""" - return cls(name=d.get('name', None), op=_enum(d, 'op', PartitionValueOp), recipient_property_key=d.get('recipient_property_key', None), value=d.get('value', None)) - - + return cls( + name=d.get("name", None), + op=_enum(d, "op", PartitionValueOp), + recipient_property_key=d.get("recipient_property_key", None), + value=d.get("value", None), + ) class PartitionValueOp(Enum): - - - EQUAL = 'EQUAL' - LIKE = 'LIKE' + + EQUAL = "EQUAL" + LIKE = "LIKE" + @dataclass class PermissionsChange: add: Optional[List[str]] = None """The set of privileges to add.""" - + principal: Optional[str] = None """The principal whose privileges we are changing.""" - + remove: Optional[List[str]] = None """The set of privileges to remove.""" - + def as_dict(self) -> dict: """Serializes the PermissionsChange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add: body['add'] = [v for v in self.add] - if self.principal is not None: body['principal'] = self.principal - if self.remove: body['remove'] = [v for v in self.remove] + if self.add: + body["add"] = [v for v in self.add] + if self.principal is not None: + body["principal"] = self.principal + if self.remove: + body["remove"] = [v for v in self.remove] return body def as_shallow_dict(self) -> dict: """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes.""" body = {} - if self.add: body['add'] = self.add - if self.principal is not None: body['principal'] = self.principal - if self.remove: body['remove'] = self.remove + if self.add: + body["add"] = self.add + if self.principal is not None: + body["principal"] = self.principal + if self.remove: + body["remove"] = self.remove return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: """Deserializes the PermissionsChange from a dictionary.""" - return cls(add=d.get('add', None), principal=d.get('principal', None), remove=d.get('remove', None)) - - + return cls(add=d.get("add", None), principal=d.get("principal", None), remove=d.get("remove", None)) class Privilege(Enum): - - - ACCESS = 'ACCESS' - ALL_PRIVILEGES = 'ALL_PRIVILEGES' - APPLY_TAG = 'APPLY_TAG' - CREATE = 'CREATE' - CREATE_CATALOG = 'CREATE_CATALOG' - CREATE_CONNECTION = 'CREATE_CONNECTION' - CREATE_EXTERNAL_LOCATION = 'CREATE_EXTERNAL_LOCATION' - CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE' - CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME' - CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG' - CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE' - CREATE_FUNCTION = 'CREATE_FUNCTION' - CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE' - CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW' - CREATE_MODEL = 'CREATE_MODEL' - CREATE_PROVIDER = 'CREATE_PROVIDER' - CREATE_RECIPIENT = 'CREATE_RECIPIENT' - CREATE_SCHEMA = 'CREATE_SCHEMA' - CREATE_SERVICE_CREDENTIAL = 'CREATE_SERVICE_CREDENTIAL' - CREATE_SHARE = 'CREATE_SHARE' - CREATE_STORAGE_CREDENTIAL = 'CREATE_STORAGE_CREDENTIAL' - CREATE_TABLE = 'CREATE_TABLE' - CREATE_VIEW = 'CREATE_VIEW' - CREATE_VOLUME = 'CREATE_VOLUME' - EXECUTE = 'EXECUTE' - MANAGE = 'MANAGE' - MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST' - MODIFY = 'MODIFY' - READ_FILES = 'READ_FILES' - READ_PRIVATE_FILES = 'READ_PRIVATE_FILES' - READ_VOLUME = 'READ_VOLUME' - REFRESH = 'REFRESH' - SELECT = 'SELECT' - SET_SHARE_PERMISSION = 'SET_SHARE_PERMISSION' - USAGE = 'USAGE' - USE_CATALOG = 'USE_CATALOG' - USE_CONNECTION = 'USE_CONNECTION' - USE_MARKETPLACE_ASSETS = 'USE_MARKETPLACE_ASSETS' - USE_PROVIDER = 'USE_PROVIDER' - USE_RECIPIENT = 'USE_RECIPIENT' - USE_SCHEMA = 'USE_SCHEMA' - USE_SHARE = 'USE_SHARE' - WRITE_FILES = 'WRITE_FILES' - WRITE_PRIVATE_FILES = 'WRITE_PRIVATE_FILES' - WRITE_VOLUME = 'WRITE_VOLUME' + + ACCESS = "ACCESS" + ALL_PRIVILEGES = "ALL_PRIVILEGES" + APPLY_TAG = "APPLY_TAG" + CREATE = "CREATE" + CREATE_CATALOG = "CREATE_CATALOG" + CREATE_CONNECTION = "CREATE_CONNECTION" + CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION" + CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE" + CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME" + CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG" + CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE" + CREATE_FUNCTION = "CREATE_FUNCTION" + CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE" + CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW" + CREATE_MODEL = "CREATE_MODEL" + CREATE_PROVIDER = "CREATE_PROVIDER" + CREATE_RECIPIENT = "CREATE_RECIPIENT" + CREATE_SCHEMA = "CREATE_SCHEMA" + CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL" + CREATE_SHARE = "CREATE_SHARE" + CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL" + CREATE_TABLE = "CREATE_TABLE" + CREATE_VIEW = "CREATE_VIEW" + CREATE_VOLUME = "CREATE_VOLUME" + EXECUTE = "EXECUTE" + MANAGE = "MANAGE" + MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" + MODIFY = "MODIFY" + READ_FILES = "READ_FILES" + READ_PRIVATE_FILES = "READ_PRIVATE_FILES" + READ_VOLUME = "READ_VOLUME" + REFRESH = "REFRESH" + SELECT = "SELECT" + SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION" + USAGE = "USAGE" + USE_CATALOG = "USE_CATALOG" + USE_CONNECTION = "USE_CONNECTION" + USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS" + USE_PROVIDER = "USE_PROVIDER" + USE_RECIPIENT = "USE_RECIPIENT" + USE_SCHEMA = "USE_SCHEMA" + USE_SHARE = "USE_SHARE" + WRITE_FILES = "WRITE_FILES" + WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES" + WRITE_VOLUME = "WRITE_VOLUME" + @dataclass class PrivilegeAssignment: principal: Optional[str] = None """The principal (user email address or group name).""" - + privileges: Optional[List[Privilege]] = None """The privileges assigned to the principal.""" - + def as_dict(self) -> dict: """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principal is not None: body['principal'] = self.principal - if self.privileges: body['privileges'] = [v.value for v in self.privileges] + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = [v.value for v in self.privileges] return body def as_shallow_dict(self) -> dict: """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.principal is not None: body['principal'] = self.principal - if self.privileges: body['privileges'] = self.privileges + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = self.privileges return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get('principal', None), privileges=_repeated_enum(d, 'privileges', Privilege)) - - + return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) @dataclass class ProviderInfo: authentication_type: Optional[AuthenticationType] = None """The delta sharing authentication type.""" - + cloud: Optional[str] = None """Cloud vendor of the provider's UC metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + comment: Optional[str] = None """Description about the provider.""" - + created_at: Optional[int] = None """Time at which this Provider was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of Provider creator.""" - + data_provider_global_metastore_id: Optional[str] = None """The global UC metastore id of the data provider. This field is only present when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" - + metastore_id: Optional[str] = None """UUID of the provider's UC metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + name: Optional[str] = None """The name of the Provider.""" - + owner: Optional[str] = None """Username of Provider owner.""" - + recipient_profile: Optional[RecipientProfile] = None """The recipient profile. This field is only present when the authentication_type is `TOKEN` or `OAUTH_CLIENT_CREDENTIALS`.""" - + recipient_profile_str: Optional[str] = None """This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - + region: Optional[str] = None """Cloud region of the provider's UC metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + updated_at: Optional[int] = None """Time at which this Provider was created, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of user who last modified Provider.""" - + def as_dict(self) -> dict: """Serializes the ProviderInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value - if self.cloud is not None: body['cloud'] = self.cloud - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.data_provider_global_metastore_id is not None: body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.recipient_profile: body['recipient_profile'] = self.recipient_profile.as_dict() - if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str - if self.region is not None: body['region'] = self.region - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.authentication_type is not None: + body["authentication_type"] = self.authentication_type.value + if self.cloud is not None: + body["cloud"] = self.cloud + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_provider_global_metastore_id is not None: + body["data_provider_global_metastore_id"] = self.data_provider_global_metastore_id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.recipient_profile: + body["recipient_profile"] = self.recipient_profile.as_dict() + if self.recipient_profile_str is not None: + body["recipient_profile_str"] = self.recipient_profile_str + if self.region is not None: + body["region"] = self.region + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.authentication_type is not None: body['authentication_type'] = self.authentication_type - if self.cloud is not None: body['cloud'] = self.cloud - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.data_provider_global_metastore_id is not None: body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.recipient_profile: body['recipient_profile'] = self.recipient_profile - if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str - if self.region is not None: body['region'] = self.region - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.authentication_type is not None: + body["authentication_type"] = self.authentication_type + if self.cloud is not None: + body["cloud"] = self.cloud + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_provider_global_metastore_id is not None: + body["data_provider_global_metastore_id"] = self.data_provider_global_metastore_id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.recipient_profile: + body["recipient_profile"] = self.recipient_profile + if self.recipient_profile_str is not None: + body["recipient_profile_str"] = self.recipient_profile_str + if self.region is not None: + body["region"] = self.region + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProviderInfo: """Deserializes the ProviderInfo from a dictionary.""" - return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), cloud=d.get('cloud', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), data_provider_global_metastore_id=d.get('data_provider_global_metastore_id', None), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), recipient_profile=_from_dict(d, 'recipient_profile', RecipientProfile), recipient_profile_str=d.get('recipient_profile_str', None), region=d.get('region', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + authentication_type=_enum(d, "authentication_type", AuthenticationType), + cloud=d.get("cloud", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + data_provider_global_metastore_id=d.get("data_provider_global_metastore_id", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + recipient_profile=_from_dict(d, "recipient_profile", RecipientProfile), + recipient_profile_str=d.get("recipient_profile_str", None), + region=d.get("region", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class ProviderShare: name: Optional[str] = None """The name of the Provider Share.""" - + def as_dict(self) -> dict: """Serializes the ProviderShare into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the ProviderShare into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ProviderShare: """Deserializes the ProviderShare from a dictionary.""" - return cls(name=d.get('name', None)) - - + return cls(name=d.get("name", None)) @dataclass class RecipientInfo: activated: Optional[bool] = None """A boolean status field showing whether the Recipient's activation URL has been exercised or not.""" - + activation_url: Optional[str] = None """Full activation url to retrieve the access token. It will be empty if the token is already retrieved.""" - + authentication_type: Optional[AuthenticationType] = None """The delta sharing authentication type.""" - + cloud: Optional[str] = None """Cloud vendor of the recipient's Unity Catalog Metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + comment: Optional[str] = None """Description about the recipient.""" - + created_at: Optional[int] = None """Time at which this recipient was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of recipient creator.""" - + data_recipient_global_metastore_id: Optional[str] = None """The global Unity Catalog metastore id provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token, in epoch milliseconds.""" - + ip_access_list: Optional[IpAccessList] = None """IP Access List""" - + metastore_id: Optional[str] = None """Unique identifier of recipient's Unity Catalog Metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + name: Optional[str] = None """Name of Recipient.""" - + owner: Optional[str] = None """Username of the recipient owner.""" - + properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None """Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write.""" - + region: Optional[str] = None """Cloud region of the recipient's Unity Catalog Metastore. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + sharing_code: Optional[str] = None """The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**.""" - + tokens: Optional[List[RecipientTokenInfo]] = None """This field is only present when the __authentication_type__ is **TOKEN**.""" - + updated_at: Optional[int] = None """Time at which the recipient was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of recipient updater.""" - + def as_dict(self) -> dict: """Serializes the RecipientInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activated is not None: body['activated'] = self.activated - if self.activation_url is not None: body['activation_url'] = self.activation_url - if self.authentication_type is not None: body['authentication_type'] = self.authentication_type.value - if self.cloud is not None: body['cloud'] = self.cloud - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict() - if self.region is not None: body['region'] = self.region - if self.sharing_code is not None: body['sharing_code'] = self.sharing_code - if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens] - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.activated is not None: + body["activated"] = self.activated + if self.activation_url is not None: + body["activation_url"] = self.activation_url + if self.authentication_type is not None: + body["authentication_type"] = self.authentication_type.value + if self.cloud is not None: + body["cloud"] = self.cloud + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_recipient_global_metastore_id is not None: + body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list.as_dict() + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties_kvpairs: + body["properties_kvpairs"] = self.properties_kvpairs.as_dict() + if self.region is not None: + body["region"] = self.region + if self.sharing_code is not None: + body["sharing_code"] = self.sharing_code + if self.tokens: + body["tokens"] = [v.as_dict() for v in self.tokens] + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the RecipientInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.activated is not None: body['activated'] = self.activated - if self.activation_url is not None: body['activation_url'] = self.activation_url - if self.authentication_type is not None: body['authentication_type'] = self.authentication_type - if self.cloud is not None: body['cloud'] = self.cloud - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id - if self.name is not None: body['name'] = self.name - if self.owner is not None: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs - if self.region is not None: body['region'] = self.region - if self.sharing_code is not None: body['sharing_code'] = self.sharing_code - if self.tokens: body['tokens'] = self.tokens - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.activated is not None: + body["activated"] = self.activated + if self.activation_url is not None: + body["activation_url"] = self.activation_url + if self.authentication_type is not None: + body["authentication_type"] = self.authentication_type + if self.cloud is not None: + body["cloud"] = self.cloud + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_recipient_global_metastore_id is not None: + body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties_kvpairs: + body["properties_kvpairs"] = self.properties_kvpairs + if self.region is not None: + body["region"] = self.region + if self.sharing_code is not None: + body["sharing_code"] = self.sharing_code + if self.tokens: + body["tokens"] = self.tokens + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RecipientInfo: """Deserializes the RecipientInfo from a dictionary.""" - return cls(activated=d.get('activated', None), activation_url=d.get('activation_url', None), authentication_type=_enum(d, 'authentication_type', AuthenticationType), cloud=d.get('cloud', None), comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), metastore_id=d.get('metastore_id', None), name=d.get('name', None), owner=d.get('owner', None), properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs), region=d.get('region', None), sharing_code=d.get('sharing_code', None), tokens=_repeated_dict(d, 'tokens', RecipientTokenInfo), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + activated=d.get("activated", None), + activation_url=d.get("activation_url", None), + authentication_type=_enum(d, "authentication_type", AuthenticationType), + cloud=d.get("cloud", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + data_recipient_global_metastore_id=d.get("data_recipient_global_metastore_id", None), + expiration_time=d.get("expiration_time", None), + ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), + region=d.get("region", None), + sharing_code=d.get("sharing_code", None), + tokens=_repeated_dict(d, "tokens", RecipientTokenInfo), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class RecipientProfile: bearer_token: Optional[str] = None """The token used to authorize the recipient.""" - + endpoint: Optional[str] = None """The endpoint for the share to be used by the recipient.""" - + share_credentials_version: Optional[int] = None """The version number of the recipient's credentials on a share.""" - + def as_dict(self) -> dict: """Serializes the RecipientProfile into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bearer_token is not None: body['bearer_token'] = self.bearer_token - if self.endpoint is not None: body['endpoint'] = self.endpoint - if self.share_credentials_version is not None: body['share_credentials_version'] = self.share_credentials_version + if self.bearer_token is not None: + body["bearer_token"] = self.bearer_token + if self.endpoint is not None: + body["endpoint"] = self.endpoint + if self.share_credentials_version is not None: + body["share_credentials_version"] = self.share_credentials_version return body def as_shallow_dict(self) -> dict: """Serializes the RecipientProfile into a shallow dictionary of its immediate attributes.""" body = {} - if self.bearer_token is not None: body['bearer_token'] = self.bearer_token - if self.endpoint is not None: body['endpoint'] = self.endpoint - if self.share_credentials_version is not None: body['share_credentials_version'] = self.share_credentials_version + if self.bearer_token is not None: + body["bearer_token"] = self.bearer_token + if self.endpoint is not None: + body["endpoint"] = self.endpoint + if self.share_credentials_version is not None: + body["share_credentials_version"] = self.share_credentials_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RecipientProfile: """Deserializes the RecipientProfile from a dictionary.""" - return cls(bearer_token=d.get('bearer_token', None), endpoint=d.get('endpoint', None), share_credentials_version=d.get('share_credentials_version', None)) - - + return cls( + bearer_token=d.get("bearer_token", None), + endpoint=d.get("endpoint", None), + share_credentials_version=d.get("share_credentials_version", None), + ) @dataclass @@ -1540,128 +1825,158 @@ class RecipientTokenInfo: activation_url: Optional[str] = None """Full activation URL to retrieve the access token. It will be empty if the token is already retrieved.""" - + created_at: Optional[int] = None """Time at which this recipient token was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of recipient token creator.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token in epoch milliseconds.""" - + id: Optional[str] = None """Unique ID of the recipient token.""" - + updated_at: Optional[int] = None """Time at which this recipient token was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of recipient token updater.""" - + def as_dict(self) -> dict: """Serializes the RecipientTokenInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.activation_url is not None: body['activation_url'] = self.activation_url - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.id is not None: body['id'] = self.id - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.activation_url is not None: + body["activation_url"] = self.activation_url + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.id is not None: + body["id"] = self.id + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the RecipientTokenInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.activation_url is not None: body['activation_url'] = self.activation_url - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.id is not None: body['id'] = self.id - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.activation_url is not None: + body["activation_url"] = self.activation_url + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.id is not None: + body["id"] = self.id + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RecipientTokenInfo: """Deserializes the RecipientTokenInfo from a dictionary.""" - return cls(activation_url=d.get('activation_url', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), expiration_time=d.get('expiration_time', None), id=d.get('id', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - + return cls( + activation_url=d.get("activation_url", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + expiration_time=d.get("expiration_time", None), + id=d.get("id", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class RegisteredModelAlias: alias_name: Optional[str] = None """Name of the alias.""" - + version_num: Optional[int] = None """Numeric model version that alias will reference.""" - + def as_dict(self) -> dict: """Serializes the RegisteredModelAlias into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alias_name is not None: body['alias_name'] = self.alias_name - if self.version_num is not None: body['version_num'] = self.version_num + if self.alias_name is not None: + body["alias_name"] = self.alias_name + if self.version_num is not None: + body["version_num"] = self.version_num return body def as_shallow_dict(self) -> dict: """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes.""" body = {} - if self.alias_name is not None: body['alias_name'] = self.alias_name - if self.version_num is not None: body['version_num'] = self.version_num + if self.alias_name is not None: + body["alias_name"] = self.alias_name + if self.version_num is not None: + body["version_num"] = self.version_num return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: """Deserializes the RegisteredModelAlias from a dictionary.""" - return cls(alias_name=d.get('alias_name', None), version_num=d.get('version_num', None)) - - - - - + return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) @dataclass class RetrieveTokenResponse: bearer_token: Optional[str] = None """The token used to authorize the recipient.""" - + endpoint: Optional[str] = None """The endpoint for the share to be used by the recipient.""" - + expiration_time: Optional[str] = None """Expiration timestamp of the token in epoch milliseconds.""" - + share_credentials_version: Optional[int] = None """These field names must follow the delta sharing protocol.""" - + def as_dict(self) -> dict: """Serializes the RetrieveTokenResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bearer_token is not None: body['bearerToken'] = self.bearer_token - if self.endpoint is not None: body['endpoint'] = self.endpoint - if self.expiration_time is not None: body['expirationTime'] = self.expiration_time - if self.share_credentials_version is not None: body['shareCredentialsVersion'] = self.share_credentials_version + if self.bearer_token is not None: + body["bearerToken"] = self.bearer_token + if self.endpoint is not None: + body["endpoint"] = self.endpoint + if self.expiration_time is not None: + body["expirationTime"] = self.expiration_time + if self.share_credentials_version is not None: + body["shareCredentialsVersion"] = self.share_credentials_version return body def as_shallow_dict(self) -> dict: """Serializes the RetrieveTokenResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bearer_token is not None: body['bearerToken'] = self.bearer_token - if self.endpoint is not None: body['endpoint'] = self.endpoint - if self.expiration_time is not None: body['expirationTime'] = self.expiration_time - if self.share_credentials_version is not None: body['shareCredentialsVersion'] = self.share_credentials_version + if self.bearer_token is not None: + body["bearerToken"] = self.bearer_token + if self.endpoint is not None: + body["endpoint"] = self.endpoint + if self.expiration_time is not None: + body["expirationTime"] = self.expiration_time + if self.share_credentials_version is not None: + body["shareCredentialsVersion"] = self.share_credentials_version return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RetrieveTokenResponse: """Deserializes the RetrieveTokenResponse from a dictionary.""" - return cls(bearer_token=d.get('bearerToken', None), endpoint=d.get('endpoint', None), expiration_time=d.get('expirationTime', None), share_credentials_version=d.get('shareCredentialsVersion', None)) - - + return cls( + bearer_token=d.get("bearerToken", None), + endpoint=d.get("endpoint", None), + expiration_time=d.get("expirationTime", None), + share_credentials_version=d.get("shareCredentialsVersion", None), + ) @dataclass @@ -1670,160 +1985,195 @@ class RotateRecipientToken: """The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire the existing token immediately, negative number will return an error.""" - + name: Optional[str] = None """The name of the Recipient.""" - + def as_dict(self) -> dict: """Serializes the RotateRecipientToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.existing_token_expire_in_seconds is not None: body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds - if self.name is not None: body['name'] = self.name + if self.existing_token_expire_in_seconds is not None: + body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the RotateRecipientToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.existing_token_expire_in_seconds is not None: body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds - if self.name is not None: body['name'] = self.name + if self.existing_token_expire_in_seconds is not None: + body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RotateRecipientToken: """Deserializes the RotateRecipientToken from a dictionary.""" - return cls(existing_token_expire_in_seconds=d.get('existing_token_expire_in_seconds', None), name=d.get('name', None)) - - + return cls( + existing_token_expire_in_seconds=d.get("existing_token_expire_in_seconds", None), name=d.get("name", None) + ) @dataclass class SecurablePropertiesKvPairs: """An object with __properties__ containing map of key-value properties attached to the securable.""" - - properties: Dict[str,str] + + properties: Dict[str, str] """A map of key-value properties attached to the securable.""" - + def as_dict(self) -> dict: """Serializes the SecurablePropertiesKvPairs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.properties: body['properties'] = self.properties + if self.properties: + body["properties"] = self.properties return body def as_shallow_dict(self) -> dict: """Serializes the SecurablePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" body = {} - if self.properties: body['properties'] = self.properties + if self.properties: + body["properties"] = self.properties return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SecurablePropertiesKvPairs: """Deserializes the SecurablePropertiesKvPairs from a dictionary.""" - return cls(properties=d.get('properties', None)) - - + return cls(properties=d.get("properties", None)) @dataclass class ShareInfo: comment: Optional[str] = None """User-provided free-form text description.""" - + created_at: Optional[int] = None """Time at which this share was created, in epoch milliseconds.""" - + created_by: Optional[str] = None """Username of share creator.""" - + name: Optional[str] = None """Name of the share.""" - + objects: Optional[List[SharedDataObject]] = None """A list of shared data objects within the share.""" - + owner: Optional[str] = None """Username of current owner of share.""" - + storage_location: Optional[str] = None """Storage Location URL (full path) for the share.""" - + storage_root: Optional[str] = None """Storage root URL for the share.""" - + updated_at: Optional[int] = None """Time at which this share was updated, in epoch milliseconds.""" - + updated_by: Optional[str] = None """Username of share updater.""" - + def as_dict(self) -> dict: """Serializes the ShareInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.name is not None: body['name'] = self.name - if self.objects: body['objects'] = [v.as_dict() for v in self.objects] - if self.owner is not None: body['owner'] = self.owner - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.name is not None: + body["name"] = self.name + if self.objects: + body["objects"] = [v.as_dict() for v in self.objects] + if self.owner is not None: + body["owner"] = self.owner + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: """Serializes the ShareInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.created_at is not None: body['created_at'] = self.created_at - if self.created_by is not None: body['created_by'] = self.created_by - if self.name is not None: body['name'] = self.name - if self.objects: body['objects'] = self.objects - if self.owner is not None: body['owner'] = self.owner - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.updated_by is not None: body['updated_by'] = self.updated_by + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.name is not None: + body["name"] = self.name + if self.objects: + body["objects"] = self.objects + if self.owner is not None: + body["owner"] = self.owner + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ShareInfo: """Deserializes the ShareInfo from a dictionary.""" - return cls(comment=d.get('comment', None), created_at=d.get('created_at', None), created_by=d.get('created_by', None), name=d.get('name', None), objects=_repeated_dict(d, 'objects', SharedDataObject), owner=d.get('owner', None), storage_location=d.get('storage_location', None), storage_root=d.get('storage_root', None), updated_at=d.get('updated_at', None), updated_by=d.get('updated_by', None)) - - - - - + return cls( + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + name=d.get("name", None), + objects=_repeated_dict(d, "objects", SharedDataObject), + owner=d.get("owner", None), + storage_location=d.get("storage_location", None), + storage_root=d.get("storage_root", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass class ShareToPrivilegeAssignment: privilege_assignments: Optional[List[PrivilegeAssignment]] = None """The privileges assigned to the principal.""" - + share_name: Optional[str] = None """The share name.""" - + def as_dict(self) -> dict: """Serializes the ShareToPrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] - if self.share_name is not None: body['share_name'] = self.share_name + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] + if self.share_name is not None: + body["share_name"] = self.share_name return body def as_shallow_dict(self) -> dict: """Serializes the ShareToPrivilegeAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments - if self.share_name is not None: body['share_name'] = self.share_name + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments + if self.share_name is not None: + body["share_name"] = self.share_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ShareToPrivilegeAssignment: """Deserializes the ShareToPrivilegeAssignment from a dictionary.""" - return cls(privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment), share_name=d.get('share_name', None)) - - + return cls( + privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), + share_name=d.get("share_name", None), + ) @dataclass @@ -1831,39 +2181,39 @@ class SharedDataObject: name: str """A fully qualified name that uniquely identifies a data object. For example, a table's fully qualified name is in the format of `..
`,""" - + added_at: Optional[int] = None """The time when this data object is added to the share, in epoch milliseconds.""" - + added_by: Optional[str] = None """Username of the sharer.""" - + cdf_enabled: Optional[bool] = None """Whether to enable cdf or indicate if cdf is enabled on the shared object.""" - + comment: Optional[str] = None """A user-provided comment when adding the data object to the share.""" - + content: Optional[str] = None """The content of the notebook file when the data object type is NOTEBOOK_FILE. This should be base64 encoded. Required for adding a NOTEBOOK_FILE, optional for updating, ignored for other types.""" - + data_object_type: Optional[SharedDataObjectDataObjectType] = None """The type of the data object.""" - + history_data_sharing_status: Optional[SharedDataObjectHistoryDataSharingStatus] = None """Whether to enable or disable sharing of data history. If not specified, the default is **DISABLED**.""" - + partitions: Optional[List[Partition]] = None """Array of partitions for the shared data.""" - + shared_as: Optional[str] = None """A user-provided new name for the data object within the share. If this new name is not provided, the object's original name will be used as the `shared_as` name. The `shared_as` name must be unique within a share. For tables, the new name must follow the format of `.
`.""" - + start_version: Optional[int] = None """The start version associated with the object. This allows data providers to control the lowest object version that is accessible by clients. If specified, clients can query snapshots or @@ -1871,203 +2221,278 @@ class SharedDataObject: the version of the object at the time it was added to the share. NOTE: The start_version should be <= the `current` version of the object.""" - + status: Optional[SharedDataObjectStatus] = None """One of: **ACTIVE**, **PERMISSION_DENIED**.""" - + string_shared_as: Optional[str] = None """A user-provided new name for the shared object within the share. If this new name is not not provided, the object's original name will be used as the `string_shared_as` name. The `string_shared_as` name must be unique for objects of the same type within a Share. For notebooks, the new name should be the new notebook file name.""" - + def as_dict(self) -> dict: """Serializes the SharedDataObject into a dictionary suitable for use as a JSON request body.""" body = {} - if self.added_at is not None: body['added_at'] = self.added_at - if self.added_by is not None: body['added_by'] = self.added_by - if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled - if self.comment is not None: body['comment'] = self.comment - if self.content is not None: body['content'] = self.content - if self.data_object_type is not None: body['data_object_type'] = self.data_object_type.value - if self.history_data_sharing_status is not None: body['history_data_sharing_status'] = self.history_data_sharing_status.value - if self.name is not None: body['name'] = self.name - if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions] - if self.shared_as is not None: body['shared_as'] = self.shared_as - if self.start_version is not None: body['start_version'] = self.start_version - if self.status is not None: body['status'] = self.status.value - if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as + if self.added_at is not None: + body["added_at"] = self.added_at + if self.added_by is not None: + body["added_by"] = self.added_by + if self.cdf_enabled is not None: + body["cdf_enabled"] = self.cdf_enabled + if self.comment is not None: + body["comment"] = self.comment + if self.content is not None: + body["content"] = self.content + if self.data_object_type is not None: + body["data_object_type"] = self.data_object_type.value + if self.history_data_sharing_status is not None: + body["history_data_sharing_status"] = self.history_data_sharing_status.value + if self.name is not None: + body["name"] = self.name + if self.partitions: + body["partitions"] = [v.as_dict() for v in self.partitions] + if self.shared_as is not None: + body["shared_as"] = self.shared_as + if self.start_version is not None: + body["start_version"] = self.start_version + if self.status is not None: + body["status"] = self.status.value + if self.string_shared_as is not None: + body["string_shared_as"] = self.string_shared_as return body def as_shallow_dict(self) -> dict: """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes.""" body = {} - if self.added_at is not None: body['added_at'] = self.added_at - if self.added_by is not None: body['added_by'] = self.added_by - if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled - if self.comment is not None: body['comment'] = self.comment - if self.content is not None: body['content'] = self.content - if self.data_object_type is not None: body['data_object_type'] = self.data_object_type - if self.history_data_sharing_status is not None: body['history_data_sharing_status'] = self.history_data_sharing_status - if self.name is not None: body['name'] = self.name - if self.partitions: body['partitions'] = self.partitions - if self.shared_as is not None: body['shared_as'] = self.shared_as - if self.start_version is not None: body['start_version'] = self.start_version - if self.status is not None: body['status'] = self.status - if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as + if self.added_at is not None: + body["added_at"] = self.added_at + if self.added_by is not None: + body["added_by"] = self.added_by + if self.cdf_enabled is not None: + body["cdf_enabled"] = self.cdf_enabled + if self.comment is not None: + body["comment"] = self.comment + if self.content is not None: + body["content"] = self.content + if self.data_object_type is not None: + body["data_object_type"] = self.data_object_type + if self.history_data_sharing_status is not None: + body["history_data_sharing_status"] = self.history_data_sharing_status + if self.name is not None: + body["name"] = self.name + if self.partitions: + body["partitions"] = self.partitions + if self.shared_as is not None: + body["shared_as"] = self.shared_as + if self.start_version is not None: + body["start_version"] = self.start_version + if self.status is not None: + body["status"] = self.status + if self.string_shared_as is not None: + body["string_shared_as"] = self.string_shared_as return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SharedDataObject: """Deserializes the SharedDataObject from a dictionary.""" - return cls(added_at=d.get('added_at', None), added_by=d.get('added_by', None), cdf_enabled=d.get('cdf_enabled', None), comment=d.get('comment', None), content=d.get('content', None), data_object_type=_enum(d, 'data_object_type', SharedDataObjectDataObjectType), history_data_sharing_status=_enum(d, 'history_data_sharing_status', SharedDataObjectHistoryDataSharingStatus), name=d.get('name', None), partitions=_repeated_dict(d, 'partitions', Partition), shared_as=d.get('shared_as', None), start_version=d.get('start_version', None), status=_enum(d, 'status', SharedDataObjectStatus), string_shared_as=d.get('string_shared_as', None)) - - + return cls( + added_at=d.get("added_at", None), + added_by=d.get("added_by", None), + cdf_enabled=d.get("cdf_enabled", None), + comment=d.get("comment", None), + content=d.get("content", None), + data_object_type=_enum(d, "data_object_type", SharedDataObjectDataObjectType), + history_data_sharing_status=_enum( + d, "history_data_sharing_status", SharedDataObjectHistoryDataSharingStatus + ), + name=d.get("name", None), + partitions=_repeated_dict(d, "partitions", Partition), + shared_as=d.get("shared_as", None), + start_version=d.get("start_version", None), + status=_enum(d, "status", SharedDataObjectStatus), + string_shared_as=d.get("string_shared_as", None), + ) class SharedDataObjectDataObjectType(Enum): - - - FEATURE_SPEC = 'FEATURE_SPEC' - FUNCTION = 'FUNCTION' - MATERIALIZED_VIEW = 'MATERIALIZED_VIEW' - MODEL = 'MODEL' - NOTEBOOK_FILE = 'NOTEBOOK_FILE' - SCHEMA = 'SCHEMA' - STREAMING_TABLE = 'STREAMING_TABLE' - TABLE = 'TABLE' - VIEW = 'VIEW' + + FEATURE_SPEC = "FEATURE_SPEC" + FUNCTION = "FUNCTION" + MATERIALIZED_VIEW = "MATERIALIZED_VIEW" + MODEL = "MODEL" + NOTEBOOK_FILE = "NOTEBOOK_FILE" + SCHEMA = "SCHEMA" + STREAMING_TABLE = "STREAMING_TABLE" + TABLE = "TABLE" + VIEW = "VIEW" + class SharedDataObjectHistoryDataSharingStatus(Enum): - - - DISABLED = 'DISABLED' - ENABLED = 'ENABLED' + + DISABLED = "DISABLED" + ENABLED = "ENABLED" + class SharedDataObjectStatus(Enum): - - - ACTIVE = 'ACTIVE' - PERMISSION_DENIED = 'PERMISSION_DENIED' + + ACTIVE = "ACTIVE" + PERMISSION_DENIED = "PERMISSION_DENIED" + @dataclass class SharedDataObjectUpdate: action: Optional[SharedDataObjectUpdateAction] = None """One of: **ADD**, **REMOVE**, **UPDATE**.""" - + data_object: Optional[SharedDataObject] = None """The data object that is being added, removed, or updated.""" - + def as_dict(self) -> dict: """Serializes the SharedDataObjectUpdate into a dictionary suitable for use as a JSON request body.""" body = {} - if self.action is not None: body['action'] = self.action.value - if self.data_object: body['data_object'] = self.data_object.as_dict() + if self.action is not None: + body["action"] = self.action.value + if self.data_object: + body["data_object"] = self.data_object.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SharedDataObjectUpdate into a shallow dictionary of its immediate attributes.""" body = {} - if self.action is not None: body['action'] = self.action - if self.data_object: body['data_object'] = self.data_object + if self.action is not None: + body["action"] = self.action + if self.data_object: + body["data_object"] = self.data_object return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SharedDataObjectUpdate: """Deserializes the SharedDataObjectUpdate from a dictionary.""" - return cls(action=_enum(d, 'action', SharedDataObjectUpdateAction), data_object=_from_dict(d, 'data_object', SharedDataObject)) - - + return cls( + action=_enum(d, "action", SharedDataObjectUpdateAction), + data_object=_from_dict(d, "data_object", SharedDataObject), + ) class SharedDataObjectUpdateAction(Enum): - - - ADD = 'ADD' - REMOVE = 'REMOVE' - UPDATE = 'UPDATE' + + ADD = "ADD" + REMOVE = "REMOVE" + UPDATE = "UPDATE" + class SharedSecurableKind(Enum): """The SecurableKind of a delta-shared object.""" - - FUNCTION_FEATURE_SPEC = 'FUNCTION_FEATURE_SPEC' - FUNCTION_REGISTERED_MODEL = 'FUNCTION_REGISTERED_MODEL' - FUNCTION_STANDARD = 'FUNCTION_STANDARD' + + FUNCTION_FEATURE_SPEC = "FUNCTION_FEATURE_SPEC" + FUNCTION_REGISTERED_MODEL = "FUNCTION_REGISTERED_MODEL" + FUNCTION_STANDARD = "FUNCTION_STANDARD" + @dataclass class Table: comment: Optional[str] = None """The comment of the table.""" - + id: Optional[str] = None """The id of the table.""" - + internal_attributes: Optional[TableInternalAttributes] = None """Internal information for D2D sharing that should not be disclosed to external users.""" - + materialization_namespace: Optional[str] = None """The catalog and schema of the materialized table""" - + materialized_table_name: Optional[str] = None """The name of a materialized table.""" - + name: Optional[str] = None """The name of the table.""" - + schema: Optional[str] = None """The name of the schema that the table belongs to.""" - + share: Optional[str] = None """The name of the share that the table belongs to.""" - + share_id: Optional[str] = None """The id of the share that the table belongs to.""" - + tags: Optional[List[catalog.TagKeyValue]] = None """The Tags of the table.""" - + def as_dict(self) -> dict: """Serializes the Table into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.id is not None: body['id'] = self.id - if self.internal_attributes: body['internal_attributes'] = self.internal_attributes.as_dict() - if self.materialization_namespace is not None: body['materialization_namespace'] = self.materialization_namespace - if self.materialized_table_name is not None: body['materialized_table_name'] = self.materialized_table_name - if self.name is not None: body['name'] = self.name - if self.schema is not None: body['schema'] = self.schema - if self.share is not None: body['share'] = self.share - if self.share_id is not None: body['share_id'] = self.share_id - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.comment is not None: + body["comment"] = self.comment + if self.id is not None: + body["id"] = self.id + if self.internal_attributes: + body["internal_attributes"] = self.internal_attributes.as_dict() + if self.materialization_namespace is not None: + body["materialization_namespace"] = self.materialization_namespace + if self.materialized_table_name is not None: + body["materialized_table_name"] = self.materialized_table_name + if self.name is not None: + body["name"] = self.name + if self.schema is not None: + body["schema"] = self.schema + if self.share is not None: + body["share"] = self.share + if self.share_id is not None: + body["share_id"] = self.share_id + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the Table into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.id is not None: body['id'] = self.id - if self.internal_attributes: body['internal_attributes'] = self.internal_attributes - if self.materialization_namespace is not None: body['materialization_namespace'] = self.materialization_namespace - if self.materialized_table_name is not None: body['materialized_table_name'] = self.materialized_table_name - if self.name is not None: body['name'] = self.name - if self.schema is not None: body['schema'] = self.schema - if self.share is not None: body['share'] = self.share - if self.share_id is not None: body['share_id'] = self.share_id - if self.tags: body['tags'] = self.tags + if self.comment is not None: + body["comment"] = self.comment + if self.id is not None: + body["id"] = self.id + if self.internal_attributes: + body["internal_attributes"] = self.internal_attributes + if self.materialization_namespace is not None: + body["materialization_namespace"] = self.materialization_namespace + if self.materialized_table_name is not None: + body["materialized_table_name"] = self.materialized_table_name + if self.name is not None: + body["name"] = self.name + if self.schema is not None: + body["schema"] = self.schema + if self.share is not None: + body["share"] = self.share + if self.share_id is not None: + body["share_id"] = self.share_id + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Table: """Deserializes the Table from a dictionary.""" - return cls(comment=d.get('comment', None), id=d.get('id', None), internal_attributes=_from_dict(d, 'internal_attributes', TableInternalAttributes), materialization_namespace=d.get('materialization_namespace', None), materialized_table_name=d.get('materialized_table_name', None), name=d.get('name', None), schema=d.get('schema', None), share=d.get('share', None), share_id=d.get('share_id', None), tags=_repeated_dict(d, 'tags', catalog.TagKeyValue)) - - + return cls( + comment=d.get("comment", None), + id=d.get("id", None), + internal_attributes=_from_dict(d, "internal_attributes", TableInternalAttributes), + materialization_namespace=d.get("materialization_namespace", None), + materialized_table_name=d.get("materialized_table_name", None), + name=d.get("name", None), + schema=d.get("schema", None), + share=d.get("share", None), + share_id=d.get("share_id", None), + tags=_repeated_dict(d, "tags", catalog.TagKeyValue), + ) @dataclass class TableInternalAttributes: """Internal information for D2D sharing that should not be disclosed to external users.""" - + parent_storage_location: Optional[str] = None """Will be populated in the reconciliation response for VIEW and FOREIGN_TABLE, with the value of the parent UC entity's storage_location, following the same logic as getManagedEntityPath in @@ -2075,387 +2500,473 @@ class TableInternalAttributes: VIEW/FOREIGN_TABLE for D2O queries. The value will be used on the recipient side to be whitelisted when SEG is enabled on the workspace of the recipient, to allow the recipient users to query this shared VIEW/FOREIGN_TABLE.""" - + storage_location: Optional[str] = None """The cloud storage location of a shard table with DIRECTORY_BASED_TABLE type.""" - + type: Optional[TableInternalAttributesSharedTableType] = None """The type of the shared table.""" - + view_definition: Optional[str] = None """The view definition of a shared view. DEPRECATED.""" - + def as_dict(self) -> dict: """Serializes the TableInternalAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parent_storage_location is not None: body['parent_storage_location'] = self.parent_storage_location - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.type is not None: body['type'] = self.type.value - if self.view_definition is not None: body['view_definition'] = self.view_definition + if self.parent_storage_location is not None: + body["parent_storage_location"] = self.parent_storage_location + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.type is not None: + body["type"] = self.type.value + if self.view_definition is not None: + body["view_definition"] = self.view_definition return body def as_shallow_dict(self) -> dict: """Serializes the TableInternalAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.parent_storage_location is not None: body['parent_storage_location'] = self.parent_storage_location - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.type is not None: body['type'] = self.type - if self.view_definition is not None: body['view_definition'] = self.view_definition + if self.parent_storage_location is not None: + body["parent_storage_location"] = self.parent_storage_location + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.type is not None: + body["type"] = self.type + if self.view_definition is not None: + body["view_definition"] = self.view_definition return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TableInternalAttributes: """Deserializes the TableInternalAttributes from a dictionary.""" - return cls(parent_storage_location=d.get('parent_storage_location', None), storage_location=d.get('storage_location', None), type=_enum(d, 'type', TableInternalAttributesSharedTableType), view_definition=d.get('view_definition', None)) - - + return cls( + parent_storage_location=d.get("parent_storage_location", None), + storage_location=d.get("storage_location", None), + type=_enum(d, "type", TableInternalAttributesSharedTableType), + view_definition=d.get("view_definition", None), + ) class TableInternalAttributesSharedTableType(Enum): - - - DIRECTORY_BASED_TABLE = 'DIRECTORY_BASED_TABLE' - FILE_BASED_TABLE = 'FILE_BASED_TABLE' - FOREIGN_TABLE = 'FOREIGN_TABLE' - MATERIALIZED_VIEW = 'MATERIALIZED_VIEW' - STREAMING_TABLE = 'STREAMING_TABLE' - VIEW = 'VIEW' - + DIRECTORY_BASED_TABLE = "DIRECTORY_BASED_TABLE" + FILE_BASED_TABLE = "FILE_BASED_TABLE" + FOREIGN_TABLE = "FOREIGN_TABLE" + MATERIALIZED_VIEW = "MATERIALIZED_VIEW" + STREAMING_TABLE = "STREAMING_TABLE" + VIEW = "VIEW" @dataclass class UpdateProvider: comment: Optional[str] = None """Description about the provider.""" - + name: Optional[str] = None """Name of the provider.""" - + new_name: Optional[str] = None """New name for the provider.""" - + owner: Optional[str] = None """Username of Provider owner.""" - + recipient_profile_str: Optional[str] = None """This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - + def as_dict(self) -> dict: """Serializes the UpdateProvider into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.recipient_profile_str is not None: + body["recipient_profile_str"] = self.recipient_profile_str return body def as_shallow_dict(self) -> dict: """Serializes the UpdateProvider into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.recipient_profile_str is not None: + body["recipient_profile_str"] = self.recipient_profile_str return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateProvider: """Deserializes the UpdateProvider from a dictionary.""" - return cls(comment=d.get('comment', None), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), recipient_profile_str=d.get('recipient_profile_str', None)) - - + return cls( + comment=d.get("comment", None), + name=d.get("name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + recipient_profile_str=d.get("recipient_profile_str", None), + ) @dataclass class UpdateRecipient: comment: Optional[str] = None """Description about the recipient.""" - + expiration_time: Optional[int] = None """Expiration timestamp of the token, in epoch milliseconds.""" - + ip_access_list: Optional[IpAccessList] = None """IP Access List""" - + name: Optional[str] = None """Name of the recipient.""" - + new_name: Optional[str] = None """New name for the recipient. .""" - + owner: Optional[str] = None """Username of the recipient owner.""" - + properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None """Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write.""" - + def as_dict(self) -> dict: """Serializes the UpdateRecipient into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list.as_dict() + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.properties_kvpairs: + body["properties_kvpairs"] = self.properties_kvpairs.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRecipient into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.expiration_time is not None: body['expiration_time'] = self.expiration_time - if self.ip_access_list: body['ip_access_list'] = self.ip_access_list - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs + if self.comment is not None: + body["comment"] = self.comment + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.ip_access_list: + body["ip_access_list"] = self.ip_access_list + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.properties_kvpairs: + body["properties_kvpairs"] = self.properties_kvpairs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRecipient: """Deserializes the UpdateRecipient from a dictionary.""" - return cls(comment=d.get('comment', None), expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs)) - - + return cls( + comment=d.get("comment", None), + expiration_time=d.get("expiration_time", None), + ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), + name=d.get("name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), + ) @dataclass class UpdateShare: comment: Optional[str] = None """User-provided free-form text description.""" - + name: Optional[str] = None """The name of the share.""" - + new_name: Optional[str] = None """New name for the share.""" - + owner: Optional[str] = None """Username of current owner of share.""" - + storage_root: Optional[str] = None """Storage root URL for the share.""" - + updates: Optional[List[SharedDataObjectUpdate]] = None """Array of shared data object updates.""" - + def as_dict(self) -> dict: """Serializes the UpdateShare into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.updates: body['updates'] = [v.as_dict() for v in self.updates] + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updates: + body["updates"] = [v.as_dict() for v in self.updates] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateShare into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.name is not None: body['name'] = self.name - if self.new_name is not None: body['new_name'] = self.new_name - if self.owner is not None: body['owner'] = self.owner - if self.storage_root is not None: body['storage_root'] = self.storage_root - if self.updates: body['updates'] = self.updates + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updates: + body["updates"] = self.updates return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateShare: """Deserializes the UpdateShare from a dictionary.""" - return cls(comment=d.get('comment', None), name=d.get('name', None), new_name=d.get('new_name', None), owner=d.get('owner', None), storage_root=d.get('storage_root', None), updates=_repeated_dict(d, 'updates', SharedDataObjectUpdate)) - - + return cls( + comment=d.get("comment", None), + name=d.get("name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), + storage_root=d.get("storage_root", None), + updates=_repeated_dict(d, "updates", SharedDataObjectUpdate), + ) @dataclass class UpdateSharePermissions: changes: Optional[List[PermissionsChange]] = None """Array of permissions change objects.""" - + name: Optional[str] = None """The name of the share.""" - + omit_permissions_list: Optional[bool] = None """Optional. Whether to return the latest permissions list of the share in the response.""" - + def as_dict(self) -> dict: """Serializes the UpdateSharePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.changes: body['changes'] = [v.as_dict() for v in self.changes] - if self.name is not None: body['name'] = self.name - if self.omit_permissions_list is not None: body['omit_permissions_list'] = self.omit_permissions_list + if self.changes: + body["changes"] = [v.as_dict() for v in self.changes] + if self.name is not None: + body["name"] = self.name + if self.omit_permissions_list is not None: + body["omit_permissions_list"] = self.omit_permissions_list return body def as_shallow_dict(self) -> dict: """Serializes the UpdateSharePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.changes: body['changes'] = self.changes - if self.name is not None: body['name'] = self.name - if self.omit_permissions_list is not None: body['omit_permissions_list'] = self.omit_permissions_list + if self.changes: + body["changes"] = self.changes + if self.name is not None: + body["name"] = self.name + if self.omit_permissions_list is not None: + body["omit_permissions_list"] = self.omit_permissions_list return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateSharePermissions: """Deserializes the UpdateSharePermissions from a dictionary.""" - return cls(changes=_repeated_dict(d, 'changes', PermissionsChange), name=d.get('name', None), omit_permissions_list=d.get('omit_permissions_list', None)) - - + return cls( + changes=_repeated_dict(d, "changes", PermissionsChange), + name=d.get("name", None), + omit_permissions_list=d.get("omit_permissions_list", None), + ) @dataclass class UpdateSharePermissionsResponse: privilege_assignments: Optional[List[PrivilegeAssignment]] = None """The privileges assigned to each principal""" - + def as_dict(self) -> dict: """Serializes the UpdateSharePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.privilege_assignments: body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments] + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: """Serializes the UpdateSharePermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateSharePermissionsResponse: """Deserializes the UpdateSharePermissionsResponse from a dictionary.""" - return cls(privilege_assignments=_repeated_dict(d, 'privilege_assignments', PrivilegeAssignment)) - - + return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment)) @dataclass class Volume: comment: Optional[str] = None """The comment of the volume.""" - + id: Optional[str] = None """This id maps to the shared_volume_id in database Recipient needs shared_volume_id for recon to check if this volume is already in recipient's DB or not.""" - + internal_attributes: Optional[VolumeInternalAttributes] = None """Internal attributes for D2D sharing that should not be disclosed to external users.""" - + name: Optional[str] = None """The name of the volume.""" - + schema: Optional[str] = None """The name of the schema that the volume belongs to.""" - + share: Optional[str] = None """The name of the share that the volume belongs to.""" - + share_id: Optional[str] = None """/ The id of the share that the volume belongs to.""" - + tags: Optional[List[catalog.TagKeyValue]] = None """The tags of the volume.""" - + def as_dict(self) -> dict: """Serializes the Volume into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.id is not None: body['id'] = self.id - if self.internal_attributes: body['internal_attributes'] = self.internal_attributes.as_dict() - if self.name is not None: body['name'] = self.name - if self.schema is not None: body['schema'] = self.schema - if self.share is not None: body['share'] = self.share - if self.share_id is not None: body['share_id'] = self.share_id - if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.comment is not None: + body["comment"] = self.comment + if self.id is not None: + body["id"] = self.id + if self.internal_attributes: + body["internal_attributes"] = self.internal_attributes.as_dict() + if self.name is not None: + body["name"] = self.name + if self.schema is not None: + body["schema"] = self.schema + if self.share is not None: + body["share"] = self.share + if self.share_id is not None: + body["share_id"] = self.share_id + if self.tags: + body["tags"] = [v.as_dict() for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the Volume into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: body['comment'] = self.comment - if self.id is not None: body['id'] = self.id - if self.internal_attributes: body['internal_attributes'] = self.internal_attributes - if self.name is not None: body['name'] = self.name - if self.schema is not None: body['schema'] = self.schema - if self.share is not None: body['share'] = self.share - if self.share_id is not None: body['share_id'] = self.share_id - if self.tags: body['tags'] = self.tags + if self.comment is not None: + body["comment"] = self.comment + if self.id is not None: + body["id"] = self.id + if self.internal_attributes: + body["internal_attributes"] = self.internal_attributes + if self.name is not None: + body["name"] = self.name + if self.schema is not None: + body["schema"] = self.schema + if self.share is not None: + body["share"] = self.share + if self.share_id is not None: + body["share_id"] = self.share_id + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Volume: """Deserializes the Volume from a dictionary.""" - return cls(comment=d.get('comment', None), id=d.get('id', None), internal_attributes=_from_dict(d, 'internal_attributes', VolumeInternalAttributes), name=d.get('name', None), schema=d.get('schema', None), share=d.get('share', None), share_id=d.get('share_id', None), tags=_repeated_dict(d, 'tags', catalog.TagKeyValue)) - - + return cls( + comment=d.get("comment", None), + id=d.get("id", None), + internal_attributes=_from_dict(d, "internal_attributes", VolumeInternalAttributes), + name=d.get("name", None), + schema=d.get("schema", None), + share=d.get("share", None), + share_id=d.get("share_id", None), + tags=_repeated_dict(d, "tags", catalog.TagKeyValue), + ) @dataclass class VolumeInternalAttributes: """Internal information for D2D sharing that should not be disclosed to external users.""" - + storage_location: Optional[str] = None """The cloud storage location of the volume""" - + type: Optional[str] = None """The type of the shared volume.""" - + def as_dict(self) -> dict: """Serializes the VolumeInternalAttributes into a dictionary suitable for use as a JSON request body.""" body = {} - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.type is not None: body['type'] = self.type + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.type is not None: + body["type"] = self.type return body def as_shallow_dict(self) -> dict: """Serializes the VolumeInternalAttributes into a shallow dictionary of its immediate attributes.""" body = {} - if self.storage_location is not None: body['storage_location'] = self.storage_location - if self.type is not None: body['type'] = self.type + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VolumeInternalAttributes: """Deserializes the VolumeInternalAttributes from a dictionary.""" - return cls(storage_location=d.get('storage_location', None), type=d.get('type', None)) - - - - + return cls(storage_location=d.get("storage_location", None), type=d.get("type", None)) class ProvidersAPI: """A data provider is an object representing the organization in the real world who shares the data. A provider contains shares which further contain the shared data.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str, authentication_type: AuthenticationType - , * - , comment: Optional[str] = None, recipient_profile_str: Optional[str] = None) -> ProviderInfo: + def create( + self, + name: str, + authentication_type: AuthenticationType, + *, + comment: Optional[str] = None, + recipient_profile_str: Optional[str] = None, + ) -> ProviderInfo: """Create an auth provider. - + Creates a new authentication provider minimally based on a name and authentication type. The caller must be an admin on the metastore. - + :param name: str The name of the Provider. :param authentication_type: :class:`AuthenticationType` @@ -2465,88 +2976,74 @@ def create(self :param recipient_profile_str: str (optional) This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided. - + :returns: :class:`ProviderInfo` """ body = {} - if authentication_type is not None: body['authentication_type'] = authentication_type.value - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if recipient_profile_str is not None: body['recipient_profile_str'] = recipient_profile_str - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/providers', body=body - - , headers=headers - ) + if authentication_type is not None: + body["authentication_type"] = authentication_type.value + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if recipient_profile_str is not None: + body["recipient_profile_str"] = recipient_profile_str + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/providers", body=body, headers=headers) return ProviderInfo.from_dict(res) - - - - - def delete(self - , name: str - ): + def delete(self, name: str): """Delete a provider. - + Deletes an authentication provider, if the caller is a metastore admin or is the owner of the provider. - + :param name: str Name of the provider. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/providers/{name}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/providers/{name}", headers=headers) - def get(self - , name: str - ) -> ProviderInfo: + def get(self, name: str) -> ProviderInfo: """Get a provider. - + Gets a specific authentication provider. The caller must supply the name of the provider, and must either be a metastore admin or the owner of the provider. - + :param name: str Name of the provider. - + :returns: :class:`ProviderInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/providers/{name}' - - , headers=headers - ) - return ProviderInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , data_provider_global_metastore_id: Optional[str] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderInfo]: + res = self._api.do("GET", f"/api/2.1/unity-catalog/providers/{name}", headers=headers) + return ProviderInfo.from_dict(res) + + def list( + self, + *, + data_provider_global_metastore_id: Optional[str] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[ProviderInfo]: """List providers. - + Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There is no guarantee of a specific ordering of the elements in the array. - + :param data_provider_global_metastore_id: str (optional) If not provided, all providers will be returned. If no providers exist with this ID, no results will be returned. @@ -2560,44 +3057,47 @@ def list(self from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ProviderInfo` """ - + query = {} - if data_provider_global_metastore_id is not None: query['data_provider_global_metastore_id'] = data_provider_global_metastore_id - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - if "max_results" not in query: query['max_results'] = 0 + if data_provider_global_metastore_id is not None: + query["data_provider_global_metastore_id"] = data_provider_global_metastore_id + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + if "max_results" not in query: + query["max_results"] = 0 while True: - json = self._api.do('GET','/api/2.1/unity-catalog/providers', query=query - - , headers=headers - ) - if 'providers' in json: - for v in json['providers']: - yield ProviderInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def list_provider_share_assets(self - , provider_name: str, share_name: str - , * - , function_max_results: Optional[int] = None, notebook_max_results: Optional[int] = None, table_max_results: Optional[int] = None, volume_max_results: Optional[int] = None) -> ListProviderShareAssetsResponse: + json = self._api.do("GET", "/api/2.1/unity-catalog/providers", query=query, headers=headers) + if "providers" in json: + for v in json["providers"]: + yield ProviderInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_provider_share_assets( + self, + provider_name: str, + share_name: str, + *, + function_max_results: Optional[int] = None, + notebook_max_results: Optional[int] = None, + table_max_results: Optional[int] = None, + volume_max_results: Optional[int] = None, + ) -> ListProviderShareAssetsResponse: """List assets by provider share. - + Get arrays of assets associated with a specified provider's share. The caller is the recipient of the share. - + :param provider_name: str The name of the provider who owns the share. :param share_name: str @@ -2610,37 +3110,37 @@ def list_provider_share_assets(self Maximum number of tables to return. :param volume_max_results: int (optional) Maximum number of volumes to return. - + :returns: :class:`ListProviderShareAssetsResponse` """ - + query = {} - if function_max_results is not None: query['function_max_results'] = function_max_results - if notebook_max_results is not None: query['notebook_max_results'] = notebook_max_results - if table_max_results is not None: query['table_max_results'] = table_max_results - if volume_max_results is not None: query['volume_max_results'] = volume_max_results - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/data-sharing/providers/{provider_name}/shares/{share_name}', query=query - - , headers=headers - ) + if function_max_results is not None: + query["function_max_results"] = function_max_results + if notebook_max_results is not None: + query["notebook_max_results"] = notebook_max_results + if table_max_results is not None: + query["table_max_results"] = table_max_results + if volume_max_results is not None: + query["volume_max_results"] = volume_max_results + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/data-sharing/providers/{provider_name}/shares/{share_name}", query=query, headers=headers + ) return ListProviderShareAssetsResponse.from_dict(res) - - - - - def list_shares(self - , name: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ProviderShare]: + def list_shares( + self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ProviderShare]: """List shares by Provider. - + Gets an array of a specified provider's shares within the metastore where: - + * the caller is a metastore admin, or * the caller is the owner. - + :param name: str Name of the provider in which to list shares. :param max_results: int (optional) @@ -2653,44 +3153,45 @@ def list_shares(self response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ProviderShare` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - if "max_results" not in query: query['max_results'] = 0 + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + if "max_results" not in query: + query["max_results"] = 0 while True: - json = self._api.do('GET',f'/api/2.1/unity-catalog/providers/{name}/shares', query=query - - , headers=headers - ) - if 'shares' in json: - for v in json['shares']: - yield ProviderShare.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def update(self - , name: str - , * - , comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None, recipient_profile_str: Optional[str] = None) -> ProviderInfo: + json = self._api.do("GET", f"/api/2.1/unity-catalog/providers/{name}/shares", query=query, headers=headers) + if "shares" in json: + for v in json["shares"]: + yield ProviderShare.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + name: str, + *, + comment: Optional[str] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + recipient_profile_str: Optional[str] = None, + ) -> ProviderInfo: """Update a provider. - + Updates the information for an authentication provider, if the caller is a metastore admin or is the owner of the provider. If the update changes the provider name, the caller must be both a metastore admin and the owner of the provider. - + :param name: str Name of the provider. :param comment: str (optional) @@ -2702,92 +3203,79 @@ def update(self :param recipient_profile_str: str (optional) This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided. - + :returns: :class:`ProviderInfo` """ body = {} - if comment is not None: body['comment'] = comment - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - if recipient_profile_str is not None: body['recipient_profile_str'] = recipient_profile_str - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/providers/{name}', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + if recipient_profile_str is not None: + body["recipient_profile_str"] = recipient_profile_str + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/providers/{name}", body=body, headers=headers) return ProviderInfo.from_dict(res) - - + class RecipientActivationAPI: """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data provider to download the credential file that includes the access token. The recipient will then use the credential file to establish a secure connection with the provider to receive the shared data. - + Note that you can download the credential file only once. Recipients should treat the downloaded credential as a secret and must not share it outside of their organization.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get_activation_url_info(self - , activation_url: str - ): + def get_activation_url_info(self, activation_url: str): """Get a share activation URL. - + Gets an activation URL for a share. - + :param activation_url: str The one time activation url. It also accepts activation token. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('GET',f'/api/2.1/unity-catalog/public/data_sharing_activation_info/{activation_url}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def retrieve_token(self - , activation_url: str - ) -> RetrieveTokenResponse: + self._api.do( + "GET", f"/api/2.1/unity-catalog/public/data_sharing_activation_info/{activation_url}", headers=headers + ) + + def retrieve_token(self, activation_url: str) -> RetrieveTokenResponse: """Get an access token. - + Retrieve access token with an activation url. This is a public API without any authentication. - + :param activation_url: str The one time activation url. It also accepts activation token. - + :returns: :class:`RetrieveTokenResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/public/data_sharing_activation/{activation_url}' - - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/unity-catalog/public/data_sharing_activation/{activation_url}", headers=headers + ) return RetrieveTokenResponse.from_dict(res) - - + class RecipientFederationPoliciesAPI: """The Recipient Federation Policies APIs are only applicable in the open sharing model where the recipient object has the authentication type of `OIDC_RECIPIENT`, enabling data sharing from Databricks to @@ -2804,175 +3292,148 @@ class RecipientFederationPoliciesAPI: Multi-Factor Authentication (MFA), and enhances security by minimizing the risk of credential leakage through the use of short-lived, expiring tokens. It is designed for strong identity governance, secure cross-platform data sharing, and reduced operational overhead for credential management. - + For more information, see https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security and https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , recipient_name: str, policy: FederationPolicy - ) -> FederationPolicy: + def create(self, recipient_name: str, policy: FederationPolicy) -> FederationPolicy: """Create recipient federation policy. - + Create a federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must be the owner of the recipient. When sharing data from Databricks to non-Databricks clients, you can define a federation policy to authenticate non-Databricks recipients. The federation policy validates OIDC claims in federated tokens and is defined at the recipient level. This enables secretless sharing clients to authenticate using OIDC tokens. - + Supported scenarios for federation policies: 1. **User-to-Machine (U2M) flow** (e.g., PowerBI): A user accesses a resource using their own identity. 2. **Machine-to-Machine (M2M) flow** (e.g., OAuth App): An OAuth App accesses a resource using its own identity, typically for tasks like running nightly jobs. - + For an overview, refer to: - Blog post: Overview of feature: https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security - + For detailed configuration guides based on your use case: - Creating a Federation Policy as a provider: https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed - Configuration and usage for Machine-to-Machine (M2M) applications (e.g., Python Delta Sharing Client): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-m2m - Configuration and usage for User-to-Machine (U2M) applications (e.g., PowerBI): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-u2m - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being created. :param policy: :class:`FederationPolicy` - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies', body=body - - , headers=headers - ) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies", body=body, headers=headers + ) return FederationPolicy.from_dict(res) - - - - - def delete(self - , recipient_name: str, name: str - ): + def delete(self, recipient_name: str, name: str): """Delete recipient federation policy. - + Deletes an existing federation policy for an OIDC_FEDERATION recipient. The caller must be the owner of the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being deleted. :param name: str Name of the policy. This is the name of the policy to be deleted. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do( + "DELETE", f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}", headers=headers + ) - def get_federation_policy(self - , recipient_name: str, name: str - ) -> FederationPolicy: + def get_federation_policy(self, recipient_name: str, name: str) -> FederationPolicy: """Get recipient federation policy. - + Reads an existing federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being retrieved. :param name: str Name of the policy. This is the name of the policy to be retrieved. - + :returns: :class:`FederationPolicy` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}' - - , headers=headers - ) - return FederationPolicy.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - , recipient_name: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[FederationPolicy]: + res = self._api.do( + "GET", f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}", headers=headers + ) + return FederationPolicy.from_dict(res) + + def list( + self, recipient_name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[FederationPolicy]: """List recipient federation policies. - + Lists federation policies for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policies are being listed. :param max_results: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` """ - - query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies', query=query - - , headers=headers - ) - if 'policies' in json: - for v in json['policies']: - yield FederationPolicy.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , recipient_name: str, name: str, policy: FederationPolicy - , * - , update_mask: Optional[str] = None) -> FederationPolicy: + while True: + json = self._api.do( + "GET", + f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies", + query=query, + headers=headers, + ) + if "policies" in json: + for v in json["policies"]: + yield FederationPolicy.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, recipient_name: str, name: str, policy: FederationPolicy, *, update_mask: Optional[str] = None + ) -> FederationPolicy: """Update recipient federation policy. - + Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being updated. :param name: str @@ -2984,57 +3445,64 @@ def update(self should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'comment,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` """ body = policy.as_dict() query = {} - if update_mask is not None: query['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}', query=query, body=body - - , headers=headers - ) + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", + f"/api/2.0/data-sharing/recipients/{recipient_name}/federation-policies/{name}", + query=query, + body=body, + headers=headers, + ) return FederationPolicy.from_dict(res) - - + class RecipientsAPI: """A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares. The way how sharing works differs depending on whether or not your recipient has access to a Databricks workspace that is enabled for Unity Catalog: - + - For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier is the key identifier that enables the secure connection. This sharing mode is called **Databricks-to-Databricks sharing**. - + - For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you create a recipient object, Databricks generates an activation link you can send to the recipient. The recipient follows the activation link to download the credential file, and then uses the credential file to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str, authentication_type: AuthenticationType - , * - , comment: Optional[str] = None, data_recipient_global_metastore_id: Optional[str] = None, expiration_time: Optional[int] = None, ip_access_list: Optional[IpAccessList] = None, owner: Optional[str] = None, properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, sharing_code: Optional[str] = None) -> RecipientInfo: + def create( + self, + name: str, + authentication_type: AuthenticationType, + *, + comment: Optional[str] = None, + data_recipient_global_metastore_id: Optional[str] = None, + expiration_time: Optional[int] = None, + ip_access_list: Optional[IpAccessList] = None, + owner: Optional[str] = None, + properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, + sharing_code: Optional[str] = None, + ) -> RecipientInfo: """Create a share recipient. - + Creates a new recipient with the delta sharing authentication type in the metastore. The caller must be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore. - + :param name: str Name of Recipient. :param authentication_type: :class:`AuthenticationType` @@ -3058,94 +3526,85 @@ def create(self :param sharing_code: str (optional) The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. - + :returns: :class:`RecipientInfo` """ body = {} - if authentication_type is not None: body['authentication_type'] = authentication_type.value - if comment is not None: body['comment'] = comment - if data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id - if expiration_time is not None: body['expiration_time'] = expiration_time - if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict() - if name is not None: body['name'] = name - if owner is not None: body['owner'] = owner - if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict() - if sharing_code is not None: body['sharing_code'] = sharing_code - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/recipients', body=body - - , headers=headers - ) + if authentication_type is not None: + body["authentication_type"] = authentication_type.value + if comment is not None: + body["comment"] = comment + if data_recipient_global_metastore_id is not None: + body["data_recipient_global_metastore_id"] = data_recipient_global_metastore_id + if expiration_time is not None: + body["expiration_time"] = expiration_time + if ip_access_list is not None: + body["ip_access_list"] = ip_access_list.as_dict() + if name is not None: + body["name"] = name + if owner is not None: + body["owner"] = owner + if properties_kvpairs is not None: + body["properties_kvpairs"] = properties_kvpairs.as_dict() + if sharing_code is not None: + body["sharing_code"] = sharing_code + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/recipients", body=body, headers=headers) return RecipientInfo.from_dict(res) - - - - - def delete(self - , name: str - ): + def delete(self, name: str): """Delete a share recipient. - + Deletes the specified recipient from the metastore. The caller must be the owner of the recipient. - + :param name: str Name of the recipient. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/recipients/{name}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/recipients/{name}", headers=headers) - def get(self - , name: str - ) -> RecipientInfo: + def get(self, name: str) -> RecipientInfo: """Get a share recipient. - + Gets a share recipient from the metastore if: - + * the caller is the owner of the share recipient, or: * is a metastore admin - + :param name: str Name of the recipient. - + :returns: :class:`RecipientInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/recipients/{name}' - - , headers=headers - ) - return RecipientInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/recipients/{name}", headers=headers) + return RecipientInfo.from_dict(res) - def list(self - - , * - , data_recipient_global_metastore_id: Optional[str] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[RecipientInfo]: + def list( + self, + *, + data_recipient_global_metastore_id: Optional[str] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> Iterator[RecipientInfo]: """List share recipients. - + Gets an array of all share recipients within the current metastore where: - + * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific ordering of the elements in the array. - + :param data_recipient_global_metastore_id: str (optional) If not provided, all recipients will be returned. If no recipients exist with this ID, no results will be returned. @@ -3159,75 +3618,66 @@ def list(self from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`RecipientInfo` """ - + query = {} - if data_recipient_global_metastore_id is not None: query['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - if "max_results" not in query: query['max_results'] = 0 + if data_recipient_global_metastore_id is not None: + query["data_recipient_global_metastore_id"] = data_recipient_global_metastore_id + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + if "max_results" not in query: + query["max_results"] = 0 while True: - json = self._api.do('GET','/api/2.1/unity-catalog/recipients', query=query - - , headers=headers - ) - if 'recipients' in json: - for v in json['recipients']: - yield RecipientInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def rotate_token(self - , name: str, existing_token_expire_in_seconds: int - ) -> RecipientInfo: + json = self._api.do("GET", "/api/2.1/unity-catalog/recipients", query=query, headers=headers) + if "recipients" in json: + for v in json["recipients"]: + yield RecipientInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> RecipientInfo: """Rotate a token. - + Refreshes the specified recipient's delta sharing authentication token with the provided token info. The caller must be the owner of the recipient. - + :param name: str The name of the Recipient. :param existing_token_expire_in_seconds: int The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire the existing token immediately, negative number will return an error. - + :returns: :class:`RecipientInfo` """ body = {} - if existing_token_expire_in_seconds is not None: body['existing_token_expire_in_seconds'] = existing_token_expire_in_seconds - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.1/unity-catalog/recipients/{name}/rotate-token', body=body - - , headers=headers - ) - return RecipientInfo.from_dict(res) + if existing_token_expire_in_seconds is not None: + body["existing_token_expire_in_seconds"] = existing_token_expire_in_seconds + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.1/unity-catalog/recipients/{name}/rotate-token", body=body, headers=headers) + return RecipientInfo.from_dict(res) - def share_permissions(self - , name: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> GetRecipientSharePermissionsResponse: + def share_permissions( + self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> GetRecipientSharePermissionsResponse: """Get recipient share permissions. - + Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the owner of the Recipient. - + :param name: str The name of the Recipient. :param max_results: int (optional) @@ -3240,35 +3690,41 @@ def share_permissions(self unset from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: :class:`GetRecipientSharePermissionsResponse` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/recipients/{name}/share-permissions', query=query - - , headers=headers - ) + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.1/unity-catalog/recipients/{name}/share-permissions", query=query, headers=headers + ) return GetRecipientSharePermissionsResponse.from_dict(res) - - - - - def update(self - , name: str - , * - , comment: Optional[str] = None, expiration_time: Optional[int] = None, ip_access_list: Optional[IpAccessList] = None, new_name: Optional[str] = None, owner: Optional[str] = None, properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None) -> RecipientInfo: + def update( + self, + name: str, + *, + comment: Optional[str] = None, + expiration_time: Optional[int] = None, + ip_access_list: Optional[IpAccessList] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, + ) -> RecipientInfo: """Update a share recipient. - + Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of the recipient. If the recipient name will be updated, the user must be both a metastore admin and the owner of the recipient. - + :param name: str Name of the recipient. :param comment: str (optional) @@ -3285,142 +3741,115 @@ def update(self Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write. - + :returns: :class:`RecipientInfo` """ body = {} - if comment is not None: body['comment'] = comment - if expiration_time is not None: body['expiration_time'] = expiration_time - if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict() - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/recipients/{name}', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if expiration_time is not None: + body["expiration_time"] = expiration_time + if ip_access_list is not None: + body["ip_access_list"] = ip_access_list.as_dict() + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + if properties_kvpairs is not None: + body["properties_kvpairs"] = properties_kvpairs.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/recipients/{name}", body=body, headers=headers) return RecipientInfo.from_dict(res) - - + class SharesAPI: """A share is a container instantiated with :method:shares/create. Once created you can iteratively register a collection of existing data assets defined within the metastore using :method:shares/update. You can register data assets under their original name, qualified by their original schema, or provide alternate exposed names.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str - , * - , comment: Optional[str] = None, storage_root: Optional[str] = None) -> ShareInfo: + def create(self, name: str, *, comment: Optional[str] = None, storage_root: Optional[str] = None) -> ShareInfo: """Create a share. - + Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore. - + :param name: str Name of the share. :param comment: str (optional) User-provided free-form text description. :param storage_root: str (optional) Storage root URL for the share. - + :returns: :class:`ShareInfo` """ body = {} - if comment is not None: body['comment'] = comment - if name is not None: body['name'] = name - if storage_root is not None: body['storage_root'] = storage_root - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.1/unity-catalog/shares', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if name is not None: + body["name"] = name + if storage_root is not None: + body["storage_root"] = storage_root + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.1/unity-catalog/shares", body=body, headers=headers) return ShareInfo.from_dict(res) - - - - - def delete(self - , name: str - ): + def delete(self, name: str): """Delete a share. - + Deletes a data object share from the metastore. The caller must be an owner of the share. - + :param name: str The name of the share. - - + + """ - + headers = {} - - self._api.do('DELETE',f'/api/2.1/unity-catalog/shares/{name}' - - , headers=headers - ) - - - - + self._api.do("DELETE", f"/api/2.1/unity-catalog/shares/{name}", headers=headers) - def get(self - , name: str - , * - , include_shared_data: Optional[bool] = None) -> ShareInfo: + def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> ShareInfo: """Get a share. - + Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the share. - + :param name: str The name of the share. :param include_shared_data: bool (optional) Query for data to include in the share. - + :returns: :class:`ShareInfo` """ - + query = {} - if include_shared_data is not None: query['include_shared_data'] = include_shared_data - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/shares/{name}', query=query - - , headers=headers - ) - return ShareInfo.from_dict(res) + if include_shared_data is not None: + query["include_shared_data"] = include_shared_data + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", f"/api/2.1/unity-catalog/shares/{name}", query=query, headers=headers) + return ShareInfo.from_dict(res) - def list(self - - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ShareInfo]: + def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ShareInfo]: """List shares. - + Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of shares to return. - when set to 0, the page length is set to a server configured value (recommended); - when set to a value greater than 0, the page length is the minimum of this @@ -3431,43 +3860,38 @@ def list(self response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ShareInfo` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - if "max_results" not in query: query['max_results'] = 0 + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + if "max_results" not in query: + query["max_results"] = 0 while True: - json = self._api.do('GET','/api/2.1/unity-catalog/shares', query=query - - , headers=headers - ) - if 'shares' in json: - for v in json['shares']: - yield ShareInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - - - - def share_permissions(self - , name: str - , * - , max_results: Optional[int] = None, page_token: Optional[str] = None) -> GetSharePermissionsResponse: + json = self._api.do("GET", "/api/2.1/unity-catalog/shares", query=query, headers=headers) + if "shares" in json: + for v in json["shares"]: + yield ShareInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def share_permissions( + self, name: str, *, max_results: Optional[int] = None, page_token: Optional[str] = None + ) -> GetSharePermissionsResponse: """Get permissions. - + Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the owner of the share. - + :param name: str The name of the share. :param max_results: int (optional) @@ -3480,47 +3904,50 @@ def share_permissions(self unset from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: :class:`GetSharePermissionsResponse` """ - + query = {} - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.1/unity-catalog/shares/{name}/permissions', query=query - - , headers=headers - ) + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.1/unity-catalog/shares/{name}/permissions", query=query, headers=headers) return GetSharePermissionsResponse.from_dict(res) - - - - - def update(self - , name: str - , * - , comment: Optional[str] = None, new_name: Optional[str] = None, owner: Optional[str] = None, storage_root: Optional[str] = None, updates: Optional[List[SharedDataObjectUpdate]] = None) -> ShareInfo: + def update( + self, + name: str, + *, + comment: Optional[str] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, + storage_root: Optional[str] = None, + updates: Optional[List[SharedDataObjectUpdate]] = None, + ) -> ShareInfo: """Update a share. - + Updates the share with the changes and data objects in the request. The caller must be the owner of the share or a metastore admin. - + When the caller is a metastore admin, only the __owner__ field can be updated. - + In the case that the share name is changed, **updateShare** requires that the caller is both the share owner and a metastore admin. - + If there are notebook files in the share, the __storage_root__ field cannot be updated. - + For each table that is added through this method, the share owner must also have **SELECT** privilege on the table. This privilege must be maintained indefinitely for recipients to be able to access the table. Typically, you should use a group as the share owner. - + Table removals through **update** do not require additional privileges. - + :param name: str The name of the share. :param comment: str (optional) @@ -3533,58 +3960,61 @@ def update(self Storage root URL for the share. :param updates: List[:class:`SharedDataObjectUpdate`] (optional) Array of shared data object updates. - + :returns: :class:`ShareInfo` """ body = {} - if comment is not None: body['comment'] = comment - if new_name is not None: body['new_name'] = new_name - if owner is not None: body['owner'] = owner - if storage_root is not None: body['storage_root'] = storage_root - if updates is not None: body['updates'] = [v.as_dict() for v in updates] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/shares/{name}', body=body - - , headers=headers - ) + if comment is not None: + body["comment"] = comment + if new_name is not None: + body["new_name"] = new_name + if owner is not None: + body["owner"] = owner + if storage_root is not None: + body["storage_root"] = storage_root + if updates is not None: + body["updates"] = [v.as_dict() for v in updates] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/shares/{name}", body=body, headers=headers) return ShareInfo.from_dict(res) - - - - - def update_permissions(self - , name: str - , * - , changes: Optional[List[PermissionsChange]] = None, omit_permissions_list: Optional[bool] = None) -> UpdateSharePermissionsResponse: + def update_permissions( + self, + name: str, + *, + changes: Optional[List[PermissionsChange]] = None, + omit_permissions_list: Optional[bool] = None, + ) -> UpdateSharePermissionsResponse: """Update permissions. - + Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an owner of the share. - + For new recipient grants, the user must also be the recipient owner or metastore admin. recipient revocations do not require additional privileges. - + :param name: str The name of the share. :param changes: List[:class:`PermissionsChange`] (optional) Array of permissions change objects. :param omit_permissions_list: bool (optional) Optional. Whether to return the latest permissions list of the share in the response. - + :returns: :class:`UpdateSharePermissionsResponse` """ body = {} - if changes is not None: body['changes'] = [v.as_dict() for v in changes] - if omit_permissions_list is not None: body['omit_permissions_list'] = omit_permissions_list - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.1/unity-catalog/shares/{name}/permissions', body=body - - , headers=headers - ) - return UpdateSharePermissionsResponse.from_dict(res) + if changes is not None: + body["changes"] = [v.as_dict() for v in changes] + if omit_permissions_list is not None: + body["omit_permissions_list"] = omit_permissions_list + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - \ No newline at end of file + res = self._api.do("PATCH", f"/api/2.1/unity-catalog/shares/{name}/permissions", body=body, headers=headers) + return UpdateSharePermissionsResponse.from_dict(res) diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index cf3dc44bf..0e23b7a47 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -1,852 +1,1078 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Callable, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AccessControl: group_name: Optional[str] = None - + permission_level: Optional[PermissionLevel] = None """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query * `CAN_MANAGE`: Can manage the query""" - + user_name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AccessControl into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the AccessControl into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AccessControl: """Deserializes the AccessControl from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', PermissionLevel), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", PermissionLevel), + user_name=d.get("user_name", None), + ) class Aggregation(Enum): - - - AVG = 'AVG' - COUNT = 'COUNT' - COUNT_DISTINCT = 'COUNT_DISTINCT' - MAX = 'MAX' - MEDIAN = 'MEDIAN' - MIN = 'MIN' - STDDEV = 'STDDEV' - SUM = 'SUM' + + AVG = "AVG" + COUNT = "COUNT" + COUNT_DISTINCT = "COUNT_DISTINCT" + MAX = "MAX" + MEDIAN = "MEDIAN" + MIN = "MIN" + STDDEV = "STDDEV" + SUM = "SUM" + @dataclass class Alert: condition: Optional[AlertCondition] = None """Trigger conditions of the alert.""" - + create_time: Optional[str] = None """The timestamp indicating when the alert was created.""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This can include email subject entries and Slack notification headers, for example. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + display_name: Optional[str] = None """The display name of the alert.""" - + id: Optional[str] = None """UUID identifying the alert.""" - + lifecycle_state: Optional[LifecycleState] = None """The workspace state of the alert. Used for tracking trashed status.""" - + notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + owner_user_name: Optional[str] = None """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" - + parent_path: Optional[str] = None """The workspace path of the folder containing the alert.""" - + query_id: Optional[str] = None """UUID of the query attached to the alert.""" - + seconds_to_retrigger: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + state: Optional[AlertState] = None """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not yet been evaluated or ran into an error during the last evaluation.""" - + trigger_time: Optional[str] = None """Timestamp when the alert was last triggered, if the alert has been triggered before.""" - + update_time: Optional[str] = None """The timestamp indicating when the alert was updated.""" - + def as_dict(self) -> dict: """Serializes the Alert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition: body['condition'] = self.condition.as_dict() - if self.create_time is not None: body['create_time'] = self.create_time - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_id is not None: body['query_id'] = self.query_id - if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger - if self.state is not None: body['state'] = self.state.value - if self.trigger_time is not None: body['trigger_time'] = self.trigger_time - if self.update_time is not None: body['update_time'] = self.update_time + if self.condition: + body["condition"] = self.condition.as_dict() + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state.value + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_id is not None: + body["query_id"] = self.query_id + if self.seconds_to_retrigger is not None: + body["seconds_to_retrigger"] = self.seconds_to_retrigger + if self.state is not None: + body["state"] = self.state.value + if self.trigger_time is not None: + body["trigger_time"] = self.trigger_time + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the Alert into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition: body['condition'] = self.condition - if self.create_time is not None: body['create_time'] = self.create_time - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_id is not None: body['query_id'] = self.query_id - if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger - if self.state is not None: body['state'] = self.state - if self.trigger_time is not None: body['trigger_time'] = self.trigger_time - if self.update_time is not None: body['update_time'] = self.update_time + if self.condition: + body["condition"] = self.condition + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_id is not None: + body["query_id"] = self.query_id + if self.seconds_to_retrigger is not None: + body["seconds_to_retrigger"] = self.seconds_to_retrigger + if self.state is not None: + body["state"] = self.state + if self.trigger_time is not None: + body["trigger_time"] = self.trigger_time + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Alert: """Deserializes the Alert from a dictionary.""" - return cls(condition=_from_dict(d, 'condition', AlertCondition), create_time=d.get('create_time', None), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), display_name=d.get('display_name', None), id=d.get('id', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), notify_on_ok=d.get('notify_on_ok', None), owner_user_name=d.get('owner_user_name', None), parent_path=d.get('parent_path', None), query_id=d.get('query_id', None), seconds_to_retrigger=d.get('seconds_to_retrigger', None), state=_enum(d, 'state', AlertState), trigger_time=d.get('trigger_time', None), update_time=d.get('update_time', None)) - - + return cls( + condition=_from_dict(d, "condition", AlertCondition), + create_time=d.get("create_time", None), + custom_body=d.get("custom_body", None), + custom_subject=d.get("custom_subject", None), + display_name=d.get("display_name", None), + id=d.get("id", None), + lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), + notify_on_ok=d.get("notify_on_ok", None), + owner_user_name=d.get("owner_user_name", None), + parent_path=d.get("parent_path", None), + query_id=d.get("query_id", None), + seconds_to_retrigger=d.get("seconds_to_retrigger", None), + state=_enum(d, "state", AlertState), + trigger_time=d.get("trigger_time", None), + update_time=d.get("update_time", None), + ) @dataclass class AlertCondition: empty_result_state: Optional[AlertState] = None """Alert state if result is empty.""" - + op: Optional[AlertOperator] = None """Operator used for comparison in alert evaluation.""" - + operand: Optional[AlertConditionOperand] = None """Name of the column from the query result to use for comparison in alert evaluation.""" - + threshold: Optional[AlertConditionThreshold] = None """Threshold value used for comparison in alert evaluation.""" - + def as_dict(self) -> dict: """Serializes the AlertCondition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value - if self.op is not None: body['op'] = self.op.value - if self.operand: body['operand'] = self.operand.as_dict() - if self.threshold: body['threshold'] = self.threshold.as_dict() + if self.empty_result_state is not None: + body["empty_result_state"] = self.empty_result_state.value + if self.op is not None: + body["op"] = self.op.value + if self.operand: + body["operand"] = self.operand.as_dict() + if self.threshold: + body["threshold"] = self.threshold.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertCondition into a shallow dictionary of its immediate attributes.""" body = {} - if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state - if self.op is not None: body['op'] = self.op - if self.operand: body['operand'] = self.operand - if self.threshold: body['threshold'] = self.threshold + if self.empty_result_state is not None: + body["empty_result_state"] = self.empty_result_state + if self.op is not None: + body["op"] = self.op + if self.operand: + body["operand"] = self.operand + if self.threshold: + body["threshold"] = self.threshold return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertCondition: """Deserializes the AlertCondition from a dictionary.""" - return cls(empty_result_state=_enum(d, 'empty_result_state', AlertState), op=_enum(d, 'op', AlertOperator), operand=_from_dict(d, 'operand', AlertConditionOperand), threshold=_from_dict(d, 'threshold', AlertConditionThreshold)) - - + return cls( + empty_result_state=_enum(d, "empty_result_state", AlertState), + op=_enum(d, "op", AlertOperator), + operand=_from_dict(d, "operand", AlertConditionOperand), + threshold=_from_dict(d, "threshold", AlertConditionThreshold), + ) @dataclass class AlertConditionOperand: column: Optional[AlertOperandColumn] = None - + def as_dict(self) -> dict: """Serializes the AlertConditionOperand into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column: body['column'] = self.column.as_dict() + if self.column: + body["column"] = self.column.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertConditionOperand into a shallow dictionary of its immediate attributes.""" body = {} - if self.column: body['column'] = self.column + if self.column: + body["column"] = self.column return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertConditionOperand: """Deserializes the AlertConditionOperand from a dictionary.""" - return cls(column=_from_dict(d, 'column', AlertOperandColumn)) - - + return cls(column=_from_dict(d, "column", AlertOperandColumn)) @dataclass class AlertConditionThreshold: value: Optional[AlertOperandValue] = None - + def as_dict(self) -> dict: """Serializes the AlertConditionThreshold into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value: body['value'] = self.value.as_dict() + if self.value: + body["value"] = self.value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertConditionThreshold into a shallow dictionary of its immediate attributes.""" body = {} - if self.value: body['value'] = self.value + if self.value: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertConditionThreshold: """Deserializes the AlertConditionThreshold from a dictionary.""" - return cls(value=_from_dict(d, 'value', AlertOperandValue)) - - + return cls(value=_from_dict(d, "value", AlertOperandValue)) class AlertEvaluationState(Enum): """UNSPECIFIED - default unspecify value for proto enum, do not use it in the code UNKNOWN - alert not yet evaluated TRIGGERED - alert is triggered OK - alert is not triggered ERROR - alert evaluation failed""" - - ERROR = 'ERROR' - OK = 'OK' - TRIGGERED = 'TRIGGERED' - UNKNOWN = 'UNKNOWN' + + ERROR = "ERROR" + OK = "OK" + TRIGGERED = "TRIGGERED" + UNKNOWN = "UNKNOWN" + @dataclass class AlertOperandColumn: name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertOperandColumn into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the AlertOperandColumn into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertOperandColumn: """Deserializes the AlertOperandColumn from a dictionary.""" - return cls(name=d.get('name', None)) - - + return cls(name=d.get("name", None)) @dataclass class AlertOperandValue: bool_value: Optional[bool] = None - + double_value: Optional[float] = None - + string_value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertOperandValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bool_value is not None: body['bool_value'] = self.bool_value - if self.double_value is not None: body['double_value'] = self.double_value - if self.string_value is not None: body['string_value'] = self.string_value + if self.bool_value is not None: + body["bool_value"] = self.bool_value + if self.double_value is not None: + body["double_value"] = self.double_value + if self.string_value is not None: + body["string_value"] = self.string_value return body def as_shallow_dict(self) -> dict: """Serializes the AlertOperandValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.bool_value is not None: body['bool_value'] = self.bool_value - if self.double_value is not None: body['double_value'] = self.double_value - if self.string_value is not None: body['string_value'] = self.string_value + if self.bool_value is not None: + body["bool_value"] = self.bool_value + if self.double_value is not None: + body["double_value"] = self.double_value + if self.string_value is not None: + body["string_value"] = self.string_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertOperandValue: """Deserializes the AlertOperandValue from a dictionary.""" - return cls(bool_value=d.get('bool_value', None), double_value=d.get('double_value', None), string_value=d.get('string_value', None)) - - + return cls( + bool_value=d.get("bool_value", None), + double_value=d.get("double_value", None), + string_value=d.get("string_value", None), + ) class AlertOperator(Enum): - - - EQUAL = 'EQUAL' - GREATER_THAN = 'GREATER_THAN' - GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL' - IS_NULL = 'IS_NULL' - LESS_THAN = 'LESS_THAN' - LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL' - NOT_EQUAL = 'NOT_EQUAL' + + EQUAL = "EQUAL" + GREATER_THAN = "GREATER_THAN" + GREATER_THAN_OR_EQUAL = "GREATER_THAN_OR_EQUAL" + IS_NULL = "IS_NULL" + LESS_THAN = "LESS_THAN" + LESS_THAN_OR_EQUAL = "LESS_THAN_OR_EQUAL" + NOT_EQUAL = "NOT_EQUAL" + @dataclass class AlertOptions: """Alert configuration options.""" - + column: str """Name of column in the query result to compare in alert evaluation.""" - + op: str """Operator used to compare in alert evaluation: `>`, `>=`, `<`, `<=`, `==`, `!=`""" - + value: Any """Value used to compare in alert evaluation. Supported types include strings (eg. 'foobar'), floats (eg. 123.4), and booleans (true).""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This includes email subject, Slack notification header, etc. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + empty_result_state: Optional[AlertOptionsEmptyResultState] = None """State that alert evaluates to when query result is empty.""" - + muted: Optional[bool] = None """Whether or not the alert is muted. If an alert is muted, it will not notify users and notification destinations when triggered.""" - + def as_dict(self) -> dict: """Serializes the AlertOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column is not None: body['column'] = self.column - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value - if self.muted is not None: body['muted'] = self.muted - if self.op is not None: body['op'] = self.op - if self.value: body['value'] = self.value + if self.column is not None: + body["column"] = self.column + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.empty_result_state is not None: + body["empty_result_state"] = self.empty_result_state.value + if self.muted is not None: + body["muted"] = self.muted + if self.op is not None: + body["op"] = self.op + if self.value: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the AlertOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.column is not None: body['column'] = self.column - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state - if self.muted is not None: body['muted'] = self.muted - if self.op is not None: body['op'] = self.op - if self.value: body['value'] = self.value + if self.column is not None: + body["column"] = self.column + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.empty_result_state is not None: + body["empty_result_state"] = self.empty_result_state + if self.muted is not None: + body["muted"] = self.muted + if self.op is not None: + body["op"] = self.op + if self.value: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertOptions: """Deserializes the AlertOptions from a dictionary.""" - return cls(column=d.get('column', None), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), empty_result_state=_enum(d, 'empty_result_state', AlertOptionsEmptyResultState), muted=d.get('muted', None), op=d.get('op', None), value=d.get('value', None)) - - + return cls( + column=d.get("column", None), + custom_body=d.get("custom_body", None), + custom_subject=d.get("custom_subject", None), + empty_result_state=_enum(d, "empty_result_state", AlertOptionsEmptyResultState), + muted=d.get("muted", None), + op=d.get("op", None), + value=d.get("value", None), + ) class AlertOptionsEmptyResultState(Enum): """State that alert evaluates to when query result is empty.""" - - OK = 'ok' - TRIGGERED = 'triggered' - UNKNOWN = 'unknown' + + OK = "ok" + TRIGGERED = "triggered" + UNKNOWN = "unknown" + @dataclass class AlertQuery: created_at: Optional[str] = None """The timestamp when this query was created.""" - + data_source_id: Optional[str] = None """Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + id: Optional[str] = None """Query ID.""" - + is_archived: Optional[bool] = None """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear in search results. If this boolean is `true`, the `options` property for this query includes a `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" - + is_draft: Optional[bool] = None """Whether the query is a draft. Draft queries only appear in list views for their owners. Visualizations from draft queries cannot appear on dashboards.""" - + is_safe: Optional[bool] = None """Text parameter types are not safe from SQL injection for all types of data source. Set this Boolean parameter to `true` if a query either does not use any text type parameters or uses a data source type where text type parameters are handled safely.""" - + name: Optional[str] = None """The title of this query that appears in list views, widget headings, and on the query page.""" - + options: Optional[QueryOptions] = None - + query: Optional[str] = None """The text of the query to be run.""" - + tags: Optional[List[str]] = None - + updated_at: Optional[str] = None """The timestamp at which this query was last updated.""" - + user_id: Optional[int] = None """The ID of the user who owns the query.""" - + def as_dict(self) -> dict: """Serializes the AlertQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.is_archived is not None: body['is_archived'] = self.is_archived - if self.is_draft is not None: body['is_draft'] = self.is_draft - if self.is_safe is not None: body['is_safe'] = self.is_safe - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.query is not None: body['query'] = self.query - if self.tags: body['tags'] = [v for v in self.tags] - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user_id is not None: body['user_id'] = self.user_id + if self.created_at is not None: + body["created_at"] = self.created_at + if self.data_source_id is not None: + body["data_source_id"] = self.data_source_id + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.is_archived is not None: + body["is_archived"] = self.is_archived + if self.is_draft is not None: + body["is_draft"] = self.is_draft + if self.is_safe is not None: + body["is_safe"] = self.is_safe + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options.as_dict() + if self.query is not None: + body["query"] = self.query + if self.tags: + body["tags"] = [v for v in self.tags] + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.user_id is not None: + body["user_id"] = self.user_id return body def as_shallow_dict(self) -> dict: """Serializes the AlertQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.is_archived is not None: body['is_archived'] = self.is_archived - if self.is_draft is not None: body['is_draft'] = self.is_draft - if self.is_safe is not None: body['is_safe'] = self.is_safe - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query is not None: body['query'] = self.query - if self.tags: body['tags'] = self.tags - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user_id is not None: body['user_id'] = self.user_id + if self.created_at is not None: + body["created_at"] = self.created_at + if self.data_source_id is not None: + body["data_source_id"] = self.data_source_id + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.is_archived is not None: + body["is_archived"] = self.is_archived + if self.is_draft is not None: + body["is_draft"] = self.is_draft + if self.is_safe is not None: + body["is_safe"] = self.is_safe + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.query is not None: + body["query"] = self.query + if self.tags: + body["tags"] = self.tags + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.user_id is not None: + body["user_id"] = self.user_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertQuery: """Deserializes the AlertQuery from a dictionary.""" - return cls(created_at=d.get('created_at', None), data_source_id=d.get('data_source_id', None), description=d.get('description', None), id=d.get('id', None), is_archived=d.get('is_archived', None), is_draft=d.get('is_draft', None), is_safe=d.get('is_safe', None), name=d.get('name', None), options=_from_dict(d, 'options', QueryOptions), query=d.get('query', None), tags=d.get('tags', None), updated_at=d.get('updated_at', None), user_id=d.get('user_id', None)) - - + return cls( + created_at=d.get("created_at", None), + data_source_id=d.get("data_source_id", None), + description=d.get("description", None), + id=d.get("id", None), + is_archived=d.get("is_archived", None), + is_draft=d.get("is_draft", None), + is_safe=d.get("is_safe", None), + name=d.get("name", None), + options=_from_dict(d, "options", QueryOptions), + query=d.get("query", None), + tags=d.get("tags", None), + updated_at=d.get("updated_at", None), + user_id=d.get("user_id", None), + ) class AlertState(Enum): - - - OK = 'OK' - TRIGGERED = 'TRIGGERED' - UNKNOWN = 'UNKNOWN' + + OK = "OK" + TRIGGERED = "TRIGGERED" + UNKNOWN = "UNKNOWN" + @dataclass class AlertV2: create_time: Optional[str] = None """The timestamp indicating when the alert was created.""" - + custom_description: Optional[str] = None """Custom description for the alert. support mustache template.""" - + custom_summary: Optional[str] = None """Custom summary for the alert. support mustache template.""" - + display_name: Optional[str] = None """The display name of the alert.""" - + evaluation: Optional[AlertV2Evaluation] = None - + id: Optional[str] = None """UUID identifying the alert.""" - + lifecycle_state: Optional[LifecycleState] = None """Indicates whether the query is trashed.""" - + owner_user_name: Optional[str] = None """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" - + parent_path: Optional[str] = None """The workspace path of the folder containing the alert. Can only be set on create, and cannot be updated.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_user_name: Optional[str] = None """The run as username. This field is set to "Unavailable" if the user has been deleted.""" - + schedule: Optional[CronSchedule] = None - + update_time: Optional[str] = None """The timestamp indicating when the alert was updated.""" - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the alert.""" - + def as_dict(self) -> dict: """Serializes the AlertV2 into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.custom_description is not None: body['custom_description'] = self.custom_description - if self.custom_summary is not None: body['custom_summary'] = self.custom_summary - if self.display_name is not None: body['display_name'] = self.display_name - if self.evaluation: body['evaluation'] = self.evaluation.as_dict() - if self.id is not None: body['id'] = self.id - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name - if self.schedule: body['schedule'] = self.schedule.as_dict() - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_description is not None: + body["custom_description"] = self.custom_description + if self.custom_summary is not None: + body["custom_summary"] = self.custom_summary + if self.display_name is not None: + body["display_name"] = self.display_name + if self.evaluation: + body["evaluation"] = self.evaluation.as_dict() + if self.id is not None: + body["id"] = self.id + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state.value + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_user_name is not None: + body["run_as_user_name"] = self.run_as_user_name + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2 into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.custom_description is not None: body['custom_description'] = self.custom_description - if self.custom_summary is not None: body['custom_summary'] = self.custom_summary - if self.display_name is not None: body['display_name'] = self.display_name - if self.evaluation: body['evaluation'] = self.evaluation - if self.id is not None: body['id'] = self.id - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name - if self.schedule: body['schedule'] = self.schedule - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_description is not None: + body["custom_description"] = self.custom_description + if self.custom_summary is not None: + body["custom_summary"] = self.custom_summary + if self.display_name is not None: + body["display_name"] = self.display_name + if self.evaluation: + body["evaluation"] = self.evaluation + if self.id is not None: + body["id"] = self.id + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_user_name is not None: + body["run_as_user_name"] = self.run_as_user_name + if self.schedule: + body["schedule"] = self.schedule + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2: """Deserializes the AlertV2 from a dictionary.""" - return cls(create_time=d.get('create_time', None), custom_description=d.get('custom_description', None), custom_summary=d.get('custom_summary', None), display_name=d.get('display_name', None), evaluation=_from_dict(d, 'evaluation', AlertV2Evaluation), id=d.get('id', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), owner_user_name=d.get('owner_user_name', None), parent_path=d.get('parent_path', None), query_text=d.get('query_text', None), run_as_user_name=d.get('run_as_user_name', None), schedule=_from_dict(d, 'schedule', CronSchedule), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + create_time=d.get("create_time", None), + custom_description=d.get("custom_description", None), + custom_summary=d.get("custom_summary", None), + display_name=d.get("display_name", None), + evaluation=_from_dict(d, "evaluation", AlertV2Evaluation), + id=d.get("id", None), + lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), + owner_user_name=d.get("owner_user_name", None), + parent_path=d.get("parent_path", None), + query_text=d.get("query_text", None), + run_as_user_name=d.get("run_as_user_name", None), + schedule=_from_dict(d, "schedule", CronSchedule), + update_time=d.get("update_time", None), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class AlertV2Evaluation: comparison_operator: Optional[ComparisonOperator] = None """Operator used for comparison in alert evaluation.""" - + empty_result_state: Optional[AlertEvaluationState] = None """Alert state if result is empty.""" - + last_evaluated_at: Optional[str] = None """Timestamp of the last evaluation.""" - + notification: Optional[AlertV2Notification] = None """User or Notification Destination to notify when alert is triggered.""" - + source: Optional[AlertV2OperandColumn] = None """Source column from result to use to evaluate alert""" - + state: Optional[AlertEvaluationState] = None """Latest state of alert evaluation.""" - + threshold: Optional[AlertV2Operand] = None """Threshold to user for alert evaluation, can be a column or a value.""" - + def as_dict(self) -> dict: """Serializes the AlertV2Evaluation into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comparison_operator is not None: body['comparison_operator'] = self.comparison_operator.value - if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value - if self.last_evaluated_at is not None: body['last_evaluated_at'] = self.last_evaluated_at - if self.notification: body['notification'] = self.notification.as_dict() - if self.source: body['source'] = self.source.as_dict() - if self.state is not None: body['state'] = self.state.value - if self.threshold: body['threshold'] = self.threshold.as_dict() + if self.comparison_operator is not None: + body["comparison_operator"] = self.comparison_operator.value + if self.empty_result_state is not None: + body["empty_result_state"] = self.empty_result_state.value + if self.last_evaluated_at is not None: + body["last_evaluated_at"] = self.last_evaluated_at + if self.notification: + body["notification"] = self.notification.as_dict() + if self.source: + body["source"] = self.source.as_dict() + if self.state is not None: + body["state"] = self.state.value + if self.threshold: + body["threshold"] = self.threshold.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2Evaluation into a shallow dictionary of its immediate attributes.""" body = {} - if self.comparison_operator is not None: body['comparison_operator'] = self.comparison_operator - if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state - if self.last_evaluated_at is not None: body['last_evaluated_at'] = self.last_evaluated_at - if self.notification: body['notification'] = self.notification - if self.source: body['source'] = self.source - if self.state is not None: body['state'] = self.state - if self.threshold: body['threshold'] = self.threshold + if self.comparison_operator is not None: + body["comparison_operator"] = self.comparison_operator + if self.empty_result_state is not None: + body["empty_result_state"] = self.empty_result_state + if self.last_evaluated_at is not None: + body["last_evaluated_at"] = self.last_evaluated_at + if self.notification: + body["notification"] = self.notification + if self.source: + body["source"] = self.source + if self.state is not None: + body["state"] = self.state + if self.threshold: + body["threshold"] = self.threshold return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2Evaluation: """Deserializes the AlertV2Evaluation from a dictionary.""" - return cls(comparison_operator=_enum(d, 'comparison_operator', ComparisonOperator), empty_result_state=_enum(d, 'empty_result_state', AlertEvaluationState), last_evaluated_at=d.get('last_evaluated_at', None), notification=_from_dict(d, 'notification', AlertV2Notification), source=_from_dict(d, 'source', AlertV2OperandColumn), state=_enum(d, 'state', AlertEvaluationState), threshold=_from_dict(d, 'threshold', AlertV2Operand)) - - + return cls( + comparison_operator=_enum(d, "comparison_operator", ComparisonOperator), + empty_result_state=_enum(d, "empty_result_state", AlertEvaluationState), + last_evaluated_at=d.get("last_evaluated_at", None), + notification=_from_dict(d, "notification", AlertV2Notification), + source=_from_dict(d, "source", AlertV2OperandColumn), + state=_enum(d, "state", AlertEvaluationState), + threshold=_from_dict(d, "threshold", AlertV2Operand), + ) @dataclass class AlertV2Notification: notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + retrigger_seconds: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + subscriptions: Optional[List[AlertV2Subscription]] = None - + def as_dict(self) -> dict: """Serializes the AlertV2Notification into a dictionary suitable for use as a JSON request body.""" body = {} - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.retrigger_seconds is not None: body['retrigger_seconds'] = self.retrigger_seconds - if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions] + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.retrigger_seconds is not None: + body["retrigger_seconds"] = self.retrigger_seconds + if self.subscriptions: + body["subscriptions"] = [v.as_dict() for v in self.subscriptions] return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2Notification into a shallow dictionary of its immediate attributes.""" body = {} - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.retrigger_seconds is not None: body['retrigger_seconds'] = self.retrigger_seconds - if self.subscriptions: body['subscriptions'] = self.subscriptions + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.retrigger_seconds is not None: + body["retrigger_seconds"] = self.retrigger_seconds + if self.subscriptions: + body["subscriptions"] = self.subscriptions return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2Notification: """Deserializes the AlertV2Notification from a dictionary.""" - return cls(notify_on_ok=d.get('notify_on_ok', None), retrigger_seconds=d.get('retrigger_seconds', None), subscriptions=_repeated_dict(d, 'subscriptions', AlertV2Subscription)) - - + return cls( + notify_on_ok=d.get("notify_on_ok", None), + retrigger_seconds=d.get("retrigger_seconds", None), + subscriptions=_repeated_dict(d, "subscriptions", AlertV2Subscription), + ) @dataclass class AlertV2Operand: column: Optional[AlertV2OperandColumn] = None - + value: Optional[AlertV2OperandValue] = None - + def as_dict(self) -> dict: """Serializes the AlertV2Operand into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column: body['column'] = self.column.as_dict() - if self.value: body['value'] = self.value.as_dict() + if self.column: + body["column"] = self.column.as_dict() + if self.value: + body["value"] = self.value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2Operand into a shallow dictionary of its immediate attributes.""" body = {} - if self.column: body['column'] = self.column - if self.value: body['value'] = self.value + if self.column: + body["column"] = self.column + if self.value: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2Operand: """Deserializes the AlertV2Operand from a dictionary.""" - return cls(column=_from_dict(d, 'column', AlertV2OperandColumn), value=_from_dict(d, 'value', AlertV2OperandValue)) - - + return cls( + column=_from_dict(d, "column", AlertV2OperandColumn), value=_from_dict(d, "value", AlertV2OperandValue) + ) @dataclass class AlertV2OperandColumn: aggregation: Optional[Aggregation] = None - + display: Optional[str] = None - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertV2OperandColumn into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aggregation is not None: body['aggregation'] = self.aggregation.value - if self.display is not None: body['display'] = self.display - if self.name is not None: body['name'] = self.name + if self.aggregation is not None: + body["aggregation"] = self.aggregation.value + if self.display is not None: + body["display"] = self.display + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2OperandColumn into a shallow dictionary of its immediate attributes.""" body = {} - if self.aggregation is not None: body['aggregation'] = self.aggregation - if self.display is not None: body['display'] = self.display - if self.name is not None: body['name'] = self.name + if self.aggregation is not None: + body["aggregation"] = self.aggregation + if self.display is not None: + body["display"] = self.display + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2OperandColumn: """Deserializes the AlertV2OperandColumn from a dictionary.""" - return cls(aggregation=_enum(d, 'aggregation', Aggregation), display=d.get('display', None), name=d.get('name', None)) - - + return cls( + aggregation=_enum(d, "aggregation", Aggregation), display=d.get("display", None), name=d.get("name", None) + ) @dataclass class AlertV2OperandValue: bool_value: Optional[bool] = None - + double_value: Optional[float] = None - + string_value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertV2OperandValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bool_value is not None: body['bool_value'] = self.bool_value - if self.double_value is not None: body['double_value'] = self.double_value - if self.string_value is not None: body['string_value'] = self.string_value + if self.bool_value is not None: + body["bool_value"] = self.bool_value + if self.double_value is not None: + body["double_value"] = self.double_value + if self.string_value is not None: + body["string_value"] = self.string_value return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2OperandValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.bool_value is not None: body['bool_value'] = self.bool_value - if self.double_value is not None: body['double_value'] = self.double_value - if self.string_value is not None: body['string_value'] = self.string_value + if self.bool_value is not None: + body["bool_value"] = self.bool_value + if self.double_value is not None: + body["double_value"] = self.double_value + if self.string_value is not None: + body["string_value"] = self.string_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2OperandValue: """Deserializes the AlertV2OperandValue from a dictionary.""" - return cls(bool_value=d.get('bool_value', None), double_value=d.get('double_value', None), string_value=d.get('string_value', None)) - - + return cls( + bool_value=d.get("bool_value", None), + double_value=d.get("double_value", None), + string_value=d.get("string_value", None), + ) @dataclass class AlertV2Subscription: destination_id: Optional[str] = None - + user_email: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the AlertV2Subscription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.destination_id is not None: body['destination_id'] = self.destination_id - if self.user_email is not None: body['user_email'] = self.user_email + if self.destination_id is not None: + body["destination_id"] = self.destination_id + if self.user_email is not None: + body["user_email"] = self.user_email return body def as_shallow_dict(self) -> dict: """Serializes the AlertV2Subscription into a shallow dictionary of its immediate attributes.""" body = {} - if self.destination_id is not None: body['destination_id'] = self.destination_id - if self.user_email is not None: body['user_email'] = self.user_email + if self.destination_id is not None: + body["destination_id"] = self.destination_id + if self.user_email is not None: + body["user_email"] = self.user_email return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AlertV2Subscription: """Deserializes the AlertV2Subscription from a dictionary.""" - return cls(destination_id=d.get('destination_id', None), user_email=d.get('user_email', None)) - - + return cls(destination_id=d.get("destination_id", None), user_email=d.get("user_email", None)) @dataclass class BaseChunkInfo: """Describes metadata for a particular chunk, within a result set; this structure is used both within a manifest, and when fetching individual chunk data or links.""" - + byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" - + chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" - + row_count: Optional[int] = None """The number of rows within the result chunk.""" - + row_offset: Optional[int] = None """The starting row offset within the result set.""" - + def as_dict(self) -> dict: """Serializes the BaseChunkInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.byte_count is not None: body['byte_count'] = self.byte_count - if self.chunk_index is not None: body['chunk_index'] = self.chunk_index - if self.row_count is not None: body['row_count'] = self.row_count - if self.row_offset is not None: body['row_offset'] = self.row_offset + if self.byte_count is not None: + body["byte_count"] = self.byte_count + if self.chunk_index is not None: + body["chunk_index"] = self.chunk_index + if self.row_count is not None: + body["row_count"] = self.row_count + if self.row_offset is not None: + body["row_offset"] = self.row_offset return body def as_shallow_dict(self) -> dict: """Serializes the BaseChunkInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.byte_count is not None: body['byte_count'] = self.byte_count - if self.chunk_index is not None: body['chunk_index'] = self.chunk_index - if self.row_count is not None: body['row_count'] = self.row_count - if self.row_offset is not None: body['row_offset'] = self.row_offset + if self.byte_count is not None: + body["byte_count"] = self.byte_count + if self.chunk_index is not None: + body["chunk_index"] = self.chunk_index + if self.row_count is not None: + body["row_count"] = self.row_count + if self.row_offset is not None: + body["row_offset"] = self.row_offset return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> BaseChunkInfo: """Deserializes the BaseChunkInfo from a dictionary.""" - return cls(byte_count=d.get('byte_count', None), chunk_index=d.get('chunk_index', None), row_count=d.get('row_count', None), row_offset=d.get('row_offset', None)) - - - - - + return cls( + byte_count=d.get("byte_count", None), + chunk_index=d.get("chunk_index", None), + row_count=d.get("row_count", None), + row_offset=d.get("row_offset", None), + ) @dataclass @@ -865,377 +1091,468 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CancelExecutionResponse: """Deserializes the CancelExecutionResponse from a dictionary.""" return cls() - - @dataclass class Channel: """Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be chosen only when `dbsql_version` is specified.""" - + dbsql_version: Optional[str] = None - + name: Optional[ChannelName] = None - + def as_dict(self) -> dict: """Serializes the Channel into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version - if self.name is not None: body['name'] = self.name.value + if self.dbsql_version is not None: + body["dbsql_version"] = self.dbsql_version + if self.name is not None: + body["name"] = self.name.value return body def as_shallow_dict(self) -> dict: """Serializes the Channel into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version - if self.name is not None: body['name'] = self.name + if self.dbsql_version is not None: + body["dbsql_version"] = self.dbsql_version + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Channel: """Deserializes the Channel from a dictionary.""" - return cls(dbsql_version=d.get('dbsql_version', None), name=_enum(d, 'name', ChannelName)) - - + return cls(dbsql_version=d.get("dbsql_version", None), name=_enum(d, "name", ChannelName)) @dataclass class ChannelInfo: """Details about a Channel.""" - + dbsql_version: Optional[str] = None """DB SQL Version the Channel is mapped to.""" - + name: Optional[ChannelName] = None """Name of the channel""" - + def as_dict(self) -> dict: """Serializes the ChannelInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version - if self.name is not None: body['name'] = self.name.value + if self.dbsql_version is not None: + body["dbsql_version"] = self.dbsql_version + if self.name is not None: + body["name"] = self.name.value return body def as_shallow_dict(self) -> dict: """Serializes the ChannelInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version - if self.name is not None: body['name'] = self.name + if self.dbsql_version is not None: + body["dbsql_version"] = self.dbsql_version + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ChannelInfo: """Deserializes the ChannelInfo from a dictionary.""" - return cls(dbsql_version=d.get('dbsql_version', None), name=_enum(d, 'name', ChannelName)) - - + return cls(dbsql_version=d.get("dbsql_version", None), name=_enum(d, "name", ChannelName)) class ChannelName(Enum): - - - CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT' - CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM' - CHANNEL_NAME_PREVIEW = 'CHANNEL_NAME_PREVIEW' - CHANNEL_NAME_PREVIOUS = 'CHANNEL_NAME_PREVIOUS' + + CHANNEL_NAME_CURRENT = "CHANNEL_NAME_CURRENT" + CHANNEL_NAME_CUSTOM = "CHANNEL_NAME_CUSTOM" + CHANNEL_NAME_PREVIEW = "CHANNEL_NAME_PREVIEW" + CHANNEL_NAME_PREVIOUS = "CHANNEL_NAME_PREVIOUS" + @dataclass class ClientConfig: allow_custom_js_visualizations: Optional[bool] = None - + allow_downloads: Optional[bool] = None - + allow_external_shares: Optional[bool] = None - + allow_subscriptions: Optional[bool] = None - + date_format: Optional[str] = None - + date_time_format: Optional[str] = None - + disable_publish: Optional[bool] = None - + enable_legacy_autodetect_types: Optional[bool] = None - + feature_show_permissions_control: Optional[bool] = None - + hide_plotly_mode_bar: Optional[bool] = None - + def as_dict(self) -> dict: """Serializes the ClientConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.allow_custom_js_visualizations is not None: body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations - if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads - if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares - if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions - if self.date_format is not None: body['date_format'] = self.date_format - if self.date_time_format is not None: body['date_time_format'] = self.date_time_format - if self.disable_publish is not None: body['disable_publish'] = self.disable_publish - if self.enable_legacy_autodetect_types is not None: body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types - if self.feature_show_permissions_control is not None: body['feature_show_permissions_control'] = self.feature_show_permissions_control - if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar + if self.allow_custom_js_visualizations is not None: + body["allow_custom_js_visualizations"] = self.allow_custom_js_visualizations + if self.allow_downloads is not None: + body["allow_downloads"] = self.allow_downloads + if self.allow_external_shares is not None: + body["allow_external_shares"] = self.allow_external_shares + if self.allow_subscriptions is not None: + body["allow_subscriptions"] = self.allow_subscriptions + if self.date_format is not None: + body["date_format"] = self.date_format + if self.date_time_format is not None: + body["date_time_format"] = self.date_time_format + if self.disable_publish is not None: + body["disable_publish"] = self.disable_publish + if self.enable_legacy_autodetect_types is not None: + body["enable_legacy_autodetect_types"] = self.enable_legacy_autodetect_types + if self.feature_show_permissions_control is not None: + body["feature_show_permissions_control"] = self.feature_show_permissions_control + if self.hide_plotly_mode_bar is not None: + body["hide_plotly_mode_bar"] = self.hide_plotly_mode_bar return body def as_shallow_dict(self) -> dict: """Serializes the ClientConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.allow_custom_js_visualizations is not None: body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations - if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads - if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares - if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions - if self.date_format is not None: body['date_format'] = self.date_format - if self.date_time_format is not None: body['date_time_format'] = self.date_time_format - if self.disable_publish is not None: body['disable_publish'] = self.disable_publish - if self.enable_legacy_autodetect_types is not None: body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types - if self.feature_show_permissions_control is not None: body['feature_show_permissions_control'] = self.feature_show_permissions_control - if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar + if self.allow_custom_js_visualizations is not None: + body["allow_custom_js_visualizations"] = self.allow_custom_js_visualizations + if self.allow_downloads is not None: + body["allow_downloads"] = self.allow_downloads + if self.allow_external_shares is not None: + body["allow_external_shares"] = self.allow_external_shares + if self.allow_subscriptions is not None: + body["allow_subscriptions"] = self.allow_subscriptions + if self.date_format is not None: + body["date_format"] = self.date_format + if self.date_time_format is not None: + body["date_time_format"] = self.date_time_format + if self.disable_publish is not None: + body["disable_publish"] = self.disable_publish + if self.enable_legacy_autodetect_types is not None: + body["enable_legacy_autodetect_types"] = self.enable_legacy_autodetect_types + if self.feature_show_permissions_control is not None: + body["feature_show_permissions_control"] = self.feature_show_permissions_control + if self.hide_plotly_mode_bar is not None: + body["hide_plotly_mode_bar"] = self.hide_plotly_mode_bar return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ClientConfig: """Deserializes the ClientConfig from a dictionary.""" - return cls(allow_custom_js_visualizations=d.get('allow_custom_js_visualizations', None), allow_downloads=d.get('allow_downloads', None), allow_external_shares=d.get('allow_external_shares', None), allow_subscriptions=d.get('allow_subscriptions', None), date_format=d.get('date_format', None), date_time_format=d.get('date_time_format', None), disable_publish=d.get('disable_publish', None), enable_legacy_autodetect_types=d.get('enable_legacy_autodetect_types', None), feature_show_permissions_control=d.get('feature_show_permissions_control', None), hide_plotly_mode_bar=d.get('hide_plotly_mode_bar', None)) - - + return cls( + allow_custom_js_visualizations=d.get("allow_custom_js_visualizations", None), + allow_downloads=d.get("allow_downloads", None), + allow_external_shares=d.get("allow_external_shares", None), + allow_subscriptions=d.get("allow_subscriptions", None), + date_format=d.get("date_format", None), + date_time_format=d.get("date_time_format", None), + disable_publish=d.get("disable_publish", None), + enable_legacy_autodetect_types=d.get("enable_legacy_autodetect_types", None), + feature_show_permissions_control=d.get("feature_show_permissions_control", None), + hide_plotly_mode_bar=d.get("hide_plotly_mode_bar", None), + ) @dataclass class ColumnInfo: name: Optional[str] = None """The name of the column.""" - + position: Optional[int] = None """The ordinal position of the column (starting at position 0).""" - + type_interval_type: Optional[str] = None """The format of the interval type.""" - + type_name: Optional[ColumnInfoTypeName] = None """The name of the base data type. This doesn't include details for complex types such as STRUCT, MAP or ARRAY.""" - + type_precision: Optional[int] = None """Specifies the number of digits in a number. This applies to the DECIMAL type.""" - + type_scale: Optional[int] = None """Specifies the number of digits to the right of the decimal point in a number. This applies to the DECIMAL type.""" - + type_text: Optional[str] = None """The full SQL type specification.""" - + def as_dict(self) -> dict: """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type - if self.type_name is not None: body['type_name'] = self.type_name.value - if self.type_precision is not None: body['type_precision'] = self.type_precision - if self.type_scale is not None: body['type_scale'] = self.type_scale - if self.type_text is not None: body['type_text'] = self.type_text + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_name is not None: + body["type_name"] = self.type_name.value + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body def as_shallow_dict(self) -> dict: """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.position is not None: body['position'] = self.position - if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type - if self.type_name is not None: body['type_name'] = self.type_name - if self.type_precision is not None: body['type_precision'] = self.type_precision - if self.type_scale is not None: body['type_scale'] = self.type_scale - if self.type_text is not None: body['type_text'] = self.type_text + if self.name is not None: + body["name"] = self.name + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_name is not None: + body["type_name"] = self.type_name + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" - return cls(name=d.get('name', None), position=d.get('position', None), type_interval_type=d.get('type_interval_type', None), type_name=_enum(d, 'type_name', ColumnInfoTypeName), type_precision=d.get('type_precision', None), type_scale=d.get('type_scale', None), type_text=d.get('type_text', None)) - - + return cls( + name=d.get("name", None), + position=d.get("position", None), + type_interval_type=d.get("type_interval_type", None), + type_name=_enum(d, "type_name", ColumnInfoTypeName), + type_precision=d.get("type_precision", None), + type_scale=d.get("type_scale", None), + type_text=d.get("type_text", None), + ) class ColumnInfoTypeName(Enum): """The name of the base data type. This doesn't include details for complex types such as STRUCT, MAP or ARRAY.""" - - ARRAY = 'ARRAY' - BINARY = 'BINARY' - BOOLEAN = 'BOOLEAN' - BYTE = 'BYTE' - CHAR = 'CHAR' - DATE = 'DATE' - DECIMAL = 'DECIMAL' - DOUBLE = 'DOUBLE' - FLOAT = 'FLOAT' - INT = 'INT' - INTERVAL = 'INTERVAL' - LONG = 'LONG' - MAP = 'MAP' - NULL = 'NULL' - SHORT = 'SHORT' - STRING = 'STRING' - STRUCT = 'STRUCT' - TIMESTAMP = 'TIMESTAMP' - USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' + + ARRAY = "ARRAY" + BINARY = "BINARY" + BOOLEAN = "BOOLEAN" + BYTE = "BYTE" + CHAR = "CHAR" + DATE = "DATE" + DECIMAL = "DECIMAL" + DOUBLE = "DOUBLE" + FLOAT = "FLOAT" + INT = "INT" + INTERVAL = "INTERVAL" + LONG = "LONG" + MAP = "MAP" + NULL = "NULL" + SHORT = "SHORT" + STRING = "STRING" + STRUCT = "STRUCT" + TIMESTAMP = "TIMESTAMP" + USER_DEFINED_TYPE = "USER_DEFINED_TYPE" + class ComparisonOperator(Enum): - - - EQUAL = 'EQUAL' - GREATER_THAN = 'GREATER_THAN' - GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL' - IS_NOT_NULL = 'IS_NOT_NULL' - IS_NULL = 'IS_NULL' - LESS_THAN = 'LESS_THAN' - LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL' - NOT_EQUAL = 'NOT_EQUAL' + + EQUAL = "EQUAL" + GREATER_THAN = "GREATER_THAN" + GREATER_THAN_OR_EQUAL = "GREATER_THAN_OR_EQUAL" + IS_NOT_NULL = "IS_NOT_NULL" + IS_NULL = "IS_NULL" + LESS_THAN = "LESS_THAN" + LESS_THAN_OR_EQUAL = "LESS_THAN_OR_EQUAL" + NOT_EQUAL = "NOT_EQUAL" + @dataclass class CreateAlert: name: str """Name of the alert.""" - + options: AlertOptions """Alert configuration options.""" - + query_id: str """Query ID.""" - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + rearm: Optional[int] = None """Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again.""" - + def as_dict(self) -> dict: """Serializes the CreateAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.parent is not None: body['parent'] = self.parent - if self.query_id is not None: body['query_id'] = self.query_id - if self.rearm is not None: body['rearm'] = self.rearm + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options.as_dict() + if self.parent is not None: + body["parent"] = self.parent + if self.query_id is not None: + body["query_id"] = self.query_id + if self.rearm is not None: + body["rearm"] = self.rearm return body def as_shallow_dict(self) -> dict: """Serializes the CreateAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.parent is not None: body['parent'] = self.parent - if self.query_id is not None: body['query_id'] = self.query_id - if self.rearm is not None: body['rearm'] = self.rearm + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.parent is not None: + body["parent"] = self.parent + if self.query_id is not None: + body["query_id"] = self.query_id + if self.rearm is not None: + body["rearm"] = self.rearm return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateAlert: """Deserializes the CreateAlert from a dictionary.""" - return cls(name=d.get('name', None), options=_from_dict(d, 'options', AlertOptions), parent=d.get('parent', None), query_id=d.get('query_id', None), rearm=d.get('rearm', None)) - - + return cls( + name=d.get("name", None), + options=_from_dict(d, "options", AlertOptions), + parent=d.get("parent", None), + query_id=d.get("query_id", None), + rearm=d.get("rearm", None), + ) @dataclass class CreateAlertRequest: alert: Optional[CreateAlertRequestAlert] = None - + auto_resolve_display_name: Optional[bool] = None """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name.""" - + def as_dict(self) -> dict: """Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert: body['alert'] = self.alert.as_dict() - if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name + if self.alert: + body["alert"] = self.alert.as_dict() + if self.auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = self.auto_resolve_display_name return body def as_shallow_dict(self) -> dict: """Serializes the CreateAlertRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert: body['alert'] = self.alert - if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name + if self.alert: + body["alert"] = self.alert + if self.auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = self.auto_resolve_display_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateAlertRequest: """Deserializes the CreateAlertRequest from a dictionary.""" - return cls(alert=_from_dict(d, 'alert', CreateAlertRequestAlert), auto_resolve_display_name=d.get('auto_resolve_display_name', None)) - - + return cls( + alert=_from_dict(d, "alert", CreateAlertRequestAlert), + auto_resolve_display_name=d.get("auto_resolve_display_name", None), + ) @dataclass class CreateAlertRequestAlert: condition: Optional[AlertCondition] = None """Trigger conditions of the alert.""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This can include email subject entries and Slack notification headers, for example. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + display_name: Optional[str] = None """The display name of the alert.""" - + notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + parent_path: Optional[str] = None """The workspace path of the folder containing the alert.""" - + query_id: Optional[str] = None """UUID of the query attached to the alert.""" - + seconds_to_retrigger: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + def as_dict(self) -> dict: """Serializes the CreateAlertRequestAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition: body['condition'] = self.condition.as_dict() - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.display_name is not None: body['display_name'] = self.display_name - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_id is not None: body['query_id'] = self.query_id - if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.condition: + body["condition"] = self.condition.as_dict() + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.display_name is not None: + body["display_name"] = self.display_name + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_id is not None: + body["query_id"] = self.query_id + if self.seconds_to_retrigger is not None: + body["seconds_to_retrigger"] = self.seconds_to_retrigger return body def as_shallow_dict(self) -> dict: """Serializes the CreateAlertRequestAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition: body['condition'] = self.condition - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.display_name is not None: body['display_name'] = self.display_name - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_id is not None: body['query_id'] = self.query_id - if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.condition: + body["condition"] = self.condition + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.display_name is not None: + body["display_name"] = self.display_name + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_id is not None: + body["query_id"] = self.query_id + if self.seconds_to_retrigger is not None: + body["seconds_to_retrigger"] = self.seconds_to_retrigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateAlertRequestAlert: """Deserializes the CreateAlertRequestAlert from a dictionary.""" - return cls(condition=_from_dict(d, 'condition', AlertCondition), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), display_name=d.get('display_name', None), notify_on_ok=d.get('notify_on_ok', None), parent_path=d.get('parent_path', None), query_id=d.get('query_id', None), seconds_to_retrigger=d.get('seconds_to_retrigger', None)) - - - - - + return cls( + condition=_from_dict(d, "condition", AlertCondition), + custom_body=d.get("custom_body", None), + custom_subject=d.get("custom_subject", None), + display_name=d.get("display_name", None), + notify_on_ok=d.get("notify_on_ok", None), + parent_path=d.get("parent_path", None), + query_id=d.get("query_id", None), + seconds_to_retrigger=d.get("seconds_to_retrigger", None), + ) @dataclass @@ -1243,222 +1560,287 @@ class CreateQueryRequest: auto_resolve_display_name: Optional[bool] = None """If true, automatically resolve query display name conflicts. Otherwise, fail the request if the query's display name conflicts with an existing query's display name.""" - + query: Optional[CreateQueryRequestQuery] = None - + def as_dict(self) -> dict: """Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name - if self.query: body['query'] = self.query.as_dict() + if self.auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = self.auto_resolve_display_name + if self.query: + body["query"] = self.query.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateQueryRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name - if self.query: body['query'] = self.query + if self.auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = self.auto_resolve_display_name + if self.query: + body["query"] = self.query return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateQueryRequest: """Deserializes the CreateQueryRequest from a dictionary.""" - return cls(auto_resolve_display_name=d.get('auto_resolve_display_name', None), query=_from_dict(d, 'query', CreateQueryRequestQuery)) - - + return cls( + auto_resolve_display_name=d.get("auto_resolve_display_name", None), + query=_from_dict(d, "query", CreateQueryRequestQuery), + ) @dataclass class CreateQueryRequestQuery: apply_auto_limit: Optional[bool] = None """Whether to apply a 1000 row limit to the query result.""" - + catalog: Optional[str] = None """Name of the catalog where this query will be executed.""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + display_name: Optional[str] = None """Display name of the query that appears in list views, widget headings, and on the query page.""" - + parameters: Optional[List[QueryParameter]] = None """List of query parameter definitions.""" - + parent_path: Optional[str] = None """Workspace path of the workspace folder containing the object.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_mode: Optional[RunAsMode] = None """Sets the "Run as" role for the object.""" - + schema: Optional[str] = None """Name of the schema where this query will be executed.""" - + tags: Optional[List[str]] = None - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the query.""" - + def as_dict(self) -> dict: """Serializes the CreateQueryRequestQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value - if self.schema is not None: body['schema'] = self.schema - if self.tags: body['tags'] = [v for v in self.tags] - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.apply_auto_limit is not None: + body["apply_auto_limit"] = self.apply_auto_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_mode is not None: + body["run_as_mode"] = self.run_as_mode.value + if self.schema is not None: + body["schema"] = self.schema + if self.tags: + body["tags"] = [v for v in self.tags] + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the CreateQueryRequestQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.parameters: body['parameters'] = self.parameters - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode - if self.schema is not None: body['schema'] = self.schema - if self.tags: body['tags'] = self.tags - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.apply_auto_limit is not None: + body["apply_auto_limit"] = self.apply_auto_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.parameters: + body["parameters"] = self.parameters + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_mode is not None: + body["run_as_mode"] = self.run_as_mode + if self.schema is not None: + body["schema"] = self.schema + if self.tags: + body["tags"] = self.tags + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateQueryRequestQuery: """Deserializes the CreateQueryRequestQuery from a dictionary.""" - return cls(apply_auto_limit=d.get('apply_auto_limit', None), catalog=d.get('catalog', None), description=d.get('description', None), display_name=d.get('display_name', None), parameters=_repeated_dict(d, 'parameters', QueryParameter), parent_path=d.get('parent_path', None), query_text=d.get('query_text', None), run_as_mode=_enum(d, 'run_as_mode', RunAsMode), schema=d.get('schema', None), tags=d.get('tags', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + apply_auto_limit=d.get("apply_auto_limit", None), + catalog=d.get("catalog", None), + description=d.get("description", None), + display_name=d.get("display_name", None), + parameters=_repeated_dict(d, "parameters", QueryParameter), + parent_path=d.get("parent_path", None), + query_text=d.get("query_text", None), + run_as_mode=_enum(d, "run_as_mode", RunAsMode), + schema=d.get("schema", None), + tags=d.get("tags", None), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class CreateQueryVisualizationsLegacyRequest: """Add visualization to a query""" - + query_id: str """The identifier returned by :method:queries/create""" - + type: str """The type of visualization: chart, table, pivot table, and so on.""" - + options: Any """The options object varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization settings in JSON.""" - + description: Optional[str] = None """A short description of this visualization. This is not displayed in the UI.""" - + name: Optional[str] = None """The name of the visualization that appears on dashboards and the query screen.""" - + def as_dict(self) -> dict: """Serializes the CreateQueryVisualizationsLegacyRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query_id is not None: body['query_id'] = self.query_id - if self.type is not None: body['type'] = self.type + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.query_id is not None: + body["query_id"] = self.query_id + if self.type is not None: + body["type"] = self.type return body def as_shallow_dict(self) -> dict: """Serializes the CreateQueryVisualizationsLegacyRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query_id is not None: body['query_id'] = self.query_id - if self.type is not None: body['type'] = self.type + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.query_id is not None: + body["query_id"] = self.query_id + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateQueryVisualizationsLegacyRequest: """Deserializes the CreateQueryVisualizationsLegacyRequest from a dictionary.""" - return cls(description=d.get('description', None), name=d.get('name', None), options=d.get('options', None), query_id=d.get('query_id', None), type=d.get('type', None)) - - + return cls( + description=d.get("description", None), + name=d.get("name", None), + options=d.get("options", None), + query_id=d.get("query_id", None), + type=d.get("type", None), + ) @dataclass class CreateVisualizationRequest: visualization: Optional[CreateVisualizationRequestVisualization] = None - + def as_dict(self) -> dict: """Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.visualization: body['visualization'] = self.visualization.as_dict() + if self.visualization: + body["visualization"] = self.visualization.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the CreateVisualizationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.visualization: body['visualization'] = self.visualization + if self.visualization: + body["visualization"] = self.visualization return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequest: """Deserializes the CreateVisualizationRequest from a dictionary.""" - return cls(visualization=_from_dict(d, 'visualization', CreateVisualizationRequestVisualization)) - - + return cls(visualization=_from_dict(d, "visualization", CreateVisualizationRequestVisualization)) @dataclass class CreateVisualizationRequestVisualization: display_name: Optional[str] = None """The display name of the visualization.""" - + query_id: Optional[str] = None """UUID of the query that the visualization is attached to.""" - + serialized_options: Optional[str] = None """The visualization options varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization options directly.""" - + serialized_query_plan: Optional[str] = None """The visualization query plan varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying the visualization query plan directly.""" - + type: Optional[str] = None """The type of visualization: counter, table, funnel, and so on.""" - + def as_dict(self) -> dict: """Serializes the CreateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.query_id is not None: body['query_id'] = self.query_id - if self.serialized_options is not None: body['serialized_options'] = self.serialized_options - if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan - if self.type is not None: body['type'] = self.type + if self.display_name is not None: + body["display_name"] = self.display_name + if self.query_id is not None: + body["query_id"] = self.query_id + if self.serialized_options is not None: + body["serialized_options"] = self.serialized_options + if self.serialized_query_plan is not None: + body["serialized_query_plan"] = self.serialized_query_plan + if self.type is not None: + body["type"] = self.type return body def as_shallow_dict(self) -> dict: """Serializes the CreateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.query_id is not None: body['query_id'] = self.query_id - if self.serialized_options is not None: body['serialized_options'] = self.serialized_options - if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan - if self.type is not None: body['type'] = self.type + if self.display_name is not None: + body["display_name"] = self.display_name + if self.query_id is not None: + body["query_id"] = self.query_id + if self.serialized_options is not None: + body["serialized_options"] = self.serialized_options + if self.serialized_query_plan is not None: + body["serialized_query_plan"] = self.serialized_query_plan + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequestVisualization: """Deserializes the CreateVisualizationRequestVisualization from a dictionary.""" - return cls(display_name=d.get('display_name', None), query_id=d.get('query_id', None), serialized_options=d.get('serialized_options', None), serialized_query_plan=d.get('serialized_query_plan', None), type=d.get('type', None)) - - + return cls( + display_name=d.get("display_name", None), + query_id=d.get("query_id", None), + serialized_options=d.get("serialized_options", None), + serialized_query_plan=d.get("serialized_query_plan", None), + type=d.get("type", None), + ) @dataclass @@ -1471,10 +1853,10 @@ class CreateWarehouseRequest: non-serverless warehouses - 0 indicates no autostop. Defaults to 120 mins""" - + channel: Optional[Channel] = None """Channel Details""" - + cluster_size: Optional[str] = None """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, @@ -1482,28 +1864,28 @@ class CreateWarehouseRequest: Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large""" - + creator_name: Optional[str] = None """warehouse creator name""" - + enable_photon: Optional[bool] = None """Configures whether the warehouse should use Photon optimized clusters. Defaults to false.""" - + enable_serverless_compute: Optional[bool] = None """Configures whether the warehouse should use serverless compute""" - + instance_profile_arn: Optional[str] = None """Deprecated. Instance profile used to pass IAM role to the cluster""" - + max_num_clusters: Optional[int] = None """Maximum number of clusters that the autoscaler will create to handle concurrent queries. Supported values: - Must be >= min_num_clusters - Must be <= 30. Defaults to min_clusters if unset.""" - + min_num_clusters: Optional[int] = None """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce @@ -1513,338 +1895,463 @@ class CreateWarehouseRequest: Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) Defaults to 1""" - + name: Optional[str] = None """Logical name for the cluster. Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - + spot_instance_policy: Optional[SpotInstancePolicy] = None """Configurations whether the warehouse should use spot instances.""" - + tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45.""" - + warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - + def as_dict(self) -> dict: """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel.as_dict() - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy.value - if self.tags: body['tags'] = self.tags.as_dict() - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value + if self.auto_stop_mins is not None: + body["auto_stop_mins"] = self.auto_stop_mins + if self.channel: + body["channel"] = self.channel.as_dict() + if self.cluster_size is not None: + body["cluster_size"] = self.cluster_size + if self.creator_name is not None: + body["creator_name"] = self.creator_name + if self.enable_photon is not None: + body["enable_photon"] = self.enable_photon + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_num_clusters is not None: + body["max_num_clusters"] = self.max_num_clusters + if self.min_num_clusters is not None: + body["min_num_clusters"] = self.min_num_clusters + if self.name is not None: + body["name"] = self.name + if self.spot_instance_policy is not None: + body["spot_instance_policy"] = self.spot_instance_policy.value + if self.tags: + body["tags"] = self.tags.as_dict() + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateWarehouseRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy - if self.tags: body['tags'] = self.tags - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + if self.auto_stop_mins is not None: + body["auto_stop_mins"] = self.auto_stop_mins + if self.channel: + body["channel"] = self.channel + if self.cluster_size is not None: + body["cluster_size"] = self.cluster_size + if self.creator_name is not None: + body["creator_name"] = self.creator_name + if self.enable_photon is not None: + body["enable_photon"] = self.enable_photon + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_num_clusters is not None: + body["max_num_clusters"] = self.max_num_clusters + if self.min_num_clusters is not None: + body["min_num_clusters"] = self.min_num_clusters + if self.name is not None: + body["name"] = self.name + if self.spot_instance_policy is not None: + body["spot_instance_policy"] = self.spot_instance_policy + if self.tags: + body["tags"] = self.tags + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWarehouseRequest: """Deserializes the CreateWarehouseRequest from a dictionary.""" - return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), creator_name=d.get('creator_name', None), enable_photon=d.get('enable_photon', None), enable_serverless_compute=d.get('enable_serverless_compute', None), instance_profile_arn=d.get('instance_profile_arn', None), max_num_clusters=d.get('max_num_clusters', None), min_num_clusters=d.get('min_num_clusters', None), name=d.get('name', None), spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy), tags=_from_dict(d, 'tags', EndpointTags), warehouse_type=_enum(d, 'warehouse_type', CreateWarehouseRequestWarehouseType)) - - + return cls( + auto_stop_mins=d.get("auto_stop_mins", None), + channel=_from_dict(d, "channel", Channel), + cluster_size=d.get("cluster_size", None), + creator_name=d.get("creator_name", None), + enable_photon=d.get("enable_photon", None), + enable_serverless_compute=d.get("enable_serverless_compute", None), + instance_profile_arn=d.get("instance_profile_arn", None), + max_num_clusters=d.get("max_num_clusters", None), + min_num_clusters=d.get("min_num_clusters", None), + name=d.get("name", None), + spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), + tags=_from_dict(d, "tags", EndpointTags), + warehouse_type=_enum(d, "warehouse_type", CreateWarehouseRequestWarehouseType), + ) class CreateWarehouseRequestWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - - CLASSIC = 'CLASSIC' - PRO = 'PRO' - TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' + + CLASSIC = "CLASSIC" + PRO = "PRO" + TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" + @dataclass class CreateWarehouseResponse: id: Optional[str] = None """Id for the SQL warehouse. This value is unique across all SQL warehouses.""" - + def as_dict(self) -> dict: """Serializes the CreateWarehouseResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body def as_shallow_dict(self) -> dict: """Serializes the CreateWarehouseResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id + if self.id is not None: + body["id"] = self.id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWarehouseResponse: """Deserializes the CreateWarehouseResponse from a dictionary.""" - return cls(id=d.get('id', None)) - - + return cls(id=d.get("id", None)) @dataclass class CreateWidget: dashboard_id: str """Dashboard ID returned by :method:dashboards/create.""" - + options: WidgetOptions - + width: int """Width of a widget""" - + id: Optional[str] = None """Widget ID returned by :method:dashboardwidgets/create""" - + text: Optional[str] = None """If this is a textbox widget, the application displays this text. This field is ignored if the widget contains a visualization in the `visualization` field.""" - + visualization_id: Optional[str] = None """Query Vizualization ID returned by :method:queryvisualizations/create.""" - + def as_dict(self) -> dict: """Serializes the CreateWidget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.id is not None: body['id'] = self.id - if self.options: body['options'] = self.options.as_dict() - if self.text is not None: body['text'] = self.text - if self.visualization_id is not None: body['visualization_id'] = self.visualization_id - if self.width is not None: body['width'] = self.width + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.id is not None: + body["id"] = self.id + if self.options: + body["options"] = self.options.as_dict() + if self.text is not None: + body["text"] = self.text + if self.visualization_id is not None: + body["visualization_id"] = self.visualization_id + if self.width is not None: + body["width"] = self.width return body def as_shallow_dict(self) -> dict: """Serializes the CreateWidget into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.id is not None: body['id'] = self.id - if self.options: body['options'] = self.options - if self.text is not None: body['text'] = self.text - if self.visualization_id is not None: body['visualization_id'] = self.visualization_id - if self.width is not None: body['width'] = self.width + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.id is not None: + body["id"] = self.id + if self.options: + body["options"] = self.options + if self.text is not None: + body["text"] = self.text + if self.visualization_id is not None: + body["visualization_id"] = self.visualization_id + if self.width is not None: + body["width"] = self.width return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateWidget: """Deserializes the CreateWidget from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), id=d.get('id', None), options=_from_dict(d, 'options', WidgetOptions), text=d.get('text', None), visualization_id=d.get('visualization_id', None), width=d.get('width', None)) - - + return cls( + dashboard_id=d.get("dashboard_id", None), + id=d.get("id", None), + options=_from_dict(d, "options", WidgetOptions), + text=d.get("text", None), + visualization_id=d.get("visualization_id", None), + width=d.get("width", None), + ) @dataclass class CronSchedule: pause_status: Optional[SchedulePauseStatus] = None """Indicate whether this schedule is paused or not.""" - + quartz_cron_schedule: Optional[str] = None """A cron expression using quartz syntax that specifies the schedule for this pipeline. Should use the quartz format described here: http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html""" - + timezone_id: Optional[str] = None """A Java timezone id. The schedule will be resolved using this timezone. This will be combined with the quartz_cron_schedule to determine the schedule. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.""" - + def as_dict(self) -> dict: """Serializes the CronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: body['pause_status'] = self.pause_status.value - if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value + if self.quartz_cron_schedule is not None: + body["quartz_cron_schedule"] = self.quartz_cron_schedule + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body def as_shallow_dict(self) -> dict: """Serializes the CronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: body['pause_status'] = self.pause_status - if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule - if self.timezone_id is not None: body['timezone_id'] = self.timezone_id + if self.pause_status is not None: + body["pause_status"] = self.pause_status + if self.quartz_cron_schedule is not None: + body["quartz_cron_schedule"] = self.quartz_cron_schedule + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CronSchedule: """Deserializes the CronSchedule from a dictionary.""" - return cls(pause_status=_enum(d, 'pause_status', SchedulePauseStatus), quartz_cron_schedule=d.get('quartz_cron_schedule', None), timezone_id=d.get('timezone_id', None)) - - + return cls( + pause_status=_enum(d, "pause_status", SchedulePauseStatus), + quartz_cron_schedule=d.get("quartz_cron_schedule", None), + timezone_id=d.get("timezone_id", None), + ) @dataclass class Dashboard: """A JSON representing a dashboard containing widgets of visualizations and text boxes.""" - + can_edit: Optional[bool] = None """Whether the authenticated user can edit the query definition.""" - + created_at: Optional[str] = None """Timestamp when this dashboard was created.""" - + dashboard_filters_enabled: Optional[bool] = None """In the web application, query filters that share a name are coupled to a single selection box if this value is `true`.""" - + id: Optional[str] = None """The ID for this dashboard.""" - + is_archived: Optional[bool] = None """Indicates whether a dashboard is trashed. Trashed dashboards won't appear in list views. If this boolean is `true`, the `options` property for this dashboard includes a `moved_to_trash_at` timestamp. Items in trash are permanently deleted after 30 days.""" - + is_draft: Optional[bool] = None """Whether a dashboard is a draft. Draft dashboards only appear in list views for their owners.""" - + is_favorite: Optional[bool] = None """Indicates whether this query object appears in the current user's favorites list. This flag determines whether the star icon for favorites is selected.""" - + name: Optional[str] = None """The title of the dashboard that appears in list views and at the top of the dashboard page.""" - + options: Optional[DashboardOptions] = None - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + permission_tier: Optional[PermissionLevel] = None """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query * `CAN_MANAGE`: Can manage the query""" - + slug: Optional[str] = None """URL slug. Usually mirrors the query name with dashes (`-`) instead of spaces. Appears in the URL for this query.""" - + tags: Optional[List[str]] = None - + updated_at: Optional[str] = None """Timestamp when this dashboard was last updated.""" - + user: Optional[User] = None - + user_id: Optional[int] = None """The ID of the user who owns the dashboard.""" - + widgets: Optional[List[Widget]] = None - + def as_dict(self) -> dict: """Serializes the Dashboard into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_edit is not None: body['can_edit'] = self.can_edit - if self.created_at is not None: body['created_at'] = self.created_at - if self.dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = self.dashboard_filters_enabled - if self.id is not None: body['id'] = self.id - if self.is_archived is not None: body['is_archived'] = self.is_archived - if self.is_draft is not None: body['is_draft'] = self.is_draft - if self.is_favorite is not None: body['is_favorite'] = self.is_favorite - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.parent is not None: body['parent'] = self.parent - if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value - if self.slug is not None: body['slug'] = self.slug - if self.tags: body['tags'] = [v for v in self.tags] - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user.as_dict() - if self.user_id is not None: body['user_id'] = self.user_id - if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets] + if self.can_edit is not None: + body["can_edit"] = self.can_edit + if self.created_at is not None: + body["created_at"] = self.created_at + if self.dashboard_filters_enabled is not None: + body["dashboard_filters_enabled"] = self.dashboard_filters_enabled + if self.id is not None: + body["id"] = self.id + if self.is_archived is not None: + body["is_archived"] = self.is_archived + if self.is_draft is not None: + body["is_draft"] = self.is_draft + if self.is_favorite is not None: + body["is_favorite"] = self.is_favorite + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options.as_dict() + if self.parent is not None: + body["parent"] = self.parent + if self.permission_tier is not None: + body["permission_tier"] = self.permission_tier.value + if self.slug is not None: + body["slug"] = self.slug + if self.tags: + body["tags"] = [v for v in self.tags] + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.user: + body["user"] = self.user.as_dict() + if self.user_id is not None: + body["user_id"] = self.user_id + if self.widgets: + body["widgets"] = [v.as_dict() for v in self.widgets] return body def as_shallow_dict(self) -> dict: """Serializes the Dashboard into a shallow dictionary of its immediate attributes.""" body = {} - if self.can_edit is not None: body['can_edit'] = self.can_edit - if self.created_at is not None: body['created_at'] = self.created_at - if self.dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = self.dashboard_filters_enabled - if self.id is not None: body['id'] = self.id - if self.is_archived is not None: body['is_archived'] = self.is_archived - if self.is_draft is not None: body['is_draft'] = self.is_draft - if self.is_favorite is not None: body['is_favorite'] = self.is_favorite - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.parent is not None: body['parent'] = self.parent - if self.permission_tier is not None: body['permission_tier'] = self.permission_tier - if self.slug is not None: body['slug'] = self.slug - if self.tags: body['tags'] = self.tags - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user - if self.user_id is not None: body['user_id'] = self.user_id - if self.widgets: body['widgets'] = self.widgets + if self.can_edit is not None: + body["can_edit"] = self.can_edit + if self.created_at is not None: + body["created_at"] = self.created_at + if self.dashboard_filters_enabled is not None: + body["dashboard_filters_enabled"] = self.dashboard_filters_enabled + if self.id is not None: + body["id"] = self.id + if self.is_archived is not None: + body["is_archived"] = self.is_archived + if self.is_draft is not None: + body["is_draft"] = self.is_draft + if self.is_favorite is not None: + body["is_favorite"] = self.is_favorite + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.parent is not None: + body["parent"] = self.parent + if self.permission_tier is not None: + body["permission_tier"] = self.permission_tier + if self.slug is not None: + body["slug"] = self.slug + if self.tags: + body["tags"] = self.tags + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.user: + body["user"] = self.user + if self.user_id is not None: + body["user_id"] = self.user_id + if self.widgets: + body["widgets"] = self.widgets return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Dashboard: """Deserializes the Dashboard from a dictionary.""" - return cls(can_edit=d.get('can_edit', None), created_at=d.get('created_at', None), dashboard_filters_enabled=d.get('dashboard_filters_enabled', None), id=d.get('id', None), is_archived=d.get('is_archived', None), is_draft=d.get('is_draft', None), is_favorite=d.get('is_favorite', None), name=d.get('name', None), options=_from_dict(d, 'options', DashboardOptions), parent=d.get('parent', None), permission_tier=_enum(d, 'permission_tier', PermissionLevel), slug=d.get('slug', None), tags=d.get('tags', None), updated_at=d.get('updated_at', None), user=_from_dict(d, 'user', User), user_id=d.get('user_id', None), widgets=_repeated_dict(d, 'widgets', Widget)) - - + return cls( + can_edit=d.get("can_edit", None), + created_at=d.get("created_at", None), + dashboard_filters_enabled=d.get("dashboard_filters_enabled", None), + id=d.get("id", None), + is_archived=d.get("is_archived", None), + is_draft=d.get("is_draft", None), + is_favorite=d.get("is_favorite", None), + name=d.get("name", None), + options=_from_dict(d, "options", DashboardOptions), + parent=d.get("parent", None), + permission_tier=_enum(d, "permission_tier", PermissionLevel), + slug=d.get("slug", None), + tags=d.get("tags", None), + updated_at=d.get("updated_at", None), + user=_from_dict(d, "user", User), + user_id=d.get("user_id", None), + widgets=_repeated_dict(d, "widgets", Widget), + ) @dataclass class DashboardEditContent: dashboard_id: Optional[str] = None - + name: Optional[str] = None """The title of this dashboard that appears in list views and at the top of the dashboard page.""" - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the DashboardEditContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.name is not None: body['name'] = self.name - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value - if self.tags: body['tags'] = [v for v in self.tags] + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.name is not None: + body["name"] = self.name + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role.value + if self.tags: + body["tags"] = [v for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the DashboardEditContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.name is not None: body['name'] = self.name - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role - if self.tags: body['tags'] = self.tags + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.name is not None: + body["name"] = self.name + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardEditContent: """Deserializes the DashboardEditContent from a dictionary.""" - return cls(dashboard_id=d.get('dashboard_id', None), name=d.get('name', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None)) - - + return cls( + dashboard_id=d.get("dashboard_id", None), + name=d.get("name", None), + run_as_role=_enum(d, "run_as_role", RunAsRole), + tags=d.get("tags", None), + ) @dataclass @@ -1852,299 +2359,348 @@ class DashboardOptions: moved_to_trash_at: Optional[str] = None """The timestamp when this dashboard was moved to trash. Only present when the `is_archived` property is `true`. Trashed items are deleted after thirty days.""" - + def as_dict(self) -> dict: """Serializes the DashboardOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at + if self.moved_to_trash_at is not None: + body["moved_to_trash_at"] = self.moved_to_trash_at return body def as_shallow_dict(self) -> dict: """Serializes the DashboardOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at + if self.moved_to_trash_at is not None: + body["moved_to_trash_at"] = self.moved_to_trash_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardOptions: """Deserializes the DashboardOptions from a dictionary.""" - return cls(moved_to_trash_at=d.get('moved_to_trash_at', None)) - - + return cls(moved_to_trash_at=d.get("moved_to_trash_at", None)) @dataclass class DashboardPostContent: name: str """The title of this dashboard that appears in list views and at the top of the dashboard page.""" - + dashboard_filters_enabled: Optional[bool] = None """Indicates whether the dashboard filters are enabled""" - + is_favorite: Optional[bool] = None """Indicates whether this dashboard object should appear in the current user's favorites list.""" - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the DashboardPostContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = self.dashboard_filters_enabled - if self.is_favorite is not None: body['is_favorite'] = self.is_favorite - if self.name is not None: body['name'] = self.name - if self.parent is not None: body['parent'] = self.parent - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value - if self.tags: body['tags'] = [v for v in self.tags] + if self.dashboard_filters_enabled is not None: + body["dashboard_filters_enabled"] = self.dashboard_filters_enabled + if self.is_favorite is not None: + body["is_favorite"] = self.is_favorite + if self.name is not None: + body["name"] = self.name + if self.parent is not None: + body["parent"] = self.parent + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role.value + if self.tags: + body["tags"] = [v for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the DashboardPostContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = self.dashboard_filters_enabled - if self.is_favorite is not None: body['is_favorite'] = self.is_favorite - if self.name is not None: body['name'] = self.name - if self.parent is not None: body['parent'] = self.parent - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role - if self.tags: body['tags'] = self.tags + if self.dashboard_filters_enabled is not None: + body["dashboard_filters_enabled"] = self.dashboard_filters_enabled + if self.is_favorite is not None: + body["is_favorite"] = self.is_favorite + if self.name is not None: + body["name"] = self.name + if self.parent is not None: + body["parent"] = self.parent + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DashboardPostContent: """Deserializes the DashboardPostContent from a dictionary.""" - return cls(dashboard_filters_enabled=d.get('dashboard_filters_enabled', None), is_favorite=d.get('is_favorite', None), name=d.get('name', None), parent=d.get('parent', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None)) - - + return cls( + dashboard_filters_enabled=d.get("dashboard_filters_enabled", None), + is_favorite=d.get("is_favorite", None), + name=d.get("name", None), + parent=d.get("parent", None), + run_as_role=_enum(d, "run_as_role", RunAsRole), + tags=d.get("tags", None), + ) @dataclass class DataSource: """A JSON object representing a DBSQL data source / SQL warehouse.""" - + id: Optional[str] = None """Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + name: Optional[str] = None """The string name of this data source / SQL warehouse as it appears in the Databricks SQL web application.""" - + pause_reason: Optional[str] = None """Reserved for internal use.""" - + paused: Optional[int] = None """Reserved for internal use.""" - + supports_auto_limit: Optional[bool] = None """Reserved for internal use.""" - + syntax: Optional[str] = None """Reserved for internal use.""" - + type: Optional[str] = None """The type of data source. For SQL warehouses, this will be `databricks_internal`.""" - + view_only: Optional[bool] = None """Reserved for internal use.""" - + warehouse_id: Optional[str] = None """The ID of the associated SQL warehouse, if this data source is backed by a SQL warehouse.""" - + def as_dict(self) -> dict: """Serializes the DataSource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.pause_reason is not None: body['pause_reason'] = self.pause_reason - if self.paused is not None: body['paused'] = self.paused - if self.supports_auto_limit is not None: body['supports_auto_limit'] = self.supports_auto_limit - if self.syntax is not None: body['syntax'] = self.syntax - if self.type is not None: body['type'] = self.type - if self.view_only is not None: body['view_only'] = self.view_only - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.pause_reason is not None: + body["pause_reason"] = self.pause_reason + if self.paused is not None: + body["paused"] = self.paused + if self.supports_auto_limit is not None: + body["supports_auto_limit"] = self.supports_auto_limit + if self.syntax is not None: + body["syntax"] = self.syntax + if self.type is not None: + body["type"] = self.type + if self.view_only is not None: + body["view_only"] = self.view_only + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the DataSource into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.pause_reason is not None: body['pause_reason'] = self.pause_reason - if self.paused is not None: body['paused'] = self.paused - if self.supports_auto_limit is not None: body['supports_auto_limit'] = self.supports_auto_limit - if self.syntax is not None: body['syntax'] = self.syntax - if self.type is not None: body['type'] = self.type - if self.view_only is not None: body['view_only'] = self.view_only - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.pause_reason is not None: + body["pause_reason"] = self.pause_reason + if self.paused is not None: + body["paused"] = self.paused + if self.supports_auto_limit is not None: + body["supports_auto_limit"] = self.supports_auto_limit + if self.syntax is not None: + body["syntax"] = self.syntax + if self.type is not None: + body["type"] = self.type + if self.view_only is not None: + body["view_only"] = self.view_only + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DataSource: """Deserializes the DataSource from a dictionary.""" - return cls(id=d.get('id', None), name=d.get('name', None), pause_reason=d.get('pause_reason', None), paused=d.get('paused', None), supports_auto_limit=d.get('supports_auto_limit', None), syntax=d.get('syntax', None), type=d.get('type', None), view_only=d.get('view_only', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + id=d.get("id", None), + name=d.get("name", None), + pause_reason=d.get("pause_reason", None), + paused=d.get("paused", None), + supports_auto_limit=d.get("supports_auto_limit", None), + syntax=d.get("syntax", None), + type=d.get("type", None), + view_only=d.get("view_only", None), + warehouse_id=d.get("warehouse_id", None), + ) class DatePrecision(Enum): - - - DAY_PRECISION = 'DAY_PRECISION' - MINUTE_PRECISION = 'MINUTE_PRECISION' - SECOND_PRECISION = 'SECOND_PRECISION' + + DAY_PRECISION = "DAY_PRECISION" + MINUTE_PRECISION = "MINUTE_PRECISION" + SECOND_PRECISION = "SECOND_PRECISION" + @dataclass class DateRange: start: str - + end: str - + def as_dict(self) -> dict: """Serializes the DateRange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end is not None: body['end'] = self.end - if self.start is not None: body['start'] = self.start + if self.end is not None: + body["end"] = self.end + if self.start is not None: + body["start"] = self.start return body def as_shallow_dict(self) -> dict: """Serializes the DateRange into a shallow dictionary of its immediate attributes.""" body = {} - if self.end is not None: body['end'] = self.end - if self.start is not None: body['start'] = self.start + if self.end is not None: + body["end"] = self.end + if self.start is not None: + body["start"] = self.start return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DateRange: """Deserializes the DateRange from a dictionary.""" - return cls(end=d.get('end', None), start=d.get('start', None)) - - + return cls(end=d.get("end", None), start=d.get("start", None)) @dataclass class DateRangeValue: date_range_value: Optional[DateRange] = None """Manually specified date-time range value.""" - + dynamic_date_range_value: Optional[DateRangeValueDynamicDateRange] = None """Dynamic date-time range value based on current date-time.""" - + precision: Optional[DatePrecision] = None """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION (YYYY-MM-DD).""" - + start_day_of_week: Optional[int] = None - + def as_dict(self) -> dict: """Serializes the DateRangeValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict() - if self.dynamic_date_range_value is not None: body['dynamic_date_range_value'] = self.dynamic_date_range_value.value - if self.precision is not None: body['precision'] = self.precision.value - if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week + if self.date_range_value: + body["date_range_value"] = self.date_range_value.as_dict() + if self.dynamic_date_range_value is not None: + body["dynamic_date_range_value"] = self.dynamic_date_range_value.value + if self.precision is not None: + body["precision"] = self.precision.value + if self.start_day_of_week is not None: + body["start_day_of_week"] = self.start_day_of_week return body def as_shallow_dict(self) -> dict: """Serializes the DateRangeValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.date_range_value: body['date_range_value'] = self.date_range_value - if self.dynamic_date_range_value is not None: body['dynamic_date_range_value'] = self.dynamic_date_range_value - if self.precision is not None: body['precision'] = self.precision - if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week + if self.date_range_value: + body["date_range_value"] = self.date_range_value + if self.dynamic_date_range_value is not None: + body["dynamic_date_range_value"] = self.dynamic_date_range_value + if self.precision is not None: + body["precision"] = self.precision + if self.start_day_of_week is not None: + body["start_day_of_week"] = self.start_day_of_week return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DateRangeValue: """Deserializes the DateRangeValue from a dictionary.""" - return cls(date_range_value=_from_dict(d, 'date_range_value', DateRange), dynamic_date_range_value=_enum(d, 'dynamic_date_range_value', DateRangeValueDynamicDateRange), precision=_enum(d, 'precision', DatePrecision), start_day_of_week=d.get('start_day_of_week', None)) - - + return cls( + date_range_value=_from_dict(d, "date_range_value", DateRange), + dynamic_date_range_value=_enum(d, "dynamic_date_range_value", DateRangeValueDynamicDateRange), + precision=_enum(d, "precision", DatePrecision), + start_day_of_week=d.get("start_day_of_week", None), + ) class DateRangeValueDynamicDateRange(Enum): - - - LAST_12_MONTHS = 'LAST_12_MONTHS' - LAST_14_DAYS = 'LAST_14_DAYS' - LAST_24_HOURS = 'LAST_24_HOURS' - LAST_30_DAYS = 'LAST_30_DAYS' - LAST_60_DAYS = 'LAST_60_DAYS' - LAST_7_DAYS = 'LAST_7_DAYS' - LAST_8_HOURS = 'LAST_8_HOURS' - LAST_90_DAYS = 'LAST_90_DAYS' - LAST_HOUR = 'LAST_HOUR' - LAST_MONTH = 'LAST_MONTH' - LAST_WEEK = 'LAST_WEEK' - LAST_YEAR = 'LAST_YEAR' - THIS_MONTH = 'THIS_MONTH' - THIS_WEEK = 'THIS_WEEK' - THIS_YEAR = 'THIS_YEAR' - TODAY = 'TODAY' - YESTERDAY = 'YESTERDAY' + + LAST_12_MONTHS = "LAST_12_MONTHS" + LAST_14_DAYS = "LAST_14_DAYS" + LAST_24_HOURS = "LAST_24_HOURS" + LAST_30_DAYS = "LAST_30_DAYS" + LAST_60_DAYS = "LAST_60_DAYS" + LAST_7_DAYS = "LAST_7_DAYS" + LAST_8_HOURS = "LAST_8_HOURS" + LAST_90_DAYS = "LAST_90_DAYS" + LAST_HOUR = "LAST_HOUR" + LAST_MONTH = "LAST_MONTH" + LAST_WEEK = "LAST_WEEK" + LAST_YEAR = "LAST_YEAR" + THIS_MONTH = "THIS_MONTH" + THIS_WEEK = "THIS_WEEK" + THIS_YEAR = "THIS_YEAR" + TODAY = "TODAY" + YESTERDAY = "YESTERDAY" + @dataclass class DateValue: date_value: Optional[str] = None """Manually specified date-time value.""" - + dynamic_date_value: Optional[DateValueDynamicDate] = None """Dynamic date-time value based on current date-time.""" - + precision: Optional[DatePrecision] = None """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION (YYYY-MM-DD).""" - + def as_dict(self) -> dict: """Serializes the DateValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.date_value is not None: body['date_value'] = self.date_value - if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value.value - if self.precision is not None: body['precision'] = self.precision.value + if self.date_value is not None: + body["date_value"] = self.date_value + if self.dynamic_date_value is not None: + body["dynamic_date_value"] = self.dynamic_date_value.value + if self.precision is not None: + body["precision"] = self.precision.value return body def as_shallow_dict(self) -> dict: """Serializes the DateValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.date_value is not None: body['date_value'] = self.date_value - if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value - if self.precision is not None: body['precision'] = self.precision + if self.date_value is not None: + body["date_value"] = self.date_value + if self.dynamic_date_value is not None: + body["dynamic_date_value"] = self.dynamic_date_value + if self.precision is not None: + body["precision"] = self.precision return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DateValue: """Deserializes the DateValue from a dictionary.""" - return cls(date_value=d.get('date_value', None), dynamic_date_value=_enum(d, 'dynamic_date_value', DateValueDynamicDate), precision=_enum(d, 'precision', DatePrecision)) - - + return cls( + date_value=d.get("date_value", None), + dynamic_date_value=_enum(d, "dynamic_date_value", DateValueDynamicDate), + precision=_enum(d, "precision", DatePrecision), + ) class DateValueDynamicDate(Enum): - - - NOW = 'NOW' - YESTERDAY = 'YESTERDAY' - - - - - - - - - - - - - + NOW = "NOW" + YESTERDAY = "YESTERDAY" @dataclass @@ -2163,14 +2719,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - - - - - - - @dataclass @@ -2189,59 +2737,71 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteWarehouseResponse: """Deserializes the DeleteWarehouseResponse from a dictionary.""" return cls() - - class Disposition(Enum): - - - EXTERNAL_LINKS = 'EXTERNAL_LINKS' - INLINE = 'INLINE' + + EXTERNAL_LINKS = "EXTERNAL_LINKS" + INLINE = "INLINE" + @dataclass class EditAlert: name: str """Name of the alert.""" - + options: AlertOptions """Alert configuration options.""" - + query_id: str """Query ID.""" - + alert_id: Optional[str] = None - + rearm: Optional[int] = None """Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again.""" - + def as_dict(self) -> dict: """Serializes the EditAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_id is not None: body['alert_id'] = self.alert_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.query_id is not None: body['query_id'] = self.query_id - if self.rearm is not None: body['rearm'] = self.rearm + if self.alert_id is not None: + body["alert_id"] = self.alert_id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options.as_dict() + if self.query_id is not None: + body["query_id"] = self.query_id + if self.rearm is not None: + body["rearm"] = self.rearm return body def as_shallow_dict(self) -> dict: """Serializes the EditAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_id is not None: body['alert_id'] = self.alert_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query_id is not None: body['query_id'] = self.query_id - if self.rearm is not None: body['rearm'] = self.rearm + if self.alert_id is not None: + body["alert_id"] = self.alert_id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.query_id is not None: + body["query_id"] = self.query_id + if self.rearm is not None: + body["rearm"] = self.rearm return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditAlert: """Deserializes the EditAlert from a dictionary.""" - return cls(alert_id=d.get('alert_id', None), name=d.get('name', None), options=_from_dict(d, 'options', AlertOptions), query_id=d.get('query_id', None), rearm=d.get('rearm', None)) - - + return cls( + alert_id=d.get("alert_id", None), + name=d.get("name", None), + options=_from_dict(d, "options", AlertOptions), + query_id=d.get("query_id", None), + rearm=d.get("rearm", None), + ) @dataclass @@ -2253,10 +2813,10 @@ class EditWarehouseRequest: Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. Defaults to 120 mins""" - + channel: Optional[Channel] = None """Channel Details""" - + cluster_size: Optional[str] = None """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, @@ -2264,31 +2824,31 @@ class EditWarehouseRequest: Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large""" - + creator_name: Optional[str] = None """warehouse creator name""" - + enable_photon: Optional[bool] = None """Configures whether the warehouse should use Photon optimized clusters. Defaults to false.""" - + enable_serverless_compute: Optional[bool] = None """Configures whether the warehouse should use serverless compute.""" - + id: Optional[str] = None """Required. Id of the warehouse to configure.""" - + instance_profile_arn: Optional[str] = None """Deprecated. Instance profile used to pass IAM role to the cluster""" - + max_num_clusters: Optional[int] = None """Maximum number of clusters that the autoscaler will create to handle concurrent queries. Supported values: - Must be >= min_num_clusters - Must be <= 30. Defaults to min_clusters if unset.""" - + min_num_clusters: Optional[int] = None """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce @@ -2298,78 +2858,120 @@ class EditWarehouseRequest: Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) Defaults to 1""" - + name: Optional[str] = None """Logical name for the cluster. Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - + spot_instance_policy: Optional[SpotInstancePolicy] = None """Configurations whether the warehouse should use spot instances.""" - + tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45.""" - + warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - + def as_dict(self) -> dict: """Serializes the EditWarehouseRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel.as_dict() - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute - if self.id is not None: body['id'] = self.id - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy.value - if self.tags: body['tags'] = self.tags.as_dict() - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value + if self.auto_stop_mins is not None: + body["auto_stop_mins"] = self.auto_stop_mins + if self.channel: + body["channel"] = self.channel.as_dict() + if self.cluster_size is not None: + body["cluster_size"] = self.cluster_size + if self.creator_name is not None: + body["creator_name"] = self.creator_name + if self.enable_photon is not None: + body["enable_photon"] = self.enable_photon + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute + if self.id is not None: + body["id"] = self.id + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_num_clusters is not None: + body["max_num_clusters"] = self.max_num_clusters + if self.min_num_clusters is not None: + body["min_num_clusters"] = self.min_num_clusters + if self.name is not None: + body["name"] = self.name + if self.spot_instance_policy is not None: + body["spot_instance_policy"] = self.spot_instance_policy.value + if self.tags: + body["tags"] = self.tags.as_dict() + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the EditWarehouseRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute - if self.id is not None: body['id'] = self.id - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy - if self.tags: body['tags'] = self.tags - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + if self.auto_stop_mins is not None: + body["auto_stop_mins"] = self.auto_stop_mins + if self.channel: + body["channel"] = self.channel + if self.cluster_size is not None: + body["cluster_size"] = self.cluster_size + if self.creator_name is not None: + body["creator_name"] = self.creator_name + if self.enable_photon is not None: + body["enable_photon"] = self.enable_photon + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute + if self.id is not None: + body["id"] = self.id + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.max_num_clusters is not None: + body["max_num_clusters"] = self.max_num_clusters + if self.min_num_clusters is not None: + body["min_num_clusters"] = self.min_num_clusters + if self.name is not None: + body["name"] = self.name + if self.spot_instance_policy is not None: + body["spot_instance_policy"] = self.spot_instance_policy + if self.tags: + body["tags"] = self.tags + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EditWarehouseRequest: """Deserializes the EditWarehouseRequest from a dictionary.""" - return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), creator_name=d.get('creator_name', None), enable_photon=d.get('enable_photon', None), enable_serverless_compute=d.get('enable_serverless_compute', None), id=d.get('id', None), instance_profile_arn=d.get('instance_profile_arn', None), max_num_clusters=d.get('max_num_clusters', None), min_num_clusters=d.get('min_num_clusters', None), name=d.get('name', None), spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy), tags=_from_dict(d, 'tags', EndpointTags), warehouse_type=_enum(d, 'warehouse_type', EditWarehouseRequestWarehouseType)) - - + return cls( + auto_stop_mins=d.get("auto_stop_mins", None), + channel=_from_dict(d, "channel", Channel), + cluster_size=d.get("cluster_size", None), + creator_name=d.get("creator_name", None), + enable_photon=d.get("enable_photon", None), + enable_serverless_compute=d.get("enable_serverless_compute", None), + id=d.get("id", None), + instance_profile_arn=d.get("instance_profile_arn", None), + max_num_clusters=d.get("max_num_clusters", None), + min_num_clusters=d.get("min_num_clusters", None), + name=d.get("name", None), + spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), + tags=_from_dict(d, "tags", EndpointTags), + warehouse_type=_enum(d, "warehouse_type", EditWarehouseRequestWarehouseType), + ) class EditWarehouseRequestWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - - CLASSIC = 'CLASSIC' - PRO = 'PRO' - TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' + + CLASSIC = "CLASSIC" + PRO = "PRO" + TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" + @dataclass class EditWarehouseResponse: @@ -2387,15 +2989,13 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> EditWarehouseResponse: """Deserializes the EditWarehouseResponse from a dictionary.""" return cls() - - @dataclass class Empty: """Represents an empty message, similar to google.protobuf.Empty, which is not available in the firm right now.""" - + def as_dict(self) -> dict: """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" body = {} @@ -2410,82 +3010,96 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> Empty: """Deserializes the Empty from a dictionary.""" return cls() - - @dataclass class EndpointConfPair: key: Optional[str] = None - + value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the EndpointConfPair into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointConfPair into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointConfPair: """Deserializes the EndpointConfPair from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class EndpointHealth: details: Optional[str] = None """Details about errors that are causing current degraded/failed status.""" - + failure_reason: Optional[TerminationReason] = None """The reason for failure to bring up clusters for this warehouse. This is available when status is 'FAILED' and sometimes when it is DEGRADED.""" - + message: Optional[str] = None """Deprecated. split into summary and details for security""" - + status: Optional[Status] = None """Health status of the warehouse.""" - + summary: Optional[str] = None """A short summary of the health status in case of degraded/failed warehouses.""" - + def as_dict(self) -> dict: """Serializes the EndpointHealth into a dictionary suitable for use as a JSON request body.""" body = {} - if self.details is not None: body['details'] = self.details - if self.failure_reason: body['failure_reason'] = self.failure_reason.as_dict() - if self.message is not None: body['message'] = self.message - if self.status is not None: body['status'] = self.status.value - if self.summary is not None: body['summary'] = self.summary + if self.details is not None: + body["details"] = self.details + if self.failure_reason: + body["failure_reason"] = self.failure_reason.as_dict() + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status.value + if self.summary is not None: + body["summary"] = self.summary return body def as_shallow_dict(self) -> dict: """Serializes the EndpointHealth into a shallow dictionary of its immediate attributes.""" body = {} - if self.details is not None: body['details'] = self.details - if self.failure_reason: body['failure_reason'] = self.failure_reason - if self.message is not None: body['message'] = self.message - if self.status is not None: body['status'] = self.status - if self.summary is not None: body['summary'] = self.summary + if self.details is not None: + body["details"] = self.details + if self.failure_reason: + body["failure_reason"] = self.failure_reason + if self.message is not None: + body["message"] = self.message + if self.status is not None: + body["status"] = self.status + if self.summary is not None: + body["summary"] = self.summary return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointHealth: """Deserializes the EndpointHealth from a dictionary.""" - return cls(details=d.get('details', None), failure_reason=_from_dict(d, 'failure_reason', TerminationReason), message=d.get('message', None), status=_enum(d, 'status', Status), summary=d.get('summary', None)) - - + return cls( + details=d.get("details", None), + failure_reason=_from_dict(d, "failure_reason", TerminationReason), + message=d.get("message", None), + status=_enum(d, "status", Status), + summary=d.get("summary", None), + ) @dataclass @@ -2497,10 +3111,10 @@ class EndpointInfo: Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. Defaults to 120 mins""" - + channel: Optional[Channel] = None """Channel Details""" - + cluster_size: Optional[str] = None """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, @@ -2508,37 +3122,37 @@ class EndpointInfo: Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large""" - + creator_name: Optional[str] = None """warehouse creator name""" - + enable_photon: Optional[bool] = None """Configures whether the warehouse should use Photon optimized clusters. Defaults to false.""" - + enable_serverless_compute: Optional[bool] = None """Configures whether the warehouse should use serverless compute""" - + health: Optional[EndpointHealth] = None """Optional health status. Assume the warehouse is healthy if this field is not set.""" - + id: Optional[str] = None """unique identifier for warehouse""" - + instance_profile_arn: Optional[str] = None """Deprecated. Instance profile used to pass IAM role to the cluster""" - + jdbc_url: Optional[str] = None """the jdbc connection string for this warehouse""" - + max_num_clusters: Optional[int] = None """Maximum number of clusters that the autoscaler will create to handle concurrent queries. Supported values: - Must be >= min_num_clusters - Must be <= 30. Defaults to min_clusters if unset.""" - + min_num_clusters: Optional[int] = None """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce @@ -2548,214 +3162,284 @@ class EndpointInfo: Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) Defaults to 1""" - + name: Optional[str] = None """Logical name for the cluster. Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - + num_active_sessions: Optional[int] = None """Deprecated. current number of active sessions for the warehouse""" - + num_clusters: Optional[int] = None """current number of clusters running for the service""" - + odbc_params: Optional[OdbcParams] = None """ODBC parameters for the SQL warehouse""" - + spot_instance_policy: Optional[SpotInstancePolicy] = None """Configurations whether the warehouse should use spot instances.""" - + state: Optional[State] = None """State of the warehouse""" - + tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45.""" - + warehouse_type: Optional[EndpointInfoWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - + def as_dict(self) -> dict: """Serializes the EndpointInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel.as_dict() - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute - if self.health: body['health'] = self.health.as_dict() - if self.id is not None: body['id'] = self.id - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions - if self.num_clusters is not None: body['num_clusters'] = self.num_clusters - if self.odbc_params: body['odbc_params'] = self.odbc_params.as_dict() - if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy.value - if self.state is not None: body['state'] = self.state.value - if self.tags: body['tags'] = self.tags.as_dict() - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value + if self.auto_stop_mins is not None: + body["auto_stop_mins"] = self.auto_stop_mins + if self.channel: + body["channel"] = self.channel.as_dict() + if self.cluster_size is not None: + body["cluster_size"] = self.cluster_size + if self.creator_name is not None: + body["creator_name"] = self.creator_name + if self.enable_photon is not None: + body["enable_photon"] = self.enable_photon + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute + if self.health: + body["health"] = self.health.as_dict() + if self.id is not None: + body["id"] = self.id + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.jdbc_url is not None: + body["jdbc_url"] = self.jdbc_url + if self.max_num_clusters is not None: + body["max_num_clusters"] = self.max_num_clusters + if self.min_num_clusters is not None: + body["min_num_clusters"] = self.min_num_clusters + if self.name is not None: + body["name"] = self.name + if self.num_active_sessions is not None: + body["num_active_sessions"] = self.num_active_sessions + if self.num_clusters is not None: + body["num_clusters"] = self.num_clusters + if self.odbc_params: + body["odbc_params"] = self.odbc_params.as_dict() + if self.spot_instance_policy is not None: + body["spot_instance_policy"] = self.spot_instance_policy.value + if self.state is not None: + body["state"] = self.state.value + if self.tags: + body["tags"] = self.tags.as_dict() + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute - if self.health: body['health'] = self.health - if self.id is not None: body['id'] = self.id - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions - if self.num_clusters is not None: body['num_clusters'] = self.num_clusters - if self.odbc_params: body['odbc_params'] = self.odbc_params - if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy - if self.state is not None: body['state'] = self.state - if self.tags: body['tags'] = self.tags - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + if self.auto_stop_mins is not None: + body["auto_stop_mins"] = self.auto_stop_mins + if self.channel: + body["channel"] = self.channel + if self.cluster_size is not None: + body["cluster_size"] = self.cluster_size + if self.creator_name is not None: + body["creator_name"] = self.creator_name + if self.enable_photon is not None: + body["enable_photon"] = self.enable_photon + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute + if self.health: + body["health"] = self.health + if self.id is not None: + body["id"] = self.id + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.jdbc_url is not None: + body["jdbc_url"] = self.jdbc_url + if self.max_num_clusters is not None: + body["max_num_clusters"] = self.max_num_clusters + if self.min_num_clusters is not None: + body["min_num_clusters"] = self.min_num_clusters + if self.name is not None: + body["name"] = self.name + if self.num_active_sessions is not None: + body["num_active_sessions"] = self.num_active_sessions + if self.num_clusters is not None: + body["num_clusters"] = self.num_clusters + if self.odbc_params: + body["odbc_params"] = self.odbc_params + if self.spot_instance_policy is not None: + body["spot_instance_policy"] = self.spot_instance_policy + if self.state is not None: + body["state"] = self.state + if self.tags: + body["tags"] = self.tags + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointInfo: """Deserializes the EndpointInfo from a dictionary.""" - return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), creator_name=d.get('creator_name', None), enable_photon=d.get('enable_photon', None), enable_serverless_compute=d.get('enable_serverless_compute', None), health=_from_dict(d, 'health', EndpointHealth), id=d.get('id', None), instance_profile_arn=d.get('instance_profile_arn', None), jdbc_url=d.get('jdbc_url', None), max_num_clusters=d.get('max_num_clusters', None), min_num_clusters=d.get('min_num_clusters', None), name=d.get('name', None), num_active_sessions=d.get('num_active_sessions', None), num_clusters=d.get('num_clusters', None), odbc_params=_from_dict(d, 'odbc_params', OdbcParams), spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy), state=_enum(d, 'state', State), tags=_from_dict(d, 'tags', EndpointTags), warehouse_type=_enum(d, 'warehouse_type', EndpointInfoWarehouseType)) - - + return cls( + auto_stop_mins=d.get("auto_stop_mins", None), + channel=_from_dict(d, "channel", Channel), + cluster_size=d.get("cluster_size", None), + creator_name=d.get("creator_name", None), + enable_photon=d.get("enable_photon", None), + enable_serverless_compute=d.get("enable_serverless_compute", None), + health=_from_dict(d, "health", EndpointHealth), + id=d.get("id", None), + instance_profile_arn=d.get("instance_profile_arn", None), + jdbc_url=d.get("jdbc_url", None), + max_num_clusters=d.get("max_num_clusters", None), + min_num_clusters=d.get("min_num_clusters", None), + name=d.get("name", None), + num_active_sessions=d.get("num_active_sessions", None), + num_clusters=d.get("num_clusters", None), + odbc_params=_from_dict(d, "odbc_params", OdbcParams), + spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), + state=_enum(d, "state", State), + tags=_from_dict(d, "tags", EndpointTags), + warehouse_type=_enum(d, "warehouse_type", EndpointInfoWarehouseType), + ) class EndpointInfoWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - - CLASSIC = 'CLASSIC' - PRO = 'PRO' - TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' + + CLASSIC = "CLASSIC" + PRO = "PRO" + TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" + @dataclass class EndpointTagPair: key: Optional[str] = None - + value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the EndpointTagPair into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointTagPair into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointTagPair: """Deserializes the EndpointTagPair from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class EndpointTags: custom_tags: Optional[List[EndpointTagPair]] = None - + def as_dict(self) -> dict: """Serializes the EndpointTags into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] return body def as_shallow_dict(self) -> dict: """Serializes the EndpointTags into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.custom_tags: + body["custom_tags"] = self.custom_tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointTags: """Deserializes the EndpointTags from a dictionary.""" - return cls(custom_tags=_repeated_dict(d, 'custom_tags', EndpointTagPair)) - - + return cls(custom_tags=_repeated_dict(d, "custom_tags", EndpointTagPair)) @dataclass class EnumValue: enum_options: Optional[str] = None """List of valid query parameter values, newline delimited.""" - + multi_values_options: Optional[MultiValuesOptions] = None """If specified, allows multiple values to be selected for this parameter.""" - + values: Optional[List[str]] = None """List of selected query parameter values.""" - + def as_dict(self) -> dict: """Serializes the EnumValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enum_options is not None: body['enum_options'] = self.enum_options - if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict() - if self.values: body['values'] = [v for v in self.values] + if self.enum_options is not None: + body["enum_options"] = self.enum_options + if self.multi_values_options: + body["multi_values_options"] = self.multi_values_options.as_dict() + if self.values: + body["values"] = [v for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the EnumValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.enum_options is not None: body['enum_options'] = self.enum_options - if self.multi_values_options: body['multi_values_options'] = self.multi_values_options - if self.values: body['values'] = self.values + if self.enum_options is not None: + body["enum_options"] = self.enum_options + if self.multi_values_options: + body["multi_values_options"] = self.multi_values_options + if self.values: + body["values"] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EnumValue: """Deserializes the EnumValue from a dictionary.""" - return cls(enum_options=d.get('enum_options', None), multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions), values=d.get('values', None)) - - + return cls( + enum_options=d.get("enum_options", None), + multi_values_options=_from_dict(d, "multi_values_options", MultiValuesOptions), + values=d.get("values", None), + ) @dataclass class ExecuteStatementRequest: statement: str """The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.""" - + warehouse_id: str """Warehouse upon which to execute a statement. See also [What are SQL warehouses?] [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html""" - + byte_limit: Optional[int] = None """Applies the given byte limit to the statement's result size. Byte counts are based on internal data representations and might not match the final size in the requested `format`. If the result was truncated due to the byte limit, then `truncated` in the response is set to `true`. When using `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` is not explcitly set.""" - + catalog: Optional[str] = None """Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html""" - + disposition: Optional[Disposition] = None - + format: Optional[Format] = None """Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. @@ -2787,7 +3471,7 @@ class ExecuteStatementRequest: [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180""" - + on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None """When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution doesn't finish within this time, `on_wait_timeout` determines whether the execution @@ -2795,7 +3479,7 @@ class ExecuteStatementRequest: asynchronously and the call returns a statement ID which can be used for polling with :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled and the call returns with a `CANCELED` state.""" - + parameters: Optional[List[StatementParameterListItem]] = None """A list of parameters to pass into a SQL statement containing parameter markers. A parameter consists of a name, a value, and optionally a type. To represent a NULL value, the `value` field @@ -2824,17 +3508,17 @@ class ExecuteStatementRequest: [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html""" - + row_limit: Optional[int] = None """Applies the given row limit to the statement's result set, but unlike the `LIMIT` clause in SQL, it also sets the `truncated` field in the response to indicate whether the result was trimmed due to the limit or not.""" - + schema: Optional[str] = None """Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL. [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html""" - + wait_timeout: Optional[str] = None """The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set to 0 or to a value between 5 and 50. @@ -2848,45 +3532,77 @@ class ExecuteStatementRequest: returns immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached.""" - + def as_dict(self) -> dict: """Serializes the ExecuteStatementRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.byte_limit is not None: body['byte_limit'] = self.byte_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.disposition is not None: body['disposition'] = self.disposition.value - if self.format is not None: body['format'] = self.format.value - if self.on_wait_timeout is not None: body['on_wait_timeout'] = self.on_wait_timeout.value - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] - if self.row_limit is not None: body['row_limit'] = self.row_limit - if self.schema is not None: body['schema'] = self.schema - if self.statement is not None: body['statement'] = self.statement - if self.wait_timeout is not None: body['wait_timeout'] = self.wait_timeout - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.byte_limit is not None: + body["byte_limit"] = self.byte_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.disposition is not None: + body["disposition"] = self.disposition.value + if self.format is not None: + body["format"] = self.format.value + if self.on_wait_timeout is not None: + body["on_wait_timeout"] = self.on_wait_timeout.value + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] + if self.row_limit is not None: + body["row_limit"] = self.row_limit + if self.schema is not None: + body["schema"] = self.schema + if self.statement is not None: + body["statement"] = self.statement + if self.wait_timeout is not None: + body["wait_timeout"] = self.wait_timeout + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the ExecuteStatementRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.byte_limit is not None: body['byte_limit'] = self.byte_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.disposition is not None: body['disposition'] = self.disposition - if self.format is not None: body['format'] = self.format - if self.on_wait_timeout is not None: body['on_wait_timeout'] = self.on_wait_timeout - if self.parameters: body['parameters'] = self.parameters - if self.row_limit is not None: body['row_limit'] = self.row_limit - if self.schema is not None: body['schema'] = self.schema - if self.statement is not None: body['statement'] = self.statement - if self.wait_timeout is not None: body['wait_timeout'] = self.wait_timeout - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.byte_limit is not None: + body["byte_limit"] = self.byte_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.disposition is not None: + body["disposition"] = self.disposition + if self.format is not None: + body["format"] = self.format + if self.on_wait_timeout is not None: + body["on_wait_timeout"] = self.on_wait_timeout + if self.parameters: + body["parameters"] = self.parameters + if self.row_limit is not None: + body["row_limit"] = self.row_limit + if self.schema is not None: + body["schema"] = self.schema + if self.statement is not None: + body["statement"] = self.statement + if self.wait_timeout is not None: + body["wait_timeout"] = self.wait_timeout + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExecuteStatementRequest: """Deserializes the ExecuteStatementRequest from a dictionary.""" - return cls(byte_limit=d.get('byte_limit', None), catalog=d.get('catalog', None), disposition=_enum(d, 'disposition', Disposition), format=_enum(d, 'format', Format), on_wait_timeout=_enum(d, 'on_wait_timeout', ExecuteStatementRequestOnWaitTimeout), parameters=_repeated_dict(d, 'parameters', StatementParameterListItem), row_limit=d.get('row_limit', None), schema=d.get('schema', None), statement=d.get('statement', None), wait_timeout=d.get('wait_timeout', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + byte_limit=d.get("byte_limit", None), + catalog=d.get("catalog", None), + disposition=_enum(d, "disposition", Disposition), + format=_enum(d, "format", Format), + on_wait_timeout=_enum(d, "on_wait_timeout", ExecuteStatementRequestOnWaitTimeout), + parameters=_repeated_dict(d, "parameters", StatementParameterListItem), + row_limit=d.get("row_limit", None), + schema=d.get("schema", None), + statement=d.get("statement", None), + wait_timeout=d.get("wait_timeout", None), + warehouse_id=d.get("warehouse_id", None), + ) class ExecuteStatementRequestOnWaitTimeout(Enum): @@ -2896,272 +3612,299 @@ class ExecuteStatementRequestOnWaitTimeout(Enum): asynchronously and the call returns a statement ID which can be used for polling with :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled and the call returns with a `CANCELED` state.""" - - CANCEL = 'CANCEL' - CONTINUE = 'CONTINUE' + + CANCEL = "CANCEL" + CONTINUE = "CONTINUE" + @dataclass class ExternalLink: byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" - + chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" - + expiration: Optional[str] = None """Indicates the date-time that the given external link will expire and becomes invalid, after which point a new `external_link` must be requested.""" - + external_link: Optional[str] = None - - http_headers: Optional[Dict[str,str]] = None + + http_headers: Optional[Dict[str, str]] = None """HTTP headers that must be included with a GET request to the `external_link`. Each header is provided as a key-value pair. Headers are typically used to pass a decryption key to the external service. The values of these headers should be considered sensitive and the client should not expose these values in a log.""" - + next_chunk_index: Optional[int] = None """When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are no more chunks. The next chunk can be fetched with a :method:statementexecution/getStatementResultChunkN request.""" - + next_chunk_internal_link: Optional[str] = None """When fetching, provides a link to fetch the _next_ chunk. If absent, indicates there are no more chunks. This link is an absolute `path` to be joined with your `$DATABRICKS_HOST`, and should be treated as an opaque link. This is an alternative to using `next_chunk_index`.""" - + row_count: Optional[int] = None """The number of rows within the result chunk.""" - + row_offset: Optional[int] = None """The starting row offset within the result set.""" - + def as_dict(self) -> dict: """Serializes the ExternalLink into a dictionary suitable for use as a JSON request body.""" body = {} - if self.byte_count is not None: body['byte_count'] = self.byte_count - if self.chunk_index is not None: body['chunk_index'] = self.chunk_index - if self.expiration is not None: body['expiration'] = self.expiration - if self.external_link is not None: body['external_link'] = self.external_link - if self.http_headers: body['http_headers'] = self.http_headers - if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index - if self.next_chunk_internal_link is not None: body['next_chunk_internal_link'] = self.next_chunk_internal_link - if self.row_count is not None: body['row_count'] = self.row_count - if self.row_offset is not None: body['row_offset'] = self.row_offset + if self.byte_count is not None: + body["byte_count"] = self.byte_count + if self.chunk_index is not None: + body["chunk_index"] = self.chunk_index + if self.expiration is not None: + body["expiration"] = self.expiration + if self.external_link is not None: + body["external_link"] = self.external_link + if self.http_headers: + body["http_headers"] = self.http_headers + if self.next_chunk_index is not None: + body["next_chunk_index"] = self.next_chunk_index + if self.next_chunk_internal_link is not None: + body["next_chunk_internal_link"] = self.next_chunk_internal_link + if self.row_count is not None: + body["row_count"] = self.row_count + if self.row_offset is not None: + body["row_offset"] = self.row_offset return body def as_shallow_dict(self) -> dict: """Serializes the ExternalLink into a shallow dictionary of its immediate attributes.""" body = {} - if self.byte_count is not None: body['byte_count'] = self.byte_count - if self.chunk_index is not None: body['chunk_index'] = self.chunk_index - if self.expiration is not None: body['expiration'] = self.expiration - if self.external_link is not None: body['external_link'] = self.external_link - if self.http_headers: body['http_headers'] = self.http_headers - if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index - if self.next_chunk_internal_link is not None: body['next_chunk_internal_link'] = self.next_chunk_internal_link - if self.row_count is not None: body['row_count'] = self.row_count - if self.row_offset is not None: body['row_offset'] = self.row_offset + if self.byte_count is not None: + body["byte_count"] = self.byte_count + if self.chunk_index is not None: + body["chunk_index"] = self.chunk_index + if self.expiration is not None: + body["expiration"] = self.expiration + if self.external_link is not None: + body["external_link"] = self.external_link + if self.http_headers: + body["http_headers"] = self.http_headers + if self.next_chunk_index is not None: + body["next_chunk_index"] = self.next_chunk_index + if self.next_chunk_internal_link is not None: + body["next_chunk_internal_link"] = self.next_chunk_internal_link + if self.row_count is not None: + body["row_count"] = self.row_count + if self.row_offset is not None: + body["row_offset"] = self.row_offset return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalLink: """Deserializes the ExternalLink from a dictionary.""" - return cls(byte_count=d.get('byte_count', None), chunk_index=d.get('chunk_index', None), expiration=d.get('expiration', None), external_link=d.get('external_link', None), http_headers=d.get('http_headers', None), next_chunk_index=d.get('next_chunk_index', None), next_chunk_internal_link=d.get('next_chunk_internal_link', None), row_count=d.get('row_count', None), row_offset=d.get('row_offset', None)) - - + return cls( + byte_count=d.get("byte_count", None), + chunk_index=d.get("chunk_index", None), + expiration=d.get("expiration", None), + external_link=d.get("external_link", None), + http_headers=d.get("http_headers", None), + next_chunk_index=d.get("next_chunk_index", None), + next_chunk_internal_link=d.get("next_chunk_internal_link", None), + row_count=d.get("row_count", None), + row_offset=d.get("row_offset", None), + ) @dataclass class ExternalQuerySource: alert_id: Optional[str] = None """The canonical identifier for this SQL alert""" - + dashboard_id: Optional[str] = None """The canonical identifier for this Lakeview dashboard""" - + genie_space_id: Optional[str] = None """The canonical identifier for this Genie space""" - + job_info: Optional[ExternalQuerySourceJobInfo] = None - + legacy_dashboard_id: Optional[str] = None """The canonical identifier for this legacy dashboard""" - + notebook_id: Optional[str] = None """The canonical identifier for this notebook""" - + sql_query_id: Optional[str] = None """The canonical identifier for this SQL query""" - + def as_dict(self) -> dict: """Serializes the ExternalQuerySource into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert_id is not None: body['alert_id'] = self.alert_id - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.genie_space_id is not None: body['genie_space_id'] = self.genie_space_id - if self.job_info: body['job_info'] = self.job_info.as_dict() - if self.legacy_dashboard_id is not None: body['legacy_dashboard_id'] = self.legacy_dashboard_id - if self.notebook_id is not None: body['notebook_id'] = self.notebook_id - if self.sql_query_id is not None: body['sql_query_id'] = self.sql_query_id + if self.alert_id is not None: + body["alert_id"] = self.alert_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.genie_space_id is not None: + body["genie_space_id"] = self.genie_space_id + if self.job_info: + body["job_info"] = self.job_info.as_dict() + if self.legacy_dashboard_id is not None: + body["legacy_dashboard_id"] = self.legacy_dashboard_id + if self.notebook_id is not None: + body["notebook_id"] = self.notebook_id + if self.sql_query_id is not None: + body["sql_query_id"] = self.sql_query_id return body def as_shallow_dict(self) -> dict: """Serializes the ExternalQuerySource into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert_id is not None: body['alert_id'] = self.alert_id - if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id - if self.genie_space_id is not None: body['genie_space_id'] = self.genie_space_id - if self.job_info: body['job_info'] = self.job_info - if self.legacy_dashboard_id is not None: body['legacy_dashboard_id'] = self.legacy_dashboard_id - if self.notebook_id is not None: body['notebook_id'] = self.notebook_id - if self.sql_query_id is not None: body['sql_query_id'] = self.sql_query_id + if self.alert_id is not None: + body["alert_id"] = self.alert_id + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.genie_space_id is not None: + body["genie_space_id"] = self.genie_space_id + if self.job_info: + body["job_info"] = self.job_info + if self.legacy_dashboard_id is not None: + body["legacy_dashboard_id"] = self.legacy_dashboard_id + if self.notebook_id is not None: + body["notebook_id"] = self.notebook_id + if self.sql_query_id is not None: + body["sql_query_id"] = self.sql_query_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalQuerySource: """Deserializes the ExternalQuerySource from a dictionary.""" - return cls(alert_id=d.get('alert_id', None), dashboard_id=d.get('dashboard_id', None), genie_space_id=d.get('genie_space_id', None), job_info=_from_dict(d, 'job_info', ExternalQuerySourceJobInfo), legacy_dashboard_id=d.get('legacy_dashboard_id', None), notebook_id=d.get('notebook_id', None), sql_query_id=d.get('sql_query_id', None)) - - + return cls( + alert_id=d.get("alert_id", None), + dashboard_id=d.get("dashboard_id", None), + genie_space_id=d.get("genie_space_id", None), + job_info=_from_dict(d, "job_info", ExternalQuerySourceJobInfo), + legacy_dashboard_id=d.get("legacy_dashboard_id", None), + notebook_id=d.get("notebook_id", None), + sql_query_id=d.get("sql_query_id", None), + ) @dataclass class ExternalQuerySourceJobInfo: job_id: Optional[str] = None """The canonical identifier for this job.""" - + job_run_id: Optional[str] = None """The canonical identifier of the run. This ID is unique across all runs of all jobs.""" - + job_task_run_id: Optional[str] = None """The canonical identifier of the task run.""" - + def as_dict(self) -> dict: """Serializes the ExternalQuerySourceJobInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_run_id is not None: body['job_run_id'] = self.job_run_id - if self.job_task_run_id is not None: body['job_task_run_id'] = self.job_task_run_id + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_run_id is not None: + body["job_run_id"] = self.job_run_id + if self.job_task_run_id is not None: + body["job_task_run_id"] = self.job_task_run_id return body def as_shallow_dict(self) -> dict: """Serializes the ExternalQuerySourceJobInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_run_id is not None: body['job_run_id'] = self.job_run_id - if self.job_task_run_id is not None: body['job_task_run_id'] = self.job_task_run_id + if self.job_id is not None: + body["job_id"] = self.job_id + if self.job_run_id is not None: + body["job_run_id"] = self.job_run_id + if self.job_task_run_id is not None: + body["job_task_run_id"] = self.job_task_run_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExternalQuerySourceJobInfo: """Deserializes the ExternalQuerySourceJobInfo from a dictionary.""" - return cls(job_id=d.get('job_id', None), job_run_id=d.get('job_run_id', None), job_task_run_id=d.get('job_task_run_id', None)) - - + return cls( + job_id=d.get("job_id", None), + job_run_id=d.get("job_run_id", None), + job_task_run_id=d.get("job_task_run_id", None), + ) class Format(Enum): - - - ARROW_STREAM = 'ARROW_STREAM' - CSV = 'CSV' - JSON_ARRAY = 'JSON_ARRAY' - - - - - - - - - - - - - - - - - - - + ARROW_STREAM = "ARROW_STREAM" + CSV = "CSV" + JSON_ARRAY = "JSON_ARRAY" @dataclass class GetResponse: access_control_list: Optional[List[AccessControl]] = None - + object_id: Optional[str] = None """An object's type and UUID, separated by a forward slash (/) character.""" - + object_type: Optional[ObjectType] = None """A singular noun object type.""" - + def as_dict(self) -> dict: """Serializes the GetResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type.value + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type.value return body def as_shallow_dict(self) -> dict: """Serializes the GetResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetResponse: """Deserializes the GetResponse from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControl), object_id=d.get('object_id', None), object_type=_enum(d, 'object_type', ObjectType)) - - - - - - - - - - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", AccessControl), + object_id=d.get("object_id", None), + object_type=_enum(d, "object_type", ObjectType), + ) @dataclass class GetWarehousePermissionLevelsResponse: permission_levels: Optional[List[WarehousePermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetWarehousePermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetWarehousePermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWarehousePermissionLevelsResponse: """Deserializes the GetWarehousePermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', WarehousePermissionsDescription)) - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", WarehousePermissionsDescription)) @dataclass @@ -3173,10 +3916,10 @@ class GetWarehouseResponse: Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. Defaults to 120 mins""" - + channel: Optional[Channel] = None """Channel Details""" - + cluster_size: Optional[str] = None """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, @@ -3184,37 +3927,37 @@ class GetWarehouseResponse: Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large""" - + creator_name: Optional[str] = None """warehouse creator name""" - + enable_photon: Optional[bool] = None """Configures whether the warehouse should use Photon optimized clusters. Defaults to false.""" - + enable_serverless_compute: Optional[bool] = None """Configures whether the warehouse should use serverless compute""" - + health: Optional[EndpointHealth] = None """Optional health status. Assume the warehouse is healthy if this field is not set.""" - + id: Optional[str] = None """unique identifier for warehouse""" - + instance_profile_arn: Optional[str] = None """Deprecated. Instance profile used to pass IAM role to the cluster""" - + jdbc_url: Optional[str] = None """the jdbc connection string for this warehouse""" - + max_num_clusters: Optional[int] = None """Maximum number of clusters that the autoscaler will create to handle concurrent queries. Supported values: - Must be >= min_num_clusters - Must be <= 30. Defaults to min_clusters if unset.""" - + min_num_clusters: Optional[int] = None """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce @@ -3224,408 +3967,599 @@ class GetWarehouseResponse: Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) Defaults to 1""" - + name: Optional[str] = None """Logical name for the cluster. Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - + num_active_sessions: Optional[int] = None """Deprecated. current number of active sessions for the warehouse""" - + num_clusters: Optional[int] = None """current number of clusters running for the service""" - + odbc_params: Optional[OdbcParams] = None """ODBC parameters for the SQL warehouse""" - + spot_instance_policy: Optional[SpotInstancePolicy] = None """Configurations whether the warehouse should use spot instances.""" - + state: Optional[State] = None """State of the warehouse""" - + tags: Optional[EndpointTags] = None """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. Supported values: - Number of tags < 45.""" - + warehouse_type: Optional[GetWarehouseResponseWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - + def as_dict(self) -> dict: """Serializes the GetWarehouseResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel.as_dict() - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute - if self.health: body['health'] = self.health.as_dict() - if self.id is not None: body['id'] = self.id - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions - if self.num_clusters is not None: body['num_clusters'] = self.num_clusters - if self.odbc_params: body['odbc_params'] = self.odbc_params.as_dict() - if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy.value - if self.state is not None: body['state'] = self.state.value - if self.tags: body['tags'] = self.tags.as_dict() - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value + if self.auto_stop_mins is not None: + body["auto_stop_mins"] = self.auto_stop_mins + if self.channel: + body["channel"] = self.channel.as_dict() + if self.cluster_size is not None: + body["cluster_size"] = self.cluster_size + if self.creator_name is not None: + body["creator_name"] = self.creator_name + if self.enable_photon is not None: + body["enable_photon"] = self.enable_photon + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute + if self.health: + body["health"] = self.health.as_dict() + if self.id is not None: + body["id"] = self.id + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.jdbc_url is not None: + body["jdbc_url"] = self.jdbc_url + if self.max_num_clusters is not None: + body["max_num_clusters"] = self.max_num_clusters + if self.min_num_clusters is not None: + body["min_num_clusters"] = self.min_num_clusters + if self.name is not None: + body["name"] = self.name + if self.num_active_sessions is not None: + body["num_active_sessions"] = self.num_active_sessions + if self.num_clusters is not None: + body["num_clusters"] = self.num_clusters + if self.odbc_params: + body["odbc_params"] = self.odbc_params.as_dict() + if self.spot_instance_policy is not None: + body["spot_instance_policy"] = self.spot_instance_policy.value + if self.state is not None: + body["state"] = self.state.value + if self.tags: + body["tags"] = self.tags.as_dict() + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the GetWarehouseResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: body['enable_serverless_compute'] = self.enable_serverless_compute - if self.health: body['health'] = self.health - if self.id is not None: body['id'] = self.id - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions - if self.num_clusters is not None: body['num_clusters'] = self.num_clusters - if self.odbc_params: body['odbc_params'] = self.odbc_params - if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy - if self.state is not None: body['state'] = self.state - if self.tags: body['tags'] = self.tags - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + if self.auto_stop_mins is not None: + body["auto_stop_mins"] = self.auto_stop_mins + if self.channel: + body["channel"] = self.channel + if self.cluster_size is not None: + body["cluster_size"] = self.cluster_size + if self.creator_name is not None: + body["creator_name"] = self.creator_name + if self.enable_photon is not None: + body["enable_photon"] = self.enable_photon + if self.enable_serverless_compute is not None: + body["enable_serverless_compute"] = self.enable_serverless_compute + if self.health: + body["health"] = self.health + if self.id is not None: + body["id"] = self.id + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.jdbc_url is not None: + body["jdbc_url"] = self.jdbc_url + if self.max_num_clusters is not None: + body["max_num_clusters"] = self.max_num_clusters + if self.min_num_clusters is not None: + body["min_num_clusters"] = self.min_num_clusters + if self.name is not None: + body["name"] = self.name + if self.num_active_sessions is not None: + body["num_active_sessions"] = self.num_active_sessions + if self.num_clusters is not None: + body["num_clusters"] = self.num_clusters + if self.odbc_params: + body["odbc_params"] = self.odbc_params + if self.spot_instance_policy is not None: + body["spot_instance_policy"] = self.spot_instance_policy + if self.state is not None: + body["state"] = self.state + if self.tags: + body["tags"] = self.tags + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWarehouseResponse: """Deserializes the GetWarehouseResponse from a dictionary.""" - return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), creator_name=d.get('creator_name', None), enable_photon=d.get('enable_photon', None), enable_serverless_compute=d.get('enable_serverless_compute', None), health=_from_dict(d, 'health', EndpointHealth), id=d.get('id', None), instance_profile_arn=d.get('instance_profile_arn', None), jdbc_url=d.get('jdbc_url', None), max_num_clusters=d.get('max_num_clusters', None), min_num_clusters=d.get('min_num_clusters', None), name=d.get('name', None), num_active_sessions=d.get('num_active_sessions', None), num_clusters=d.get('num_clusters', None), odbc_params=_from_dict(d, 'odbc_params', OdbcParams), spot_instance_policy=_enum(d, 'spot_instance_policy', SpotInstancePolicy), state=_enum(d, 'state', State), tags=_from_dict(d, 'tags', EndpointTags), warehouse_type=_enum(d, 'warehouse_type', GetWarehouseResponseWarehouseType)) - - + return cls( + auto_stop_mins=d.get("auto_stop_mins", None), + channel=_from_dict(d, "channel", Channel), + cluster_size=d.get("cluster_size", None), + creator_name=d.get("creator_name", None), + enable_photon=d.get("enable_photon", None), + enable_serverless_compute=d.get("enable_serverless_compute", None), + health=_from_dict(d, "health", EndpointHealth), + id=d.get("id", None), + instance_profile_arn=d.get("instance_profile_arn", None), + jdbc_url=d.get("jdbc_url", None), + max_num_clusters=d.get("max_num_clusters", None), + min_num_clusters=d.get("min_num_clusters", None), + name=d.get("name", None), + num_active_sessions=d.get("num_active_sessions", None), + num_clusters=d.get("num_clusters", None), + odbc_params=_from_dict(d, "odbc_params", OdbcParams), + spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), + state=_enum(d, "state", State), + tags=_from_dict(d, "tags", EndpointTags), + warehouse_type=_enum(d, "warehouse_type", GetWarehouseResponseWarehouseType), + ) class GetWarehouseResponseWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" - - CLASSIC = 'CLASSIC' - PRO = 'PRO' - TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' + + CLASSIC = "CLASSIC" + PRO = "PRO" + TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" + @dataclass class GetWorkspaceWarehouseConfigResponse: channel: Optional[Channel] = None """Optional: Channel selection details""" - + config_param: Optional[RepeatedEndpointConfPairs] = None """Deprecated: Use sql_configuration_parameters""" - + data_access_config: Optional[List[EndpointConfPair]] = None """Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K""" - + enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None """List of Warehouse Types allowed in this workspace (limits allowed value of the type field in CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses to be converted to another type. Used by frontend to save specific type availability in the warehouse create and edit form UI.""" - + global_param: Optional[RepeatedEndpointConfPairs] = None """Deprecated: Use sql_configuration_parameters""" - + google_service_account: Optional[str] = None """GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage""" - + instance_profile_arn: Optional[str] = None """AWS Only: Instance profile used to pass IAM role to the cluster""" - + security_policy: Optional[GetWorkspaceWarehouseConfigResponseSecurityPolicy] = None """Security policy for warehouses""" - + sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None """SQL configuration parameters""" - + def as_dict(self) -> dict: """Serializes the GetWorkspaceWarehouseConfigResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.channel: body['channel'] = self.channel.as_dict() - if self.config_param: body['config_param'] = self.config_param.as_dict() - if self.data_access_config: body['data_access_config'] = [v.as_dict() for v in self.data_access_config] - if self.enabled_warehouse_types: body['enabled_warehouse_types'] = [v.as_dict() for v in self.enabled_warehouse_types] - if self.global_param: body['global_param'] = self.global_param.as_dict() - if self.google_service_account is not None: body['google_service_account'] = self.google_service_account - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.security_policy is not None: body['security_policy'] = self.security_policy.value - if self.sql_configuration_parameters: body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict() + if self.channel: + body["channel"] = self.channel.as_dict() + if self.config_param: + body["config_param"] = self.config_param.as_dict() + if self.data_access_config: + body["data_access_config"] = [v.as_dict() for v in self.data_access_config] + if self.enabled_warehouse_types: + body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types] + if self.global_param: + body["global_param"] = self.global_param.as_dict() + if self.google_service_account is not None: + body["google_service_account"] = self.google_service_account + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.security_policy is not None: + body["security_policy"] = self.security_policy.value + if self.sql_configuration_parameters: + body["sql_configuration_parameters"] = self.sql_configuration_parameters.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the GetWorkspaceWarehouseConfigResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.channel: body['channel'] = self.channel - if self.config_param: body['config_param'] = self.config_param - if self.data_access_config: body['data_access_config'] = self.data_access_config - if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types - if self.global_param: body['global_param'] = self.global_param - if self.google_service_account is not None: body['google_service_account'] = self.google_service_account - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.security_policy is not None: body['security_policy'] = self.security_policy - if self.sql_configuration_parameters: body['sql_configuration_parameters'] = self.sql_configuration_parameters + if self.channel: + body["channel"] = self.channel + if self.config_param: + body["config_param"] = self.config_param + if self.data_access_config: + body["data_access_config"] = self.data_access_config + if self.enabled_warehouse_types: + body["enabled_warehouse_types"] = self.enabled_warehouse_types + if self.global_param: + body["global_param"] = self.global_param + if self.google_service_account is not None: + body["google_service_account"] = self.google_service_account + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.security_policy is not None: + body["security_policy"] = self.security_policy + if self.sql_configuration_parameters: + body["sql_configuration_parameters"] = self.sql_configuration_parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceWarehouseConfigResponse: """Deserializes the GetWorkspaceWarehouseConfigResponse from a dictionary.""" - return cls(channel=_from_dict(d, 'channel', Channel), config_param=_from_dict(d, 'config_param', RepeatedEndpointConfPairs), data_access_config=_repeated_dict(d, 'data_access_config', EndpointConfPair), enabled_warehouse_types=_repeated_dict(d, 'enabled_warehouse_types', WarehouseTypePair), global_param=_from_dict(d, 'global_param', RepeatedEndpointConfPairs), google_service_account=d.get('google_service_account', None), instance_profile_arn=d.get('instance_profile_arn', None), security_policy=_enum(d, 'security_policy', GetWorkspaceWarehouseConfigResponseSecurityPolicy), sql_configuration_parameters=_from_dict(d, 'sql_configuration_parameters', RepeatedEndpointConfPairs)) - - + return cls( + channel=_from_dict(d, "channel", Channel), + config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs), + data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair), + enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair), + global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs), + google_service_account=d.get("google_service_account", None), + instance_profile_arn=d.get("instance_profile_arn", None), + security_policy=_enum(d, "security_policy", GetWorkspaceWarehouseConfigResponseSecurityPolicy), + sql_configuration_parameters=_from_dict(d, "sql_configuration_parameters", RepeatedEndpointConfPairs), + ) class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum): """Security policy for warehouses""" - - DATA_ACCESS_CONTROL = 'DATA_ACCESS_CONTROL' - NONE = 'NONE' - PASSTHROUGH = 'PASSTHROUGH' + + DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL" + NONE = "NONE" + PASSTHROUGH = "PASSTHROUGH" + @dataclass class LegacyAlert: created_at: Optional[str] = None """Timestamp when the alert was created.""" - + id: Optional[str] = None """Alert ID.""" - + last_triggered_at: Optional[str] = None """Timestamp when the alert was last triggered.""" - + name: Optional[str] = None """Name of the alert.""" - + options: Optional[AlertOptions] = None """Alert configuration options.""" - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + query: Optional[AlertQuery] = None - + rearm: Optional[int] = None """Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again.""" - + state: Optional[LegacyAlertState] = None """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" - + updated_at: Optional[str] = None """Timestamp when the alert was last updated.""" - + user: Optional[User] = None - + def as_dict(self) -> dict: """Serializes the LegacyAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.id is not None: body['id'] = self.id - if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.parent is not None: body['parent'] = self.parent - if self.query: body['query'] = self.query.as_dict() - if self.rearm is not None: body['rearm'] = self.rearm - if self.state is not None: body['state'] = self.state.value - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user.as_dict() + if self.created_at is not None: + body["created_at"] = self.created_at + if self.id is not None: + body["id"] = self.id + if self.last_triggered_at is not None: + body["last_triggered_at"] = self.last_triggered_at + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options.as_dict() + if self.parent is not None: + body["parent"] = self.parent + if self.query: + body["query"] = self.query.as_dict() + if self.rearm is not None: + body["rearm"] = self.rearm + if self.state is not None: + body["state"] = self.state.value + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.user: + body["user"] = self.user.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the LegacyAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.id is not None: body['id'] = self.id - if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.parent is not None: body['parent'] = self.parent - if self.query: body['query'] = self.query - if self.rearm is not None: body['rearm'] = self.rearm - if self.state is not None: body['state'] = self.state - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user + if self.created_at is not None: + body["created_at"] = self.created_at + if self.id is not None: + body["id"] = self.id + if self.last_triggered_at is not None: + body["last_triggered_at"] = self.last_triggered_at + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.parent is not None: + body["parent"] = self.parent + if self.query: + body["query"] = self.query + if self.rearm is not None: + body["rearm"] = self.rearm + if self.state is not None: + body["state"] = self.state + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.user: + body["user"] = self.user return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LegacyAlert: """Deserializes the LegacyAlert from a dictionary.""" - return cls(created_at=d.get('created_at', None), id=d.get('id', None), last_triggered_at=d.get('last_triggered_at', None), name=d.get('name', None), options=_from_dict(d, 'options', AlertOptions), parent=d.get('parent', None), query=_from_dict(d, 'query', AlertQuery), rearm=d.get('rearm', None), state=_enum(d, 'state', LegacyAlertState), updated_at=d.get('updated_at', None), user=_from_dict(d, 'user', User)) - - + return cls( + created_at=d.get("created_at", None), + id=d.get("id", None), + last_triggered_at=d.get("last_triggered_at", None), + name=d.get("name", None), + options=_from_dict(d, "options", AlertOptions), + parent=d.get("parent", None), + query=_from_dict(d, "query", AlertQuery), + rearm=d.get("rearm", None), + state=_enum(d, "state", LegacyAlertState), + updated_at=d.get("updated_at", None), + user=_from_dict(d, "user", User), + ) class LegacyAlertState(Enum): """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" - - OK = 'ok' - TRIGGERED = 'triggered' - UNKNOWN = 'unknown' + + OK = "ok" + TRIGGERED = "triggered" + UNKNOWN = "unknown" + @dataclass class LegacyQuery: can_edit: Optional[bool] = None """Describes whether the authenticated user is allowed to edit the definition of this query.""" - + created_at: Optional[str] = None """The timestamp when this query was created.""" - + data_source_id: Optional[str] = None """Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + id: Optional[str] = None """Query ID.""" - + is_archived: Optional[bool] = None """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear in search results. If this boolean is `true`, the `options` property for this query includes a `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" - + is_draft: Optional[bool] = None """Whether the query is a draft. Draft queries only appear in list views for their owners. Visualizations from draft queries cannot appear on dashboards.""" - + is_favorite: Optional[bool] = None """Whether this query object appears in the current user's favorites list. This flag determines whether the star icon for favorites is selected.""" - + is_safe: Optional[bool] = None """Text parameter types are not safe from SQL injection for all types of data source. Set this Boolean parameter to `true` if a query either does not use any text type parameters or uses a data source type where text type parameters are handled safely.""" - + last_modified_by: Optional[User] = None - + last_modified_by_id: Optional[int] = None """The ID of the user who last saved changes to this query.""" - + latest_query_data_id: Optional[str] = None """If there is a cached result for this query and user, this field includes the query result ID. If this query uses parameters, this field is always null.""" - + name: Optional[str] = None """The title of this query that appears in list views, widget headings, and on the query page.""" - + options: Optional[QueryOptions] = None - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + permission_tier: Optional[PermissionLevel] = None """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query * `CAN_MANAGE`: Can manage the query""" - + query: Optional[str] = None """The text of the query to be run.""" - + query_hash: Optional[str] = None """A SHA-256 hash of the query text along with the authenticated user ID.""" - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + updated_at: Optional[str] = None """The timestamp at which this query was last updated.""" - + user: Optional[User] = None - + user_id: Optional[int] = None """The ID of the user who owns the query.""" - + visualizations: Optional[List[LegacyVisualization]] = None - + def as_dict(self) -> dict: """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_edit is not None: body['can_edit'] = self.can_edit - if self.created_at is not None: body['created_at'] = self.created_at - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.is_archived is not None: body['is_archived'] = self.is_archived - if self.is_draft is not None: body['is_draft'] = self.is_draft - if self.is_favorite is not None: body['is_favorite'] = self.is_favorite - if self.is_safe is not None: body['is_safe'] = self.is_safe - if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict() - if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id - if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.parent is not None: body['parent'] = self.parent - if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value - if self.query is not None: body['query'] = self.query - if self.query_hash is not None: body['query_hash'] = self.query_hash - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value - if self.tags: body['tags'] = [v for v in self.tags] - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user.as_dict() - if self.user_id is not None: body['user_id'] = self.user_id - if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations] + if self.can_edit is not None: + body["can_edit"] = self.can_edit + if self.created_at is not None: + body["created_at"] = self.created_at + if self.data_source_id is not None: + body["data_source_id"] = self.data_source_id + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.is_archived is not None: + body["is_archived"] = self.is_archived + if self.is_draft is not None: + body["is_draft"] = self.is_draft + if self.is_favorite is not None: + body["is_favorite"] = self.is_favorite + if self.is_safe is not None: + body["is_safe"] = self.is_safe + if self.last_modified_by: + body["last_modified_by"] = self.last_modified_by.as_dict() + if self.last_modified_by_id is not None: + body["last_modified_by_id"] = self.last_modified_by_id + if self.latest_query_data_id is not None: + body["latest_query_data_id"] = self.latest_query_data_id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options.as_dict() + if self.parent is not None: + body["parent"] = self.parent + if self.permission_tier is not None: + body["permission_tier"] = self.permission_tier.value + if self.query is not None: + body["query"] = self.query + if self.query_hash is not None: + body["query_hash"] = self.query_hash + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role.value + if self.tags: + body["tags"] = [v for v in self.tags] + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.user: + body["user"] = self.user.as_dict() + if self.user_id is not None: + body["user_id"] = self.user_id + if self.visualizations: + body["visualizations"] = [v.as_dict() for v in self.visualizations] return body def as_shallow_dict(self) -> dict: """Serializes the LegacyQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.can_edit is not None: body['can_edit'] = self.can_edit - if self.created_at is not None: body['created_at'] = self.created_at - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.is_archived is not None: body['is_archived'] = self.is_archived - if self.is_draft is not None: body['is_draft'] = self.is_draft - if self.is_favorite is not None: body['is_favorite'] = self.is_favorite - if self.is_safe is not None: body['is_safe'] = self.is_safe - if self.last_modified_by: body['last_modified_by'] = self.last_modified_by - if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id - if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.parent is not None: body['parent'] = self.parent - if self.permission_tier is not None: body['permission_tier'] = self.permission_tier - if self.query is not None: body['query'] = self.query - if self.query_hash is not None: body['query_hash'] = self.query_hash - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role - if self.tags: body['tags'] = self.tags - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user - if self.user_id is not None: body['user_id'] = self.user_id - if self.visualizations: body['visualizations'] = self.visualizations + if self.can_edit is not None: + body["can_edit"] = self.can_edit + if self.created_at is not None: + body["created_at"] = self.created_at + if self.data_source_id is not None: + body["data_source_id"] = self.data_source_id + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.is_archived is not None: + body["is_archived"] = self.is_archived + if self.is_draft is not None: + body["is_draft"] = self.is_draft + if self.is_favorite is not None: + body["is_favorite"] = self.is_favorite + if self.is_safe is not None: + body["is_safe"] = self.is_safe + if self.last_modified_by: + body["last_modified_by"] = self.last_modified_by + if self.last_modified_by_id is not None: + body["last_modified_by_id"] = self.last_modified_by_id + if self.latest_query_data_id is not None: + body["latest_query_data_id"] = self.latest_query_data_id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.parent is not None: + body["parent"] = self.parent + if self.permission_tier is not None: + body["permission_tier"] = self.permission_tier + if self.query is not None: + body["query"] = self.query + if self.query_hash is not None: + body["query_hash"] = self.query_hash + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role + if self.tags: + body["tags"] = self.tags + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.user: + body["user"] = self.user + if self.user_id is not None: + body["user_id"] = self.user_id + if self.visualizations: + body["visualizations"] = self.visualizations return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LegacyQuery: """Deserializes the LegacyQuery from a dictionary.""" - return cls(can_edit=d.get('can_edit', None), created_at=d.get('created_at', None), data_source_id=d.get('data_source_id', None), description=d.get('description', None), id=d.get('id', None), is_archived=d.get('is_archived', None), is_draft=d.get('is_draft', None), is_favorite=d.get('is_favorite', None), is_safe=d.get('is_safe', None), last_modified_by=_from_dict(d, 'last_modified_by', User), last_modified_by_id=d.get('last_modified_by_id', None), latest_query_data_id=d.get('latest_query_data_id', None), name=d.get('name', None), options=_from_dict(d, 'options', QueryOptions), parent=d.get('parent', None), permission_tier=_enum(d, 'permission_tier', PermissionLevel), query=d.get('query', None), query_hash=d.get('query_hash', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None), updated_at=d.get('updated_at', None), user=_from_dict(d, 'user', User), user_id=d.get('user_id', None), visualizations=_repeated_dict(d, 'visualizations', LegacyVisualization)) - - + return cls( + can_edit=d.get("can_edit", None), + created_at=d.get("created_at", None), + data_source_id=d.get("data_source_id", None), + description=d.get("description", None), + id=d.get("id", None), + is_archived=d.get("is_archived", None), + is_draft=d.get("is_draft", None), + is_favorite=d.get("is_favorite", None), + is_safe=d.get("is_safe", None), + last_modified_by=_from_dict(d, "last_modified_by", User), + last_modified_by_id=d.get("last_modified_by_id", None), + latest_query_data_id=d.get("latest_query_data_id", None), + name=d.get("name", None), + options=_from_dict(d, "options", QueryOptions), + parent=d.get("parent", None), + permission_tier=_enum(d, "permission_tier", PermissionLevel), + query=d.get("query", None), + query_hash=d.get("query_hash", None), + run_as_role=_enum(d, "run_as_role", RunAsRole), + tags=d.get("tags", None), + updated_at=d.get("updated_at", None), + user=_from_dict(d, "user", User), + user_id=d.get("user_id", None), + visualizations=_repeated_dict(d, "visualizations", LegacyVisualization), + ) @dataclass @@ -3634,846 +4568,1065 @@ class LegacyVisualization: visualization by copying description objects received _from the API_ and then using them to create a new one with a POST request to the same endpoint. Databricks does not recommend constructing ad-hoc visualizations entirely in JSON.""" - + created_at: Optional[str] = None - + description: Optional[str] = None """A short description of this visualization. This is not displayed in the UI.""" - + id: Optional[str] = None """The UUID for this visualization.""" - + name: Optional[str] = None """The name of the visualization that appears on dashboards and the query screen.""" - + options: Optional[Any] = None """The options object varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization settings in JSON.""" - + query: Optional[LegacyQuery] = None - + type: Optional[str] = None """The type of visualization: chart, table, pivot table, and so on.""" - + updated_at: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the LegacyVisualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query: body['query'] = self.query.as_dict() - if self.type is not None: body['type'] = self.type - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.created_at is not None: + body["created_at"] = self.created_at + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.query: + body["query"] = self.query.as_dict() + if self.type is not None: + body["type"] = self.type + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the LegacyVisualization into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.description is not None: body['description'] = self.description - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query: body['query'] = self.query - if self.type is not None: body['type'] = self.type - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.created_at is not None: + body["created_at"] = self.created_at + if self.description is not None: + body["description"] = self.description + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.query: + body["query"] = self.query + if self.type is not None: + body["type"] = self.type + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> LegacyVisualization: """Deserializes the LegacyVisualization from a dictionary.""" - return cls(created_at=d.get('created_at', None), description=d.get('description', None), id=d.get('id', None), name=d.get('name', None), options=d.get('options', None), query=_from_dict(d, 'query', LegacyQuery), type=d.get('type', None), updated_at=d.get('updated_at', None)) - - + return cls( + created_at=d.get("created_at", None), + description=d.get("description", None), + id=d.get("id", None), + name=d.get("name", None), + options=d.get("options", None), + query=_from_dict(d, "query", LegacyQuery), + type=d.get("type", None), + updated_at=d.get("updated_at", None), + ) class LifecycleState(Enum): - - - ACTIVE = 'ACTIVE' - TRASHED = 'TRASHED' - + ACTIVE = "ACTIVE" + TRASHED = "TRASHED" @dataclass class ListAlertsResponse: next_page_token: Optional[str] = None - + results: Optional[List[ListAlertsResponseAlert]] = None - + def as_dict(self) -> dict: """Serializes the ListAlertsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListAlertsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = self.results + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAlertsResponse: """Deserializes the ListAlertsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', ListAlertsResponseAlert)) - - + return cls( + next_page_token=d.get("next_page_token", None), + results=_repeated_dict(d, "results", ListAlertsResponseAlert), + ) @dataclass class ListAlertsResponseAlert: condition: Optional[AlertCondition] = None """Trigger conditions of the alert.""" - + create_time: Optional[str] = None """The timestamp indicating when the alert was created.""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This can include email subject entries and Slack notification headers, for example. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + display_name: Optional[str] = None """The display name of the alert.""" - + id: Optional[str] = None """UUID identifying the alert.""" - + lifecycle_state: Optional[LifecycleState] = None """The workspace state of the alert. Used for tracking trashed status.""" - + notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + owner_user_name: Optional[str] = None """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" - + query_id: Optional[str] = None """UUID of the query attached to the alert.""" - + seconds_to_retrigger: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + state: Optional[AlertState] = None """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not yet been evaluated or ran into an error during the last evaluation.""" - + trigger_time: Optional[str] = None """Timestamp when the alert was last triggered, if the alert has been triggered before.""" - + update_time: Optional[str] = None """The timestamp indicating when the alert was updated.""" - + def as_dict(self) -> dict: """Serializes the ListAlertsResponseAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition: body['condition'] = self.condition.as_dict() - if self.create_time is not None: body['create_time'] = self.create_time - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.query_id is not None: body['query_id'] = self.query_id - if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger - if self.state is not None: body['state'] = self.state.value - if self.trigger_time is not None: body['trigger_time'] = self.trigger_time - if self.update_time is not None: body['update_time'] = self.update_time + if self.condition: + body["condition"] = self.condition.as_dict() + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state.value + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.query_id is not None: + body["query_id"] = self.query_id + if self.seconds_to_retrigger is not None: + body["seconds_to_retrigger"] = self.seconds_to_retrigger + if self.state is not None: + body["state"] = self.state.value + if self.trigger_time is not None: + body["trigger_time"] = self.trigger_time + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the ListAlertsResponseAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition: body['condition'] = self.condition - if self.create_time is not None: body['create_time'] = self.create_time - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.query_id is not None: body['query_id'] = self.query_id - if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger - if self.state is not None: body['state'] = self.state - if self.trigger_time is not None: body['trigger_time'] = self.trigger_time - if self.update_time is not None: body['update_time'] = self.update_time + if self.condition: + body["condition"] = self.condition + if self.create_time is not None: + body["create_time"] = self.create_time + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.query_id is not None: + body["query_id"] = self.query_id + if self.seconds_to_retrigger is not None: + body["seconds_to_retrigger"] = self.seconds_to_retrigger + if self.state is not None: + body["state"] = self.state + if self.trigger_time is not None: + body["trigger_time"] = self.trigger_time + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAlertsResponseAlert: """Deserializes the ListAlertsResponseAlert from a dictionary.""" - return cls(condition=_from_dict(d, 'condition', AlertCondition), create_time=d.get('create_time', None), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), display_name=d.get('display_name', None), id=d.get('id', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), notify_on_ok=d.get('notify_on_ok', None), owner_user_name=d.get('owner_user_name', None), query_id=d.get('query_id', None), seconds_to_retrigger=d.get('seconds_to_retrigger', None), state=_enum(d, 'state', AlertState), trigger_time=d.get('trigger_time', None), update_time=d.get('update_time', None)) - - - - - + return cls( + condition=_from_dict(d, "condition", AlertCondition), + create_time=d.get("create_time", None), + custom_body=d.get("custom_body", None), + custom_subject=d.get("custom_subject", None), + display_name=d.get("display_name", None), + id=d.get("id", None), + lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), + notify_on_ok=d.get("notify_on_ok", None), + owner_user_name=d.get("owner_user_name", None), + query_id=d.get("query_id", None), + seconds_to_retrigger=d.get("seconds_to_retrigger", None), + state=_enum(d, "state", AlertState), + trigger_time=d.get("trigger_time", None), + update_time=d.get("update_time", None), + ) @dataclass class ListAlertsV2Response: next_page_token: Optional[str] = None - + results: Optional[List[AlertV2]] = None - + def as_dict(self) -> dict: """Serializes the ListAlertsV2Response into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListAlertsV2Response into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = self.results + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAlertsV2Response: """Deserializes the ListAlertsV2Response from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', AlertV2)) - - - - - + return cls(next_page_token=d.get("next_page_token", None), results=_repeated_dict(d, "results", AlertV2)) class ListOrder(Enum): - - - CREATED_AT = 'created_at' - NAME = 'name' - - - - + CREATED_AT = "created_at" + NAME = "name" @dataclass class ListQueriesResponse: has_next_page: Optional[bool] = None """Whether there is another page of results.""" - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results.""" - + res: Optional[List[QueryInfo]] = None - + def as_dict(self) -> dict: """Serializes the ListQueriesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.has_next_page is not None: body['has_next_page'] = self.has_next_page - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.res: body['res'] = [v.as_dict() for v in self.res] + if self.has_next_page is not None: + body["has_next_page"] = self.has_next_page + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.res: + body["res"] = [v.as_dict() for v in self.res] return body def as_shallow_dict(self) -> dict: """Serializes the ListQueriesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.has_next_page is not None: body['has_next_page'] = self.has_next_page - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.res: body['res'] = self.res + if self.has_next_page is not None: + body["has_next_page"] = self.has_next_page + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.res: + body["res"] = self.res return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListQueriesResponse: """Deserializes the ListQueriesResponse from a dictionary.""" - return cls(has_next_page=d.get('has_next_page', None), next_page_token=d.get('next_page_token', None), res=_repeated_dict(d, 'res', QueryInfo)) - - - - - + return cls( + has_next_page=d.get("has_next_page", None), + next_page_token=d.get("next_page_token", None), + res=_repeated_dict(d, "res", QueryInfo), + ) @dataclass class ListQueryObjectsResponse: next_page_token: Optional[str] = None - + results: Optional[List[ListQueryObjectsResponseQuery]] = None - + def as_dict(self) -> dict: """Serializes the ListQueryObjectsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListQueryObjectsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = self.results + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListQueryObjectsResponse: """Deserializes the ListQueryObjectsResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', ListQueryObjectsResponseQuery)) - - + return cls( + next_page_token=d.get("next_page_token", None), + results=_repeated_dict(d, "results", ListQueryObjectsResponseQuery), + ) @dataclass class ListQueryObjectsResponseQuery: apply_auto_limit: Optional[bool] = None """Whether to apply a 1000 row limit to the query result.""" - + catalog: Optional[str] = None """Name of the catalog where this query will be executed.""" - + create_time: Optional[str] = None """Timestamp when this query was created.""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + display_name: Optional[str] = None """Display name of the query that appears in list views, widget headings, and on the query page.""" - + id: Optional[str] = None """UUID identifying the query.""" - + last_modifier_user_name: Optional[str] = None """Username of the user who last saved changes to this query.""" - + lifecycle_state: Optional[LifecycleState] = None """Indicates whether the query is trashed.""" - + owner_user_name: Optional[str] = None """Username of the user that owns the query.""" - + parameters: Optional[List[QueryParameter]] = None """List of query parameter definitions.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_mode: Optional[RunAsMode] = None """Sets the "Run as" role for the object.""" - + schema: Optional[str] = None """Name of the schema where this query will be executed.""" - + tags: Optional[List[str]] = None - + update_time: Optional[str] = None """Timestamp when this query was last updated.""" - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the query.""" - + def as_dict(self) -> dict: """Serializes the ListQueryObjectsResponseQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.create_time is not None: body['create_time'] = self.create_time - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.last_modifier_user_name is not None: body['last_modifier_user_name'] = self.last_modifier_user_name - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value - if self.schema is not None: body['schema'] = self.schema - if self.tags: body['tags'] = [v for v in self.tags] - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.apply_auto_limit is not None: + body["apply_auto_limit"] = self.apply_auto_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.create_time is not None: + body["create_time"] = self.create_time + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.last_modifier_user_name is not None: + body["last_modifier_user_name"] = self.last_modifier_user_name + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state.value + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_mode is not None: + body["run_as_mode"] = self.run_as_mode.value + if self.schema is not None: + body["schema"] = self.schema + if self.tags: + body["tags"] = [v for v in self.tags] + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the ListQueryObjectsResponseQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.create_time is not None: body['create_time'] = self.create_time - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.last_modifier_user_name is not None: body['last_modifier_user_name'] = self.last_modifier_user_name - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parameters: body['parameters'] = self.parameters - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode - if self.schema is not None: body['schema'] = self.schema - if self.tags: body['tags'] = self.tags - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.apply_auto_limit is not None: + body["apply_auto_limit"] = self.apply_auto_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.create_time is not None: + body["create_time"] = self.create_time + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.last_modifier_user_name is not None: + body["last_modifier_user_name"] = self.last_modifier_user_name + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parameters: + body["parameters"] = self.parameters + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_mode is not None: + body["run_as_mode"] = self.run_as_mode + if self.schema is not None: + body["schema"] = self.schema + if self.tags: + body["tags"] = self.tags + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListQueryObjectsResponseQuery: """Deserializes the ListQueryObjectsResponseQuery from a dictionary.""" - return cls(apply_auto_limit=d.get('apply_auto_limit', None), catalog=d.get('catalog', None), create_time=d.get('create_time', None), description=d.get('description', None), display_name=d.get('display_name', None), id=d.get('id', None), last_modifier_user_name=d.get('last_modifier_user_name', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), owner_user_name=d.get('owner_user_name', None), parameters=_repeated_dict(d, 'parameters', QueryParameter), query_text=d.get('query_text', None), run_as_mode=_enum(d, 'run_as_mode', RunAsMode), schema=d.get('schema', None), tags=d.get('tags', None), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + apply_auto_limit=d.get("apply_auto_limit", None), + catalog=d.get("catalog", None), + create_time=d.get("create_time", None), + description=d.get("description", None), + display_name=d.get("display_name", None), + id=d.get("id", None), + last_modifier_user_name=d.get("last_modifier_user_name", None), + lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), + owner_user_name=d.get("owner_user_name", None), + parameters=_repeated_dict(d, "parameters", QueryParameter), + query_text=d.get("query_text", None), + run_as_mode=_enum(d, "run_as_mode", RunAsMode), + schema=d.get("schema", None), + tags=d.get("tags", None), + update_time=d.get("update_time", None), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class ListResponse: count: Optional[int] = None """The total number of dashboards.""" - + page: Optional[int] = None """The current page being displayed.""" - + page_size: Optional[int] = None """The number of dashboards per page.""" - + results: Optional[List[Dashboard]] = None """List of dashboards returned.""" - + def as_dict(self) -> dict: """Serializes the ListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.count is not None: body['count'] = self.count - if self.page is not None: body['page'] = self.page - if self.page_size is not None: body['page_size'] = self.page_size - if self.results: body['results'] = [v.as_dict() for v in self.results] + if self.count is not None: + body["count"] = self.count + if self.page is not None: + body["page"] = self.page + if self.page_size is not None: + body["page_size"] = self.page_size + if self.results: + body["results"] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.count is not None: body['count'] = self.count - if self.page is not None: body['page'] = self.page - if self.page_size is not None: body['page_size'] = self.page_size - if self.results: body['results'] = self.results + if self.count is not None: + body["count"] = self.count + if self.page is not None: + body["page"] = self.page + if self.page_size is not None: + body["page_size"] = self.page_size + if self.results: + body["results"] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListResponse: """Deserializes the ListResponse from a dictionary.""" - return cls(count=d.get('count', None), page=d.get('page', None), page_size=d.get('page_size', None), results=_repeated_dict(d, 'results', Dashboard)) - - - - - + return cls( + count=d.get("count", None), + page=d.get("page", None), + page_size=d.get("page_size", None), + results=_repeated_dict(d, "results", Dashboard), + ) @dataclass class ListVisualizationsForQueryResponse: next_page_token: Optional[str] = None - + results: Optional[List[Visualization]] = None - + def as_dict(self) -> dict: """Serializes the ListVisualizationsForQueryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = [v.as_dict() for v in self.results] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the ListVisualizationsForQueryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.results: body['results'] = self.results + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.results: + body["results"] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListVisualizationsForQueryResponse: """Deserializes the ListVisualizationsForQueryResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), results=_repeated_dict(d, 'results', Visualization)) - - - - - + return cls(next_page_token=d.get("next_page_token", None), results=_repeated_dict(d, "results", Visualization)) @dataclass class ListWarehousesResponse: warehouses: Optional[List[EndpointInfo]] = None """A list of warehouses and their configurations.""" - + def as_dict(self) -> dict: """Serializes the ListWarehousesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.warehouses: body['warehouses'] = [v.as_dict() for v in self.warehouses] + if self.warehouses: + body["warehouses"] = [v.as_dict() for v in self.warehouses] return body def as_shallow_dict(self) -> dict: """Serializes the ListWarehousesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.warehouses: body['warehouses'] = self.warehouses + if self.warehouses: + body["warehouses"] = self.warehouses return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListWarehousesResponse: """Deserializes the ListWarehousesResponse from a dictionary.""" - return cls(warehouses=_repeated_dict(d, 'warehouses', EndpointInfo)) - - + return cls(warehouses=_repeated_dict(d, "warehouses", EndpointInfo)) @dataclass class MultiValuesOptions: prefix: Optional[str] = None """Character that prefixes each selected parameter value.""" - + separator: Optional[str] = None """Character that separates each selected parameter value. Defaults to a comma.""" - + suffix: Optional[str] = None """Character that suffixes each selected parameter value.""" - + def as_dict(self) -> dict: """Serializes the MultiValuesOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.prefix is not None: body['prefix'] = self.prefix - if self.separator is not None: body['separator'] = self.separator - if self.suffix is not None: body['suffix'] = self.suffix + if self.prefix is not None: + body["prefix"] = self.prefix + if self.separator is not None: + body["separator"] = self.separator + if self.suffix is not None: + body["suffix"] = self.suffix return body def as_shallow_dict(self) -> dict: """Serializes the MultiValuesOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.prefix is not None: body['prefix'] = self.prefix - if self.separator is not None: body['separator'] = self.separator - if self.suffix is not None: body['suffix'] = self.suffix + if self.prefix is not None: + body["prefix"] = self.prefix + if self.separator is not None: + body["separator"] = self.separator + if self.suffix is not None: + body["suffix"] = self.suffix return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MultiValuesOptions: """Deserializes the MultiValuesOptions from a dictionary.""" - return cls(prefix=d.get('prefix', None), separator=d.get('separator', None), suffix=d.get('suffix', None)) - - + return cls(prefix=d.get("prefix", None), separator=d.get("separator", None), suffix=d.get("suffix", None)) @dataclass class NumericValue: value: Optional[float] = None - + def as_dict(self) -> dict: """Serializes the NumericValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the NumericValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> NumericValue: """Deserializes the NumericValue from a dictionary.""" - return cls(value=d.get('value', None)) - - + return cls(value=d.get("value", None)) class ObjectType(Enum): """A singular noun object type.""" - - ALERT = 'alert' - DASHBOARD = 'dashboard' - DATA_SOURCE = 'data_source' - QUERY = 'query' + + ALERT = "alert" + DASHBOARD = "dashboard" + DATA_SOURCE = "data_source" + QUERY = "query" + class ObjectTypePlural(Enum): """Always a plural of the object type.""" - - ALERTS = 'alerts' - DASHBOARDS = 'dashboards' - DATA_SOURCES = 'data_sources' - QUERIES = 'queries' + + ALERTS = "alerts" + DASHBOARDS = "dashboards" + DATA_SOURCES = "data_sources" + QUERIES = "queries" + @dataclass class OdbcParams: hostname: Optional[str] = None - + path: Optional[str] = None - + port: Optional[int] = None - + protocol: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the OdbcParams into a dictionary suitable for use as a JSON request body.""" body = {} - if self.hostname is not None: body['hostname'] = self.hostname - if self.path is not None: body['path'] = self.path - if self.port is not None: body['port'] = self.port - if self.protocol is not None: body['protocol'] = self.protocol + if self.hostname is not None: + body["hostname"] = self.hostname + if self.path is not None: + body["path"] = self.path + if self.port is not None: + body["port"] = self.port + if self.protocol is not None: + body["protocol"] = self.protocol return body def as_shallow_dict(self) -> dict: """Serializes the OdbcParams into a shallow dictionary of its immediate attributes.""" body = {} - if self.hostname is not None: body['hostname'] = self.hostname - if self.path is not None: body['path'] = self.path - if self.port is not None: body['port'] = self.port - if self.protocol is not None: body['protocol'] = self.protocol + if self.hostname is not None: + body["hostname"] = self.hostname + if self.path is not None: + body["path"] = self.path + if self.port is not None: + body["port"] = self.port + if self.protocol is not None: + body["protocol"] = self.protocol return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> OdbcParams: """Deserializes the OdbcParams from a dictionary.""" - return cls(hostname=d.get('hostname', None), path=d.get('path', None), port=d.get('port', None), protocol=d.get('protocol', None)) - - + return cls( + hostname=d.get("hostname", None), + path=d.get("path", None), + port=d.get("port", None), + protocol=d.get("protocol", None), + ) class OwnableObjectType(Enum): """The singular form of the type of object which can be owned.""" - - ALERT = 'alert' - DASHBOARD = 'dashboard' - QUERY = 'query' + + ALERT = "alert" + DASHBOARD = "dashboard" + QUERY = "query" + @dataclass class Parameter: enum_options: Optional[str] = None """List of valid parameter values, newline delimited. Only applies for dropdown list parameters.""" - + multi_values_options: Optional[MultiValuesOptions] = None """If specified, allows multiple values to be selected for this parameter. Only applies to dropdown list and query-based dropdown list parameters.""" - + name: Optional[str] = None """The literal parameter marker that appears between double curly braces in the query text.""" - + query_id: Optional[str] = None """The UUID of the query that provides the parameter values. Only applies for query-based dropdown list parameters.""" - + title: Optional[str] = None """The text displayed in a parameter picking widget.""" - + type: Optional[ParameterType] = None """Parameters can have several different types.""" - + value: Optional[Any] = None """The default value for this parameter.""" - + def as_dict(self) -> dict: """Serializes the Parameter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enum_options is not None: body['enumOptions'] = self.enum_options - if self.multi_values_options: body['multiValuesOptions'] = self.multi_values_options.as_dict() - if self.name is not None: body['name'] = self.name - if self.query_id is not None: body['queryId'] = self.query_id - if self.title is not None: body['title'] = self.title - if self.type is not None: body['type'] = self.type.value - if self.value: body['value'] = self.value + if self.enum_options is not None: + body["enumOptions"] = self.enum_options + if self.multi_values_options: + body["multiValuesOptions"] = self.multi_values_options.as_dict() + if self.name is not None: + body["name"] = self.name + if self.query_id is not None: + body["queryId"] = self.query_id + if self.title is not None: + body["title"] = self.title + if self.type is not None: + body["type"] = self.type.value + if self.value: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the Parameter into a shallow dictionary of its immediate attributes.""" body = {} - if self.enum_options is not None: body['enumOptions'] = self.enum_options - if self.multi_values_options: body['multiValuesOptions'] = self.multi_values_options - if self.name is not None: body['name'] = self.name - if self.query_id is not None: body['queryId'] = self.query_id - if self.title is not None: body['title'] = self.title - if self.type is not None: body['type'] = self.type - if self.value: body['value'] = self.value + if self.enum_options is not None: + body["enumOptions"] = self.enum_options + if self.multi_values_options: + body["multiValuesOptions"] = self.multi_values_options + if self.name is not None: + body["name"] = self.name + if self.query_id is not None: + body["queryId"] = self.query_id + if self.title is not None: + body["title"] = self.title + if self.type is not None: + body["type"] = self.type + if self.value: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Parameter: """Deserializes the Parameter from a dictionary.""" - return cls(enum_options=d.get('enumOptions', None), multi_values_options=_from_dict(d, 'multiValuesOptions', MultiValuesOptions), name=d.get('name', None), query_id=d.get('queryId', None), title=d.get('title', None), type=_enum(d, 'type', ParameterType), value=d.get('value', None)) - - + return cls( + enum_options=d.get("enumOptions", None), + multi_values_options=_from_dict(d, "multiValuesOptions", MultiValuesOptions), + name=d.get("name", None), + query_id=d.get("queryId", None), + title=d.get("title", None), + type=_enum(d, "type", ParameterType), + value=d.get("value", None), + ) class ParameterType(Enum): """Parameters can have several different types.""" - - DATETIME = 'datetime' - ENUM = 'enum' - NUMBER = 'number' - QUERY = 'query' - TEXT = 'text' + + DATETIME = "datetime" + ENUM = "enum" + NUMBER = "number" + QUERY = "query" + TEXT = "text" + class PermissionLevel(Enum): """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query * `CAN_MANAGE`: Can manage the query""" - - CAN_EDIT = 'CAN_EDIT' - CAN_MANAGE = 'CAN_MANAGE' - CAN_RUN = 'CAN_RUN' - CAN_VIEW = 'CAN_VIEW' + + CAN_EDIT = "CAN_EDIT" + CAN_MANAGE = "CAN_MANAGE" + CAN_RUN = "CAN_RUN" + CAN_VIEW = "CAN_VIEW" + class PlansState(Enum): """Possible Reasons for which we have not saved plans in the database""" - - EMPTY = 'EMPTY' - EXISTS = 'EXISTS' - IGNORED_LARGE_PLANS_SIZE = 'IGNORED_LARGE_PLANS_SIZE' - IGNORED_SMALL_DURATION = 'IGNORED_SMALL_DURATION' - IGNORED_SPARK_PLAN_TYPE = 'IGNORED_SPARK_PLAN_TYPE' - UNKNOWN = 'UNKNOWN' + + EMPTY = "EMPTY" + EXISTS = "EXISTS" + IGNORED_LARGE_PLANS_SIZE = "IGNORED_LARGE_PLANS_SIZE" + IGNORED_SMALL_DURATION = "IGNORED_SMALL_DURATION" + IGNORED_SPARK_PLAN_TYPE = "IGNORED_SPARK_PLAN_TYPE" + UNKNOWN = "UNKNOWN" + @dataclass class Query: apply_auto_limit: Optional[bool] = None """Whether to apply a 1000 row limit to the query result.""" - + catalog: Optional[str] = None """Name of the catalog where this query will be executed.""" - + create_time: Optional[str] = None """Timestamp when this query was created.""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + display_name: Optional[str] = None """Display name of the query that appears in list views, widget headings, and on the query page.""" - + id: Optional[str] = None """UUID identifying the query.""" - + last_modifier_user_name: Optional[str] = None """Username of the user who last saved changes to this query.""" - + lifecycle_state: Optional[LifecycleState] = None """Indicates whether the query is trashed.""" - + owner_user_name: Optional[str] = None """Username of the user that owns the query.""" - + parameters: Optional[List[QueryParameter]] = None """List of query parameter definitions.""" - + parent_path: Optional[str] = None """Workspace path of the workspace folder containing the object.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_mode: Optional[RunAsMode] = None """Sets the "Run as" role for the object.""" - + schema: Optional[str] = None """Name of the schema where this query will be executed.""" - + tags: Optional[List[str]] = None - + update_time: Optional[str] = None """Timestamp when this query was last updated.""" - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the query.""" - + def as_dict(self) -> dict: """Serializes the Query into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.create_time is not None: body['create_time'] = self.create_time - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.last_modifier_user_name is not None: body['last_modifier_user_name'] = self.last_modifier_user_name - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value - if self.schema is not None: body['schema'] = self.schema - if self.tags: body['tags'] = [v for v in self.tags] - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.apply_auto_limit is not None: + body["apply_auto_limit"] = self.apply_auto_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.create_time is not None: + body["create_time"] = self.create_time + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.last_modifier_user_name is not None: + body["last_modifier_user_name"] = self.last_modifier_user_name + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state.value + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_mode is not None: + body["run_as_mode"] = self.run_as_mode.value + if self.schema is not None: + body["schema"] = self.schema + if self.tags: + body["tags"] = [v for v in self.tags] + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the Query into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.create_time is not None: body['create_time'] = self.create_time - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.last_modifier_user_name is not None: body['last_modifier_user_name'] = self.last_modifier_user_name - if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parameters: body['parameters'] = self.parameters - if self.parent_path is not None: body['parent_path'] = self.parent_path - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode - if self.schema is not None: body['schema'] = self.schema - if self.tags: body['tags'] = self.tags - if self.update_time is not None: body['update_time'] = self.update_time - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.apply_auto_limit is not None: + body["apply_auto_limit"] = self.apply_auto_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.create_time is not None: + body["create_time"] = self.create_time + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.last_modifier_user_name is not None: + body["last_modifier_user_name"] = self.last_modifier_user_name + if self.lifecycle_state is not None: + body["lifecycle_state"] = self.lifecycle_state + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parameters: + body["parameters"] = self.parameters + if self.parent_path is not None: + body["parent_path"] = self.parent_path + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_mode is not None: + body["run_as_mode"] = self.run_as_mode + if self.schema is not None: + body["schema"] = self.schema + if self.tags: + body["tags"] = self.tags + if self.update_time is not None: + body["update_time"] = self.update_time + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Query: """Deserializes the Query from a dictionary.""" - return cls(apply_auto_limit=d.get('apply_auto_limit', None), catalog=d.get('catalog', None), create_time=d.get('create_time', None), description=d.get('description', None), display_name=d.get('display_name', None), id=d.get('id', None), last_modifier_user_name=d.get('last_modifier_user_name', None), lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), owner_user_name=d.get('owner_user_name', None), parameters=_repeated_dict(d, 'parameters', QueryParameter), parent_path=d.get('parent_path', None), query_text=d.get('query_text', None), run_as_mode=_enum(d, 'run_as_mode', RunAsMode), schema=d.get('schema', None), tags=d.get('tags', None), update_time=d.get('update_time', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + apply_auto_limit=d.get("apply_auto_limit", None), + catalog=d.get("catalog", None), + create_time=d.get("create_time", None), + description=d.get("description", None), + display_name=d.get("display_name", None), + id=d.get("id", None), + last_modifier_user_name=d.get("last_modifier_user_name", None), + lifecycle_state=_enum(d, "lifecycle_state", LifecycleState), + owner_user_name=d.get("owner_user_name", None), + parameters=_repeated_dict(d, "parameters", QueryParameter), + parent_path=d.get("parent_path", None), + query_text=d.get("query_text", None), + run_as_mode=_enum(d, "run_as_mode", RunAsMode), + schema=d.get("schema", None), + tags=d.get("tags", None), + update_time=d.get("update_time", None), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class QueryBackedValue: multi_values_options: Optional[MultiValuesOptions] = None """If specified, allows multiple values to be selected for this parameter.""" - + query_id: Optional[str] = None """UUID of the query that provides the parameter values.""" - + values: Optional[List[str]] = None """List of selected query parameter values.""" - + def as_dict(self) -> dict: """Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict() - if self.query_id is not None: body['query_id'] = self.query_id - if self.values: body['values'] = [v for v in self.values] + if self.multi_values_options: + body["multi_values_options"] = self.multi_values_options.as_dict() + if self.query_id is not None: + body["query_id"] = self.query_id + if self.values: + body["values"] = [v for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the QueryBackedValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.multi_values_options: body['multi_values_options'] = self.multi_values_options - if self.query_id is not None: body['query_id'] = self.query_id - if self.values: body['values'] = self.values + if self.multi_values_options: + body["multi_values_options"] = self.multi_values_options + if self.query_id is not None: + body["query_id"] = self.query_id + if self.values: + body["values"] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryBackedValue: """Deserializes the QueryBackedValue from a dictionary.""" - return cls(multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions), query_id=d.get('query_id', None), values=d.get('values', None)) - - + return cls( + multi_values_options=_from_dict(d, "multi_values_options", MultiValuesOptions), + query_id=d.get("query_id", None), + values=d.get("values", None), + ) @dataclass @@ -4483,475 +5636,673 @@ class QueryEditContent: warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + name: Optional[str] = None """The title of this query that appears in list views, widget headings, and on the query page.""" - + options: Optional[Any] = None """Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. It can be overridden at runtime.""" - + query: Optional[str] = None """The text of the query to be run.""" - + query_id: Optional[str] = None - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the QueryEditContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query is not None: body['query'] = self.query - if self.query_id is not None: body['query_id'] = self.query_id - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value - if self.tags: body['tags'] = [v for v in self.tags] + if self.data_source_id is not None: + body["data_source_id"] = self.data_source_id + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.query is not None: + body["query"] = self.query + if self.query_id is not None: + body["query_id"] = self.query_id + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role.value + if self.tags: + body["tags"] = [v for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the QueryEditContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query is not None: body['query'] = self.query - if self.query_id is not None: body['query_id'] = self.query_id - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role - if self.tags: body['tags'] = self.tags + if self.data_source_id is not None: + body["data_source_id"] = self.data_source_id + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.query is not None: + body["query"] = self.query + if self.query_id is not None: + body["query_id"] = self.query_id + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryEditContent: """Deserializes the QueryEditContent from a dictionary.""" - return cls(data_source_id=d.get('data_source_id', None), description=d.get('description', None), name=d.get('name', None), options=d.get('options', None), query=d.get('query', None), query_id=d.get('query_id', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None)) - - + return cls( + data_source_id=d.get("data_source_id", None), + description=d.get("description", None), + name=d.get("name", None), + options=d.get("options", None), + query=d.get("query", None), + query_id=d.get("query_id", None), + run_as_role=_enum(d, "run_as_role", RunAsRole), + tags=d.get("tags", None), + ) @dataclass class QueryFilter: query_start_time_range: Optional[TimeRange] = None """A range filter for query submitted time. The time range must be <= 30 days.""" - + statement_ids: Optional[List[str]] = None """A list of statement IDs.""" - + statuses: Optional[List[QueryStatus]] = None - + user_ids: Optional[List[int]] = None """A list of user IDs who ran the queries.""" - + warehouse_ids: Optional[List[str]] = None """A list of warehouse IDs.""" - + def as_dict(self) -> dict: """Serializes the QueryFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict() - if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids] - if self.statuses: body['statuses'] = [v.value for v in self.statuses] - if self.user_ids: body['user_ids'] = [v for v in self.user_ids] - if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids] + if self.query_start_time_range: + body["query_start_time_range"] = self.query_start_time_range.as_dict() + if self.statement_ids: + body["statement_ids"] = [v for v in self.statement_ids] + if self.statuses: + body["statuses"] = [v.value for v in self.statuses] + if self.user_ids: + body["user_ids"] = [v for v in self.user_ids] + if self.warehouse_ids: + body["warehouse_ids"] = [v for v in self.warehouse_ids] return body def as_shallow_dict(self) -> dict: """Serializes the QueryFilter into a shallow dictionary of its immediate attributes.""" body = {} - if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range - if self.statement_ids: body['statement_ids'] = self.statement_ids - if self.statuses: body['statuses'] = self.statuses - if self.user_ids: body['user_ids'] = self.user_ids - if self.warehouse_ids: body['warehouse_ids'] = self.warehouse_ids + if self.query_start_time_range: + body["query_start_time_range"] = self.query_start_time_range + if self.statement_ids: + body["statement_ids"] = self.statement_ids + if self.statuses: + body["statuses"] = self.statuses + if self.user_ids: + body["user_ids"] = self.user_ids + if self.warehouse_ids: + body["warehouse_ids"] = self.warehouse_ids return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryFilter: """Deserializes the QueryFilter from a dictionary.""" - return cls(query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange), statement_ids=d.get('statement_ids', None), statuses=_repeated_enum(d, 'statuses', QueryStatus), user_ids=d.get('user_ids', None), warehouse_ids=d.get('warehouse_ids', None)) - - + return cls( + query_start_time_range=_from_dict(d, "query_start_time_range", TimeRange), + statement_ids=d.get("statement_ids", None), + statuses=_repeated_enum(d, "statuses", QueryStatus), + user_ids=d.get("user_ids", None), + warehouse_ids=d.get("warehouse_ids", None), + ) @dataclass class QueryInfo: channel_used: Optional[ChannelInfo] = None """SQL Warehouse channel information at the time of query execution""" - + client_application: Optional[str] = None """Client application that ran the statement. For example: Databricks SQL Editor, Tableau, and Power BI. This field is derived from information provided by client applications. While values are expected to remain static over time, this cannot be guaranteed.""" - + duration: Optional[int] = None """Total execution time of the statement ( excluding result fetch time ).""" - + endpoint_id: Optional[str] = None """Alias for `warehouse_id`.""" - + error_message: Optional[str] = None """Message describing why the query could not complete.""" - + executed_as_user_id: Optional[int] = None """The ID of the user whose credentials were used to run the query.""" - + executed_as_user_name: Optional[str] = None """The email address or username of the user whose credentials were used to run the query.""" - + execution_end_time_ms: Optional[int] = None """The time execution of the query ended.""" - + is_final: Optional[bool] = None """Whether more updates for the query are expected.""" - + lookup_key: Optional[str] = None """A key that can be used to look up query details.""" - + metrics: Optional[QueryMetrics] = None """Metrics about query execution.""" - + plans_state: Optional[PlansState] = None """Whether plans exist for the execution, or the reason why they are missing""" - + query_end_time_ms: Optional[int] = None """The time the query ended.""" - + query_id: Optional[str] = None """The query ID.""" - + query_source: Optional[ExternalQuerySource] = None """A struct that contains key-value pairs representing Databricks entities that were involved in the execution of this statement, such as jobs, notebooks, or dashboards. This field only records Databricks entities.""" - + query_start_time_ms: Optional[int] = None """The time the query started.""" - + query_text: Optional[str] = None """The text of the query.""" - + rows_produced: Optional[int] = None """The number of results returned by the query.""" - + spark_ui_url: Optional[str] = None """URL to the Spark UI query plan.""" - + statement_type: Optional[QueryStatementType] = None """Type of statement for this query""" - + status: Optional[QueryStatus] = None """Query status with one the following values: - `QUEUED`: Query has been received and queued. - `RUNNING`: Query has started. - `CANCELED`: Query has been cancelled by the user. - `FAILED`: Query has failed. - `FINISHED`: Query has completed.""" - + user_id: Optional[int] = None """The ID of the user who ran the query.""" - + user_name: Optional[str] = None """The email address or username of the user who ran the query.""" - + warehouse_id: Optional[str] = None """Warehouse ID.""" - + def as_dict(self) -> dict: """Serializes the QueryInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.channel_used: body['channel_used'] = self.channel_used.as_dict() - if self.client_application is not None: body['client_application'] = self.client_application - if self.duration is not None: body['duration'] = self.duration - if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id - if self.error_message is not None: body['error_message'] = self.error_message - if self.executed_as_user_id is not None: body['executed_as_user_id'] = self.executed_as_user_id - if self.executed_as_user_name is not None: body['executed_as_user_name'] = self.executed_as_user_name - if self.execution_end_time_ms is not None: body['execution_end_time_ms'] = self.execution_end_time_ms - if self.is_final is not None: body['is_final'] = self.is_final - if self.lookup_key is not None: body['lookup_key'] = self.lookup_key - if self.metrics: body['metrics'] = self.metrics.as_dict() - if self.plans_state is not None: body['plans_state'] = self.plans_state.value - if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms - if self.query_id is not None: body['query_id'] = self.query_id - if self.query_source: body['query_source'] = self.query_source.as_dict() - if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms - if self.query_text is not None: body['query_text'] = self.query_text - if self.rows_produced is not None: body['rows_produced'] = self.rows_produced - if self.spark_ui_url is not None: body['spark_ui_url'] = self.spark_ui_url - if self.statement_type is not None: body['statement_type'] = self.statement_type.value - if self.status is not None: body['status'] = self.status.value - if self.user_id is not None: body['user_id'] = self.user_id - if self.user_name is not None: body['user_name'] = self.user_name - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.channel_used: + body["channel_used"] = self.channel_used.as_dict() + if self.client_application is not None: + body["client_application"] = self.client_application + if self.duration is not None: + body["duration"] = self.duration + if self.endpoint_id is not None: + body["endpoint_id"] = self.endpoint_id + if self.error_message is not None: + body["error_message"] = self.error_message + if self.executed_as_user_id is not None: + body["executed_as_user_id"] = self.executed_as_user_id + if self.executed_as_user_name is not None: + body["executed_as_user_name"] = self.executed_as_user_name + if self.execution_end_time_ms is not None: + body["execution_end_time_ms"] = self.execution_end_time_ms + if self.is_final is not None: + body["is_final"] = self.is_final + if self.lookup_key is not None: + body["lookup_key"] = self.lookup_key + if self.metrics: + body["metrics"] = self.metrics.as_dict() + if self.plans_state is not None: + body["plans_state"] = self.plans_state.value + if self.query_end_time_ms is not None: + body["query_end_time_ms"] = self.query_end_time_ms + if self.query_id is not None: + body["query_id"] = self.query_id + if self.query_source: + body["query_source"] = self.query_source.as_dict() + if self.query_start_time_ms is not None: + body["query_start_time_ms"] = self.query_start_time_ms + if self.query_text is not None: + body["query_text"] = self.query_text + if self.rows_produced is not None: + body["rows_produced"] = self.rows_produced + if self.spark_ui_url is not None: + body["spark_ui_url"] = self.spark_ui_url + if self.statement_type is not None: + body["statement_type"] = self.statement_type.value + if self.status is not None: + body["status"] = self.status.value + if self.user_id is not None: + body["user_id"] = self.user_id + if self.user_name is not None: + body["user_name"] = self.user_name + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the QueryInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.channel_used: body['channel_used'] = self.channel_used - if self.client_application is not None: body['client_application'] = self.client_application - if self.duration is not None: body['duration'] = self.duration - if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id - if self.error_message is not None: body['error_message'] = self.error_message - if self.executed_as_user_id is not None: body['executed_as_user_id'] = self.executed_as_user_id - if self.executed_as_user_name is not None: body['executed_as_user_name'] = self.executed_as_user_name - if self.execution_end_time_ms is not None: body['execution_end_time_ms'] = self.execution_end_time_ms - if self.is_final is not None: body['is_final'] = self.is_final - if self.lookup_key is not None: body['lookup_key'] = self.lookup_key - if self.metrics: body['metrics'] = self.metrics - if self.plans_state is not None: body['plans_state'] = self.plans_state - if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms - if self.query_id is not None: body['query_id'] = self.query_id - if self.query_source: body['query_source'] = self.query_source - if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms - if self.query_text is not None: body['query_text'] = self.query_text - if self.rows_produced is not None: body['rows_produced'] = self.rows_produced - if self.spark_ui_url is not None: body['spark_ui_url'] = self.spark_ui_url - if self.statement_type is not None: body['statement_type'] = self.statement_type - if self.status is not None: body['status'] = self.status - if self.user_id is not None: body['user_id'] = self.user_id - if self.user_name is not None: body['user_name'] = self.user_name - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.channel_used: + body["channel_used"] = self.channel_used + if self.client_application is not None: + body["client_application"] = self.client_application + if self.duration is not None: + body["duration"] = self.duration + if self.endpoint_id is not None: + body["endpoint_id"] = self.endpoint_id + if self.error_message is not None: + body["error_message"] = self.error_message + if self.executed_as_user_id is not None: + body["executed_as_user_id"] = self.executed_as_user_id + if self.executed_as_user_name is not None: + body["executed_as_user_name"] = self.executed_as_user_name + if self.execution_end_time_ms is not None: + body["execution_end_time_ms"] = self.execution_end_time_ms + if self.is_final is not None: + body["is_final"] = self.is_final + if self.lookup_key is not None: + body["lookup_key"] = self.lookup_key + if self.metrics: + body["metrics"] = self.metrics + if self.plans_state is not None: + body["plans_state"] = self.plans_state + if self.query_end_time_ms is not None: + body["query_end_time_ms"] = self.query_end_time_ms + if self.query_id is not None: + body["query_id"] = self.query_id + if self.query_source: + body["query_source"] = self.query_source + if self.query_start_time_ms is not None: + body["query_start_time_ms"] = self.query_start_time_ms + if self.query_text is not None: + body["query_text"] = self.query_text + if self.rows_produced is not None: + body["rows_produced"] = self.rows_produced + if self.spark_ui_url is not None: + body["spark_ui_url"] = self.spark_ui_url + if self.statement_type is not None: + body["statement_type"] = self.statement_type + if self.status is not None: + body["status"] = self.status + if self.user_id is not None: + body["user_id"] = self.user_id + if self.user_name is not None: + body["user_name"] = self.user_name + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryInfo: """Deserializes the QueryInfo from a dictionary.""" - return cls(channel_used=_from_dict(d, 'channel_used', ChannelInfo), client_application=d.get('client_application', None), duration=d.get('duration', None), endpoint_id=d.get('endpoint_id', None), error_message=d.get('error_message', None), executed_as_user_id=d.get('executed_as_user_id', None), executed_as_user_name=d.get('executed_as_user_name', None), execution_end_time_ms=d.get('execution_end_time_ms', None), is_final=d.get('is_final', None), lookup_key=d.get('lookup_key', None), metrics=_from_dict(d, 'metrics', QueryMetrics), plans_state=_enum(d, 'plans_state', PlansState), query_end_time_ms=d.get('query_end_time_ms', None), query_id=d.get('query_id', None), query_source=_from_dict(d, 'query_source', ExternalQuerySource), query_start_time_ms=d.get('query_start_time_ms', None), query_text=d.get('query_text', None), rows_produced=d.get('rows_produced', None), spark_ui_url=d.get('spark_ui_url', None), statement_type=_enum(d, 'statement_type', QueryStatementType), status=_enum(d, 'status', QueryStatus), user_id=d.get('user_id', None), user_name=d.get('user_name', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + channel_used=_from_dict(d, "channel_used", ChannelInfo), + client_application=d.get("client_application", None), + duration=d.get("duration", None), + endpoint_id=d.get("endpoint_id", None), + error_message=d.get("error_message", None), + executed_as_user_id=d.get("executed_as_user_id", None), + executed_as_user_name=d.get("executed_as_user_name", None), + execution_end_time_ms=d.get("execution_end_time_ms", None), + is_final=d.get("is_final", None), + lookup_key=d.get("lookup_key", None), + metrics=_from_dict(d, "metrics", QueryMetrics), + plans_state=_enum(d, "plans_state", PlansState), + query_end_time_ms=d.get("query_end_time_ms", None), + query_id=d.get("query_id", None), + query_source=_from_dict(d, "query_source", ExternalQuerySource), + query_start_time_ms=d.get("query_start_time_ms", None), + query_text=d.get("query_text", None), + rows_produced=d.get("rows_produced", None), + spark_ui_url=d.get("spark_ui_url", None), + statement_type=_enum(d, "statement_type", QueryStatementType), + status=_enum(d, "status", QueryStatus), + user_id=d.get("user_id", None), + user_name=d.get("user_name", None), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass class QueryList: count: Optional[int] = None """The total number of queries.""" - + page: Optional[int] = None """The page number that is currently displayed.""" - + page_size: Optional[int] = None """The number of queries per page.""" - + results: Optional[List[LegacyQuery]] = None """List of queries returned.""" - + def as_dict(self) -> dict: """Serializes the QueryList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.count is not None: body['count'] = self.count - if self.page is not None: body['page'] = self.page - if self.page_size is not None: body['page_size'] = self.page_size - if self.results: body['results'] = [v.as_dict() for v in self.results] + if self.count is not None: + body["count"] = self.count + if self.page is not None: + body["page"] = self.page + if self.page_size is not None: + body["page_size"] = self.page_size + if self.results: + body["results"] = [v.as_dict() for v in self.results] return body def as_shallow_dict(self) -> dict: """Serializes the QueryList into a shallow dictionary of its immediate attributes.""" body = {} - if self.count is not None: body['count'] = self.count - if self.page is not None: body['page'] = self.page - if self.page_size is not None: body['page_size'] = self.page_size - if self.results: body['results'] = self.results + if self.count is not None: + body["count"] = self.count + if self.page is not None: + body["page"] = self.page + if self.page_size is not None: + body["page_size"] = self.page_size + if self.results: + body["results"] = self.results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryList: """Deserializes the QueryList from a dictionary.""" - return cls(count=d.get('count', None), page=d.get('page', None), page_size=d.get('page_size', None), results=_repeated_dict(d, 'results', LegacyQuery)) - - + return cls( + count=d.get("count", None), + page=d.get("page", None), + page_size=d.get("page_size", None), + results=_repeated_dict(d, "results", LegacyQuery), + ) @dataclass class QueryMetrics: """A query metric that encapsulates a set of measurements for a single query. Metrics come from the driver and are stored in the history service database.""" - + compilation_time_ms: Optional[int] = None """Time spent loading metadata and optimizing the query, in milliseconds.""" - + execution_time_ms: Optional[int] = None """Time spent executing the query, in milliseconds.""" - + network_sent_bytes: Optional[int] = None """Total amount of data sent over the network between executor nodes during shuffle, in bytes.""" - + overloading_queue_start_timestamp: Optional[int] = None """Timestamp of when the query was enqueued waiting while the warehouse was at max load. This field is optional and will not appear if the query skipped the overloading queue.""" - + photon_total_time_ms: Optional[int] = None """Total execution time for all individual Photon query engine tasks in the query, in milliseconds.""" - + provisioning_queue_start_timestamp: Optional[int] = None """Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the warehouse. This field is optional and will not appear if the query skipped the provisioning queue.""" - + pruned_bytes: Optional[int] = None """Total number of bytes in all tables not read due to pruning""" - + pruned_files_count: Optional[int] = None """Total number of files from all tables not read due to pruning""" - + query_compilation_start_timestamp: Optional[int] = None """Timestamp of when the underlying compute started compilation of the query.""" - + read_bytes: Optional[int] = None """Total size of data read by the query, in bytes.""" - + read_cache_bytes: Optional[int] = None """Size of persistent data read from the cache, in bytes.""" - + read_files_count: Optional[int] = None """Number of files read after pruning""" - + read_partitions_count: Optional[int] = None """Number of partitions read after pruning.""" - + read_remote_bytes: Optional[int] = None """Size of persistent data read from cloud object storage on your cloud tenant, in bytes.""" - + result_fetch_time_ms: Optional[int] = None """Time spent fetching the query results after the execution finished, in milliseconds.""" - + result_from_cache: Optional[bool] = None """`true` if the query result was fetched from cache, `false` otherwise.""" - + rows_produced_count: Optional[int] = None """Total number of rows returned by the query.""" - + rows_read_count: Optional[int] = None """Total number of rows read by the query.""" - + spill_to_disk_bytes: Optional[int] = None """Size of data temporarily written to disk while executing the query, in bytes.""" - + task_time_over_time_range: Optional[TaskTimeOverRange] = None """sum of task times completed in a range of wall clock time, approximated to a configurable number of points aggregated over all stages and jobs in the query (based on task_total_time_ms)""" - + task_total_time_ms: Optional[int] = None """Sum of execution time for all of the query’s tasks, in milliseconds.""" - + total_time_ms: Optional[int] = None """Total execution time of the query from the client’s point of view, in milliseconds.""" - + write_remote_bytes: Optional[int] = None """Size pf persistent data written to cloud object storage in your cloud tenant, in bytes.""" - + def as_dict(self) -> dict: """Serializes the QueryMetrics into a dictionary suitable for use as a JSON request body.""" body = {} - if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms - if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms - if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes - if self.overloading_queue_start_timestamp is not None: body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp - if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms - if self.provisioning_queue_start_timestamp is not None: body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp - if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes - if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count - if self.query_compilation_start_timestamp is not None: body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp - if self.read_bytes is not None: body['read_bytes'] = self.read_bytes - if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes - if self.read_files_count is not None: body['read_files_count'] = self.read_files_count - if self.read_partitions_count is not None: body['read_partitions_count'] = self.read_partitions_count - if self.read_remote_bytes is not None: body['read_remote_bytes'] = self.read_remote_bytes - if self.result_fetch_time_ms is not None: body['result_fetch_time_ms'] = self.result_fetch_time_ms - if self.result_from_cache is not None: body['result_from_cache'] = self.result_from_cache - if self.rows_produced_count is not None: body['rows_produced_count'] = self.rows_produced_count - if self.rows_read_count is not None: body['rows_read_count'] = self.rows_read_count - if self.spill_to_disk_bytes is not None: body['spill_to_disk_bytes'] = self.spill_to_disk_bytes - if self.task_time_over_time_range: body['task_time_over_time_range'] = self.task_time_over_time_range.as_dict() - if self.task_total_time_ms is not None: body['task_total_time_ms'] = self.task_total_time_ms - if self.total_time_ms is not None: body['total_time_ms'] = self.total_time_ms - if self.write_remote_bytes is not None: body['write_remote_bytes'] = self.write_remote_bytes + if self.compilation_time_ms is not None: + body["compilation_time_ms"] = self.compilation_time_ms + if self.execution_time_ms is not None: + body["execution_time_ms"] = self.execution_time_ms + if self.network_sent_bytes is not None: + body["network_sent_bytes"] = self.network_sent_bytes + if self.overloading_queue_start_timestamp is not None: + body["overloading_queue_start_timestamp"] = self.overloading_queue_start_timestamp + if self.photon_total_time_ms is not None: + body["photon_total_time_ms"] = self.photon_total_time_ms + if self.provisioning_queue_start_timestamp is not None: + body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp + if self.pruned_bytes is not None: + body["pruned_bytes"] = self.pruned_bytes + if self.pruned_files_count is not None: + body["pruned_files_count"] = self.pruned_files_count + if self.query_compilation_start_timestamp is not None: + body["query_compilation_start_timestamp"] = self.query_compilation_start_timestamp + if self.read_bytes is not None: + body["read_bytes"] = self.read_bytes + if self.read_cache_bytes is not None: + body["read_cache_bytes"] = self.read_cache_bytes + if self.read_files_count is not None: + body["read_files_count"] = self.read_files_count + if self.read_partitions_count is not None: + body["read_partitions_count"] = self.read_partitions_count + if self.read_remote_bytes is not None: + body["read_remote_bytes"] = self.read_remote_bytes + if self.result_fetch_time_ms is not None: + body["result_fetch_time_ms"] = self.result_fetch_time_ms + if self.result_from_cache is not None: + body["result_from_cache"] = self.result_from_cache + if self.rows_produced_count is not None: + body["rows_produced_count"] = self.rows_produced_count + if self.rows_read_count is not None: + body["rows_read_count"] = self.rows_read_count + if self.spill_to_disk_bytes is not None: + body["spill_to_disk_bytes"] = self.spill_to_disk_bytes + if self.task_time_over_time_range: + body["task_time_over_time_range"] = self.task_time_over_time_range.as_dict() + if self.task_total_time_ms is not None: + body["task_total_time_ms"] = self.task_total_time_ms + if self.total_time_ms is not None: + body["total_time_ms"] = self.total_time_ms + if self.write_remote_bytes is not None: + body["write_remote_bytes"] = self.write_remote_bytes return body def as_shallow_dict(self) -> dict: """Serializes the QueryMetrics into a shallow dictionary of its immediate attributes.""" body = {} - if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms - if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms - if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes - if self.overloading_queue_start_timestamp is not None: body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp - if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms - if self.provisioning_queue_start_timestamp is not None: body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp - if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes - if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count - if self.query_compilation_start_timestamp is not None: body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp - if self.read_bytes is not None: body['read_bytes'] = self.read_bytes - if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes - if self.read_files_count is not None: body['read_files_count'] = self.read_files_count - if self.read_partitions_count is not None: body['read_partitions_count'] = self.read_partitions_count - if self.read_remote_bytes is not None: body['read_remote_bytes'] = self.read_remote_bytes - if self.result_fetch_time_ms is not None: body['result_fetch_time_ms'] = self.result_fetch_time_ms - if self.result_from_cache is not None: body['result_from_cache'] = self.result_from_cache - if self.rows_produced_count is not None: body['rows_produced_count'] = self.rows_produced_count - if self.rows_read_count is not None: body['rows_read_count'] = self.rows_read_count - if self.spill_to_disk_bytes is not None: body['spill_to_disk_bytes'] = self.spill_to_disk_bytes - if self.task_time_over_time_range: body['task_time_over_time_range'] = self.task_time_over_time_range - if self.task_total_time_ms is not None: body['task_total_time_ms'] = self.task_total_time_ms - if self.total_time_ms is not None: body['total_time_ms'] = self.total_time_ms - if self.write_remote_bytes is not None: body['write_remote_bytes'] = self.write_remote_bytes + if self.compilation_time_ms is not None: + body["compilation_time_ms"] = self.compilation_time_ms + if self.execution_time_ms is not None: + body["execution_time_ms"] = self.execution_time_ms + if self.network_sent_bytes is not None: + body["network_sent_bytes"] = self.network_sent_bytes + if self.overloading_queue_start_timestamp is not None: + body["overloading_queue_start_timestamp"] = self.overloading_queue_start_timestamp + if self.photon_total_time_ms is not None: + body["photon_total_time_ms"] = self.photon_total_time_ms + if self.provisioning_queue_start_timestamp is not None: + body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp + if self.pruned_bytes is not None: + body["pruned_bytes"] = self.pruned_bytes + if self.pruned_files_count is not None: + body["pruned_files_count"] = self.pruned_files_count + if self.query_compilation_start_timestamp is not None: + body["query_compilation_start_timestamp"] = self.query_compilation_start_timestamp + if self.read_bytes is not None: + body["read_bytes"] = self.read_bytes + if self.read_cache_bytes is not None: + body["read_cache_bytes"] = self.read_cache_bytes + if self.read_files_count is not None: + body["read_files_count"] = self.read_files_count + if self.read_partitions_count is not None: + body["read_partitions_count"] = self.read_partitions_count + if self.read_remote_bytes is not None: + body["read_remote_bytes"] = self.read_remote_bytes + if self.result_fetch_time_ms is not None: + body["result_fetch_time_ms"] = self.result_fetch_time_ms + if self.result_from_cache is not None: + body["result_from_cache"] = self.result_from_cache + if self.rows_produced_count is not None: + body["rows_produced_count"] = self.rows_produced_count + if self.rows_read_count is not None: + body["rows_read_count"] = self.rows_read_count + if self.spill_to_disk_bytes is not None: + body["spill_to_disk_bytes"] = self.spill_to_disk_bytes + if self.task_time_over_time_range: + body["task_time_over_time_range"] = self.task_time_over_time_range + if self.task_total_time_ms is not None: + body["task_total_time_ms"] = self.task_total_time_ms + if self.total_time_ms is not None: + body["total_time_ms"] = self.total_time_ms + if self.write_remote_bytes is not None: + body["write_remote_bytes"] = self.write_remote_bytes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryMetrics: """Deserializes the QueryMetrics from a dictionary.""" - return cls(compilation_time_ms=d.get('compilation_time_ms', None), execution_time_ms=d.get('execution_time_ms', None), network_sent_bytes=d.get('network_sent_bytes', None), overloading_queue_start_timestamp=d.get('overloading_queue_start_timestamp', None), photon_total_time_ms=d.get('photon_total_time_ms', None), provisioning_queue_start_timestamp=d.get('provisioning_queue_start_timestamp', None), pruned_bytes=d.get('pruned_bytes', None), pruned_files_count=d.get('pruned_files_count', None), query_compilation_start_timestamp=d.get('query_compilation_start_timestamp', None), read_bytes=d.get('read_bytes', None), read_cache_bytes=d.get('read_cache_bytes', None), read_files_count=d.get('read_files_count', None), read_partitions_count=d.get('read_partitions_count', None), read_remote_bytes=d.get('read_remote_bytes', None), result_fetch_time_ms=d.get('result_fetch_time_ms', None), result_from_cache=d.get('result_from_cache', None), rows_produced_count=d.get('rows_produced_count', None), rows_read_count=d.get('rows_read_count', None), spill_to_disk_bytes=d.get('spill_to_disk_bytes', None), task_time_over_time_range=_from_dict(d, 'task_time_over_time_range', TaskTimeOverRange), task_total_time_ms=d.get('task_total_time_ms', None), total_time_ms=d.get('total_time_ms', None), write_remote_bytes=d.get('write_remote_bytes', None)) - - + return cls( + compilation_time_ms=d.get("compilation_time_ms", None), + execution_time_ms=d.get("execution_time_ms", None), + network_sent_bytes=d.get("network_sent_bytes", None), + overloading_queue_start_timestamp=d.get("overloading_queue_start_timestamp", None), + photon_total_time_ms=d.get("photon_total_time_ms", None), + provisioning_queue_start_timestamp=d.get("provisioning_queue_start_timestamp", None), + pruned_bytes=d.get("pruned_bytes", None), + pruned_files_count=d.get("pruned_files_count", None), + query_compilation_start_timestamp=d.get("query_compilation_start_timestamp", None), + read_bytes=d.get("read_bytes", None), + read_cache_bytes=d.get("read_cache_bytes", None), + read_files_count=d.get("read_files_count", None), + read_partitions_count=d.get("read_partitions_count", None), + read_remote_bytes=d.get("read_remote_bytes", None), + result_fetch_time_ms=d.get("result_fetch_time_ms", None), + result_from_cache=d.get("result_from_cache", None), + rows_produced_count=d.get("rows_produced_count", None), + rows_read_count=d.get("rows_read_count", None), + spill_to_disk_bytes=d.get("spill_to_disk_bytes", None), + task_time_over_time_range=_from_dict(d, "task_time_over_time_range", TaskTimeOverRange), + task_total_time_ms=d.get("task_total_time_ms", None), + total_time_ms=d.get("total_time_ms", None), + write_remote_bytes=d.get("write_remote_bytes", None), + ) @dataclass class QueryOptions: catalog: Optional[str] = None """The name of the catalog to execute this query in.""" - + moved_to_trash_at: Optional[str] = None """The timestamp when this query was moved to trash. Only present when the `is_archived` property is `true`. Trashed items are deleted after thirty days.""" - + parameters: Optional[List[Parameter]] = None - + schema: Optional[str] = None """The name of the schema to execute this query in.""" - + def as_dict(self) -> dict: """Serializes the QueryOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog is not None: body['catalog'] = self.catalog - if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] - if self.schema is not None: body['schema'] = self.schema + if self.catalog is not None: + body["catalog"] = self.catalog + if self.moved_to_trash_at is not None: + body["moved_to_trash_at"] = self.moved_to_trash_at + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] + if self.schema is not None: + body["schema"] = self.schema return body def as_shallow_dict(self) -> dict: """Serializes the QueryOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog is not None: body['catalog'] = self.catalog - if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at - if self.parameters: body['parameters'] = self.parameters - if self.schema is not None: body['schema'] = self.schema + if self.catalog is not None: + body["catalog"] = self.catalog + if self.moved_to_trash_at is not None: + body["moved_to_trash_at"] = self.moved_to_trash_at + if self.parameters: + body["parameters"] = self.parameters + if self.schema is not None: + body["schema"] = self.schema return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryOptions: """Deserializes the QueryOptions from a dictionary.""" - return cls(catalog=d.get('catalog', None), moved_to_trash_at=d.get('moved_to_trash_at', None), parameters=_repeated_dict(d, 'parameters', Parameter), schema=d.get('schema', None)) - - + return cls( + catalog=d.get("catalog", None), + moved_to_trash_at=d.get("moved_to_trash_at", None), + parameters=_repeated_dict(d, "parameters", Parameter), + schema=d.get("schema", None), + ) @dataclass @@ -4959,60 +6310,83 @@ class QueryParameter: date_range_value: Optional[DateRangeValue] = None """Date-range query parameter value. Can only specify one of `dynamic_date_range_value` or `date_range_value`.""" - + date_value: Optional[DateValue] = None """Date query parameter value. Can only specify one of `dynamic_date_value` or `date_value`.""" - + enum_value: Optional[EnumValue] = None """Dropdown query parameter value.""" - + name: Optional[str] = None """Literal parameter marker that appears between double curly braces in the query text.""" - + numeric_value: Optional[NumericValue] = None """Numeric query parameter value.""" - + query_backed_value: Optional[QueryBackedValue] = None """Query-based dropdown query parameter value.""" - + text_value: Optional[TextValue] = None """Text query parameter value.""" - + title: Optional[str] = None """Text displayed in the user-facing parameter widget in the UI.""" - + def as_dict(self) -> dict: """Serializes the QueryParameter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict() - if self.date_value: body['date_value'] = self.date_value.as_dict() - if self.enum_value: body['enum_value'] = self.enum_value.as_dict() - if self.name is not None: body['name'] = self.name - if self.numeric_value: body['numeric_value'] = self.numeric_value.as_dict() - if self.query_backed_value: body['query_backed_value'] = self.query_backed_value.as_dict() - if self.text_value: body['text_value'] = self.text_value.as_dict() - if self.title is not None: body['title'] = self.title + if self.date_range_value: + body["date_range_value"] = self.date_range_value.as_dict() + if self.date_value: + body["date_value"] = self.date_value.as_dict() + if self.enum_value: + body["enum_value"] = self.enum_value.as_dict() + if self.name is not None: + body["name"] = self.name + if self.numeric_value: + body["numeric_value"] = self.numeric_value.as_dict() + if self.query_backed_value: + body["query_backed_value"] = self.query_backed_value.as_dict() + if self.text_value: + body["text_value"] = self.text_value.as_dict() + if self.title is not None: + body["title"] = self.title return body def as_shallow_dict(self) -> dict: """Serializes the QueryParameter into a shallow dictionary of its immediate attributes.""" body = {} - if self.date_range_value: body['date_range_value'] = self.date_range_value - if self.date_value: body['date_value'] = self.date_value - if self.enum_value: body['enum_value'] = self.enum_value - if self.name is not None: body['name'] = self.name - if self.numeric_value: body['numeric_value'] = self.numeric_value - if self.query_backed_value: body['query_backed_value'] = self.query_backed_value - if self.text_value: body['text_value'] = self.text_value - if self.title is not None: body['title'] = self.title + if self.date_range_value: + body["date_range_value"] = self.date_range_value + if self.date_value: + body["date_value"] = self.date_value + if self.enum_value: + body["enum_value"] = self.enum_value + if self.name is not None: + body["name"] = self.name + if self.numeric_value: + body["numeric_value"] = self.numeric_value + if self.query_backed_value: + body["query_backed_value"] = self.query_backed_value + if self.text_value: + body["text_value"] = self.text_value + if self.title is not None: + body["title"] = self.title return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryParameter: """Deserializes the QueryParameter from a dictionary.""" - return cls(date_range_value=_from_dict(d, 'date_range_value', DateRangeValue), date_value=_from_dict(d, 'date_value', DateValue), enum_value=_from_dict(d, 'enum_value', EnumValue), name=d.get('name', None), numeric_value=_from_dict(d, 'numeric_value', NumericValue), query_backed_value=_from_dict(d, 'query_backed_value', QueryBackedValue), text_value=_from_dict(d, 'text_value', TextValue), title=d.get('title', None)) - - + return cls( + date_range_value=_from_dict(d, "date_range_value", DateRangeValue), + date_value=_from_dict(d, "date_value", DateValue), + enum_value=_from_dict(d, "enum_value", EnumValue), + name=d.get("name", None), + numeric_value=_from_dict(d, "numeric_value", NumericValue), + query_backed_value=_from_dict(d, "query_backed_value", QueryBackedValue), + text_value=_from_dict(d, "text_value", TextValue), + title=d.get("title", None), + ) @dataclass @@ -5022,135 +6396,158 @@ class QueryPostContent: warehouse ID. [Learn more] [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + name: Optional[str] = None """The title of this query that appears in list views, widget headings, and on the query page.""" - + options: Optional[Any] = None """Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. It can be overridden at runtime.""" - + parent: Optional[str] = None """The identifier of the workspace folder containing the object.""" - + query: Optional[str] = None """The text of the query to be run.""" - + run_as_role: Optional[RunAsRole] = None """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - + tags: Optional[List[str]] = None - + def as_dict(self) -> dict: """Serializes the QueryPostContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.parent is not None: body['parent'] = self.parent - if self.query is not None: body['query'] = self.query - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value - if self.tags: body['tags'] = [v for v in self.tags] + if self.data_source_id is not None: + body["data_source_id"] = self.data_source_id + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.parent is not None: + body["parent"] = self.parent + if self.query is not None: + body["query"] = self.query + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role.value + if self.tags: + body["tags"] = [v for v in self.tags] return body def as_shallow_dict(self) -> dict: """Serializes the QueryPostContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.parent is not None: body['parent'] = self.parent - if self.query is not None: body['query'] = self.query - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role - if self.tags: body['tags'] = self.tags + if self.data_source_id is not None: + body["data_source_id"] = self.data_source_id + if self.description is not None: + body["description"] = self.description + if self.name is not None: + body["name"] = self.name + if self.options: + body["options"] = self.options + if self.parent is not None: + body["parent"] = self.parent + if self.query is not None: + body["query"] = self.query + if self.run_as_role is not None: + body["run_as_role"] = self.run_as_role + if self.tags: + body["tags"] = self.tags return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryPostContent: """Deserializes the QueryPostContent from a dictionary.""" - return cls(data_source_id=d.get('data_source_id', None), description=d.get('description', None), name=d.get('name', None), options=d.get('options', None), parent=d.get('parent', None), query=d.get('query', None), run_as_role=_enum(d, 'run_as_role', RunAsRole), tags=d.get('tags', None)) - - + return cls( + data_source_id=d.get("data_source_id", None), + description=d.get("description", None), + name=d.get("name", None), + options=d.get("options", None), + parent=d.get("parent", None), + query=d.get("query", None), + run_as_role=_enum(d, "run_as_role", RunAsRole), + tags=d.get("tags", None), + ) class QueryStatementType(Enum): - - - ALTER = 'ALTER' - ANALYZE = 'ANALYZE' - COPY = 'COPY' - CREATE = 'CREATE' - DELETE = 'DELETE' - DESCRIBE = 'DESCRIBE' - DROP = 'DROP' - EXPLAIN = 'EXPLAIN' - GRANT = 'GRANT' - INSERT = 'INSERT' - MERGE = 'MERGE' - OPTIMIZE = 'OPTIMIZE' - OTHER = 'OTHER' - REFRESH = 'REFRESH' - REPLACE = 'REPLACE' - REVOKE = 'REVOKE' - SELECT = 'SELECT' - SET = 'SET' - SHOW = 'SHOW' - TRUNCATE = 'TRUNCATE' - UPDATE = 'UPDATE' - USE = 'USE' + + ALTER = "ALTER" + ANALYZE = "ANALYZE" + COPY = "COPY" + CREATE = "CREATE" + DELETE = "DELETE" + DESCRIBE = "DESCRIBE" + DROP = "DROP" + EXPLAIN = "EXPLAIN" + GRANT = "GRANT" + INSERT = "INSERT" + MERGE = "MERGE" + OPTIMIZE = "OPTIMIZE" + OTHER = "OTHER" + REFRESH = "REFRESH" + REPLACE = "REPLACE" + REVOKE = "REVOKE" + SELECT = "SELECT" + SET = "SET" + SHOW = "SHOW" + TRUNCATE = "TRUNCATE" + UPDATE = "UPDATE" + USE = "USE" + class QueryStatus(Enum): """Statuses which are also used by OperationStatus in runtime""" - - CANCELED = 'CANCELED' - COMPILED = 'COMPILED' - COMPILING = 'COMPILING' - FAILED = 'FAILED' - FINISHED = 'FINISHED' - QUEUED = 'QUEUED' - RUNNING = 'RUNNING' - STARTED = 'STARTED' + + CANCELED = "CANCELED" + COMPILED = "COMPILED" + COMPILING = "COMPILING" + FAILED = "FAILED" + FINISHED = "FINISHED" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + STARTED = "STARTED" + @dataclass class RepeatedEndpointConfPairs: config_pair: Optional[List[EndpointConfPair]] = None """Deprecated: Use configuration_pairs""" - + configuration_pairs: Optional[List[EndpointConfPair]] = None - + def as_dict(self) -> dict: """Serializes the RepeatedEndpointConfPairs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.config_pair: body['config_pair'] = [v.as_dict() for v in self.config_pair] - if self.configuration_pairs: body['configuration_pairs'] = [v.as_dict() for v in self.configuration_pairs] + if self.config_pair: + body["config_pair"] = [v.as_dict() for v in self.config_pair] + if self.configuration_pairs: + body["configuration_pairs"] = [v.as_dict() for v in self.configuration_pairs] return body def as_shallow_dict(self) -> dict: """Serializes the RepeatedEndpointConfPairs into a shallow dictionary of its immediate attributes.""" body = {} - if self.config_pair: body['config_pair'] = self.config_pair - if self.configuration_pairs: body['configuration_pairs'] = self.configuration_pairs + if self.config_pair: + body["config_pair"] = self.config_pair + if self.configuration_pairs: + body["configuration_pairs"] = self.configuration_pairs return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepeatedEndpointConfPairs: """Deserializes the RepeatedEndpointConfPairs from a dictionary.""" - return cls(config_pair=_repeated_dict(d, 'config_pair', EndpointConfPair), configuration_pairs=_repeated_dict(d, 'configuration_pairs', EndpointConfPair)) - - - - - - - - + return cls( + config_pair=_repeated_dict(d, "config_pair", EndpointConfPair), + configuration_pairs=_repeated_dict(d, "configuration_pairs", EndpointConfPair), + ) @dataclass @@ -5169,8 +6566,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> RestoreResponse: """Deserializes the RestoreResponse from a dictionary.""" return cls() - - @dataclass @@ -5178,365 +6573,456 @@ class ResultData: byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" - + chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" - + data_array: Optional[List[List[str]]] = None """The `JSON_ARRAY` format is an array of arrays of values, where each non-null value is formatted as a string. Null values are encoded as JSON `null`.""" - + external_links: Optional[List[ExternalLink]] = None - + next_chunk_index: Optional[int] = None """When fetching, provides the `chunk_index` for the _next_ chunk. If absent, indicates there are no more chunks. The next chunk can be fetched with a :method:statementexecution/getStatementResultChunkN request.""" - + next_chunk_internal_link: Optional[str] = None """When fetching, provides a link to fetch the _next_ chunk. If absent, indicates there are no more chunks. This link is an absolute `path` to be joined with your `$DATABRICKS_HOST`, and should be treated as an opaque link. This is an alternative to using `next_chunk_index`.""" - + row_count: Optional[int] = None """The number of rows within the result chunk.""" - + row_offset: Optional[int] = None """The starting row offset within the result set.""" - + def as_dict(self) -> dict: """Serializes the ResultData into a dictionary suitable for use as a JSON request body.""" body = {} - if self.byte_count is not None: body['byte_count'] = self.byte_count - if self.chunk_index is not None: body['chunk_index'] = self.chunk_index - if self.data_array: body['data_array'] = [v for v in self.data_array] - if self.external_links: body['external_links'] = [v.as_dict() for v in self.external_links] - if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index - if self.next_chunk_internal_link is not None: body['next_chunk_internal_link'] = self.next_chunk_internal_link - if self.row_count is not None: body['row_count'] = self.row_count - if self.row_offset is not None: body['row_offset'] = self.row_offset + if self.byte_count is not None: + body["byte_count"] = self.byte_count + if self.chunk_index is not None: + body["chunk_index"] = self.chunk_index + if self.data_array: + body["data_array"] = [v for v in self.data_array] + if self.external_links: + body["external_links"] = [v.as_dict() for v in self.external_links] + if self.next_chunk_index is not None: + body["next_chunk_index"] = self.next_chunk_index + if self.next_chunk_internal_link is not None: + body["next_chunk_internal_link"] = self.next_chunk_internal_link + if self.row_count is not None: + body["row_count"] = self.row_count + if self.row_offset is not None: + body["row_offset"] = self.row_offset return body def as_shallow_dict(self) -> dict: """Serializes the ResultData into a shallow dictionary of its immediate attributes.""" body = {} - if self.byte_count is not None: body['byte_count'] = self.byte_count - if self.chunk_index is not None: body['chunk_index'] = self.chunk_index - if self.data_array: body['data_array'] = self.data_array - if self.external_links: body['external_links'] = self.external_links - if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index - if self.next_chunk_internal_link is not None: body['next_chunk_internal_link'] = self.next_chunk_internal_link - if self.row_count is not None: body['row_count'] = self.row_count - if self.row_offset is not None: body['row_offset'] = self.row_offset + if self.byte_count is not None: + body["byte_count"] = self.byte_count + if self.chunk_index is not None: + body["chunk_index"] = self.chunk_index + if self.data_array: + body["data_array"] = self.data_array + if self.external_links: + body["external_links"] = self.external_links + if self.next_chunk_index is not None: + body["next_chunk_index"] = self.next_chunk_index + if self.next_chunk_internal_link is not None: + body["next_chunk_internal_link"] = self.next_chunk_internal_link + if self.row_count is not None: + body["row_count"] = self.row_count + if self.row_offset is not None: + body["row_offset"] = self.row_offset return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultData: """Deserializes the ResultData from a dictionary.""" - return cls(byte_count=d.get('byte_count', None), chunk_index=d.get('chunk_index', None), data_array=d.get('data_array', None), external_links=_repeated_dict(d, 'external_links', ExternalLink), next_chunk_index=d.get('next_chunk_index', None), next_chunk_internal_link=d.get('next_chunk_internal_link', None), row_count=d.get('row_count', None), row_offset=d.get('row_offset', None)) - - + return cls( + byte_count=d.get("byte_count", None), + chunk_index=d.get("chunk_index", None), + data_array=d.get("data_array", None), + external_links=_repeated_dict(d, "external_links", ExternalLink), + next_chunk_index=d.get("next_chunk_index", None), + next_chunk_internal_link=d.get("next_chunk_internal_link", None), + row_count=d.get("row_count", None), + row_offset=d.get("row_offset", None), + ) @dataclass class ResultManifest: """The result manifest provides schema and metadata for the result set.""" - + chunks: Optional[List[BaseChunkInfo]] = None """Array of result set chunk metadata.""" - + format: Optional[Format] = None - + schema: Optional[ResultSchema] = None """The schema is an ordered list of column descriptions.""" - + total_byte_count: Optional[int] = None """The total number of bytes in the result set. This field is not available when using `INLINE` disposition.""" - + total_chunk_count: Optional[int] = None """The total number of chunks that the result set has been divided into.""" - + total_row_count: Optional[int] = None """The total number of rows in the result set.""" - + truncated: Optional[bool] = None """Indicates whether the result is truncated due to `row_limit` or `byte_limit`.""" - + def as_dict(self) -> dict: """Serializes the ResultManifest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.chunks: body['chunks'] = [v.as_dict() for v in self.chunks] - if self.format is not None: body['format'] = self.format.value - if self.schema: body['schema'] = self.schema.as_dict() - if self.total_byte_count is not None: body['total_byte_count'] = self.total_byte_count - if self.total_chunk_count is not None: body['total_chunk_count'] = self.total_chunk_count - if self.total_row_count is not None: body['total_row_count'] = self.total_row_count - if self.truncated is not None: body['truncated'] = self.truncated + if self.chunks: + body["chunks"] = [v.as_dict() for v in self.chunks] + if self.format is not None: + body["format"] = self.format.value + if self.schema: + body["schema"] = self.schema.as_dict() + if self.total_byte_count is not None: + body["total_byte_count"] = self.total_byte_count + if self.total_chunk_count is not None: + body["total_chunk_count"] = self.total_chunk_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count + if self.truncated is not None: + body["truncated"] = self.truncated return body def as_shallow_dict(self) -> dict: """Serializes the ResultManifest into a shallow dictionary of its immediate attributes.""" body = {} - if self.chunks: body['chunks'] = self.chunks - if self.format is not None: body['format'] = self.format - if self.schema: body['schema'] = self.schema - if self.total_byte_count is not None: body['total_byte_count'] = self.total_byte_count - if self.total_chunk_count is not None: body['total_chunk_count'] = self.total_chunk_count - if self.total_row_count is not None: body['total_row_count'] = self.total_row_count - if self.truncated is not None: body['truncated'] = self.truncated + if self.chunks: + body["chunks"] = self.chunks + if self.format is not None: + body["format"] = self.format + if self.schema: + body["schema"] = self.schema + if self.total_byte_count is not None: + body["total_byte_count"] = self.total_byte_count + if self.total_chunk_count is not None: + body["total_chunk_count"] = self.total_chunk_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count + if self.truncated is not None: + body["truncated"] = self.truncated return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultManifest: """Deserializes the ResultManifest from a dictionary.""" - return cls(chunks=_repeated_dict(d, 'chunks', BaseChunkInfo), format=_enum(d, 'format', Format), schema=_from_dict(d, 'schema', ResultSchema), total_byte_count=d.get('total_byte_count', None), total_chunk_count=d.get('total_chunk_count', None), total_row_count=d.get('total_row_count', None), truncated=d.get('truncated', None)) - - + return cls( + chunks=_repeated_dict(d, "chunks", BaseChunkInfo), + format=_enum(d, "format", Format), + schema=_from_dict(d, "schema", ResultSchema), + total_byte_count=d.get("total_byte_count", None), + total_chunk_count=d.get("total_chunk_count", None), + total_row_count=d.get("total_row_count", None), + truncated=d.get("truncated", None), + ) @dataclass class ResultSchema: """The schema is an ordered list of column descriptions.""" - + column_count: Optional[int] = None - + columns: Optional[List[ColumnInfo]] = None - + def as_dict(self) -> dict: """Serializes the ResultSchema into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column_count is not None: body['column_count'] = self.column_count - if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + if self.column_count is not None: + body["column_count"] = self.column_count + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the ResultSchema into a shallow dictionary of its immediate attributes.""" body = {} - if self.column_count is not None: body['column_count'] = self.column_count - if self.columns: body['columns'] = self.columns + if self.column_count is not None: + body["column_count"] = self.column_count + if self.columns: + body["columns"] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultSchema: """Deserializes the ResultSchema from a dictionary.""" - return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo)) - - + return cls(column_count=d.get("column_count", None), columns=_repeated_dict(d, "columns", ColumnInfo)) class RunAsMode(Enum): - - - OWNER = 'OWNER' - VIEWER = 'VIEWER' + + OWNER = "OWNER" + VIEWER = "VIEWER" + class RunAsRole(Enum): """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - OWNER = 'owner' - VIEWER = 'viewer' + + OWNER = "owner" + VIEWER = "viewer" + class SchedulePauseStatus(Enum): - - - PAUSED = 'PAUSED' - UNPAUSED = 'UNPAUSED' + + PAUSED = "PAUSED" + UNPAUSED = "UNPAUSED" + @dataclass class ServiceError: error_code: Optional[ServiceErrorCode] = None - + message: Optional[str] = None """A brief summary of the error condition.""" - + def as_dict(self) -> dict: """Serializes the ServiceError into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error_code is not None: body['error_code'] = self.error_code.value - if self.message is not None: body['message'] = self.message + if self.error_code is not None: + body["error_code"] = self.error_code.value + if self.message is not None: + body["message"] = self.message return body def as_shallow_dict(self) -> dict: """Serializes the ServiceError into a shallow dictionary of its immediate attributes.""" body = {} - if self.error_code is not None: body['error_code'] = self.error_code - if self.message is not None: body['message'] = self.message + if self.error_code is not None: + body["error_code"] = self.error_code + if self.message is not None: + body["message"] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ServiceError: """Deserializes the ServiceError from a dictionary.""" - return cls(error_code=_enum(d, 'error_code', ServiceErrorCode), message=d.get('message', None)) - - + return cls(error_code=_enum(d, "error_code", ServiceErrorCode), message=d.get("message", None)) class ServiceErrorCode(Enum): - - - ABORTED = 'ABORTED' - ALREADY_EXISTS = 'ALREADY_EXISTS' - BAD_REQUEST = 'BAD_REQUEST' - CANCELLED = 'CANCELLED' - DEADLINE_EXCEEDED = 'DEADLINE_EXCEEDED' - INTERNAL_ERROR = 'INTERNAL_ERROR' - IO_ERROR = 'IO_ERROR' - NOT_FOUND = 'NOT_FOUND' - RESOURCE_EXHAUSTED = 'RESOURCE_EXHAUSTED' - SERVICE_UNDER_MAINTENANCE = 'SERVICE_UNDER_MAINTENANCE' - TEMPORARILY_UNAVAILABLE = 'TEMPORARILY_UNAVAILABLE' - UNAUTHENTICATED = 'UNAUTHENTICATED' - UNKNOWN = 'UNKNOWN' - WORKSPACE_TEMPORARILY_UNAVAILABLE = 'WORKSPACE_TEMPORARILY_UNAVAILABLE' + + ABORTED = "ABORTED" + ALREADY_EXISTS = "ALREADY_EXISTS" + BAD_REQUEST = "BAD_REQUEST" + CANCELLED = "CANCELLED" + DEADLINE_EXCEEDED = "DEADLINE_EXCEEDED" + INTERNAL_ERROR = "INTERNAL_ERROR" + IO_ERROR = "IO_ERROR" + NOT_FOUND = "NOT_FOUND" + RESOURCE_EXHAUSTED = "RESOURCE_EXHAUSTED" + SERVICE_UNDER_MAINTENANCE = "SERVICE_UNDER_MAINTENANCE" + TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" + UNAUTHENTICATED = "UNAUTHENTICATED" + UNKNOWN = "UNKNOWN" + WORKSPACE_TEMPORARILY_UNAVAILABLE = "WORKSPACE_TEMPORARILY_UNAVAILABLE" + @dataclass class SetRequest: """Set object ACL""" - + access_control_list: Optional[List[AccessControl]] = None - + object_id: Optional[str] = None """Object ID. The ACL for the object with this UUID is overwritten by this request's POST content.""" - + object_type: Optional[ObjectTypePlural] = None """The type of object permission to set.""" - + def as_dict(self) -> dict: """Serializes the SetRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['objectId'] = self.object_id - if self.object_type is not None: body['objectType'] = self.object_type.value + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["objectId"] = self.object_id + if self.object_type is not None: + body["objectType"] = self.object_type.value return body def as_shallow_dict(self) -> dict: """Serializes the SetRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['objectId'] = self.object_id - if self.object_type is not None: body['objectType'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["objectId"] = self.object_id + if self.object_type is not None: + body["objectType"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetRequest: """Deserializes the SetRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControl), object_id=d.get('objectId', None), object_type=_enum(d, 'objectType', ObjectTypePlural)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", AccessControl), + object_id=d.get("objectId", None), + object_type=_enum(d, "objectType", ObjectTypePlural), + ) @dataclass class SetResponse: access_control_list: Optional[List[AccessControl]] = None - + object_id: Optional[str] = None """An object's type and UUID, separated by a forward slash (/) character.""" - + object_type: Optional[ObjectType] = None """A singular noun object type.""" - + def as_dict(self) -> dict: """Serializes the SetResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type.value + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type.value return body def as_shallow_dict(self) -> dict: """Serializes the SetResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetResponse: """Deserializes the SetResponse from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', AccessControl), object_id=d.get('object_id', None), object_type=_enum(d, 'object_type', ObjectType)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", AccessControl), + object_id=d.get("object_id", None), + object_type=_enum(d, "object_type", ObjectType), + ) @dataclass class SetWorkspaceWarehouseConfigRequest: channel: Optional[Channel] = None """Optional: Channel selection details""" - + config_param: Optional[RepeatedEndpointConfPairs] = None """Deprecated: Use sql_configuration_parameters""" - + data_access_config: Optional[List[EndpointConfPair]] = None """Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K""" - + enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None """List of Warehouse Types allowed in this workspace (limits allowed value of the type field in CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses to be converted to another type. Used by frontend to save specific type availability in the warehouse create and edit form UI.""" - + global_param: Optional[RepeatedEndpointConfPairs] = None """Deprecated: Use sql_configuration_parameters""" - + google_service_account: Optional[str] = None """GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage""" - + instance_profile_arn: Optional[str] = None """AWS Only: Instance profile used to pass IAM role to the cluster""" - + security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None """Security policy for warehouses""" - + sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None """SQL configuration parameters""" - + def as_dict(self) -> dict: """Serializes the SetWorkspaceWarehouseConfigRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.channel: body['channel'] = self.channel.as_dict() - if self.config_param: body['config_param'] = self.config_param.as_dict() - if self.data_access_config: body['data_access_config'] = [v.as_dict() for v in self.data_access_config] - if self.enabled_warehouse_types: body['enabled_warehouse_types'] = [v.as_dict() for v in self.enabled_warehouse_types] - if self.global_param: body['global_param'] = self.global_param.as_dict() - if self.google_service_account is not None: body['google_service_account'] = self.google_service_account - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.security_policy is not None: body['security_policy'] = self.security_policy.value - if self.sql_configuration_parameters: body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict() + if self.channel: + body["channel"] = self.channel.as_dict() + if self.config_param: + body["config_param"] = self.config_param.as_dict() + if self.data_access_config: + body["data_access_config"] = [v.as_dict() for v in self.data_access_config] + if self.enabled_warehouse_types: + body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types] + if self.global_param: + body["global_param"] = self.global_param.as_dict() + if self.google_service_account is not None: + body["google_service_account"] = self.google_service_account + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.security_policy is not None: + body["security_policy"] = self.security_policy.value + if self.sql_configuration_parameters: + body["sql_configuration_parameters"] = self.sql_configuration_parameters.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the SetWorkspaceWarehouseConfigRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.channel: body['channel'] = self.channel - if self.config_param: body['config_param'] = self.config_param - if self.data_access_config: body['data_access_config'] = self.data_access_config - if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types - if self.global_param: body['global_param'] = self.global_param - if self.google_service_account is not None: body['google_service_account'] = self.google_service_account - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.security_policy is not None: body['security_policy'] = self.security_policy - if self.sql_configuration_parameters: body['sql_configuration_parameters'] = self.sql_configuration_parameters + if self.channel: + body["channel"] = self.channel + if self.config_param: + body["config_param"] = self.config_param + if self.data_access_config: + body["data_access_config"] = self.data_access_config + if self.enabled_warehouse_types: + body["enabled_warehouse_types"] = self.enabled_warehouse_types + if self.global_param: + body["global_param"] = self.global_param + if self.google_service_account is not None: + body["google_service_account"] = self.google_service_account + if self.instance_profile_arn is not None: + body["instance_profile_arn"] = self.instance_profile_arn + if self.security_policy is not None: + body["security_policy"] = self.security_policy + if self.sql_configuration_parameters: + body["sql_configuration_parameters"] = self.sql_configuration_parameters return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SetWorkspaceWarehouseConfigRequest: """Deserializes the SetWorkspaceWarehouseConfigRequest from a dictionary.""" - return cls(channel=_from_dict(d, 'channel', Channel), config_param=_from_dict(d, 'config_param', RepeatedEndpointConfPairs), data_access_config=_repeated_dict(d, 'data_access_config', EndpointConfPair), enabled_warehouse_types=_repeated_dict(d, 'enabled_warehouse_types', WarehouseTypePair), global_param=_from_dict(d, 'global_param', RepeatedEndpointConfPairs), google_service_account=d.get('google_service_account', None), instance_profile_arn=d.get('instance_profile_arn', None), security_policy=_enum(d, 'security_policy', SetWorkspaceWarehouseConfigRequestSecurityPolicy), sql_configuration_parameters=_from_dict(d, 'sql_configuration_parameters', RepeatedEndpointConfPairs)) - - + return cls( + channel=_from_dict(d, "channel", Channel), + config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs), + data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair), + enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair), + global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs), + google_service_account=d.get("google_service_account", None), + instance_profile_arn=d.get("instance_profile_arn", None), + security_policy=_enum(d, "security_policy", SetWorkspaceWarehouseConfigRequestSecurityPolicy), + sql_configuration_parameters=_from_dict(d, "sql_configuration_parameters", RepeatedEndpointConfPairs), + ) class SetWorkspaceWarehouseConfigRequestSecurityPolicy(Enum): """Security policy for warehouses""" - - DATA_ACCESS_CONTROL = 'DATA_ACCESS_CONTROL' - NONE = 'NONE' - PASSTHROUGH = 'PASSTHROUGH' + + DATA_ACCESS_CONTROL = "DATA_ACCESS_CONTROL" + NONE = "NONE" + PASSTHROUGH = "PASSTHROUGH" + @dataclass class SetWorkspaceWarehouseConfigResponse: @@ -5554,18 +7040,14 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SetWorkspaceWarehouseConfigResponse: """Deserializes the SetWorkspaceWarehouseConfigResponse from a dictionary.""" return cls() - - class SpotInstancePolicy(Enum): """Configurations whether the warehouse should use spot instances.""" - - COST_OPTIMIZED = 'COST_OPTIMIZED' - POLICY_UNSPECIFIED = 'POLICY_UNSPECIFIED' - RELIABILITY_OPTIMIZED = 'RELIABILITY_OPTIMIZED' - + COST_OPTIMIZED = "COST_OPTIMIZED" + POLICY_UNSPECIFIED = "POLICY_UNSPECIFIED" + RELIABILITY_OPTIMIZED = "RELIABILITY_OPTIMIZED" @dataclass @@ -5584,25 +7066,24 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> StartWarehouseResponse: """Deserializes the StartWarehouseResponse from a dictionary.""" return cls() - - class State(Enum): """State of the warehouse""" - - DELETED = 'DELETED' - DELETING = 'DELETING' - RUNNING = 'RUNNING' - STARTING = 'STARTING' - STOPPED = 'STOPPED' - STOPPING = 'STOPPING' + + DELETED = "DELETED" + DELETING = "DELETING" + RUNNING = "RUNNING" + STARTING = "STARTING" + STOPPED = "STOPPED" + STOPPING = "STOPPING" + @dataclass class StatementParameterListItem: name: str """The name of a parameter marker to be substituted in the statement.""" - + type: Optional[str] = None """The data type, given as a string. For example: `INT`, `STRING`, `DECIMAL(10,2)`. If no type is given the type is assumed to be `STRING`. Complex types, such as `ARRAY`, `MAP`, and `STRUCT` @@ -5610,72 +7091,87 @@ class StatementParameterListItem: reference. [Data types]: https://docs.databricks.com/sql/language-manual/functions/cast.html""" - + value: Optional[str] = None """The value to substitute, represented as a string. If omitted, the value is interpreted as NULL.""" - + def as_dict(self) -> dict: """Serializes the StatementParameterListItem into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name - if self.type is not None: body['type'] = self.type - if self.value is not None: body['value'] = self.value + if self.name is not None: + body["name"] = self.name + if self.type is not None: + body["type"] = self.type + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the StatementParameterListItem into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name - if self.type is not None: body['type'] = self.type - if self.value is not None: body['value'] = self.value + if self.name is not None: + body["name"] = self.name + if self.type is not None: + body["type"] = self.type + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StatementParameterListItem: """Deserializes the StatementParameterListItem from a dictionary.""" - return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None)) - - + return cls(name=d.get("name", None), type=d.get("type", None), value=d.get("value", None)) @dataclass class StatementResponse: manifest: Optional[ResultManifest] = None """The result manifest provides schema and metadata for the result set.""" - + result: Optional[ResultData] = None - + statement_id: Optional[str] = None """The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls.""" - + status: Optional[StatementStatus] = None """The status response includes execution state and if relevant, error information.""" - + def as_dict(self) -> dict: """Serializes the StatementResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.manifest: body['manifest'] = self.manifest.as_dict() - if self.result: body['result'] = self.result.as_dict() - if self.statement_id is not None: body['statement_id'] = self.statement_id - if self.status: body['status'] = self.status.as_dict() + if self.manifest: + body["manifest"] = self.manifest.as_dict() + if self.result: + body["result"] = self.result.as_dict() + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.status: + body["status"] = self.status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the StatementResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.manifest: body['manifest'] = self.manifest - if self.result: body['result'] = self.result - if self.statement_id is not None: body['statement_id'] = self.statement_id - if self.status: body['status'] = self.status + if self.manifest: + body["manifest"] = self.manifest + if self.result: + body["result"] = self.result + if self.statement_id is not None: + body["statement_id"] = self.statement_id + if self.status: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StatementResponse: """Deserializes the StatementResponse from a dictionary.""" - return cls(manifest=_from_dict(d, 'manifest', ResultManifest), result=_from_dict(d, 'result', ResultData), statement_id=d.get('statement_id', None), status=_from_dict(d, 'status', StatementStatus)) - - + return cls( + manifest=_from_dict(d, "manifest", ResultManifest), + result=_from_dict(d, "result", ResultData), + statement_id=d.get("statement_id", None), + status=_from_dict(d, "status", StatementStatus), + ) class StatementState(Enum): @@ -5684,58 +7180,59 @@ class StatementState(Enum): failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: execution successful, and statement closed; result no longer available for fetch""" - - CANCELED = 'CANCELED' - CLOSED = 'CLOSED' - FAILED = 'FAILED' - PENDING = 'PENDING' - RUNNING = 'RUNNING' - SUCCEEDED = 'SUCCEEDED' + + CANCELED = "CANCELED" + CLOSED = "CLOSED" + FAILED = "FAILED" + PENDING = "PENDING" + RUNNING = "RUNNING" + SUCCEEDED = "SUCCEEDED" + @dataclass class StatementStatus: """The status response includes execution state and if relevant, error information.""" - + error: Optional[ServiceError] = None - + state: Optional[StatementState] = None """Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: execution successful, and statement closed; result no longer available for fetch""" - + def as_dict(self) -> dict: """Serializes the StatementStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.error: body['error'] = self.error.as_dict() - if self.state is not None: body['state'] = self.state.value + if self.error: + body["error"] = self.error.as_dict() + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the StatementStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.error: body['error'] = self.error - if self.state is not None: body['state'] = self.state + if self.error: + body["error"] = self.error + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> StatementStatus: """Deserializes the StatementStatus from a dictionary.""" - return cls(error=_from_dict(d, 'error', ServiceError), state=_enum(d, 'state', StatementState)) - - + return cls(error=_from_dict(d, "error", ServiceError), state=_enum(d, "state", StatementState)) class Status(Enum): """Health status of the warehouse.""" - - DEGRADED = 'DEGRADED' - FAILED = 'FAILED' - HEALTHY = 'HEALTHY' - STATUS_UNSPECIFIED = 'STATUS_UNSPECIFIED' - + DEGRADED = "DEGRADED" + FAILED = "FAILED" + HEALTHY = "HEALTHY" + STATUS_UNSPECIFIED = "STATUS_UNSPECIFIED" @dataclass @@ -5754,343 +7251,354 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> StopWarehouseResponse: """Deserializes the StopWarehouseResponse from a dictionary.""" return cls() - - @dataclass class Success: message: Optional[SuccessMessage] = None - + def as_dict(self) -> dict: """Serializes the Success into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: body['message'] = self.message.value + if self.message is not None: + body["message"] = self.message.value return body def as_shallow_dict(self) -> dict: """Serializes the Success into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: body['message'] = self.message + if self.message is not None: + body["message"] = self.message return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Success: """Deserializes the Success from a dictionary.""" - return cls(message=_enum(d, 'message', SuccessMessage)) - - + return cls(message=_enum(d, "message", SuccessMessage)) class SuccessMessage(Enum): - - - SUCCESS = 'Success' + + SUCCESS = "Success" + @dataclass class TaskTimeOverRange: entries: Optional[List[TaskTimeOverRangeEntry]] = None - + interval: Optional[int] = None """interval length for all entries (difference in start time and end time of an entry range) the same for all entries start time of first interval is query_start_time_ms""" - + def as_dict(self) -> dict: """Serializes the TaskTimeOverRange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entries: body['entries'] = [v.as_dict() for v in self.entries] - if self.interval is not None: body['interval'] = self.interval + if self.entries: + body["entries"] = [v.as_dict() for v in self.entries] + if self.interval is not None: + body["interval"] = self.interval return body def as_shallow_dict(self) -> dict: """Serializes the TaskTimeOverRange into a shallow dictionary of its immediate attributes.""" body = {} - if self.entries: body['entries'] = self.entries - if self.interval is not None: body['interval'] = self.interval + if self.entries: + body["entries"] = self.entries + if self.interval is not None: + body["interval"] = self.interval return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TaskTimeOverRange: """Deserializes the TaskTimeOverRange from a dictionary.""" - return cls(entries=_repeated_dict(d, 'entries', TaskTimeOverRangeEntry), interval=d.get('interval', None)) - - + return cls(entries=_repeated_dict(d, "entries", TaskTimeOverRangeEntry), interval=d.get("interval", None)) @dataclass class TaskTimeOverRangeEntry: task_completed_time_ms: Optional[int] = None """total task completion time in this time range, aggregated over all stages and jobs in the query""" - + def as_dict(self) -> dict: """Serializes the TaskTimeOverRangeEntry into a dictionary suitable for use as a JSON request body.""" body = {} - if self.task_completed_time_ms is not None: body['task_completed_time_ms'] = self.task_completed_time_ms + if self.task_completed_time_ms is not None: + body["task_completed_time_ms"] = self.task_completed_time_ms return body def as_shallow_dict(self) -> dict: """Serializes the TaskTimeOverRangeEntry into a shallow dictionary of its immediate attributes.""" body = {} - if self.task_completed_time_ms is not None: body['task_completed_time_ms'] = self.task_completed_time_ms + if self.task_completed_time_ms is not None: + body["task_completed_time_ms"] = self.task_completed_time_ms return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TaskTimeOverRangeEntry: """Deserializes the TaskTimeOverRangeEntry from a dictionary.""" - return cls(task_completed_time_ms=d.get('task_completed_time_ms', None)) - - + return cls(task_completed_time_ms=d.get("task_completed_time_ms", None)) @dataclass class TerminationReason: code: Optional[TerminationReasonCode] = None """status code indicating why the cluster was terminated""" - - parameters: Optional[Dict[str,str]] = None + + parameters: Optional[Dict[str, str]] = None """list of parameters that provide additional information about why the cluster was terminated""" - + type: Optional[TerminationReasonType] = None """type of the termination""" - + def as_dict(self) -> dict: """Serializes the TerminationReason into a dictionary suitable for use as a JSON request body.""" body = {} - if self.code is not None: body['code'] = self.code.value - if self.parameters: body['parameters'] = self.parameters - if self.type is not None: body['type'] = self.type.value + if self.code is not None: + body["code"] = self.code.value + if self.parameters: + body["parameters"] = self.parameters + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: """Serializes the TerminationReason into a shallow dictionary of its immediate attributes.""" body = {} - if self.code is not None: body['code'] = self.code - if self.parameters: body['parameters'] = self.parameters - if self.type is not None: body['type'] = self.type + if self.code is not None: + body["code"] = self.code + if self.parameters: + body["parameters"] = self.parameters + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TerminationReason: """Deserializes the TerminationReason from a dictionary.""" - return cls(code=_enum(d, 'code', TerminationReasonCode), parameters=d.get('parameters', None), type=_enum(d, 'type', TerminationReasonType)) - - + return cls( + code=_enum(d, "code", TerminationReasonCode), + parameters=d.get("parameters", None), + type=_enum(d, "type", TerminationReasonType), + ) class TerminationReasonCode(Enum): """status code indicating why the cluster was terminated""" - - ABUSE_DETECTED = 'ABUSE_DETECTED' - ATTACH_PROJECT_FAILURE = 'ATTACH_PROJECT_FAILURE' - AWS_AUTHORIZATION_FAILURE = 'AWS_AUTHORIZATION_FAILURE' - AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = 'AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE' - AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = 'AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE' - AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = 'AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE' - AWS_REQUEST_LIMIT_EXCEEDED = 'AWS_REQUEST_LIMIT_EXCEEDED' - AWS_UNSUPPORTED_FAILURE = 'AWS_UNSUPPORTED_FAILURE' - AZURE_BYOK_KEY_PERMISSION_FAILURE = 'AZURE_BYOK_KEY_PERMISSION_FAILURE' - AZURE_EPHEMERAL_DISK_FAILURE = 'AZURE_EPHEMERAL_DISK_FAILURE' - AZURE_INVALID_DEPLOYMENT_TEMPLATE = 'AZURE_INVALID_DEPLOYMENT_TEMPLATE' - AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = 'AZURE_OPERATION_NOT_ALLOWED_EXCEPTION' - AZURE_QUOTA_EXCEEDED_EXCEPTION = 'AZURE_QUOTA_EXCEEDED_EXCEPTION' - AZURE_RESOURCE_MANAGER_THROTTLING = 'AZURE_RESOURCE_MANAGER_THROTTLING' - AZURE_RESOURCE_PROVIDER_THROTTLING = 'AZURE_RESOURCE_PROVIDER_THROTTLING' - AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = 'AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE' - AZURE_VM_EXTENSION_FAILURE = 'AZURE_VM_EXTENSION_FAILURE' - AZURE_VNET_CONFIGURATION_FAILURE = 'AZURE_VNET_CONFIGURATION_FAILURE' - BOOTSTRAP_TIMEOUT = 'BOOTSTRAP_TIMEOUT' - BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = 'BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION' - CLOUD_PROVIDER_DISK_SETUP_FAILURE = 'CLOUD_PROVIDER_DISK_SETUP_FAILURE' - CLOUD_PROVIDER_LAUNCH_FAILURE = 'CLOUD_PROVIDER_LAUNCH_FAILURE' - CLOUD_PROVIDER_RESOURCE_STOCKOUT = 'CLOUD_PROVIDER_RESOURCE_STOCKOUT' - CLOUD_PROVIDER_SHUTDOWN = 'CLOUD_PROVIDER_SHUTDOWN' - COMMUNICATION_LOST = 'COMMUNICATION_LOST' - CONTAINER_LAUNCH_FAILURE = 'CONTAINER_LAUNCH_FAILURE' - CONTROL_PLANE_REQUEST_FAILURE = 'CONTROL_PLANE_REQUEST_FAILURE' - DATABASE_CONNECTION_FAILURE = 'DATABASE_CONNECTION_FAILURE' - DBFS_COMPONENT_UNHEALTHY = 'DBFS_COMPONENT_UNHEALTHY' - DOCKER_IMAGE_PULL_FAILURE = 'DOCKER_IMAGE_PULL_FAILURE' - DRIVER_UNREACHABLE = 'DRIVER_UNREACHABLE' - DRIVER_UNRESPONSIVE = 'DRIVER_UNRESPONSIVE' - EXECUTION_COMPONENT_UNHEALTHY = 'EXECUTION_COMPONENT_UNHEALTHY' - GCP_QUOTA_EXCEEDED = 'GCP_QUOTA_EXCEEDED' - GCP_SERVICE_ACCOUNT_DELETED = 'GCP_SERVICE_ACCOUNT_DELETED' - GLOBAL_INIT_SCRIPT_FAILURE = 'GLOBAL_INIT_SCRIPT_FAILURE' - HIVE_METASTORE_PROVISIONING_FAILURE = 'HIVE_METASTORE_PROVISIONING_FAILURE' - IMAGE_PULL_PERMISSION_DENIED = 'IMAGE_PULL_PERMISSION_DENIED' - INACTIVITY = 'INACTIVITY' - INIT_SCRIPT_FAILURE = 'INIT_SCRIPT_FAILURE' - INSTANCE_POOL_CLUSTER_FAILURE = 'INSTANCE_POOL_CLUSTER_FAILURE' - INSTANCE_UNREACHABLE = 'INSTANCE_UNREACHABLE' - INTERNAL_ERROR = 'INTERNAL_ERROR' - INVALID_ARGUMENT = 'INVALID_ARGUMENT' - INVALID_SPARK_IMAGE = 'INVALID_SPARK_IMAGE' - IP_EXHAUSTION_FAILURE = 'IP_EXHAUSTION_FAILURE' - JOB_FINISHED = 'JOB_FINISHED' - K8S_AUTOSCALING_FAILURE = 'K8S_AUTOSCALING_FAILURE' - K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = 'K8S_DBR_CLUSTER_LAUNCH_TIMEOUT' - METASTORE_COMPONENT_UNHEALTHY = 'METASTORE_COMPONENT_UNHEALTHY' - NEPHOS_RESOURCE_MANAGEMENT = 'NEPHOS_RESOURCE_MANAGEMENT' - NETWORK_CONFIGURATION_FAILURE = 'NETWORK_CONFIGURATION_FAILURE' - NFS_MOUNT_FAILURE = 'NFS_MOUNT_FAILURE' - NPIP_TUNNEL_SETUP_FAILURE = 'NPIP_TUNNEL_SETUP_FAILURE' - NPIP_TUNNEL_TOKEN_FAILURE = 'NPIP_TUNNEL_TOKEN_FAILURE' - REQUEST_REJECTED = 'REQUEST_REJECTED' - REQUEST_THROTTLED = 'REQUEST_THROTTLED' - SECRET_RESOLUTION_ERROR = 'SECRET_RESOLUTION_ERROR' - SECURITY_DAEMON_REGISTRATION_EXCEPTION = 'SECURITY_DAEMON_REGISTRATION_EXCEPTION' - SELF_BOOTSTRAP_FAILURE = 'SELF_BOOTSTRAP_FAILURE' - SKIPPED_SLOW_NODES = 'SKIPPED_SLOW_NODES' - SLOW_IMAGE_DOWNLOAD = 'SLOW_IMAGE_DOWNLOAD' - SPARK_ERROR = 'SPARK_ERROR' - SPARK_IMAGE_DOWNLOAD_FAILURE = 'SPARK_IMAGE_DOWNLOAD_FAILURE' - SPARK_STARTUP_FAILURE = 'SPARK_STARTUP_FAILURE' - SPOT_INSTANCE_TERMINATION = 'SPOT_INSTANCE_TERMINATION' - STORAGE_DOWNLOAD_FAILURE = 'STORAGE_DOWNLOAD_FAILURE' - STS_CLIENT_SETUP_FAILURE = 'STS_CLIENT_SETUP_FAILURE' - SUBNET_EXHAUSTED_FAILURE = 'SUBNET_EXHAUSTED_FAILURE' - TEMPORARILY_UNAVAILABLE = 'TEMPORARILY_UNAVAILABLE' - TRIAL_EXPIRED = 'TRIAL_EXPIRED' - UNEXPECTED_LAUNCH_FAILURE = 'UNEXPECTED_LAUNCH_FAILURE' - UNKNOWN = 'UNKNOWN' - UNSUPPORTED_INSTANCE_TYPE = 'UNSUPPORTED_INSTANCE_TYPE' - UPDATE_INSTANCE_PROFILE_FAILURE = 'UPDATE_INSTANCE_PROFILE_FAILURE' - USER_REQUEST = 'USER_REQUEST' - WORKER_SETUP_FAILURE = 'WORKER_SETUP_FAILURE' - WORKSPACE_CANCELLED_ERROR = 'WORKSPACE_CANCELLED_ERROR' - WORKSPACE_CONFIGURATION_ERROR = 'WORKSPACE_CONFIGURATION_ERROR' + + ABUSE_DETECTED = "ABUSE_DETECTED" + ATTACH_PROJECT_FAILURE = "ATTACH_PROJECT_FAILURE" + AWS_AUTHORIZATION_FAILURE = "AWS_AUTHORIZATION_FAILURE" + AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE = "AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE" + AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = "AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE" + AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = "AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE" + AWS_REQUEST_LIMIT_EXCEEDED = "AWS_REQUEST_LIMIT_EXCEEDED" + AWS_UNSUPPORTED_FAILURE = "AWS_UNSUPPORTED_FAILURE" + AZURE_BYOK_KEY_PERMISSION_FAILURE = "AZURE_BYOK_KEY_PERMISSION_FAILURE" + AZURE_EPHEMERAL_DISK_FAILURE = "AZURE_EPHEMERAL_DISK_FAILURE" + AZURE_INVALID_DEPLOYMENT_TEMPLATE = "AZURE_INVALID_DEPLOYMENT_TEMPLATE" + AZURE_OPERATION_NOT_ALLOWED_EXCEPTION = "AZURE_OPERATION_NOT_ALLOWED_EXCEPTION" + AZURE_QUOTA_EXCEEDED_EXCEPTION = "AZURE_QUOTA_EXCEEDED_EXCEPTION" + AZURE_RESOURCE_MANAGER_THROTTLING = "AZURE_RESOURCE_MANAGER_THROTTLING" + AZURE_RESOURCE_PROVIDER_THROTTLING = "AZURE_RESOURCE_PROVIDER_THROTTLING" + AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE = "AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE" + AZURE_VM_EXTENSION_FAILURE = "AZURE_VM_EXTENSION_FAILURE" + AZURE_VNET_CONFIGURATION_FAILURE = "AZURE_VNET_CONFIGURATION_FAILURE" + BOOTSTRAP_TIMEOUT = "BOOTSTRAP_TIMEOUT" + BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION = "BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION" + CLOUD_PROVIDER_DISK_SETUP_FAILURE = "CLOUD_PROVIDER_DISK_SETUP_FAILURE" + CLOUD_PROVIDER_LAUNCH_FAILURE = "CLOUD_PROVIDER_LAUNCH_FAILURE" + CLOUD_PROVIDER_RESOURCE_STOCKOUT = "CLOUD_PROVIDER_RESOURCE_STOCKOUT" + CLOUD_PROVIDER_SHUTDOWN = "CLOUD_PROVIDER_SHUTDOWN" + COMMUNICATION_LOST = "COMMUNICATION_LOST" + CONTAINER_LAUNCH_FAILURE = "CONTAINER_LAUNCH_FAILURE" + CONTROL_PLANE_REQUEST_FAILURE = "CONTROL_PLANE_REQUEST_FAILURE" + DATABASE_CONNECTION_FAILURE = "DATABASE_CONNECTION_FAILURE" + DBFS_COMPONENT_UNHEALTHY = "DBFS_COMPONENT_UNHEALTHY" + DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" + DRIVER_UNREACHABLE = "DRIVER_UNREACHABLE" + DRIVER_UNRESPONSIVE = "DRIVER_UNRESPONSIVE" + EXECUTION_COMPONENT_UNHEALTHY = "EXECUTION_COMPONENT_UNHEALTHY" + GCP_QUOTA_EXCEEDED = "GCP_QUOTA_EXCEEDED" + GCP_SERVICE_ACCOUNT_DELETED = "GCP_SERVICE_ACCOUNT_DELETED" + GLOBAL_INIT_SCRIPT_FAILURE = "GLOBAL_INIT_SCRIPT_FAILURE" + HIVE_METASTORE_PROVISIONING_FAILURE = "HIVE_METASTORE_PROVISIONING_FAILURE" + IMAGE_PULL_PERMISSION_DENIED = "IMAGE_PULL_PERMISSION_DENIED" + INACTIVITY = "INACTIVITY" + INIT_SCRIPT_FAILURE = "INIT_SCRIPT_FAILURE" + INSTANCE_POOL_CLUSTER_FAILURE = "INSTANCE_POOL_CLUSTER_FAILURE" + INSTANCE_UNREACHABLE = "INSTANCE_UNREACHABLE" + INTERNAL_ERROR = "INTERNAL_ERROR" + INVALID_ARGUMENT = "INVALID_ARGUMENT" + INVALID_SPARK_IMAGE = "INVALID_SPARK_IMAGE" + IP_EXHAUSTION_FAILURE = "IP_EXHAUSTION_FAILURE" + JOB_FINISHED = "JOB_FINISHED" + K8S_AUTOSCALING_FAILURE = "K8S_AUTOSCALING_FAILURE" + K8S_DBR_CLUSTER_LAUNCH_TIMEOUT = "K8S_DBR_CLUSTER_LAUNCH_TIMEOUT" + METASTORE_COMPONENT_UNHEALTHY = "METASTORE_COMPONENT_UNHEALTHY" + NEPHOS_RESOURCE_MANAGEMENT = "NEPHOS_RESOURCE_MANAGEMENT" + NETWORK_CONFIGURATION_FAILURE = "NETWORK_CONFIGURATION_FAILURE" + NFS_MOUNT_FAILURE = "NFS_MOUNT_FAILURE" + NPIP_TUNNEL_SETUP_FAILURE = "NPIP_TUNNEL_SETUP_FAILURE" + NPIP_TUNNEL_TOKEN_FAILURE = "NPIP_TUNNEL_TOKEN_FAILURE" + REQUEST_REJECTED = "REQUEST_REJECTED" + REQUEST_THROTTLED = "REQUEST_THROTTLED" + SECRET_RESOLUTION_ERROR = "SECRET_RESOLUTION_ERROR" + SECURITY_DAEMON_REGISTRATION_EXCEPTION = "SECURITY_DAEMON_REGISTRATION_EXCEPTION" + SELF_BOOTSTRAP_FAILURE = "SELF_BOOTSTRAP_FAILURE" + SKIPPED_SLOW_NODES = "SKIPPED_SLOW_NODES" + SLOW_IMAGE_DOWNLOAD = "SLOW_IMAGE_DOWNLOAD" + SPARK_ERROR = "SPARK_ERROR" + SPARK_IMAGE_DOWNLOAD_FAILURE = "SPARK_IMAGE_DOWNLOAD_FAILURE" + SPARK_STARTUP_FAILURE = "SPARK_STARTUP_FAILURE" + SPOT_INSTANCE_TERMINATION = "SPOT_INSTANCE_TERMINATION" + STORAGE_DOWNLOAD_FAILURE = "STORAGE_DOWNLOAD_FAILURE" + STS_CLIENT_SETUP_FAILURE = "STS_CLIENT_SETUP_FAILURE" + SUBNET_EXHAUSTED_FAILURE = "SUBNET_EXHAUSTED_FAILURE" + TEMPORARILY_UNAVAILABLE = "TEMPORARILY_UNAVAILABLE" + TRIAL_EXPIRED = "TRIAL_EXPIRED" + UNEXPECTED_LAUNCH_FAILURE = "UNEXPECTED_LAUNCH_FAILURE" + UNKNOWN = "UNKNOWN" + UNSUPPORTED_INSTANCE_TYPE = "UNSUPPORTED_INSTANCE_TYPE" + UPDATE_INSTANCE_PROFILE_FAILURE = "UPDATE_INSTANCE_PROFILE_FAILURE" + USER_REQUEST = "USER_REQUEST" + WORKER_SETUP_FAILURE = "WORKER_SETUP_FAILURE" + WORKSPACE_CANCELLED_ERROR = "WORKSPACE_CANCELLED_ERROR" + WORKSPACE_CONFIGURATION_ERROR = "WORKSPACE_CONFIGURATION_ERROR" + class TerminationReasonType(Enum): """type of the termination""" - - CLIENT_ERROR = 'CLIENT_ERROR' - CLOUD_FAILURE = 'CLOUD_FAILURE' - SERVICE_FAULT = 'SERVICE_FAULT' - SUCCESS = 'SUCCESS' + + CLIENT_ERROR = "CLIENT_ERROR" + CLOUD_FAILURE = "CLOUD_FAILURE" + SERVICE_FAULT = "SERVICE_FAULT" + SUCCESS = "SUCCESS" + @dataclass class TextValue: value: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the TextValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the TextValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.value is not None: body['value'] = self.value + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TextValue: """Deserializes the TextValue from a dictionary.""" - return cls(value=d.get('value', None)) - - + return cls(value=d.get("value", None)) @dataclass class TimeRange: end_time_ms: Optional[int] = None """The end time in milliseconds.""" - + start_time_ms: Optional[int] = None """The start time in milliseconds.""" - + def as_dict(self) -> dict: """Serializes the TimeRange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms - if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms return body def as_shallow_dict(self) -> dict: """Serializes the TimeRange into a shallow dictionary of its immediate attributes.""" body = {} - if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms - if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TimeRange: """Deserializes the TimeRange from a dictionary.""" - return cls(end_time_ms=d.get('end_time_ms', None), start_time_ms=d.get('start_time_ms', None)) - - + return cls(end_time_ms=d.get("end_time_ms", None), start_time_ms=d.get("start_time_ms", None)) @dataclass class TransferOwnershipObjectId: new_owner: Optional[str] = None """Email address for the new owner, who must exist in the workspace.""" - + def as_dict(self) -> dict: """Serializes the TransferOwnershipObjectId into a dictionary suitable for use as a JSON request body.""" body = {} - if self.new_owner is not None: body['new_owner'] = self.new_owner + if self.new_owner is not None: + body["new_owner"] = self.new_owner return body def as_shallow_dict(self) -> dict: """Serializes the TransferOwnershipObjectId into a shallow dictionary of its immediate attributes.""" body = {} - if self.new_owner is not None: body['new_owner'] = self.new_owner + if self.new_owner is not None: + body["new_owner"] = self.new_owner return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransferOwnershipObjectId: """Deserializes the TransferOwnershipObjectId from a dictionary.""" - return cls(new_owner=d.get('new_owner', None)) - - + return cls(new_owner=d.get("new_owner", None)) @dataclass class TransferOwnershipRequest: """Transfer object ownership""" - + new_owner: Optional[str] = None """Email address for the new owner, who must exist in the workspace.""" - + object_id: Optional[TransferOwnershipObjectId] = None """The ID of the object on which to change ownership.""" - + object_type: Optional[OwnableObjectType] = None """The type of object on which to change ownership.""" - + def as_dict(self) -> dict: """Serializes the TransferOwnershipRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.new_owner is not None: body['new_owner'] = self.new_owner - if self.object_id: body['objectId'] = self.object_id.as_dict() - if self.object_type is not None: body['objectType'] = self.object_type.value + if self.new_owner is not None: + body["new_owner"] = self.new_owner + if self.object_id: + body["objectId"] = self.object_id.as_dict() + if self.object_type is not None: + body["objectType"] = self.object_type.value return body def as_shallow_dict(self) -> dict: """Serializes the TransferOwnershipRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.new_owner is not None: body['new_owner'] = self.new_owner - if self.object_id: body['objectId'] = self.object_id - if self.object_type is not None: body['objectType'] = self.object_type + if self.new_owner is not None: + body["new_owner"] = self.new_owner + if self.object_id: + body["objectId"] = self.object_id + if self.object_type is not None: + body["objectType"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> TransferOwnershipRequest: """Deserializes the TransferOwnershipRequest from a dictionary.""" - return cls(new_owner=d.get('new_owner', None), object_id=_from_dict(d, 'objectId', TransferOwnershipObjectId), object_type=_enum(d, 'objectType', OwnableObjectType)) - - - - - - - - - - - + return cls( + new_owner=d.get("new_owner", None), + object_id=_from_dict(d, "objectId", TransferOwnershipObjectId), + object_type=_enum(d, "objectType", OwnableObjectType), + ) @dataclass @@ -6105,108 +7613,139 @@ class UpdateAlertRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + alert: Optional[UpdateAlertRequestAlert] = None - + auto_resolve_display_name: Optional[bool] = None """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name.""" - + id: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alert: body['alert'] = self.alert.as_dict() - if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name - if self.id is not None: body['id'] = self.id - if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.alert: + body["alert"] = self.alert.as_dict() + if self.auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = self.auto_resolve_display_name + if self.id is not None: + body["id"] = self.id + if self.update_mask is not None: + body["update_mask"] = self.update_mask return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAlertRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.alert: body['alert'] = self.alert - if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name - if self.id is not None: body['id'] = self.id - if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.alert: + body["alert"] = self.alert + if self.auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = self.auto_resolve_display_name + if self.id is not None: + body["id"] = self.id + if self.update_mask is not None: + body["update_mask"] = self.update_mask return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAlertRequest: """Deserializes the UpdateAlertRequest from a dictionary.""" - return cls(alert=_from_dict(d, 'alert', UpdateAlertRequestAlert), auto_resolve_display_name=d.get('auto_resolve_display_name', None), id=d.get('id', None), update_mask=d.get('update_mask', None)) - - + return cls( + alert=_from_dict(d, "alert", UpdateAlertRequestAlert), + auto_resolve_display_name=d.get("auto_resolve_display_name", None), + id=d.get("id", None), + update_mask=d.get("update_mask", None), + ) @dataclass class UpdateAlertRequestAlert: condition: Optional[AlertCondition] = None """Trigger conditions of the alert.""" - + custom_body: Optional[str] = None """Custom body of alert notification, if it exists. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + custom_subject: Optional[str] = None """Custom subject of alert notification, if it exists. This can include email subject entries and Slack notification headers, for example. See [here] for custom templating instructions. [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - + display_name: Optional[str] = None """The display name of the alert.""" - + notify_on_ok: Optional[bool] = None """Whether to notify alert subscribers when alert returns back to normal.""" - + owner_user_name: Optional[str] = None """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" - + query_id: Optional[str] = None """UUID of the query attached to the alert.""" - + seconds_to_retrigger: Optional[int] = None """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - + def as_dict(self) -> dict: """Serializes the UpdateAlertRequestAlert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.condition: body['condition'] = self.condition.as_dict() - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.display_name is not None: body['display_name'] = self.display_name - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.query_id is not None: body['query_id'] = self.query_id - if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.condition: + body["condition"] = self.condition.as_dict() + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.display_name is not None: + body["display_name"] = self.display_name + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.query_id is not None: + body["query_id"] = self.query_id + if self.seconds_to_retrigger is not None: + body["seconds_to_retrigger"] = self.seconds_to_retrigger return body def as_shallow_dict(self) -> dict: """Serializes the UpdateAlertRequestAlert into a shallow dictionary of its immediate attributes.""" body = {} - if self.condition: body['condition'] = self.condition - if self.custom_body is not None: body['custom_body'] = self.custom_body - if self.custom_subject is not None: body['custom_subject'] = self.custom_subject - if self.display_name is not None: body['display_name'] = self.display_name - if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.query_id is not None: body['query_id'] = self.query_id - if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.condition: + body["condition"] = self.condition + if self.custom_body is not None: + body["custom_body"] = self.custom_body + if self.custom_subject is not None: + body["custom_subject"] = self.custom_subject + if self.display_name is not None: + body["display_name"] = self.display_name + if self.notify_on_ok is not None: + body["notify_on_ok"] = self.notify_on_ok + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.query_id is not None: + body["query_id"] = self.query_id + if self.seconds_to_retrigger is not None: + body["seconds_to_retrigger"] = self.seconds_to_retrigger return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateAlertRequestAlert: """Deserializes the UpdateAlertRequestAlert from a dictionary.""" - return cls(condition=_from_dict(d, 'condition', AlertCondition), custom_body=d.get('custom_body', None), custom_subject=d.get('custom_subject', None), display_name=d.get('display_name', None), notify_on_ok=d.get('notify_on_ok', None), owner_user_name=d.get('owner_user_name', None), query_id=d.get('query_id', None), seconds_to_retrigger=d.get('seconds_to_retrigger', None)) - - - - - + return cls( + condition=_from_dict(d, "condition", AlertCondition), + custom_body=d.get("custom_body", None), + custom_subject=d.get("custom_subject", None), + display_name=d.get("display_name", None), + notify_on_ok=d.get("notify_on_ok", None), + owner_user_name=d.get("owner_user_name", None), + query_id=d.get("query_id", None), + seconds_to_retrigger=d.get("seconds_to_retrigger", None), + ) @dataclass @@ -6221,113 +7760,156 @@ class UpdateQueryRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + auto_resolve_display_name: Optional[bool] = None """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name.""" - + id: Optional[str] = None - + query: Optional[UpdateQueryRequestQuery] = None - + def as_dict(self) -> dict: """Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name - if self.id is not None: body['id'] = self.id - if self.query: body['query'] = self.query.as_dict() - if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = self.auto_resolve_display_name + if self.id is not None: + body["id"] = self.id + if self.query: + body["query"] = self.query.as_dict() + if self.update_mask is not None: + body["update_mask"] = self.update_mask return body def as_shallow_dict(self) -> dict: """Serializes the UpdateQueryRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_resolve_display_name is not None: body['auto_resolve_display_name'] = self.auto_resolve_display_name - if self.id is not None: body['id'] = self.id - if self.query: body['query'] = self.query - if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = self.auto_resolve_display_name + if self.id is not None: + body["id"] = self.id + if self.query: + body["query"] = self.query + if self.update_mask is not None: + body["update_mask"] = self.update_mask return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateQueryRequest: """Deserializes the UpdateQueryRequest from a dictionary.""" - return cls(auto_resolve_display_name=d.get('auto_resolve_display_name', None), id=d.get('id', None), query=_from_dict(d, 'query', UpdateQueryRequestQuery), update_mask=d.get('update_mask', None)) - - + return cls( + auto_resolve_display_name=d.get("auto_resolve_display_name", None), + id=d.get("id", None), + query=_from_dict(d, "query", UpdateQueryRequestQuery), + update_mask=d.get("update_mask", None), + ) @dataclass class UpdateQueryRequestQuery: apply_auto_limit: Optional[bool] = None """Whether to apply a 1000 row limit to the query result.""" - + catalog: Optional[str] = None """Name of the catalog where this query will be executed.""" - + description: Optional[str] = None """General description that conveys additional information about this query such as usage notes.""" - + display_name: Optional[str] = None """Display name of the query that appears in list views, widget headings, and on the query page.""" - + owner_user_name: Optional[str] = None """Username of the user that owns the query.""" - + parameters: Optional[List[QueryParameter]] = None """List of query parameter definitions.""" - + query_text: Optional[str] = None """Text of the query to be run.""" - + run_as_mode: Optional[RunAsMode] = None """Sets the "Run as" role for the object.""" - + schema: Optional[str] = None """Name of the schema where this query will be executed.""" - + tags: Optional[List[str]] = None - + warehouse_id: Optional[str] = None """ID of the SQL warehouse attached to the query.""" - + def as_dict(self) -> dict: """Serializes the UpdateQueryRequestQuery into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value - if self.schema is not None: body['schema'] = self.schema - if self.tags: body['tags'] = [v for v in self.tags] - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.apply_auto_limit is not None: + body["apply_auto_limit"] = self.apply_auto_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_mode is not None: + body["run_as_mode"] = self.run_as_mode.value + if self.schema is not None: + body["schema"] = self.schema + if self.tags: + body["tags"] = [v for v in self.tags] + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the UpdateQueryRequestQuery into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit - if self.catalog is not None: body['catalog'] = self.catalog - if self.description is not None: body['description'] = self.description - if self.display_name is not None: body['display_name'] = self.display_name - if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name - if self.parameters: body['parameters'] = self.parameters - if self.query_text is not None: body['query_text'] = self.query_text - if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode - if self.schema is not None: body['schema'] = self.schema - if self.tags: body['tags'] = self.tags - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.apply_auto_limit is not None: + body["apply_auto_limit"] = self.apply_auto_limit + if self.catalog is not None: + body["catalog"] = self.catalog + if self.description is not None: + body["description"] = self.description + if self.display_name is not None: + body["display_name"] = self.display_name + if self.owner_user_name is not None: + body["owner_user_name"] = self.owner_user_name + if self.parameters: + body["parameters"] = self.parameters + if self.query_text is not None: + body["query_text"] = self.query_text + if self.run_as_mode is not None: + body["run_as_mode"] = self.run_as_mode + if self.schema is not None: + body["schema"] = self.schema + if self.tags: + body["tags"] = self.tags + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateQueryRequestQuery: """Deserializes the UpdateQueryRequestQuery from a dictionary.""" - return cls(apply_auto_limit=d.get('apply_auto_limit', None), catalog=d.get('catalog', None), description=d.get('description', None), display_name=d.get('display_name', None), owner_user_name=d.get('owner_user_name', None), parameters=_repeated_dict(d, 'parameters', QueryParameter), query_text=d.get('query_text', None), run_as_mode=_enum(d, 'run_as_mode', RunAsMode), schema=d.get('schema', None), tags=d.get('tags', None), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + apply_auto_limit=d.get("apply_auto_limit", None), + catalog=d.get("catalog", None), + description=d.get("description", None), + display_name=d.get("display_name", None), + owner_user_name=d.get("owner_user_name", None), + parameters=_repeated_dict(d, "parameters", QueryParameter), + query_text=d.get("query_text", None), + run_as_mode=_enum(d, "run_as_mode", RunAsMode), + schema=d.get("schema", None), + tags=d.get("tags", None), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass @@ -6346,8 +7928,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: """Deserializes the UpdateResponse from a dictionary.""" return cls() - - @dataclass @@ -6362,386 +7942,484 @@ class UpdateVisualizationRequest: A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future.""" - + id: Optional[str] = None - + visualization: Optional[UpdateVisualizationRequestVisualization] = None - + def as_dict(self) -> dict: """Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.update_mask is not None: body['update_mask'] = self.update_mask - if self.visualization: body['visualization'] = self.visualization.as_dict() + if self.id is not None: + body["id"] = self.id + if self.update_mask is not None: + body["update_mask"] = self.update_mask + if self.visualization: + body["visualization"] = self.visualization.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the UpdateVisualizationRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.update_mask is not None: body['update_mask'] = self.update_mask - if self.visualization: body['visualization'] = self.visualization + if self.id is not None: + body["id"] = self.id + if self.update_mask is not None: + body["update_mask"] = self.update_mask + if self.visualization: + body["visualization"] = self.visualization return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateVisualizationRequest: """Deserializes the UpdateVisualizationRequest from a dictionary.""" - return cls(id=d.get('id', None), update_mask=d.get('update_mask', None), visualization=_from_dict(d, 'visualization', UpdateVisualizationRequestVisualization)) - - + return cls( + id=d.get("id", None), + update_mask=d.get("update_mask", None), + visualization=_from_dict(d, "visualization", UpdateVisualizationRequestVisualization), + ) @dataclass class UpdateVisualizationRequestVisualization: display_name: Optional[str] = None """The display name of the visualization.""" - + serialized_options: Optional[str] = None """The visualization options varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization options directly.""" - + serialized_query_plan: Optional[str] = None """The visualization query plan varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying the visualization query plan directly.""" - + type: Optional[str] = None """The type of visualization: counter, table, funnel, and so on.""" - + def as_dict(self) -> dict: """Serializes the UpdateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.serialized_options is not None: body['serialized_options'] = self.serialized_options - if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan - if self.type is not None: body['type'] = self.type + if self.display_name is not None: + body["display_name"] = self.display_name + if self.serialized_options is not None: + body["serialized_options"] = self.serialized_options + if self.serialized_query_plan is not None: + body["serialized_query_plan"] = self.serialized_query_plan + if self.type is not None: + body["type"] = self.type return body def as_shallow_dict(self) -> dict: """Serializes the UpdateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes.""" body = {} - if self.display_name is not None: body['display_name'] = self.display_name - if self.serialized_options is not None: body['serialized_options'] = self.serialized_options - if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan - if self.type is not None: body['type'] = self.type + if self.display_name is not None: + body["display_name"] = self.display_name + if self.serialized_options is not None: + body["serialized_options"] = self.serialized_options + if self.serialized_query_plan is not None: + body["serialized_query_plan"] = self.serialized_query_plan + if self.type is not None: + body["type"] = self.type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateVisualizationRequestVisualization: """Deserializes the UpdateVisualizationRequestVisualization from a dictionary.""" - return cls(display_name=d.get('display_name', None), serialized_options=d.get('serialized_options', None), serialized_query_plan=d.get('serialized_query_plan', None), type=d.get('type', None)) - - + return cls( + display_name=d.get("display_name", None), + serialized_options=d.get("serialized_options", None), + serialized_query_plan=d.get("serialized_query_plan", None), + type=d.get("type", None), + ) @dataclass class User: email: Optional[str] = None - + id: Optional[int] = None - + name: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the User into a dictionary suitable for use as a JSON request body.""" body = {} - if self.email is not None: body['email'] = self.email - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name + if self.email is not None: + body["email"] = self.email + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the User into a shallow dictionary of its immediate attributes.""" body = {} - if self.email is not None: body['email'] = self.email - if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name + if self.email is not None: + body["email"] = self.email + if self.id is not None: + body["id"] = self.id + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> User: """Deserializes the User from a dictionary.""" - return cls(email=d.get('email', None), id=d.get('id', None), name=d.get('name', None)) - - + return cls(email=d.get("email", None), id=d.get("id", None), name=d.get("name", None)) @dataclass class Visualization: create_time: Optional[str] = None """The timestamp indicating when the visualization was created.""" - + display_name: Optional[str] = None """The display name of the visualization.""" - + id: Optional[str] = None """UUID identifying the visualization.""" - + query_id: Optional[str] = None """UUID of the query that the visualization is attached to.""" - + serialized_options: Optional[str] = None """The visualization options varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization options directly.""" - + serialized_query_plan: Optional[str] = None """The visualization query plan varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying the visualization query plan directly.""" - + type: Optional[str] = None """The type of visualization: counter, table, funnel, and so on.""" - + update_time: Optional[str] = None """The timestamp indicating when the visualization was updated.""" - + def as_dict(self) -> dict: """Serializes the Visualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.query_id is not None: body['query_id'] = self.query_id - if self.serialized_options is not None: body['serialized_options'] = self.serialized_options - if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan - if self.type is not None: body['type'] = self.type - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.query_id is not None: + body["query_id"] = self.query_id + if self.serialized_options is not None: + body["serialized_options"] = self.serialized_options + if self.serialized_query_plan is not None: + body["serialized_query_plan"] = self.serialized_query_plan + if self.type is not None: + body["type"] = self.type + if self.update_time is not None: + body["update_time"] = self.update_time return body def as_shallow_dict(self) -> dict: """Serializes the Visualization into a shallow dictionary of its immediate attributes.""" body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.display_name is not None: body['display_name'] = self.display_name - if self.id is not None: body['id'] = self.id - if self.query_id is not None: body['query_id'] = self.query_id - if self.serialized_options is not None: body['serialized_options'] = self.serialized_options - if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan - if self.type is not None: body['type'] = self.type - if self.update_time is not None: body['update_time'] = self.update_time + if self.create_time is not None: + body["create_time"] = self.create_time + if self.display_name is not None: + body["display_name"] = self.display_name + if self.id is not None: + body["id"] = self.id + if self.query_id is not None: + body["query_id"] = self.query_id + if self.serialized_options is not None: + body["serialized_options"] = self.serialized_options + if self.serialized_query_plan is not None: + body["serialized_query_plan"] = self.serialized_query_plan + if self.type is not None: + body["type"] = self.type + if self.update_time is not None: + body["update_time"] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Visualization: """Deserializes the Visualization from a dictionary.""" - return cls(create_time=d.get('create_time', None), display_name=d.get('display_name', None), id=d.get('id', None), query_id=d.get('query_id', None), serialized_options=d.get('serialized_options', None), serialized_query_plan=d.get('serialized_query_plan', None), type=d.get('type', None), update_time=d.get('update_time', None)) - - + return cls( + create_time=d.get("create_time", None), + display_name=d.get("display_name", None), + id=d.get("id", None), + query_id=d.get("query_id", None), + serialized_options=d.get("serialized_options", None), + serialized_query_plan=d.get("serialized_query_plan", None), + type=d.get("type", None), + update_time=d.get("update_time", None), + ) @dataclass class WarehouseAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[WarehousePermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the WarehouseAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the WarehouseAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehouseAccessControlRequest: """Deserializes the WarehouseAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', WarehousePermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", WarehousePermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class WarehouseAccessControlResponse: all_permissions: Optional[List[WarehousePermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the WarehouseAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the WarehouseAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehouseAccessControlResponse: """Deserializes the WarehouseAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', WarehousePermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", WarehousePermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class WarehousePermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[WarehousePermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the WarehousePermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the WarehousePermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehousePermission: """Deserializes the WarehousePermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', WarehousePermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", WarehousePermissionLevel), + ) class WarehousePermissionLevel(Enum): """Permission level""" - - CAN_MANAGE = 'CAN_MANAGE' - CAN_MONITOR = 'CAN_MONITOR' - CAN_USE = 'CAN_USE' - CAN_VIEW = 'CAN_VIEW' - IS_OWNER = 'IS_OWNER' + + CAN_MANAGE = "CAN_MANAGE" + CAN_MONITOR = "CAN_MONITOR" + CAN_USE = "CAN_USE" + CAN_VIEW = "CAN_VIEW" + IS_OWNER = "IS_OWNER" + @dataclass class WarehousePermissions: access_control_list: Optional[List[WarehouseAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the WarehousePermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the WarehousePermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissions: """Deserializes the WarehousePermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', WarehouseAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", WarehouseAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class WarehousePermissionsDescription: description: Optional[str] = None - + permission_level: Optional[WarehousePermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the WarehousePermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the WarehousePermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsDescription: """Deserializes the WarehousePermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', WarehousePermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", WarehousePermissionLevel), + ) @dataclass class WarehousePermissionsRequest: access_control_list: Optional[List[WarehouseAccessControlRequest]] = None - + warehouse_id: Optional[str] = None """The SQL warehouse for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the WarehousePermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body def as_shallow_dict(self) -> dict: """Serializes the WarehousePermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.warehouse_id is not None: + body["warehouse_id"] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsRequest: """Deserializes the WarehousePermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', WarehouseAccessControlRequest), warehouse_id=d.get('warehouse_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", WarehouseAccessControlRequest), + warehouse_id=d.get("warehouse_id", None), + ) @dataclass @@ -6749,186 +8427,234 @@ class WarehouseTypePair: enabled: Optional[bool] = None """If set to false the specific warehouse type will not be be allowed as a value for warehouse_type in CreateWarehouse and EditWarehouse""" - + warehouse_type: Optional[WarehouseTypePairWarehouseType] = None """Warehouse type: `PRO` or `CLASSIC`.""" - + def as_dict(self) -> dict: """Serializes the WarehouseTypePair into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value + if self.enabled is not None: + body["enabled"] = self.enabled + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: """Serializes the WarehouseTypePair into a shallow dictionary of its immediate attributes.""" body = {} - if self.enabled is not None: body['enabled'] = self.enabled - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type + if self.enabled is not None: + body["enabled"] = self.enabled + if self.warehouse_type is not None: + body["warehouse_type"] = self.warehouse_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WarehouseTypePair: """Deserializes the WarehouseTypePair from a dictionary.""" - return cls(enabled=d.get('enabled', None), warehouse_type=_enum(d, 'warehouse_type', WarehouseTypePairWarehouseType)) - - + return cls( + enabled=d.get("enabled", None), warehouse_type=_enum(d, "warehouse_type", WarehouseTypePairWarehouseType) + ) class WarehouseTypePairWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`.""" - - CLASSIC = 'CLASSIC' - PRO = 'PRO' - TYPE_UNSPECIFIED = 'TYPE_UNSPECIFIED' + + CLASSIC = "CLASSIC" + PRO = "PRO" + TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" + @dataclass class Widget: id: Optional[str] = None """The unique ID for this widget.""" - + options: Optional[WidgetOptions] = None - + visualization: Optional[LegacyVisualization] = None """The visualization description API changes frequently and is unsupported. You can duplicate a visualization by copying description objects received _from the API_ and then using them to create a new one with a POST request to the same endpoint. Databricks does not recommend constructing ad-hoc visualizations entirely in JSON.""" - + width: Optional[int] = None """Unused field.""" - + def as_dict(self) -> dict: """Serializes the Widget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.id is not None: body['id'] = self.id - if self.options: body['options'] = self.options.as_dict() - if self.visualization: body['visualization'] = self.visualization.as_dict() - if self.width is not None: body['width'] = self.width + if self.id is not None: + body["id"] = self.id + if self.options: + body["options"] = self.options.as_dict() + if self.visualization: + body["visualization"] = self.visualization.as_dict() + if self.width is not None: + body["width"] = self.width return body def as_shallow_dict(self) -> dict: """Serializes the Widget into a shallow dictionary of its immediate attributes.""" body = {} - if self.id is not None: body['id'] = self.id - if self.options: body['options'] = self.options - if self.visualization: body['visualization'] = self.visualization - if self.width is not None: body['width'] = self.width + if self.id is not None: + body["id"] = self.id + if self.options: + body["options"] = self.options + if self.visualization: + body["visualization"] = self.visualization + if self.width is not None: + body["width"] = self.width return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Widget: """Deserializes the Widget from a dictionary.""" - return cls(id=d.get('id', None), options=_from_dict(d, 'options', WidgetOptions), visualization=_from_dict(d, 'visualization', LegacyVisualization), width=d.get('width', None)) - - + return cls( + id=d.get("id", None), + options=_from_dict(d, "options", WidgetOptions), + visualization=_from_dict(d, "visualization", LegacyVisualization), + width=d.get("width", None), + ) @dataclass class WidgetOptions: created_at: Optional[str] = None """Timestamp when this object was created""" - + description: Optional[str] = None """Custom description of the widget""" - + is_hidden: Optional[bool] = None """Whether this widget is hidden on the dashboard.""" - + parameter_mappings: Optional[Any] = None """How parameters used by the visualization in this widget relate to other widgets on the dashboard. Databricks does not recommend modifying this definition in JSON.""" - + position: Optional[WidgetPosition] = None """Coordinates of this widget on a dashboard. This portion of the API changes frequently and is unsupported.""" - + title: Optional[str] = None """Custom title of the widget""" - + updated_at: Optional[str] = None """Timestamp of the last time this object was updated.""" - + def as_dict(self) -> dict: """Serializes the WidgetOptions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.description is not None: body['description'] = self.description - if self.is_hidden is not None: body['isHidden'] = self.is_hidden - if self.parameter_mappings: body['parameterMappings'] = self.parameter_mappings - if self.position: body['position'] = self.position.as_dict() - if self.title is not None: body['title'] = self.title - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.created_at is not None: + body["created_at"] = self.created_at + if self.description is not None: + body["description"] = self.description + if self.is_hidden is not None: + body["isHidden"] = self.is_hidden + if self.parameter_mappings: + body["parameterMappings"] = self.parameter_mappings + if self.position: + body["position"] = self.position.as_dict() + if self.title is not None: + body["title"] = self.title + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body def as_shallow_dict(self) -> dict: """Serializes the WidgetOptions into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.description is not None: body['description'] = self.description - if self.is_hidden is not None: body['isHidden'] = self.is_hidden - if self.parameter_mappings: body['parameterMappings'] = self.parameter_mappings - if self.position: body['position'] = self.position - if self.title is not None: body['title'] = self.title - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.created_at is not None: + body["created_at"] = self.created_at + if self.description is not None: + body["description"] = self.description + if self.is_hidden is not None: + body["isHidden"] = self.is_hidden + if self.parameter_mappings: + body["parameterMappings"] = self.parameter_mappings + if self.position: + body["position"] = self.position + if self.title is not None: + body["title"] = self.title + if self.updated_at is not None: + body["updated_at"] = self.updated_at return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WidgetOptions: """Deserializes the WidgetOptions from a dictionary.""" - return cls(created_at=d.get('created_at', None), description=d.get('description', None), is_hidden=d.get('isHidden', None), parameter_mappings=d.get('parameterMappings', None), position=_from_dict(d, 'position', WidgetPosition), title=d.get('title', None), updated_at=d.get('updated_at', None)) - - + return cls( + created_at=d.get("created_at", None), + description=d.get("description", None), + is_hidden=d.get("isHidden", None), + parameter_mappings=d.get("parameterMappings", None), + position=_from_dict(d, "position", WidgetPosition), + title=d.get("title", None), + updated_at=d.get("updated_at", None), + ) @dataclass class WidgetPosition: """Coordinates of this widget on a dashboard. This portion of the API changes frequently and is unsupported.""" - + auto_height: Optional[bool] = None """reserved for internal use""" - + col: Optional[int] = None """column in the dashboard grid. Values start with 0""" - + row: Optional[int] = None """row in the dashboard grid. Values start with 0""" - + size_x: Optional[int] = None """width of the widget measured in dashboard grid cells""" - + size_y: Optional[int] = None """height of the widget measured in dashboard grid cells""" - + def as_dict(self) -> dict: """Serializes the WidgetPosition into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_height is not None: body['autoHeight'] = self.auto_height - if self.col is not None: body['col'] = self.col - if self.row is not None: body['row'] = self.row - if self.size_x is not None: body['sizeX'] = self.size_x - if self.size_y is not None: body['sizeY'] = self.size_y + if self.auto_height is not None: + body["autoHeight"] = self.auto_height + if self.col is not None: + body["col"] = self.col + if self.row is not None: + body["row"] = self.row + if self.size_x is not None: + body["sizeX"] = self.size_x + if self.size_y is not None: + body["sizeY"] = self.size_y return body def as_shallow_dict(self) -> dict: """Serializes the WidgetPosition into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_height is not None: body['autoHeight'] = self.auto_height - if self.col is not None: body['col'] = self.col - if self.row is not None: body['row'] = self.row - if self.size_x is not None: body['sizeX'] = self.size_x - if self.size_y is not None: body['sizeY'] = self.size_y + if self.auto_height is not None: + body["autoHeight"] = self.auto_height + if self.col is not None: + body["col"] = self.col + if self.row is not None: + body["row"] = self.row + if self.size_x is not None: + body["sizeX"] = self.size_x + if self.size_y is not None: + body["sizeY"] = self.size_y return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WidgetPosition: """Deserializes the WidgetPosition from a dictionary.""" - return cls(auto_height=d.get('autoHeight', None), col=d.get('col', None), row=d.get('row', None), size_x=d.get('sizeX', None), size_y=d.get('sizeY', None)) - - - - + return cls( + auto_height=d.get("autoHeight", None), + col=d.get("col", None), + row=d.get("row", None), + size_x=d.get("sizeX", None), + size_y=d.get("sizeY", None), + ) class AlertsAPI: @@ -6936,145 +8662,116 @@ class AlertsAPI: periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , alert: Optional[CreateAlertRequestAlert] = None, auto_resolve_display_name: Optional[bool] = None) -> Alert: + def create( + self, *, alert: Optional[CreateAlertRequestAlert] = None, auto_resolve_display_name: Optional[bool] = None + ) -> Alert: """Create an alert. - + Creates an alert. - + :param alert: :class:`CreateAlertRequestAlert` (optional) :param auto_resolve_display_name: bool (optional) If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. - + :returns: :class:`Alert` """ body = {} - if alert is not None: body['alert'] = alert.as_dict() - if auto_resolve_display_name is not None: body['auto_resolve_display_name'] = auto_resolve_display_name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/sql/alerts', body=body - - , headers=headers - ) - return Alert.from_dict(res) + if alert is not None: + body["alert"] = alert.as_dict() + if auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = auto_resolve_display_name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/sql/alerts", body=body, headers=headers) + return Alert.from_dict(res) - def delete(self - , id: str - ): + def delete(self, id: str): """Delete an alert. - + Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/sql/alerts/{id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/sql/alerts/{id}", headers=headers) - def get(self - , id: str - ) -> Alert: + def get(self, id: str) -> Alert: """Get an alert. - + Gets an alert. - + :param id: str - + :returns: :class:`Alert` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/sql/alerts/{id}' - - , headers=headers - ) - return Alert.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ListAlertsResponseAlert]: + res = self._api.do("GET", f"/api/2.0/sql/alerts/{id}", headers=headers) + return Alert.from_dict(res) + + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ListAlertsResponseAlert]: """List alerts. - + Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListAlertsResponseAlert` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/sql/alerts', query=query - - , headers=headers - ) - if 'results' in json: - for v in json['results']: - yield ListAlertsResponseAlert.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , id: str, update_mask: str - , * - , alert: Optional[UpdateAlertRequestAlert] = None, auto_resolve_display_name: Optional[bool] = None) -> Alert: + while True: + json = self._api.do("GET", "/api/2.0/sql/alerts", query=query, headers=headers) + if "results" in json: + for v in json["results"]: + yield ListAlertsResponseAlert.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + id: str, + update_mask: str, + *, + alert: Optional[UpdateAlertRequestAlert] = None, + auto_resolve_display_name: Optional[bool] = None, + ) -> Alert: """Update an alert. - + Updates an alert. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -7082,7 +8779,7 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. @@ -7090,59 +8787,58 @@ def update(self :param auto_resolve_display_name: bool (optional) If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. - + :returns: :class:`Alert` """ body = {} - if alert is not None: body['alert'] = alert.as_dict() - if auto_resolve_display_name is not None: body['auto_resolve_display_name'] = auto_resolve_display_name - if update_mask is not None: body['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/sql/alerts/{id}', body=body - - , headers=headers - ) + if alert is not None: + body["alert"] = alert.as_dict() + if auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = auto_resolve_display_name + if update_mask is not None: + body["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/sql/alerts/{id}", body=body, headers=headers) return Alert.from_dict(res) - - + class AlertsLegacyAPI: """The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str, options: AlertOptions, query_id: str - , * - , parent: Optional[str] = None, rearm: Optional[int] = None) -> LegacyAlert: + def create( + self, + name: str, + options: AlertOptions, + query_id: str, + *, + parent: Optional[str] = None, + rearm: Optional[int] = None, + ) -> LegacyAlert: """Create an alert. - + Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification destinations if the condition was met. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param name: str Name of the alert. :param options: :class:`AlertOptions` @@ -7154,123 +8850,102 @@ def create(self :param rearm: int (optional) Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - + :returns: :class:`LegacyAlert` """ body = {} - if name is not None: body['name'] = name - if options is not None: body['options'] = options.as_dict() - if parent is not None: body['parent'] = parent - if query_id is not None: body['query_id'] = query_id - if rearm is not None: body['rearm'] = rearm - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/preview/sql/alerts', body=body - - , headers=headers - ) + if name is not None: + body["name"] = name + if options is not None: + body["options"] = options.as_dict() + if parent is not None: + body["parent"] = parent + if query_id is not None: + body["query_id"] = query_id + if rearm is not None: + body["rearm"] = rearm + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/sql/alerts", body=body, headers=headers) return LegacyAlert.from_dict(res) - - - - - def delete(self - , alert_id: str - ): + def delete(self, alert_id: str): """Delete an alert. - + Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to the trash. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/preview/sql/alerts/{alert_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get(self - , alert_id: str - ) -> LegacyAlert: + self._api.do("DELETE", f"/api/2.0/preview/sql/alerts/{alert_id}", headers=headers) + + def get(self, alert_id: str) -> LegacyAlert: """Get an alert. - + Gets an alert. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str - + :returns: :class:`LegacyAlert` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/preview/sql/alerts/{alert_id}' - - , headers=headers - ) - return LegacyAlert.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/sql/alerts/{alert_id}", headers=headers) + return LegacyAlert.from_dict(res) def list(self) -> Iterator[LegacyAlert]: """Get alerts. - + Gets a list of alerts. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :returns: Iterator over :class:`LegacyAlert` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/preview/sql/alerts' - , headers=headers - ) - return [LegacyAlert.from_dict(v) for v in res] - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/preview/sql/alerts", headers=headers) + return [LegacyAlert.from_dict(v) for v in res] - def update(self - , alert_id: str, name: str, options: AlertOptions, query_id: str - , * - , rearm: Optional[int] = None): + def update(self, alert_id: str, name: str, options: AlertOptions, query_id: str, *, rearm: Optional[int] = None): """Update an alert. - + Updates an alert. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str :param name: str Name of the alert. @@ -7281,158 +8956,119 @@ def update(self :param rearm: int (optional) Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - - + + """ body = {} - if name is not None: body['name'] = name - if options is not None: body['options'] = options.as_dict() - if query_id is not None: body['query_id'] = query_id - if rearm is not None: body['rearm'] = rearm - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PUT',f'/api/2.0/preview/sql/alerts/{alert_id}', body=body - - , headers=headers - ) - + if name is not None: + body["name"] = name + if options is not None: + body["options"] = options.as_dict() + if query_id is not None: + body["query_id"] = query_id + if rearm is not None: + body["rearm"] = rearm + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PUT", f"/api/2.0/preview/sql/alerts/{alert_id}", body=body, headers=headers) + - - class AlertsV2API: """New version of SQL Alerts""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_alert(self - , alert: AlertV2 - ) -> AlertV2: + def create_alert(self, alert: AlertV2) -> AlertV2: """Create an alert. - + Create Alert - + :param alert: :class:`AlertV2` - + :returns: :class:`AlertV2` """ body = alert.as_dict() - query = {} - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/alerts', body=body - - , headers=headers - ) - return AlertV2.from_dict(res) + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/alerts", body=body, headers=headers) + return AlertV2.from_dict(res) - def get_alert(self - , id: str - ) -> AlertV2: + def get_alert(self, id: str) -> AlertV2: """Get an alert. - + Gets an alert. - + :param id: str - + :returns: :class:`AlertV2` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/alerts/{id}' - - , headers=headers - ) - return AlertV2.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/alerts/{id}", headers=headers) + return AlertV2.from_dict(res) + + def list_alerts(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[AlertV2]: + """List alerts. - def list_alerts(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[AlertV2]: - """List alerts. - Gets a list of alerts accessible to the user, ordered by creation time. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`AlertV2` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/alerts', query=query - - , headers=headers - ) - if 'results' in json: - for v in json['results']: - yield AlertV2.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def trash_alert(self - , id: str - ): + while True: + json = self._api.do("GET", "/api/2.0/alerts", query=query, headers=headers) + if "results" in json: + for v in json["results"]: + yield AlertV2.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def trash_alert(self, id: str): """Delete an alert. - + Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/alerts/{id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/alerts/{id}", headers=headers) - def update_alert(self - , id: str, alert: AlertV2, update_mask: str - ) -> AlertV2: + def update_alert(self, id: str, alert: AlertV2, update_mask: str) -> AlertV2: """Update an alert. - + Update alert - + :param id: str UUID identifying the alert. :param alert: :class:`AlertV2` @@ -7442,47 +9078,44 @@ def update_alert(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AlertV2` """ body = alert.as_dict() query = {} - if update_mask is not None: query['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/alerts/{id}', query=query, body=body - - , headers=headers - ) + if update_mask is not None: + query["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/alerts/{id}", query=query, body=body, headers=headers) return AlertV2.from_dict(res) - - + class DashboardWidgetsAPI: """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace. Data structures may change over time.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , dashboard_id: str, options: WidgetOptions, width: int - , * - , text: Optional[str] = None, visualization_id: Optional[str] = None) -> Widget: + def create( + self, + dashboard_id: str, + options: WidgetOptions, + width: int, + *, + text: Optional[str] = None, + visualization_id: Optional[str] = None, + ) -> Widget: """Add widget to a dashboard. - + :param dashboard_id: str Dashboard ID returned by :method:dashboards/create. :param options: :class:`WidgetOptions` @@ -7493,56 +9126,55 @@ def create(self contains a visualization in the `visualization` field. :param visualization_id: str (optional) Query Vizualization ID returned by :method:queryvisualizations/create. - + :returns: :class:`Widget` """ body = {} - if dashboard_id is not None: body['dashboard_id'] = dashboard_id - if options is not None: body['options'] = options.as_dict() - if text is not None: body['text'] = text - if visualization_id is not None: body['visualization_id'] = visualization_id - if width is not None: body['width'] = width - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/preview/sql/widgets', body=body - - , headers=headers - ) + if dashboard_id is not None: + body["dashboard_id"] = dashboard_id + if options is not None: + body["options"] = options.as_dict() + if text is not None: + body["text"] = text + if visualization_id is not None: + body["visualization_id"] = visualization_id + if width is not None: + body["width"] = width + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/sql/widgets", body=body, headers=headers) return Widget.from_dict(res) - - - - - def delete(self - , id: str - ): + def delete(self, id: str): """Remove widget. - + :param id: str Widget ID returned by :method:dashboardwidgets/create - - - """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/preview/sql/widgets/{id}' - - , headers=headers - ) - - - - - def update(self - , id: str, dashboard_id: str, options: WidgetOptions, width: int - , * - , text: Optional[str] = None, visualization_id: Optional[str] = None) -> Widget: + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/preview/sql/widgets/{id}", headers=headers) + + def update( + self, + id: str, + dashboard_id: str, + options: WidgetOptions, + width: int, + *, + text: Optional[str] = None, + visualization_id: Optional[str] = None, + ) -> Widget: """Update existing widget. - + :param id: str Widget ID returned by :method:dashboardwidgets/create :param dashboard_id: str @@ -7555,49 +9187,51 @@ def update(self contains a visualization in the `visualization` field. :param visualization_id: str (optional) Query Vizualization ID returned by :method:queryvisualizations/create. - + :returns: :class:`Widget` """ body = {} - if dashboard_id is not None: body['dashboard_id'] = dashboard_id - if options is not None: body['options'] = options.as_dict() - if text is not None: body['text'] = text - if visualization_id is not None: body['visualization_id'] = visualization_id - if width is not None: body['width'] = width - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/preview/sql/widgets/{id}', body=body - - , headers=headers - ) + if dashboard_id is not None: + body["dashboard_id"] = dashboard_id + if options is not None: + body["options"] = options.as_dict() + if text is not None: + body["text"] = text + if visualization_id is not None: + body["visualization_id"] = visualization_id + if width is not None: + body["width"] = width + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/preview/sql/widgets/{id}", body=body, headers=headers) return Widget.from_dict(res) - - + class DashboardsAPI: """In general, there is little need to modify dashboards using the API. However, it can be useful to use dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it to create a new one. Dashboards can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , name: str - , * - , dashboard_filters_enabled: Optional[bool] = None, is_favorite: Optional[bool] = None, parent: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None) -> Dashboard: + def create( + self, + name: str, + *, + dashboard_filters_enabled: Optional[bool] = None, + is_favorite: Optional[bool] = None, + parent: Optional[str] = None, + run_as_role: Optional[RunAsRole] = None, + tags: Optional[List[str]] = None, + ) -> Dashboard: """Create a dashboard object. - + :param name: str The title of this dashboard that appears in list views and at the top of the dashboard page. :param dashboard_filters_enabled: bool (optional) @@ -7610,88 +9244,79 @@ def create(self Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`Dashboard` """ body = {} - if dashboard_filters_enabled is not None: body['dashboard_filters_enabled'] = dashboard_filters_enabled - if is_favorite is not None: body['is_favorite'] = is_favorite - if name is not None: body['name'] = name - if parent is not None: body['parent'] = parent - if run_as_role is not None: body['run_as_role'] = run_as_role.value - if tags is not None: body['tags'] = [v for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/preview/sql/dashboards', body=body - - , headers=headers - ) + if dashboard_filters_enabled is not None: + body["dashboard_filters_enabled"] = dashboard_filters_enabled + if is_favorite is not None: + body["is_favorite"] = is_favorite + if name is not None: + body["name"] = name + if parent is not None: + body["parent"] = parent + if run_as_role is not None: + body["run_as_role"] = run_as_role.value + if tags is not None: + body["tags"] = [v for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/sql/dashboards", body=body, headers=headers) return Dashboard.from_dict(res) - - - - - def delete(self - , dashboard_id: str - ): + def delete(self, dashboard_id: str): """Remove a dashboard. - + Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot be shared. - + :param dashboard_id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/preview/sql/dashboards/{dashboard_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/preview/sql/dashboards/{dashboard_id}", headers=headers) - def get(self - , dashboard_id: str - ) -> Dashboard: + def get(self, dashboard_id: str) -> Dashboard: """Retrieve a definition. - + Returns a JSON representation of a dashboard object, including its visualization and query objects. - + :param dashboard_id: str - + :returns: :class:`Dashboard` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/preview/sql/dashboards/{dashboard_id}' - - , headers=headers - ) - return Dashboard.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/sql/dashboards/{dashboard_id}", headers=headers) + return Dashboard.from_dict(res) - def list(self - - , * - , order: Optional[ListOrder] = None, page: Optional[int] = None, page_size: Optional[int] = None, q: Optional[str] = None) -> Iterator[Dashboard]: + def list( + self, + *, + order: Optional[ListOrder] = None, + page: Optional[int] = None, + page_size: Optional[int] = None, + q: Optional[str] = None, + ) -> Iterator[Dashboard]: """Get dashboard objects. - + Fetch a paginated list of dashboard objects. - + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param order: :class:`ListOrder` (optional) Name of dashboard attribute to order by. :param page: int (optional) @@ -7700,77 +9325,70 @@ def list(self Number of dashboards to return per page. :param q: str (optional) Full text search term. - + :returns: Iterator over :class:`Dashboard` """ - + query = {} - if order is not None: query['order'] = order.value - if page is not None: query['page'] = page - if page_size is not None: query['page_size'] = page_size - if q is not None: query['q'] = q - headers = {'Accept': 'application/json',} - - + if order is not None: + query["order"] = order.value + if page is not None: + query["page"] = page + if page_size is not None: + query["page_size"] = page_size + if q is not None: + query["q"] = q + headers = { + "Accept": "application/json", + } + # deduplicate items that may have been added during iteration seen = set() - query['page'] =1 + query["page"] = 1 while True: - json = self._api.do('GET','/api/2.0/preview/sql/dashboards', query=query - - , headers=headers - ) - if 'results' in json: - for v in json['results']: - i = v['id'] - if i in seen: - continue - seen.add(i) - yield Dashboard.from_dict(v) - if 'results' not in json or not json['results']: - return - query['page'] += 1 - - - - - - - def restore(self - , dashboard_id: str - ): + json = self._api.do("GET", "/api/2.0/preview/sql/dashboards", query=query, headers=headers) + if "results" in json: + for v in json["results"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield Dashboard.from_dict(v) + if "results" not in json or not json["results"]: + return + query["page"] += 1 + + def restore(self, dashboard_id: str): """Restore a dashboard. - + A restored dashboard appears in list views and searches and can be shared. - + :param dashboard_id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('POST',f'/api/2.0/preview/sql/dashboards/trash/{dashboard_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("POST", f"/api/2.0/preview/sql/dashboards/trash/{dashboard_id}", headers=headers) - def update(self - , dashboard_id: str - , * - , name: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None) -> Dashboard: + def update( + self, + dashboard_id: str, + *, + name: Optional[str] = None, + run_as_role: Optional[RunAsRole] = None, + tags: Optional[List[str]] = None, + ) -> Dashboard: """Change a dashboard definition. - + Modify this dashboard definition. This operation only affects attributes of the dashboard object. It does not add, modify, or remove widgets. - + **Note**: You cannot undo this operation. - + :param dashboard_id: str :param name: str (optional) The title of this dashboard that appears in list views and at the top of the dashboard page. @@ -7778,383 +9396,331 @@ def update(self Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`Dashboard` """ body = {} - if name is not None: body['name'] = name - if run_as_role is not None: body['run_as_role'] = run_as_role.value - if tags is not None: body['tags'] = [v for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/preview/sql/dashboards/{dashboard_id}', body=body - - , headers=headers - ) + if name is not None: + body["name"] = name + if run_as_role is not None: + body["run_as_role"] = run_as_role.value + if tags is not None: + body["tags"] = [v for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/preview/sql/dashboards/{dashboard_id}", body=body, headers=headers) return Dashboard.from_dict(res) - - + class DataSourcesAPI: """This API is provided to assist you in making new query objects. When creating a query object, you may optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't already know the `data_source_id` for your desired SQL warehouse, this API will help you find it. - + This API does not support searches. It returns the full list of SQL warehouses in your workspace. We advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - def list(self) -> Iterator[DataSource]: """Get a list of SQL warehouses. - + Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :returns: Iterator over :class:`DataSource` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/preview/sql/data_sources' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/preview/sql/data_sources", headers=headers) return [DataSource.from_dict(v) for v in res] - - + class DbsqlPermissionsAPI: """The SQL Permissions API is similar to the endpoints of the :method:permissions/set. However, this exposes only one endpoint, which gets the Access Control List for a given object. You cannot modify any permissions using this API. - + There are three levels of permission: - + - `CAN_VIEW`: Allows read-only access - + - `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`) - + - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def get(self - , object_type: ObjectTypePlural, object_id: str - ) -> GetResponse: + def get(self, object_type: ObjectTypePlural, object_id: str) -> GetResponse: """Get object ACL. - + Gets a JSON representation of the access control list (ACL) for a specified object. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:workspace/getpermissions instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. :param object_id: str Object ID. An ACL is returned for the object with this UUID. - + :returns: :class:`GetResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}' - - , headers=headers - ) - return GetResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}", headers=headers) + return GetResponse.from_dict(res) - def set(self - , object_type: ObjectTypePlural, object_id: str - , * - , access_control_list: Optional[List[AccessControl]] = None) -> SetResponse: + def set( + self, + object_type: ObjectTypePlural, + object_id: str, + *, + access_control_list: Optional[List[AccessControl]] = None, + ) -> SetResponse: """Set object ACL. - + Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:workspace/setpermissions instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`ObjectTypePlural` The type of object permission to set. :param object_id: str Object ID. The ACL for the object with this UUID is overwritten by this request's POST content. :param access_control_list: List[:class:`AccessControl`] (optional) - + :returns: :class:`SetResponse` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}', body=body - - , headers=headers - ) - return SetResponse.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "POST", f"/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}", body=body, headers=headers + ) + return SetResponse.from_dict(res) - def transfer_ownership(self - , object_type: OwnableObjectType, object_id: TransferOwnershipObjectId - , * - , new_owner: Optional[str] = None) -> Success: + def transfer_ownership( + self, object_type: OwnableObjectType, object_id: TransferOwnershipObjectId, *, new_owner: Optional[str] = None + ) -> Success: """Transfer object ownership. - + Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. - + **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use :method:queries/update and :method:alerts/update respectively instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. :param object_id: :class:`TransferOwnershipObjectId` The ID of the object on which to change ownership. :param new_owner: str (optional) Email address for the new owner, who must exist in the workspace. - + :returns: :class:`Success` """ body = {} - if new_owner is not None: body['new_owner'] = new_owner - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}/transfer', body=body - - , headers=headers - ) + if new_owner is not None: + body["new_owner"] = new_owner + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/preview/sql/permissions/{object_type.value}/{object_id}/transfer", + body=body, + headers=headers, + ) return Success.from_dict(res) - - + class QueriesAPI: """The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , auto_resolve_display_name: Optional[bool] = None, query: Optional[CreateQueryRequestQuery] = None) -> Query: + def create( + self, *, auto_resolve_display_name: Optional[bool] = None, query: Optional[CreateQueryRequestQuery] = None + ) -> Query: """Create a query. - + Creates a query. - + :param auto_resolve_display_name: bool (optional) If true, automatically resolve query display name conflicts. Otherwise, fail the request if the query's display name conflicts with an existing query's display name. :param query: :class:`CreateQueryRequestQuery` (optional) - + :returns: :class:`Query` """ body = {} - if auto_resolve_display_name is not None: body['auto_resolve_display_name'] = auto_resolve_display_name - if query is not None: body['query'] = query.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/sql/queries', body=body - - , headers=headers - ) - return Query.from_dict(res) + if auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = auto_resolve_display_name + if query is not None: + body["query"] = query.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/sql/queries", body=body, headers=headers) + return Query.from_dict(res) - def delete(self - , id: str - ): + def delete(self, id: str): """Delete a query. - + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is permanently deleted after 30 days. - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/sql/queries/{id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/sql/queries/{id}", headers=headers) - def get(self - , id: str - ) -> Query: + def get(self, id: str) -> Query: """Get a query. - + Gets a query. - + :param id: str - + :returns: :class:`Query` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/sql/queries/{id}' - - , headers=headers - ) - return Query.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/sql/queries/{id}", headers=headers) + return Query.from_dict(res) - def list(self - - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[ListQueryObjectsResponseQuery]: + def list( + self, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[ListQueryObjectsResponseQuery]: """List queries. - + Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListQueryObjectsResponseQuery` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/sql/queries', query=query - - , headers=headers - ) - if 'results' in json: - for v in json['results']: - yield ListQueryObjectsResponseQuery.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def list_visualizations(self - , id: str - , * - , page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[Visualization]: + while True: + json = self._api.do("GET", "/api/2.0/sql/queries", query=query, headers=headers) + if "results" in json: + for v in json["results"]: + yield ListQueryObjectsResponseQuery.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def list_visualizations( + self, id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[Visualization]: """List visualizations on a query. - + Gets a list of visualizations on a query. - + :param id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Visualization` """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET',f'/api/2.0/sql/queries/{id}/visualizations', query=query - - , headers=headers - ) - if 'results' in json: - for v in json['results']: - yield Visualization.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update(self - , id: str, update_mask: str - , * - , auto_resolve_display_name: Optional[bool] = None, query: Optional[UpdateQueryRequestQuery] = None) -> Query: + while True: + json = self._api.do("GET", f"/api/2.0/sql/queries/{id}/visualizations", query=query, headers=headers) + if "results" in json: + for v in json["results"]: + yield Visualization.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update( + self, + id: str, + update_mask: str, + *, + auto_resolve_display_name: Optional[bool] = None, + query: Optional[UpdateQueryRequestQuery] = None, + ) -> Query: """Update a query. - + Updates a query. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -8162,7 +9728,7 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. @@ -8170,68 +9736,70 @@ def update(self If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. :param query: :class:`UpdateQueryRequestQuery` (optional) - + :returns: :class:`Query` """ body = {} - if auto_resolve_display_name is not None: body['auto_resolve_display_name'] = auto_resolve_display_name - if query is not None: body['query'] = query.as_dict() - if update_mask is not None: body['update_mask'] = update_mask - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/sql/queries/{id}', body=body - - , headers=headers - ) + if auto_resolve_display_name is not None: + body["auto_resolve_display_name"] = auto_resolve_display_name + if query is not None: + body["query"] = query.as_dict() + if update_mask is not None: + body["update_mask"] = update_mask + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/sql/queries/{id}", body=body, headers=headers) return Query.from_dict(res) - - + class QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , data_source_id: Optional[str] = None, description: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, parent: Optional[str] = None, query: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None) -> LegacyQuery: + def create( + self, + *, + data_source_id: Optional[str] = None, + description: Optional[str] = None, + name: Optional[str] = None, + options: Optional[Any] = None, + parent: Optional[str] = None, + query: Optional[str] = None, + run_as_role: Optional[RunAsRole] = None, + tags: Optional[List[str]] = None, + ) -> LegacyQuery: """Create a new query definition. - + Creates a new query definition. Queries created with this endpoint belong to the authenticated user making the request. - + The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the `data_source_id` from an existing query. - + **Note**: You cannot add a visualization until you create the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] - + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list :param description: str (optional) General description that conveys additional information about this query such as usage notes. @@ -8249,119 +9817,112 @@ def create(self Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`LegacyQuery` """ body = {} - if data_source_id is not None: body['data_source_id'] = data_source_id - if description is not None: body['description'] = description - if name is not None: body['name'] = name - if options is not None: body['options'] = options - if parent is not None: body['parent'] = parent - if query is not None: body['query'] = query - if run_as_role is not None: body['run_as_role'] = run_as_role.value - if tags is not None: body['tags'] = [v for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/preview/sql/queries', body=body - - , headers=headers - ) + if data_source_id is not None: + body["data_source_id"] = data_source_id + if description is not None: + body["description"] = description + if name is not None: + body["name"] = name + if options is not None: + body["options"] = options + if parent is not None: + body["parent"] = parent + if query is not None: + body["query"] = query + if run_as_role is not None: + body["run_as_role"] = run_as_role.value + if tags is not None: + body["tags"] = [v for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/sql/queries", body=body, headers=headers) return LegacyQuery.from_dict(res) - - - - - def delete(self - , query_id: str - ): + def delete(self, query_id: str): """Delete a query. - + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/preview/sql/queries/{query_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get(self - , query_id: str - ) -> LegacyQuery: + self._api.do("DELETE", f"/api/2.0/preview/sql/queries/{query_id}", headers=headers) + + def get(self, query_id: str) -> LegacyQuery: """Get a query definition. - + Retrieve a query object definition along with contextual permissions information about the currently authenticated user. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - + :returns: :class:`LegacyQuery` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/preview/sql/queries/{query_id}' - - , headers=headers - ) - return LegacyQuery.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/preview/sql/queries/{query_id}", headers=headers) + return LegacyQuery.from_dict(res) - def list(self - - , * - , order: Optional[str] = None, page: Optional[int] = None, page_size: Optional[int] = None, q: Optional[str] = None) -> Iterator[LegacyQuery]: + def list( + self, + *, + order: Optional[str] = None, + page: Optional[int] = None, + page_size: Optional[int] = None, + q: Optional[str] = None, + ) -> Iterator[LegacyQuery]: """Get a list of queries. - + Gets a list of queries. Optionally, this list can be filtered by a search term. - + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param order: str (optional) Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order descending instead. - + - `name`: The name of the query. - + - `created_at`: The timestamp the query was created. - + - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank value is treated as the highest value for sorting. - + - `executed_at`: The timestamp when the query was last run. - + - `created_by`: The user name of the user that created the query. :param page: int (optional) Page number to retrieve. @@ -8369,92 +9930,89 @@ def list(self Number of queries to return per page. :param q: str (optional) Full text search term - + :returns: Iterator over :class:`LegacyQuery` """ - + query = {} - if order is not None: query['order'] = order - if page is not None: query['page'] = page - if page_size is not None: query['page_size'] = page_size - if q is not None: query['q'] = q - headers = {'Accept': 'application/json',} - - + if order is not None: + query["order"] = order + if page is not None: + query["page"] = page + if page_size is not None: + query["page_size"] = page_size + if q is not None: + query["q"] = q + headers = { + "Accept": "application/json", + } + # deduplicate items that may have been added during iteration seen = set() - query['page'] =1 + query["page"] = 1 while True: - json = self._api.do('GET','/api/2.0/preview/sql/queries', query=query - - , headers=headers - ) - if 'results' in json: - for v in json['results']: - i = v['id'] - if i in seen: - continue - seen.add(i) - yield LegacyQuery.from_dict(v) - if 'results' not in json or not json['results']: - return - query['page'] += 1 - - - - - - - def restore(self - , query_id: str - ): + json = self._api.do("GET", "/api/2.0/preview/sql/queries", query=query, headers=headers) + if "results" in json: + for v in json["results"]: + i = v["id"] + if i in seen: + continue + seen.add(i) + yield LegacyQuery.from_dict(v) + if "results" not in json or not json["results"]: + return + query["page"] += 1 + + def restore(self, query_id: str): """Restore a query. - + Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - - - """ - - headers = {'Accept': 'application/json',} - - self._api.do('POST',f'/api/2.0/preview/sql/queries/trash/{query_id}' - - , headers=headers - ) - - - - - def update(self - , query_id: str - , * - , data_source_id: Optional[str] = None, description: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, query: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, tags: Optional[List[str]] = None) -> LegacyQuery: + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("POST", f"/api/2.0/preview/sql/queries/trash/{query_id}", headers=headers) + + def update( + self, + query_id: str, + *, + data_source_id: Optional[str] = None, + description: Optional[str] = None, + name: Optional[str] = None, + options: Optional[Any] = None, + query: Optional[str] = None, + run_as_role: Optional[RunAsRole] = None, + tags: Optional[List[str]] = None, + ) -> LegacyQuery: """Change a query definition. - + Modify this query definition. - + **Note**: You cannot undo this operation. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] - + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list :param description: str (optional) General description that conveys additional information about this query such as usage notes. @@ -8470,54 +10028,56 @@ def update(self Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`LegacyQuery` """ body = {} - if data_source_id is not None: body['data_source_id'] = data_source_id - if description is not None: body['description'] = description - if name is not None: body['name'] = name - if options is not None: body['options'] = options - if query is not None: body['query'] = query - if run_as_role is not None: body['run_as_role'] = run_as_role.value - if tags is not None: body['tags'] = [v for v in tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/preview/sql/queries/{query_id}', body=body - - , headers=headers - ) + if data_source_id is not None: + body["data_source_id"] = data_source_id + if description is not None: + body["description"] = description + if name is not None: + body["name"] = name + if options is not None: + body["options"] = options + if query is not None: + body["query"] = query + if run_as_role is not None: + body["run_as_role"] = run_as_role.value + if tags is not None: + body["tags"] = [v for v in tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/preview/sql/queries/{query_id}", body=body, headers=headers) return LegacyQuery.from_dict(res) - - + class QueryHistoryAPI: """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def list(self - - , * - , filter_by: Optional[QueryFilter] = None, include_metrics: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> ListQueriesResponse: + def list( + self, + *, + filter_by: Optional[QueryFilter] = None, + include_metrics: Optional[bool] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None, + ) -> ListQueriesResponse: """List Queries. - + List the history of queries through SQL warehouses, and serverless compute. - + You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are returned first (up to max_results in request). The pagination token returned in response can be used to list subsequent query statuses. - + :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. :param include_metrics: bool (optional) @@ -8529,98 +10089,77 @@ def list(self A token that can be used to get the next page of results. The token can contains characters that need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by %2B. This field is optional. - + :returns: :class:`ListQueriesResponse` """ - + query = {} - if filter_by is not None: query['filter_by'] = filter_by.as_dict() - if include_metrics is not None: query['include_metrics'] = include_metrics - if max_results is not None: query['max_results'] = max_results - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/sql/history/queries', query=query - - , headers=headers - ) + if filter_by is not None: + query["filter_by"] = filter_by.as_dict() + if include_metrics is not None: + query["include_metrics"] = include_metrics + if max_results is not None: + query["max_results"] = max_results + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/sql/history/queries", query=query, headers=headers) return ListQueriesResponse.from_dict(res) - - + class QueryVisualizationsAPI: """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace. Data structures can change over time.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - - , * - , visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization: + def create(self, *, visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization: """Add a visualization to a query. - + Adds a visualization to a query. - + :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) - + :returns: :class:`Visualization` """ body = {} - if visualization is not None: body['visualization'] = visualization.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/sql/visualizations', body=body - - , headers=headers - ) - return Visualization.from_dict(res) + if visualization is not None: + body["visualization"] = visualization.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", "/api/2.0/sql/visualizations", body=body, headers=headers) + return Visualization.from_dict(res) - def delete(self - , id: str - ): + def delete(self, id: str): """Remove a visualization. - + Removes a visualization. - + :param id: str - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/sql/visualizations/{id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def update(self - , id: str, update_mask: str - , * - , visualization: Optional[UpdateVisualizationRequestVisualization] = None) -> Visualization: + self._api.do("DELETE", f"/api/2.0/sql/visualizations/{id}", headers=headers) + + def update( + self, id: str, update_mask: str, *, visualization: Optional[UpdateVisualizationRequestVisualization] = None + ) -> Visualization: """Update a visualization. - + Updates a visualization. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -8628,60 +10167,52 @@ def update(self `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) - + :returns: :class:`Visualization` """ body = {} - if update_mask is not None: body['update_mask'] = update_mask - if visualization is not None: body['visualization'] = visualization.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/sql/visualizations/{id}', body=body - - , headers=headers - ) + if update_mask is not None: + body["update_mask"] = update_mask + if visualization is not None: + body["visualization"] = visualization.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/sql/visualizations/{id}", body=body, headers=headers) return Visualization.from_dict(res) - - + class QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace. Data structures may change over time. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , query_id: str, type: str, options: Any - , * - , description: Optional[str] = None, name: Optional[str] = None) -> LegacyVisualization: + def create( + self, query_id: str, type: str, options: Any, *, description: Optional[str] = None, name: Optional[str] = None + ) -> LegacyVisualization: """Add visualization to a query. - + Creates visualization in the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str The identifier returned by :method:queries/create :param type: str @@ -8693,70 +10224,71 @@ def create(self A short description of this visualization. This is not displayed in the UI. :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. - + :returns: :class:`LegacyVisualization` """ body = {} - if description is not None: body['description'] = description - if name is not None: body['name'] = name - if options is not None: body['options'] = options - if query_id is not None: body['query_id'] = query_id - if type is not None: body['type'] = type - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/preview/sql/visualizations', body=body - - , headers=headers - ) + if description is not None: + body["description"] = description + if name is not None: + body["name"] = name + if options is not None: + body["options"] = options + if query_id is not None: + body["query_id"] = query_id + if type is not None: + body["type"] = type + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/preview/sql/visualizations", body=body, headers=headers) return LegacyVisualization.from_dict(res) - - - - - def delete(self - , id: str - ): + def delete(self, id: str): """Remove visualization. - + Removes a visualization from the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param id: str Widget ID returned by :method:queryvizualisations/create - - - """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/preview/sql/visualizations/{id}' - - , headers=headers - ) - - - - - def update(self - , id: str - , * - , created_at: Optional[str] = None, description: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, query: Optional[LegacyQuery] = None, type: Optional[str] = None, updated_at: Optional[str] = None) -> LegacyVisualization: + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/preview/sql/visualizations/{id}", headers=headers) + + def update( + self, + id: str, + *, + created_at: Optional[str] = None, + description: Optional[str] = None, + name: Optional[str] = None, + options: Optional[Any] = None, + query: Optional[LegacyQuery] = None, + type: Optional[str] = None, + updated_at: Optional[str] = None, + ) -> LegacyVisualization: """Edit existing visualization. - + Updates visualization in the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param id: str The UUID for this visualization. :param created_at: str (optional) @@ -8771,66 +10303,63 @@ def update(self :param type: str (optional) The type of visualization: chart, table, pivot table, and so on. :param updated_at: str (optional) - + :returns: :class:`LegacyVisualization` """ body = {} - if created_at is not None: body['created_at'] = created_at - if description is not None: body['description'] = description - if name is not None: body['name'] = name - if options is not None: body['options'] = options - if query is not None: body['query'] = query.as_dict() - if type is not None: body['type'] = type - if updated_at is not None: body['updated_at'] = updated_at - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/preview/sql/visualizations/{id}', body=body - - , headers=headers - ) + if created_at is not None: + body["created_at"] = created_at + if description is not None: + body["description"] = description + if name is not None: + body["name"] = name + if options is not None: + body["options"] = options + if query is not None: + body["query"] = query.as_dict() + if type is not None: + body["type"] = type + if updated_at is not None: + body["updated_at"] = updated_at + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/preview/sql/visualizations/{id}", body=body, headers=headers) return LegacyVisualization.from_dict(res) - - + class RedashConfigAPI: """Redash V2 service for workspace configurations (internal)""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - def get_config(self) -> ClientConfig: """Read workspace configuration for Redash-v2. - + :returns: :class:`ClientConfig` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/redash-v2/config' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/redash-v2/config", headers=headers) return ClientConfig.from_dict(res) - - + class StatementExecutionAPI: """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result. - + **Getting started** - + We suggest beginning with the [Databricks SQL Statement Execution API tutorial]. - + **Overview of statement execution and result fetching** - + Statement execution begins by issuing a :method:statementexecution/executeStatement request with a valid SQL statement and warehouse ID, along with optional parameters such as the data catalog and output format. If no other parameters are specified, the server will wait for up to 10s before returning a response. If @@ -8838,7 +10367,7 @@ class StatementExecutionAPI: array and metadata. Otherwise, if no result is available after the 10s timeout expired, the response will provide the statement ID that can be used to poll for results by using a :method:statementexecution/getStatement request. - + You can specify whether the call should behave synchronously, asynchronously or start synchronously with a fallback to asynchronous execution. This is controlled with the `wait_timeout` and `on_wait_timeout` settings. If `wait_timeout` is set between 5-50 seconds (default: 10s), the call waits for results up to @@ -8846,7 +10375,7 @@ class StatementExecutionAPI: statement ID. The `on_wait_timeout` setting specifies what should happen when the timeout is reached while the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode, or it can be set to `CANCEL`, which cancels the statement. - + In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30 seconds; if the statement execution finishes within this time, the result data is returned directly in the response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns @@ -8858,38 +10387,38 @@ class StatementExecutionAPI: seconds; if the statement execution finishes within this time, the result data is returned directly in the response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can be used to fetch status and results in the same way as in the asynchronous mode. - + Depending on the size, the result can be split into multiple chunks. If the statement execution is successful, the statement response contains a manifest and the first chunk of the result. The manifest contains schema information and provides metadata for each chunk in the result. Result chunks can be retrieved by index with :method:statementexecution/getStatementResultChunkN which may be called in any order and in parallel. For sequential fetching, each chunk, apart from the last, also contains a `next_chunk_index` and `next_chunk_internal_link` that point to the next chunk. - + A statement can be canceled with :method:statementexecution/cancelExecution. - + **Fetching result data: format and disposition** - + To specify the format of the result data, use the `format` field, which can be set to one of the following options: `JSON_ARRAY` (JSON), `ARROW_STREAM` ([Apache Arrow Columnar]), or `CSV`. - + There are two ways to receive statement results, controlled by the `disposition` setting, which can be either `INLINE` or `EXTERNAL_LINKS`: - + - `INLINE`: In this mode, the result data is directly included in the response. It's best suited for smaller results. This mode can only be used with the `JSON_ARRAY` format. - + - `EXTERNAL_LINKS`: In this mode, the response provides links that can be used to download the result data in chunks separately. This approach is ideal for larger results and offers higher throughput. This mode can be used with all the formats: `JSON_ARRAY`, `ARROW_STREAM`, and `CSV`. - + By default, the API uses `format=JSON_ARRAY` and `disposition=INLINE`. - + **Limits and limitations** - + Note: The byte limit for INLINE disposition is based on internal storage metrics and will not exactly match the byte count of the actual payload. - + - Statements with `disposition=INLINE` are limited to 25 MiB and will fail when this limit is exceeded. - Statements with `disposition=EXTERNAL_LINKS` are limited to 100 GiB. Result sets larger than this limit will be truncated. Truncation is indicated by the `truncated` field in the result manifest. - The maximum @@ -8902,59 +10431,53 @@ class StatementExecutionAPI: once every 15 minutes. - The results are only available for one hour after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it. - + [Apache Arrow Columnar]: https://arrow.apache.org/overview/ - [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html""" - + [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html + """ + def __init__(self, api_client): self._api = api_client - - - - - - - - - def cancel_execution(self - , statement_id: str - ): + def cancel_execution(self, statement_id: str): """Cancel statement execution. - + Requests that an executing statement be canceled. Callers must poll for status to see the terminal state. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - - + + """ - - headers = {} - - self._api.do('POST',f'/api/2.0/sql/statements/{statement_id}/cancel' - - , headers=headers - ) - - - - + headers = {} - def execute_statement(self - , statement: str, warehouse_id: str - , * - , byte_limit: Optional[int] = None, catalog: Optional[str] = None, disposition: Optional[Disposition] = None, format: Optional[Format] = None, on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None, parameters: Optional[List[StatementParameterListItem]] = None, row_limit: Optional[int] = None, schema: Optional[str] = None, wait_timeout: Optional[str] = None) -> StatementResponse: + self._api.do("POST", f"/api/2.0/sql/statements/{statement_id}/cancel", headers=headers) + + def execute_statement( + self, + statement: str, + warehouse_id: str, + *, + byte_limit: Optional[int] = None, + catalog: Optional[str] = None, + disposition: Optional[Disposition] = None, + format: Optional[Format] = None, + on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None, + parameters: Optional[List[StatementParameterListItem]] = None, + row_limit: Optional[int] = None, + schema: Optional[str] = None, + wait_timeout: Optional[str] = None, + ) -> StatementResponse: """Execute a SQL statement. - + :param statement: str The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. :param warehouse_id: str Warehouse upon which to execute a statement. See also [What are SQL warehouses?] - + [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html :param byte_limit: int (optional) Applies the given byte limit to the statement's result size. Byte counts are based on internal data @@ -8964,37 +10487,37 @@ def execute_statement(self explcitly set. :param catalog: str (optional) Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. - + [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. - + Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS` disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition. - + When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of values, where each value is either the *string representation* of a value, or `null`. For example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM range(3)` would look like this: - + ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ``` - + When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result contains compact JSON with no indentation or extra whitespace. - + When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format]. - + When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be a CSV according to [RFC 4180] standard. All the columns values will have *string representation* similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the first chunk in the result would contain a header row with column names. For example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look like this: - + ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ``` - + [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180 :param on_wait_timeout: :class:`ExecuteStatementRequestOnWaitTimeout` (optional) @@ -9009,27 +10532,27 @@ def execute_statement(self of a name, a value, and optionally a type. To represent a NULL value, the `value` field may be omitted or set to `null` explicitly. If the `type` field is omitted, the value is interpreted as a string. - + If the type is given, parameters will be checked for type correctness according to the given type. A value is correct if the provided string can be converted to the requested type using the `cast` function. The exact semantics are described in the section [`cast` function] of the SQL language reference. - + For example, the following statement contains two parameters, `my_name` and `my_date`: - + SELECT * FROM my_table WHERE name = :my_name AND date = :my_date - + The parameters can be passed in the request body as follows: - + { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value": "2020-01-01", "type": "DATE" } ] } - + Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL Statement Execution API. - + Also see the section [Parameter markers] of the SQL language reference. - + [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html :param row_limit: int (optional) @@ -9038,196 +10561,204 @@ def execute_statement(self the limit or not. :param schema: str (optional) Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL. - + [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html :param wait_timeout: str (optional) The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set to 0 or to a value between 5 and 50. - + When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for the execution to finish. In this case, the call returns directly with `PENDING` state and a statement ID which can be used for polling with :method:statementexecution/getStatement. - + When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and wait for the statement execution to finish. If the execution finishes within this time, the call returns immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached. - + :returns: :class:`StatementResponse` """ body = {} - if byte_limit is not None: body['byte_limit'] = byte_limit - if catalog is not None: body['catalog'] = catalog - if disposition is not None: body['disposition'] = disposition.value - if format is not None: body['format'] = format.value - if on_wait_timeout is not None: body['on_wait_timeout'] = on_wait_timeout.value - if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters] - if row_limit is not None: body['row_limit'] = row_limit - if schema is not None: body['schema'] = schema - if statement is not None: body['statement'] = statement - if wait_timeout is not None: body['wait_timeout'] = wait_timeout - if warehouse_id is not None: body['warehouse_id'] = warehouse_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/sql/statements/', body=body - - , headers=headers - ) + if byte_limit is not None: + body["byte_limit"] = byte_limit + if catalog is not None: + body["catalog"] = catalog + if disposition is not None: + body["disposition"] = disposition.value + if format is not None: + body["format"] = format.value + if on_wait_timeout is not None: + body["on_wait_timeout"] = on_wait_timeout.value + if parameters is not None: + body["parameters"] = [v.as_dict() for v in parameters] + if row_limit is not None: + body["row_limit"] = row_limit + if schema is not None: + body["schema"] = schema + if statement is not None: + body["statement"] = statement + if wait_timeout is not None: + body["wait_timeout"] = wait_timeout + if warehouse_id is not None: + body["warehouse_id"] = warehouse_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/sql/statements/", body=body, headers=headers) return StatementResponse.from_dict(res) - - - - - def get_statement(self - , statement_id: str - ) -> StatementResponse: + def get_statement(self, statement_id: str) -> StatementResponse: """Get status, manifest, and result first chunk. - + This request can be used to poll for the statement's status. When the `status.state` field is `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and further calls will receive an HTTP 404 response. - + **NOTE** This call currently might take up to 5 seconds to get the latest status and result. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - + :returns: :class:`StatementResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/sql/statements/{statement_id}' - - , headers=headers - ) - return StatementResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/sql/statements/{statement_id}", headers=headers) + return StatementResponse.from_dict(res) - def get_statement_result_chunk_n(self - , statement_id: str, chunk_index: int - ) -> ResultData: + def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> ResultData: """Get result chunk by index. - + After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index. Whereas the first chunk with `chunk_index=0` is typically fetched with :method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request can be used to fetch subsequent chunks. The response structure is identical to the nested `result` element described in the :method:statementexecution/getStatement request, and similarly includes the `next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. :param chunk_index: int - + :returns: :class:`ResultData` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}' - - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/sql/statements/{statement_id}/result/chunks/{chunk_index}", headers=headers + ) return ResultData.from_dict(res) - - + class WarehousesAPI: """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.""" - + def __init__(self, api_client): self._api = api_client - - - - - - def wait_get_warehouse_running(self, id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[GetWarehouseResponse], None]] = None) -> GetWarehouseResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (State.RUNNING, ) - failure_states = (State.STOPPED, State.DELETED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(id=id) - status = poll.state - status_message = f'current status: {status}' - if poll.health: - status_message = poll.health.summary - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach RUNNING, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"id={id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_get_warehouse_stopped(self, id: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[GetWarehouseResponse], None]] = None) -> GetWarehouseResponse: - deadline = time.time() + timeout.total_seconds() - target_states = (State.STOPPED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(id=id) - status = poll.state - status_message = f'current status: {status}' - if poll.health: - status_message = poll.health.summary - if status in target_states: - return poll - if callback: - callback(poll) - prefix = f"id={id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - def create(self - - , * - , auto_stop_mins: Optional[int] = None, channel: Optional[Channel] = None, cluster_size: Optional[str] = None, creator_name: Optional[str] = None, enable_photon: Optional[bool] = None, enable_serverless_compute: Optional[bool] = None, instance_profile_arn: Optional[str] = None, max_num_clusters: Optional[int] = None, min_num_clusters: Optional[int] = None, name: Optional[str] = None, spot_instance_policy: Optional[SpotInstancePolicy] = None, tags: Optional[EndpointTags] = None, warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None) -> Wait[GetWarehouseResponse]: + def wait_get_warehouse_running( + self, id: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[GetWarehouseResponse], None]] = None + ) -> GetWarehouseResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (State.RUNNING,) + failure_states = ( + State.STOPPED, + State.DELETED, + ) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(id=id) + status = poll.state + status_message = f"current status: {status}" + if poll.health: + status_message = poll.health.summary + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach RUNNING, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"id={id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def wait_get_warehouse_stopped( + self, id: str, timeout=timedelta(minutes=20), callback: Optional[Callable[[GetWarehouseResponse], None]] = None + ) -> GetWarehouseResponse: + deadline = time.time() + timeout.total_seconds() + target_states = (State.STOPPED,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get(id=id) + status = poll.state + status_message = f"current status: {status}" + if poll.health: + status_message = poll.health.summary + if status in target_states: + return poll + if callback: + callback(poll) + prefix = f"id={id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def create( + self, + *, + auto_stop_mins: Optional[int] = None, + channel: Optional[Channel] = None, + cluster_size: Optional[str] = None, + creator_name: Optional[str] = None, + enable_photon: Optional[bool] = None, + enable_serverless_compute: Optional[bool] = None, + instance_profile_arn: Optional[str] = None, + max_num_clusters: Optional[int] = None, + min_num_clusters: Optional[int] = None, + name: Optional[str] = None, + spot_instance_policy: Optional[SpotInstancePolicy] = None, + tags: Optional[EndpointTags] = None, + warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None, + ) -> Wait[GetWarehouseResponse]: """Create a warehouse. - + Creates a new SQL warehouse. - + :param auto_stop_mins: int (optional) The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - + Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for non-serverless warehouses - 0 indicates no autostop. - + Defaults to 120 mins :param channel: :class:`Channel` (optional) Channel Details @@ -9235,14 +10766,14 @@ def create(self Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, please tune max_num_clusters. - + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large :param creator_name: str (optional) warehouse creator name :param enable_photon: bool (optional) Configures whether the warehouse should use Photon optimized clusters. - + Defaults to false. :param enable_serverless_compute: bool (optional) Configures whether the warehouse should use serverless compute @@ -9250,113 +10781,157 @@ def create(self Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) Maximum number of clusters that the autoscaler will create to handle concurrent queries. - + Supported values: - Must be >= min_num_clusters - Must be <= 30. - + Defaults to min_clusters if unset. :param min_num_clusters: int (optional) Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce the cold start time for new queries. This is similar to reserved vs. revocable cores in a resource manager. - + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - + Defaults to 1 :param name: str (optional) Logical name for the cluster. - + Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. - + Supported values: - Number of tags < 45. :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional) Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. """ body = {} - if auto_stop_mins is not None: body['auto_stop_mins'] = auto_stop_mins - if channel is not None: body['channel'] = channel.as_dict() - if cluster_size is not None: body['cluster_size'] = cluster_size - if creator_name is not None: body['creator_name'] = creator_name - if enable_photon is not None: body['enable_photon'] = enable_photon - if enable_serverless_compute is not None: body['enable_serverless_compute'] = enable_serverless_compute - if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn - if max_num_clusters is not None: body['max_num_clusters'] = max_num_clusters - if min_num_clusters is not None: body['min_num_clusters'] = min_num_clusters - if name is not None: body['name'] = name - if spot_instance_policy is not None: body['spot_instance_policy'] = spot_instance_policy.value - if tags is not None: body['tags'] = tags.as_dict() - if warehouse_type is not None: body['warehouse_type'] = warehouse_type.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.0/sql/warehouses', body=body - - , headers=headers - ) - return Wait(self.wait_get_warehouse_running - , response = CreateWarehouseResponse.from_dict(op_response) - , id=op_response['id']) - - - def create_and_wait(self - - , * - , auto_stop_mins: Optional[int] = None, channel: Optional[Channel] = None, cluster_size: Optional[str] = None, creator_name: Optional[str] = None, enable_photon: Optional[bool] = None, enable_serverless_compute: Optional[bool] = None, instance_profile_arn: Optional[str] = None, max_num_clusters: Optional[int] = None, min_num_clusters: Optional[int] = None, name: Optional[str] = None, spot_instance_policy: Optional[SpotInstancePolicy] = None, tags: Optional[EndpointTags] = None, warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None, - timeout=timedelta(minutes=20)) -> GetWarehouseResponse: - return self.create(auto_stop_mins=auto_stop_mins, channel=channel, cluster_size=cluster_size, creator_name=creator_name, enable_photon=enable_photon, enable_serverless_compute=enable_serverless_compute, instance_profile_arn=instance_profile_arn, max_num_clusters=max_num_clusters, min_num_clusters=min_num_clusters, name=name, spot_instance_policy=spot_instance_policy, tags=tags, warehouse_type=warehouse_type).result(timeout=timeout) - - - - - def delete(self - , id: str - ): + if auto_stop_mins is not None: + body["auto_stop_mins"] = auto_stop_mins + if channel is not None: + body["channel"] = channel.as_dict() + if cluster_size is not None: + body["cluster_size"] = cluster_size + if creator_name is not None: + body["creator_name"] = creator_name + if enable_photon is not None: + body["enable_photon"] = enable_photon + if enable_serverless_compute is not None: + body["enable_serverless_compute"] = enable_serverless_compute + if instance_profile_arn is not None: + body["instance_profile_arn"] = instance_profile_arn + if max_num_clusters is not None: + body["max_num_clusters"] = max_num_clusters + if min_num_clusters is not None: + body["min_num_clusters"] = min_num_clusters + if name is not None: + body["name"] = name + if spot_instance_policy is not None: + body["spot_instance_policy"] = spot_instance_policy.value + if tags is not None: + body["tags"] = tags.as_dict() + if warehouse_type is not None: + body["warehouse_type"] = warehouse_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.0/sql/warehouses", body=body, headers=headers) + return Wait( + self.wait_get_warehouse_running, + response=CreateWarehouseResponse.from_dict(op_response), + id=op_response["id"], + ) + + def create_and_wait( + self, + *, + auto_stop_mins: Optional[int] = None, + channel: Optional[Channel] = None, + cluster_size: Optional[str] = None, + creator_name: Optional[str] = None, + enable_photon: Optional[bool] = None, + enable_serverless_compute: Optional[bool] = None, + instance_profile_arn: Optional[str] = None, + max_num_clusters: Optional[int] = None, + min_num_clusters: Optional[int] = None, + name: Optional[str] = None, + spot_instance_policy: Optional[SpotInstancePolicy] = None, + tags: Optional[EndpointTags] = None, + warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None, + timeout=timedelta(minutes=20), + ) -> GetWarehouseResponse: + return self.create( + auto_stop_mins=auto_stop_mins, + channel=channel, + cluster_size=cluster_size, + creator_name=creator_name, + enable_photon=enable_photon, + enable_serverless_compute=enable_serverless_compute, + instance_profile_arn=instance_profile_arn, + max_num_clusters=max_num_clusters, + min_num_clusters=min_num_clusters, + name=name, + spot_instance_policy=spot_instance_policy, + tags=tags, + warehouse_type=warehouse_type, + ).result(timeout=timeout) + + def delete(self, id: str): """Delete a warehouse. - + Deletes a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - - - """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/sql/warehouses/{id}' - - , headers=headers - ) - - - - - def edit(self - , id: str - , * - , auto_stop_mins: Optional[int] = None, channel: Optional[Channel] = None, cluster_size: Optional[str] = None, creator_name: Optional[str] = None, enable_photon: Optional[bool] = None, enable_serverless_compute: Optional[bool] = None, instance_profile_arn: Optional[str] = None, max_num_clusters: Optional[int] = None, min_num_clusters: Optional[int] = None, name: Optional[str] = None, spot_instance_policy: Optional[SpotInstancePolicy] = None, tags: Optional[EndpointTags] = None, warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None) -> Wait[GetWarehouseResponse]: + """ + + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/sql/warehouses/{id}", headers=headers) + + def edit( + self, + id: str, + *, + auto_stop_mins: Optional[int] = None, + channel: Optional[Channel] = None, + cluster_size: Optional[str] = None, + creator_name: Optional[str] = None, + enable_photon: Optional[bool] = None, + enable_serverless_compute: Optional[bool] = None, + instance_profile_arn: Optional[str] = None, + max_num_clusters: Optional[int] = None, + min_num_clusters: Optional[int] = None, + name: Optional[str] = None, + spot_instance_policy: Optional[SpotInstancePolicy] = None, + tags: Optional[EndpointTags] = None, + warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None, + ) -> Wait[GetWarehouseResponse]: """Update a warehouse. - + Updates the configuration for a SQL warehouse. - + :param id: str Required. Id of the warehouse to configure. :param auto_stop_mins: int (optional) The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - + Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. - + Defaults to 120 mins :param channel: :class:`Channel` (optional) Channel Details @@ -9364,14 +10939,14 @@ def edit(self Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, please tune max_num_clusters. - + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large :param creator_name: str (optional) warehouse creator name :param enable_photon: bool (optional) Configures whether the warehouse should use Photon optimized clusters. - + Defaults to false. :param enable_serverless_compute: bool (optional) Configures whether the warehouse should use serverless compute. @@ -9379,235 +10954,243 @@ def edit(self Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) Maximum number of clusters that the autoscaler will create to handle concurrent queries. - + Supported values: - Must be >= min_num_clusters - Must be <= 30. - + Defaults to min_clusters if unset. :param min_num_clusters: int (optional) Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce the cold start time for new queries. This is similar to reserved vs. revocable cores in a resource manager. - + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - + Defaults to 1 :param name: str (optional) Logical name for the cluster. - + Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. - + Supported values: - Number of tags < 45. :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional) Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. """ body = {} - if auto_stop_mins is not None: body['auto_stop_mins'] = auto_stop_mins - if channel is not None: body['channel'] = channel.as_dict() - if cluster_size is not None: body['cluster_size'] = cluster_size - if creator_name is not None: body['creator_name'] = creator_name - if enable_photon is not None: body['enable_photon'] = enable_photon - if enable_serverless_compute is not None: body['enable_serverless_compute'] = enable_serverless_compute - if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn - if max_num_clusters is not None: body['max_num_clusters'] = max_num_clusters - if min_num_clusters is not None: body['min_num_clusters'] = min_num_clusters - if name is not None: body['name'] = name - if spot_instance_policy is not None: body['spot_instance_policy'] = spot_instance_policy.value - if tags is not None: body['tags'] = tags.as_dict() - if warehouse_type is not None: body['warehouse_type'] = warehouse_type.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/sql/warehouses/{id}/edit', body=body - - , headers=headers - ) - return Wait(self.wait_get_warehouse_running - , response = EditWarehouseResponse.from_dict(op_response) - , id=id) - - - def edit_and_wait(self - , id: str - , * - , auto_stop_mins: Optional[int] = None, channel: Optional[Channel] = None, cluster_size: Optional[str] = None, creator_name: Optional[str] = None, enable_photon: Optional[bool] = None, enable_serverless_compute: Optional[bool] = None, instance_profile_arn: Optional[str] = None, max_num_clusters: Optional[int] = None, min_num_clusters: Optional[int] = None, name: Optional[str] = None, spot_instance_policy: Optional[SpotInstancePolicy] = None, tags: Optional[EndpointTags] = None, warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None, - timeout=timedelta(minutes=20)) -> GetWarehouseResponse: - return self.edit(auto_stop_mins=auto_stop_mins, channel=channel, cluster_size=cluster_size, creator_name=creator_name, enable_photon=enable_photon, enable_serverless_compute=enable_serverless_compute, id=id, instance_profile_arn=instance_profile_arn, max_num_clusters=max_num_clusters, min_num_clusters=min_num_clusters, name=name, spot_instance_policy=spot_instance_policy, tags=tags, warehouse_type=warehouse_type).result(timeout=timeout) - - - - - def get(self - , id: str - ) -> GetWarehouseResponse: + if auto_stop_mins is not None: + body["auto_stop_mins"] = auto_stop_mins + if channel is not None: + body["channel"] = channel.as_dict() + if cluster_size is not None: + body["cluster_size"] = cluster_size + if creator_name is not None: + body["creator_name"] = creator_name + if enable_photon is not None: + body["enable_photon"] = enable_photon + if enable_serverless_compute is not None: + body["enable_serverless_compute"] = enable_serverless_compute + if instance_profile_arn is not None: + body["instance_profile_arn"] = instance_profile_arn + if max_num_clusters is not None: + body["max_num_clusters"] = max_num_clusters + if min_num_clusters is not None: + body["min_num_clusters"] = min_num_clusters + if name is not None: + body["name"] = name + if spot_instance_policy is not None: + body["spot_instance_policy"] = spot_instance_policy.value + if tags is not None: + body["tags"] = tags.as_dict() + if warehouse_type is not None: + body["warehouse_type"] = warehouse_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/edit", body=body, headers=headers) + return Wait(self.wait_get_warehouse_running, response=EditWarehouseResponse.from_dict(op_response), id=id) + + def edit_and_wait( + self, + id: str, + *, + auto_stop_mins: Optional[int] = None, + channel: Optional[Channel] = None, + cluster_size: Optional[str] = None, + creator_name: Optional[str] = None, + enable_photon: Optional[bool] = None, + enable_serverless_compute: Optional[bool] = None, + instance_profile_arn: Optional[str] = None, + max_num_clusters: Optional[int] = None, + min_num_clusters: Optional[int] = None, + name: Optional[str] = None, + spot_instance_policy: Optional[SpotInstancePolicy] = None, + tags: Optional[EndpointTags] = None, + warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None, + timeout=timedelta(minutes=20), + ) -> GetWarehouseResponse: + return self.edit( + auto_stop_mins=auto_stop_mins, + channel=channel, + cluster_size=cluster_size, + creator_name=creator_name, + enable_photon=enable_photon, + enable_serverless_compute=enable_serverless_compute, + id=id, + instance_profile_arn=instance_profile_arn, + max_num_clusters=max_num_clusters, + min_num_clusters=min_num_clusters, + name=name, + spot_instance_policy=spot_instance_policy, + tags=tags, + warehouse_type=warehouse_type, + ).result(timeout=timeout) + + def get(self, id: str) -> GetWarehouseResponse: """Get warehouse info. - + Gets the information for a single SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: :class:`GetWarehouseResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/sql/warehouses/{id}' - - , headers=headers - ) - return GetWarehouseResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/sql/warehouses/{id}", headers=headers) + return GetWarehouseResponse.from_dict(res) - def get_permission_levels(self - , warehouse_id: str - ) -> GetWarehousePermissionLevelsResponse: + def get_permission_levels(self, warehouse_id: str) -> GetWarehousePermissionLevelsResponse: """Get SQL warehouse permission levels. - + Gets the permission levels that a user can have on an object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. - + :returns: :class:`GetWarehousePermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/warehouses/{warehouse_id}/permissionLevels' - - , headers=headers - ) - return GetWarehousePermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_permissions(self - , warehouse_id: str - ) -> WarehousePermissions: + res = self._api.do("GET", f"/api/2.0/permissions/warehouses/{warehouse_id}/permissionLevels", headers=headers) + return GetWarehousePermissionLevelsResponse.from_dict(res) + + def get_permissions(self, warehouse_id: str) -> WarehousePermissions: """Get SQL warehouse permissions. - + Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. - + :returns: :class:`WarehousePermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/warehouses/{warehouse_id}' - - , headers=headers - ) - return WarehousePermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/warehouses/{warehouse_id}", headers=headers) + return WarehousePermissions.from_dict(res) def get_workspace_warehouse_config(self) -> GetWorkspaceWarehouseConfigResponse: """Get the workspace configuration. - + Gets the workspace level configuration that is shared by all SQL warehouses in a workspace. - + :returns: :class:`GetWorkspaceWarehouseConfigResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/sql/config/warehouses' - , headers=headers - ) - return GetWorkspaceWarehouseConfigResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/sql/config/warehouses", headers=headers) + return GetWorkspaceWarehouseConfigResponse.from_dict(res) - def list(self - - , * - , run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo]: + def list(self, *, run_as_user_id: Optional[int] = None) -> Iterator[EndpointInfo]: """List warehouses. - + Lists all SQL warehouses that a user has manager permissions on. - + :param run_as_user_id: int (optional) Service Principal which will be used to fetch the list of warehouses. If not specified, the user from the session header is used. - + :returns: Iterator over :class:`EndpointInfo` """ - + query = {} - if run_as_user_id is not None: query['run_as_user_id'] = run_as_user_id - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/sql/warehouses', query=query - - , headers=headers - ) + if run_as_user_id is not None: + query["run_as_user_id"] = run_as_user_id + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/sql/warehouses", query=query, headers=headers) parsed = ListWarehousesResponse.from_dict(json).warehouses return parsed if parsed is not None else [] - - - - - - def set_permissions(self - , warehouse_id: str - , * - , access_control_list: Optional[List[WarehouseAccessControlRequest]] = None) -> WarehousePermissions: + def set_permissions( + self, warehouse_id: str, *, access_control_list: Optional[List[WarehouseAccessControlRequest]] = None + ) -> WarehousePermissions: """Set SQL warehouse permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional) - + :returns: :class:`WarehousePermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/warehouses/{warehouse_id}', body=body - - , headers=headers - ) - return WarehousePermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PUT", f"/api/2.0/permissions/warehouses/{warehouse_id}", body=body, headers=headers) + return WarehousePermissions.from_dict(res) - def set_workspace_warehouse_config(self - - , * - , channel: Optional[Channel] = None, config_param: Optional[RepeatedEndpointConfPairs] = None, data_access_config: Optional[List[EndpointConfPair]] = None, enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None, global_param: Optional[RepeatedEndpointConfPairs] = None, google_service_account: Optional[str] = None, instance_profile_arn: Optional[str] = None, security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None, sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None): + def set_workspace_warehouse_config( + self, + *, + channel: Optional[Channel] = None, + config_param: Optional[RepeatedEndpointConfPairs] = None, + data_access_config: Optional[List[EndpointConfPair]] = None, + enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None, + global_param: Optional[RepeatedEndpointConfPairs] = None, + google_service_account: Optional[str] = None, + instance_profile_arn: Optional[str] = None, + security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None, + sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None, + ): """Set the workspace configuration. - + Sets the workspace level configuration that is shared by all SQL warehouses in a workspace. - + :param channel: :class:`Channel` (optional) Optional: Channel selection details :param config_param: :class:`RepeatedEndpointConfPairs` (optional) @@ -9630,125 +11213,102 @@ def set_workspace_warehouse_config(self Security policy for warehouses :param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional) SQL configuration parameters - - - """ - body = {} - if channel is not None: body['channel'] = channel.as_dict() - if config_param is not None: body['config_param'] = config_param.as_dict() - if data_access_config is not None: body['data_access_config'] = [v.as_dict() for v in data_access_config] - if enabled_warehouse_types is not None: body['enabled_warehouse_types'] = [v.as_dict() for v in enabled_warehouse_types] - if global_param is not None: body['global_param'] = global_param.as_dict() - if google_service_account is not None: body['google_service_account'] = google_service_account - if instance_profile_arn is not None: body['instance_profile_arn'] = instance_profile_arn - if security_policy is not None: body['security_policy'] = security_policy.value - if sql_configuration_parameters is not None: body['sql_configuration_parameters'] = sql_configuration_parameters.as_dict() - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PUT','/api/2.0/sql/config/warehouses', body=body - - , headers=headers - ) - - - - - def start(self - , id: str - ) -> Wait[GetWarehouseResponse]: + """ + body = {} + if channel is not None: + body["channel"] = channel.as_dict() + if config_param is not None: + body["config_param"] = config_param.as_dict() + if data_access_config is not None: + body["data_access_config"] = [v.as_dict() for v in data_access_config] + if enabled_warehouse_types is not None: + body["enabled_warehouse_types"] = [v.as_dict() for v in enabled_warehouse_types] + if global_param is not None: + body["global_param"] = global_param.as_dict() + if google_service_account is not None: + body["google_service_account"] = google_service_account + if instance_profile_arn is not None: + body["instance_profile_arn"] = instance_profile_arn + if security_policy is not None: + body["security_policy"] = security_policy.value + if sql_configuration_parameters is not None: + body["sql_configuration_parameters"] = sql_configuration_parameters.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PUT", "/api/2.0/sql/config/warehouses", body=body, headers=headers) + + def start(self, id: str) -> Wait[GetWarehouseResponse]: """Start a warehouse. - + Starts a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. """ - - headers = {'Accept': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/sql/warehouses/{id}/start' - - , headers=headers - ) - return Wait(self.wait_get_warehouse_running - , response = StartWarehouseResponse.from_dict(op_response) - , id=id) - - def start_and_wait(self - , id: str - , - timeout=timedelta(minutes=20)) -> GetWarehouseResponse: + headers = { + "Accept": "application/json", + } + + op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/start", headers=headers) + return Wait(self.wait_get_warehouse_running, response=StartWarehouseResponse.from_dict(op_response), id=id) + + def start_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse: return self.start(id=id).result(timeout=timeout) - - - - def stop(self - , id: str - ) -> Wait[GetWarehouseResponse]: + def stop(self, id: str) -> Wait[GetWarehouseResponse]: """Stop a warehouse. - + Stops a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_stopped for more details. """ - - headers = {'Accept': 'application/json',} - - op_response = self._api.do('POST',f'/api/2.0/sql/warehouses/{id}/stop' - - , headers=headers - ) - return Wait(self.wait_get_warehouse_stopped - , response = StopWarehouseResponse.from_dict(op_response) - , id=id) - - def stop_and_wait(self - , id: str - , - timeout=timedelta(minutes=20)) -> GetWarehouseResponse: + headers = { + "Accept": "application/json", + } + + op_response = self._api.do("POST", f"/api/2.0/sql/warehouses/{id}/stop", headers=headers) + return Wait(self.wait_get_warehouse_stopped, response=StopWarehouseResponse.from_dict(op_response), id=id) + + def stop_and_wait(self, id: str, timeout=timedelta(minutes=20)) -> GetWarehouseResponse: return self.stop(id=id).result(timeout=timeout) - - - - def update_permissions(self - , warehouse_id: str - , * - , access_control_list: Optional[List[WarehouseAccessControlRequest]] = None) -> WarehousePermissions: + def update_permissions( + self, warehouse_id: str, *, access_control_list: Optional[List[WarehouseAccessControlRequest]] = None + ) -> WarehousePermissions: """Update SQL warehouse permissions. - + Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional) - + :returns: :class:`WarehousePermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/warehouses/{warehouse_id}', body=body - - , headers=headers - ) - return WarehousePermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - \ No newline at end of file + res = self._api.do("PATCH", f"/api/2.0/permissions/warehouses/{warehouse_id}", body=body, headers=headers) + return WarehousePermissions.from_dict(res) diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index 5b6f77864..4a2a7100a 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -1,241 +1,266 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging +import random +import time from dataclasses import dataclass from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Callable, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class ColumnInfo: name: Optional[str] = None """Name of the column.""" - + def as_dict(self) -> dict: """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: body['name'] = self.name + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ColumnInfo: """Deserializes the ColumnInfo from a dictionary.""" - return cls(name=d.get('name', None)) - - + return cls(name=d.get("name", None)) @dataclass class CreateEndpoint: name: str """Name of the vector search endpoint""" - + endpoint_type: EndpointType """Type of endpoint""" - + budget_policy_id: Optional[str] = None """The budget policy id to be applied""" - + def as_dict(self) -> dict: """Serializes the CreateEndpoint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type.value - if self.name is not None: body['name'] = self.name + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.endpoint_type is not None: + body["endpoint_type"] = self.endpoint_type.value + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the CreateEndpoint into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type - if self.name is not None: body['name'] = self.name + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.endpoint_type is not None: + body["endpoint_type"] = self.endpoint_type + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateEndpoint: """Deserializes the CreateEndpoint from a dictionary.""" - return cls(budget_policy_id=d.get('budget_policy_id', None), endpoint_type=_enum(d, 'endpoint_type', EndpointType), name=d.get('name', None)) - - + return cls( + budget_policy_id=d.get("budget_policy_id", None), + endpoint_type=_enum(d, "endpoint_type", EndpointType), + name=d.get("name", None), + ) @dataclass class CreateVectorIndexRequest: name: str """Name of the index""" - + endpoint_name: str """Name of the endpoint to be used for serving the index""" - + primary_key: str """Primary key of the index""" - + index_type: VectorIndexType """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" - + delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None """Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.""" - + direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None """Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`.""" - + def as_dict(self) -> dict: """Serializes the CreateVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec.as_dict() - if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec.as_dict() - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.index_type is not None: body['index_type'] = self.index_type.value - if self.name is not None: body['name'] = self.name - if self.primary_key is not None: body['primary_key'] = self.primary_key + if self.delta_sync_index_spec: + body["delta_sync_index_spec"] = self.delta_sync_index_spec.as_dict() + if self.direct_access_index_spec: + body["direct_access_index_spec"] = self.direct_access_index_spec.as_dict() + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.index_type is not None: + body["index_type"] = self.index_type.value + if self.name is not None: + body["name"] = self.name + if self.primary_key is not None: + body["primary_key"] = self.primary_key return body def as_shallow_dict(self) -> dict: """Serializes the CreateVectorIndexRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec - if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.index_type is not None: body['index_type'] = self.index_type - if self.name is not None: body['name'] = self.name - if self.primary_key is not None: body['primary_key'] = self.primary_key + if self.delta_sync_index_spec: + body["delta_sync_index_spec"] = self.delta_sync_index_spec + if self.direct_access_index_spec: + body["direct_access_index_spec"] = self.direct_access_index_spec + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.index_type is not None: + body["index_type"] = self.index_type + if self.name is not None: + body["name"] = self.name + if self.primary_key is not None: + body["primary_key"] = self.primary_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateVectorIndexRequest: """Deserializes the CreateVectorIndexRequest from a dictionary.""" - return cls(delta_sync_index_spec=_from_dict(d, 'delta_sync_index_spec', DeltaSyncVectorIndexSpecRequest), direct_access_index_spec=_from_dict(d, 'direct_access_index_spec', DirectAccessVectorIndexSpec), endpoint_name=d.get('endpoint_name', None), index_type=_enum(d, 'index_type', VectorIndexType), name=d.get('name', None), primary_key=d.get('primary_key', None)) - - + return cls( + delta_sync_index_spec=_from_dict(d, "delta_sync_index_spec", DeltaSyncVectorIndexSpecRequest), + direct_access_index_spec=_from_dict(d, "direct_access_index_spec", DirectAccessVectorIndexSpec), + endpoint_name=d.get("endpoint_name", None), + index_type=_enum(d, "index_type", VectorIndexType), + name=d.get("name", None), + primary_key=d.get("primary_key", None), + ) @dataclass class CustomTag: key: str """Key field for a vector search endpoint tag.""" - + value: Optional[str] = None """[Optional] Value field for a vector search endpoint tag.""" - + def as_dict(self) -> dict: """Serializes the CustomTag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the CustomTag into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CustomTag: """Deserializes the CustomTag from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class DeleteDataResult: failed_primary_keys: Optional[List[str]] = None """List of primary keys for rows that failed to process.""" - + success_row_count: Optional[int] = None """Count of successfully processed rows.""" - + def as_dict(self) -> dict: """Serializes the DeleteDataResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.failed_primary_keys: body['failed_primary_keys'] = [v for v in self.failed_primary_keys] - if self.success_row_count is not None: body['success_row_count'] = self.success_row_count + if self.failed_primary_keys: + body["failed_primary_keys"] = [v for v in self.failed_primary_keys] + if self.success_row_count is not None: + body["success_row_count"] = self.success_row_count return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDataResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys - if self.success_row_count is not None: body['success_row_count'] = self.success_row_count + if self.failed_primary_keys: + body["failed_primary_keys"] = self.failed_primary_keys + if self.success_row_count is not None: + body["success_row_count"] = self.success_row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDataResult: """Deserializes the DeleteDataResult from a dictionary.""" - return cls(failed_primary_keys=d.get('failed_primary_keys', None), success_row_count=d.get('success_row_count', None)) - - + return cls( + failed_primary_keys=d.get("failed_primary_keys", None), success_row_count=d.get("success_row_count", None) + ) class DeleteDataStatus(Enum): - - - FAILURE = 'FAILURE' - PARTIAL_SUCCESS = 'PARTIAL_SUCCESS' - SUCCESS = 'SUCCESS' - + FAILURE = "FAILURE" + PARTIAL_SUCCESS = "PARTIAL_SUCCESS" + SUCCESS = "SUCCESS" @dataclass class DeleteDataVectorIndexResponse: result: Optional[DeleteDataResult] = None """Result of the upsert or delete operation.""" - + status: Optional[DeleteDataStatus] = None """Status of the delete operation.""" - + def as_dict(self) -> dict: """Serializes the DeleteDataVectorIndexResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.result: body['result'] = self.result.as_dict() - if self.status is not None: body['status'] = self.status.value + if self.result: + body["result"] = self.result.as_dict() + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the DeleteDataVectorIndexResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.result: body['result'] = self.result - if self.status is not None: body['status'] = self.status + if self.result: + body["result"] = self.result + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteDataVectorIndexResponse: """Deserializes the DeleteDataVectorIndexResponse from a dictionary.""" - return cls(result=_from_dict(d, 'result', DeleteDataResult), status=_enum(d, 'status', DeleteDataStatus)) - - - - - + return cls(result=_from_dict(d, "result", DeleteDataResult), status=_enum(d, "status", DeleteDataStatus)) @dataclass @@ -254,11 +279,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteEndpointResponse: """Deserializes the DeleteEndpointResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -277,8 +297,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteIndexResponse: """Deserializes the DeleteIndexResponse from a dictionary.""" return cls() - - @dataclass @@ -287,392 +305,466 @@ class DeltaSyncVectorIndexSpecRequest: """[Optional] Select the columns to sync with the vector index. If you leave this field blank, all columns from the source table are synced with the index. The primary key column and embedding source column or embedding vector column are always synced.""" - + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" - + embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None """The columns that contain the embedding vectors.""" - + embedding_writeback_table: Optional[str] = None """[Optional] Name of the Delta table to sync the vector index contents and computed embeddings to.""" - + pipeline_type: Optional[PipelineType] = None """Pipeline execution mode. - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing after successfully refreshing the source table in the pipeline once, ensuring the table is updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it arrives in the source table to keep vector index fresh.""" - + source_table: Optional[str] = None """The name of the source table.""" - + def as_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns_to_sync: body['columns_to_sync'] = [v for v in self.columns_to_sync] - if self.embedding_source_columns: body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns] - if self.embedding_vector_columns: body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns] - if self.embedding_writeback_table is not None: body['embedding_writeback_table'] = self.embedding_writeback_table - if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type.value - if self.source_table is not None: body['source_table'] = self.source_table + if self.columns_to_sync: + body["columns_to_sync"] = [v for v in self.columns_to_sync] + if self.embedding_source_columns: + body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] + if self.embedding_vector_columns: + body["embedding_vector_columns"] = [v.as_dict() for v in self.embedding_vector_columns] + if self.embedding_writeback_table is not None: + body["embedding_writeback_table"] = self.embedding_writeback_table + if self.pipeline_type is not None: + body["pipeline_type"] = self.pipeline_type.value + if self.source_table is not None: + body["source_table"] = self.source_table return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns_to_sync: body['columns_to_sync'] = self.columns_to_sync - if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns - if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns - if self.embedding_writeback_table is not None: body['embedding_writeback_table'] = self.embedding_writeback_table - if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type - if self.source_table is not None: body['source_table'] = self.source_table + if self.columns_to_sync: + body["columns_to_sync"] = self.columns_to_sync + if self.embedding_source_columns: + body["embedding_source_columns"] = self.embedding_source_columns + if self.embedding_vector_columns: + body["embedding_vector_columns"] = self.embedding_vector_columns + if self.embedding_writeback_table is not None: + body["embedding_writeback_table"] = self.embedding_writeback_table + if self.pipeline_type is not None: + body["pipeline_type"] = self.pipeline_type + if self.source_table is not None: + body["source_table"] = self.source_table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecRequest: """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary.""" - return cls(columns_to_sync=d.get('columns_to_sync', None), embedding_source_columns=_repeated_dict(d, 'embedding_source_columns', EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns', EmbeddingVectorColumn), embedding_writeback_table=d.get('embedding_writeback_table', None), pipeline_type=_enum(d, 'pipeline_type', PipelineType), source_table=d.get('source_table', None)) - - + return cls( + columns_to_sync=d.get("columns_to_sync", None), + embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), + embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), + embedding_writeback_table=d.get("embedding_writeback_table", None), + pipeline_type=_enum(d, "pipeline_type", PipelineType), + source_table=d.get("source_table", None), + ) @dataclass class DeltaSyncVectorIndexSpecResponse: embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" - + embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None """The columns that contain the embedding vectors.""" - + embedding_writeback_table: Optional[str] = None """[Optional] Name of the Delta table to sync the vector index contents and computed embeddings to.""" - + pipeline_id: Optional[str] = None """The ID of the pipeline that is used to sync the index.""" - + pipeline_type: Optional[PipelineType] = None """Pipeline execution mode. - `TRIGGERED`: If the pipeline uses the triggered execution mode, the system stops processing after successfully refreshing the source table in the pipeline once, ensuring the table is updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it arrives in the source table to keep vector index fresh.""" - + source_table: Optional[str] = None """The name of the source table.""" - + def as_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding_source_columns: body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns] - if self.embedding_vector_columns: body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns] - if self.embedding_writeback_table is not None: body['embedding_writeback_table'] = self.embedding_writeback_table - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type.value - if self.source_table is not None: body['source_table'] = self.source_table + if self.embedding_source_columns: + body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] + if self.embedding_vector_columns: + body["embedding_vector_columns"] = [v.as_dict() for v in self.embedding_vector_columns] + if self.embedding_writeback_table is not None: + body["embedding_writeback_table"] = self.embedding_writeback_table + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.pipeline_type is not None: + body["pipeline_type"] = self.pipeline_type.value + if self.source_table is not None: + body["source_table"] = self.source_table return body def as_shallow_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns - if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns - if self.embedding_writeback_table is not None: body['embedding_writeback_table'] = self.embedding_writeback_table - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type - if self.source_table is not None: body['source_table'] = self.source_table + if self.embedding_source_columns: + body["embedding_source_columns"] = self.embedding_source_columns + if self.embedding_vector_columns: + body["embedding_vector_columns"] = self.embedding_vector_columns + if self.embedding_writeback_table is not None: + body["embedding_writeback_table"] = self.embedding_writeback_table + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.pipeline_type is not None: + body["pipeline_type"] = self.pipeline_type + if self.source_table is not None: + body["source_table"] = self.source_table return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeltaSyncVectorIndexSpecResponse: """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary.""" - return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns', EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns', EmbeddingVectorColumn), embedding_writeback_table=d.get('embedding_writeback_table', None), pipeline_id=d.get('pipeline_id', None), pipeline_type=_enum(d, 'pipeline_type', PipelineType), source_table=d.get('source_table', None)) - - + return cls( + embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), + embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), + embedding_writeback_table=d.get("embedding_writeback_table", None), + pipeline_id=d.get("pipeline_id", None), + pipeline_type=_enum(d, "pipeline_type", PipelineType), + source_table=d.get("source_table", None), + ) @dataclass class DirectAccessVectorIndexSpec: embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source. The format should be array[double].""" - + embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None """The columns that contain the embedding vectors. The format should be array[double].""" - + schema_json: Optional[str] = None """The schema of the index in JSON format. Supported types are `integer`, `long`, `float`, `double`, `boolean`, `string`, `date`, `timestamp`. Supported types for vector column: `array`, `array`,`.""" - + def as_dict(self) -> dict: """Serializes the DirectAccessVectorIndexSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding_source_columns: body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns] - if self.embedding_vector_columns: body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns] - if self.schema_json is not None: body['schema_json'] = self.schema_json + if self.embedding_source_columns: + body["embedding_source_columns"] = [v.as_dict() for v in self.embedding_source_columns] + if self.embedding_vector_columns: + body["embedding_vector_columns"] = [v.as_dict() for v in self.embedding_vector_columns] + if self.schema_json is not None: + body["schema_json"] = self.schema_json return body def as_shallow_dict(self) -> dict: """Serializes the DirectAccessVectorIndexSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns - if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns - if self.schema_json is not None: body['schema_json'] = self.schema_json + if self.embedding_source_columns: + body["embedding_source_columns"] = self.embedding_source_columns + if self.embedding_vector_columns: + body["embedding_vector_columns"] = self.embedding_vector_columns + if self.schema_json is not None: + body["schema_json"] = self.schema_json return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DirectAccessVectorIndexSpec: """Deserializes the DirectAccessVectorIndexSpec from a dictionary.""" - return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns', EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns', EmbeddingVectorColumn), schema_json=d.get('schema_json', None)) - - + return cls( + embedding_source_columns=_repeated_dict(d, "embedding_source_columns", EmbeddingSourceColumn), + embedding_vector_columns=_repeated_dict(d, "embedding_vector_columns", EmbeddingVectorColumn), + schema_json=d.get("schema_json", None), + ) @dataclass class EmbeddingSourceColumn: embedding_model_endpoint_name: Optional[str] = None """Name of the embedding model endpoint""" - + name: Optional[str] = None """Name of the column""" - + def as_dict(self) -> dict: """Serializes the EmbeddingSourceColumn into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding_model_endpoint_name is not None: body['embedding_model_endpoint_name'] = self.embedding_model_endpoint_name - if self.name is not None: body['name'] = self.name + if self.embedding_model_endpoint_name is not None: + body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the EmbeddingSourceColumn into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding_model_endpoint_name is not None: body['embedding_model_endpoint_name'] = self.embedding_model_endpoint_name - if self.name is not None: body['name'] = self.name + if self.embedding_model_endpoint_name is not None: + body["embedding_model_endpoint_name"] = self.embedding_model_endpoint_name + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmbeddingSourceColumn: """Deserializes the EmbeddingSourceColumn from a dictionary.""" - return cls(embedding_model_endpoint_name=d.get('embedding_model_endpoint_name', None), name=d.get('name', None)) - - + return cls(embedding_model_endpoint_name=d.get("embedding_model_endpoint_name", None), name=d.get("name", None)) @dataclass class EmbeddingVectorColumn: embedding_dimension: Optional[int] = None """Dimension of the embedding vector""" - + name: Optional[str] = None """Name of the column""" - + def as_dict(self) -> dict: """Serializes the EmbeddingVectorColumn into a dictionary suitable for use as a JSON request body.""" body = {} - if self.embedding_dimension is not None: body['embedding_dimension'] = self.embedding_dimension - if self.name is not None: body['name'] = self.name + if self.embedding_dimension is not None: + body["embedding_dimension"] = self.embedding_dimension + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the EmbeddingVectorColumn into a shallow dictionary of its immediate attributes.""" body = {} - if self.embedding_dimension is not None: body['embedding_dimension'] = self.embedding_dimension - if self.name is not None: body['name'] = self.name + if self.embedding_dimension is not None: + body["embedding_dimension"] = self.embedding_dimension + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EmbeddingVectorColumn: """Deserializes the EmbeddingVectorColumn from a dictionary.""" - return cls(embedding_dimension=d.get('embedding_dimension', None), name=d.get('name', None)) - - + return cls(embedding_dimension=d.get("embedding_dimension", None), name=d.get("name", None)) @dataclass class EndpointInfo: creation_timestamp: Optional[int] = None """Timestamp of endpoint creation""" - + creator: Optional[str] = None """Creator of the endpoint""" - + custom_tags: Optional[List[CustomTag]] = None """The custom tags assigned to the endpoint""" - + effective_budget_policy_id: Optional[str] = None """The budget policy id applied to the endpoint""" - + endpoint_status: Optional[EndpointStatus] = None """Current status of the endpoint""" - + endpoint_type: Optional[EndpointType] = None """Type of endpoint""" - + id: Optional[str] = None """Unique identifier of the endpoint""" - + last_updated_timestamp: Optional[int] = None """Timestamp of last update to the endpoint""" - + last_updated_user: Optional[str] = None """User who last updated the endpoint""" - + name: Optional[str] = None """Name of the vector search endpoint""" - + num_indexes: Optional[int] = None """Number of indexes on the endpoint""" - + def as_dict(self) -> dict: """Serializes the EndpointInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.endpoint_status: body['endpoint_status'] = self.endpoint_status.as_dict() - if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type.value - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.last_updated_user is not None: body['last_updated_user'] = self.last_updated_user - if self.name is not None: body['name'] = self.name - if self.num_indexes is not None: body['num_indexes'] = self.num_indexes + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.endpoint_status: + body["endpoint_status"] = self.endpoint_status.as_dict() + if self.endpoint_type is not None: + body["endpoint_type"] = self.endpoint_type.value + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user is not None: + body["last_updated_user"] = self.last_updated_user + if self.name is not None: + body["name"] = self.name + if self.num_indexes is not None: + body["num_indexes"] = self.num_indexes return body def as_shallow_dict(self) -> dict: """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp - if self.creator is not None: body['creator'] = self.creator - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id - if self.endpoint_status: body['endpoint_status'] = self.endpoint_status - if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type - if self.id is not None: body['id'] = self.id - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp - if self.last_updated_user is not None: body['last_updated_user'] = self.last_updated_user - if self.name is not None: body['name'] = self.name - if self.num_indexes is not None: body['num_indexes'] = self.num_indexes + if self.creation_timestamp is not None: + body["creation_timestamp"] = self.creation_timestamp + if self.creator is not None: + body["creator"] = self.creator + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id + if self.endpoint_status: + body["endpoint_status"] = self.endpoint_status + if self.endpoint_type is not None: + body["endpoint_type"] = self.endpoint_type + if self.id is not None: + body["id"] = self.id + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp + if self.last_updated_user is not None: + body["last_updated_user"] = self.last_updated_user + if self.name is not None: + body["name"] = self.name + if self.num_indexes is not None: + body["num_indexes"] = self.num_indexes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointInfo: """Deserializes the EndpointInfo from a dictionary.""" - return cls(creation_timestamp=d.get('creation_timestamp', None), creator=d.get('creator', None), custom_tags=_repeated_dict(d, 'custom_tags', CustomTag), effective_budget_policy_id=d.get('effective_budget_policy_id', None), endpoint_status=_from_dict(d, 'endpoint_status', EndpointStatus), endpoint_type=_enum(d, 'endpoint_type', EndpointType), id=d.get('id', None), last_updated_timestamp=d.get('last_updated_timestamp', None), last_updated_user=d.get('last_updated_user', None), name=d.get('name', None), num_indexes=d.get('num_indexes', None)) - - + return cls( + creation_timestamp=d.get("creation_timestamp", None), + creator=d.get("creator", None), + custom_tags=_repeated_dict(d, "custom_tags", CustomTag), + effective_budget_policy_id=d.get("effective_budget_policy_id", None), + endpoint_status=_from_dict(d, "endpoint_status", EndpointStatus), + endpoint_type=_enum(d, "endpoint_type", EndpointType), + id=d.get("id", None), + last_updated_timestamp=d.get("last_updated_timestamp", None), + last_updated_user=d.get("last_updated_user", None), + name=d.get("name", None), + num_indexes=d.get("num_indexes", None), + ) @dataclass class EndpointStatus: """Status information of an endpoint""" - + message: Optional[str] = None """Additional status message""" - + state: Optional[EndpointStatusState] = None """Current state of the endpoint""" - + def as_dict(self) -> dict: """Serializes the EndpointStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state.value + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state.value return body def as_shallow_dict(self) -> dict: """Serializes the EndpointStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state + if self.message is not None: + body["message"] = self.message + if self.state is not None: + body["state"] = self.state return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> EndpointStatus: """Deserializes the EndpointStatus from a dictionary.""" - return cls(message=d.get('message', None), state=_enum(d, 'state', EndpointStatusState)) - - + return cls(message=d.get("message", None), state=_enum(d, "state", EndpointStatusState)) class EndpointStatusState(Enum): """Current state of the endpoint""" - - OFFLINE = 'OFFLINE' - ONLINE = 'ONLINE' - PROVISIONING = 'PROVISIONING' - -class EndpointType(Enum): - """Type of endpoint.""" - - STANDARD = 'STANDARD' - + OFFLINE = "OFFLINE" + ONLINE = "ONLINE" + PROVISIONING = "PROVISIONING" +class EndpointType(Enum): + """Type of endpoint.""" + STANDARD = "STANDARD" @dataclass class ListEndpointResponse: endpoints: Optional[List[EndpointInfo]] = None """An array of Endpoint objects""" - + next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" - + def as_dict(self) -> dict: """Serializes the ListEndpointResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoints: body['endpoints'] = [v.as_dict() for v in self.endpoints] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.endpoints: + body["endpoints"] = [v.as_dict() for v in self.endpoints] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: """Serializes the ListEndpointResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoints: body['endpoints'] = self.endpoints - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.endpoints: + body["endpoints"] = self.endpoints + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListEndpointResponse: """Deserializes the ListEndpointResponse from a dictionary.""" - return cls(endpoints=_repeated_dict(d, 'endpoints', EndpointInfo), next_page_token=d.get('next_page_token', None)) - - - - - - - - + return cls( + endpoints=_repeated_dict(d, "endpoints", EndpointInfo), next_page_token=d.get("next_page_token", None) + ) @dataclass class ListValue: values: Optional[List[Value]] = None """Repeated field of dynamically typed values.""" - + def as_dict(self) -> dict: """Serializes the ListValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.values: body['values'] = [v.as_dict() for v in self.values] + if self.values: + body["values"] = [v.as_dict() for v in self.values] return body def as_shallow_dict(self) -> dict: """Serializes the ListValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.values: body['values'] = self.values + if self.values: + body["values"] = self.values return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListValue: """Deserializes the ListValue from a dictionary.""" - return cls(values=_repeated_dict(d, 'values', Value)) - - + return cls(values=_repeated_dict(d, "values", Value)) @dataclass @@ -680,165 +772,188 @@ class ListVectorIndexesResponse: next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" - + vector_indexes: Optional[List[MiniVectorIndex]] = None - + def as_dict(self) -> dict: """Serializes the ListVectorIndexesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.vector_indexes: body['vector_indexes'] = [v.as_dict() for v in self.vector_indexes] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.vector_indexes: + body["vector_indexes"] = [v.as_dict() for v in self.vector_indexes] return body def as_shallow_dict(self) -> dict: """Serializes the ListVectorIndexesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.vector_indexes: body['vector_indexes'] = self.vector_indexes + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.vector_indexes: + body["vector_indexes"] = self.vector_indexes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListVectorIndexesResponse: """Deserializes the ListVectorIndexesResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), vector_indexes=_repeated_dict(d, 'vector_indexes', MiniVectorIndex)) - - + return cls( + next_page_token=d.get("next_page_token", None), + vector_indexes=_repeated_dict(d, "vector_indexes", MiniVectorIndex), + ) @dataclass class MapStringValueEntry: """Key-value pair.""" - + key: Optional[str] = None """Column name.""" - + value: Optional[Value] = None """Column value, nullable.""" - + def as_dict(self) -> dict: """Serializes the MapStringValueEntry into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value: body['value'] = self.value.as_dict() + if self.key is not None: + body["key"] = self.key + if self.value: + body["value"] = self.value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the MapStringValueEntry into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MapStringValueEntry: """Deserializes the MapStringValueEntry from a dictionary.""" - return cls(key=d.get('key', None), value=_from_dict(d, 'value', Value)) - - + return cls(key=d.get("key", None), value=_from_dict(d, "value", Value)) @dataclass class MiniVectorIndex: creator: Optional[str] = None """The user who created the index.""" - + endpoint_name: Optional[str] = None """Name of the endpoint associated with the index""" - + index_type: Optional[VectorIndexType] = None """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" - + name: Optional[str] = None """Name of the index""" - + primary_key: Optional[str] = None """Primary key of the index""" - + def as_dict(self) -> dict: """Serializes the MiniVectorIndex into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creator is not None: body['creator'] = self.creator - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.index_type is not None: body['index_type'] = self.index_type.value - if self.name is not None: body['name'] = self.name - if self.primary_key is not None: body['primary_key'] = self.primary_key + if self.creator is not None: + body["creator"] = self.creator + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.index_type is not None: + body["index_type"] = self.index_type.value + if self.name is not None: + body["name"] = self.name + if self.primary_key is not None: + body["primary_key"] = self.primary_key return body def as_shallow_dict(self) -> dict: """Serializes the MiniVectorIndex into a shallow dictionary of its immediate attributes.""" body = {} - if self.creator is not None: body['creator'] = self.creator - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.index_type is not None: body['index_type'] = self.index_type - if self.name is not None: body['name'] = self.name - if self.primary_key is not None: body['primary_key'] = self.primary_key + if self.creator is not None: + body["creator"] = self.creator + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.index_type is not None: + body["index_type"] = self.index_type + if self.name is not None: + body["name"] = self.name + if self.primary_key is not None: + body["primary_key"] = self.primary_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> MiniVectorIndex: """Deserializes the MiniVectorIndex from a dictionary.""" - return cls(creator=d.get('creator', None), endpoint_name=d.get('endpoint_name', None), index_type=_enum(d, 'index_type', VectorIndexType), name=d.get('name', None), primary_key=d.get('primary_key', None)) - - + return cls( + creator=d.get("creator", None), + endpoint_name=d.get("endpoint_name", None), + index_type=_enum(d, "index_type", VectorIndexType), + name=d.get("name", None), + primary_key=d.get("primary_key", None), + ) @dataclass class PatchEndpointBudgetPolicyRequest: budget_policy_id: str """The budget policy id to be applied""" - + endpoint_name: Optional[str] = None """Name of the vector search endpoint""" - + def as_dict(self) -> dict: """Serializes the PatchEndpointBudgetPolicyRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name return body def as_shallow_dict(self) -> dict: """Serializes the PatchEndpointBudgetPolicyRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.budget_policy_id is not None: + body["budget_policy_id"] = self.budget_policy_id + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PatchEndpointBudgetPolicyRequest: """Deserializes the PatchEndpointBudgetPolicyRequest from a dictionary.""" - return cls(budget_policy_id=d.get('budget_policy_id', None), endpoint_name=d.get('endpoint_name', None)) - - + return cls(budget_policy_id=d.get("budget_policy_id", None), endpoint_name=d.get("endpoint_name", None)) @dataclass class PatchEndpointBudgetPolicyResponse: effective_budget_policy_id: Optional[str] = None """The budget policy applied to the vector search endpoint.""" - + def as_dict(self) -> dict: """Serializes the PatchEndpointBudgetPolicyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id return body def as_shallow_dict(self) -> dict: """Serializes the PatchEndpointBudgetPolicyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.effective_budget_policy_id is not None: body['effective_budget_policy_id'] = self.effective_budget_policy_id + if self.effective_budget_policy_id is not None: + body["effective_budget_policy_id"] = self.effective_budget_policy_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PatchEndpointBudgetPolicyResponse: """Deserializes the PatchEndpointBudgetPolicyResponse from a dictionary.""" - return cls(effective_budget_policy_id=d.get('effective_budget_policy_id', None)) - - + return cls(effective_budget_policy_id=d.get("effective_budget_policy_id", None)) class PipelineType(Enum): @@ -847,55 +962,64 @@ class PipelineType(Enum): ensuring the table is updated based on the data available when the update started. - `CONTINUOUS`: If the pipeline uses continuous execution, the pipeline processes new data as it arrives in the source table to keep vector index fresh.""" - - CONTINUOUS = 'CONTINUOUS' - TRIGGERED = 'TRIGGERED' + + CONTINUOUS = "CONTINUOUS" + TRIGGERED = "TRIGGERED" + @dataclass class QueryVectorIndexNextPageRequest: """Request payload for getting next page of results.""" - + endpoint_name: Optional[str] = None """Name of the endpoint.""" - + index_name: Optional[str] = None """Name of the vector index to query.""" - + page_token: Optional[str] = None """Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.""" - + def as_dict(self) -> dict: """Serializes the QueryVectorIndexNextPageRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.index_name is not None: body['index_name'] = self.index_name - if self.page_token is not None: body['page_token'] = self.page_token + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.index_name is not None: + body["index_name"] = self.index_name + if self.page_token is not None: + body["page_token"] = self.page_token return body def as_shallow_dict(self) -> dict: """Serializes the QueryVectorIndexNextPageRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.index_name is not None: body['index_name'] = self.index_name - if self.page_token is not None: body['page_token'] = self.page_token + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.index_name is not None: + body["index_name"] = self.index_name + if self.page_token is not None: + body["page_token"] = self.page_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexNextPageRequest: """Deserializes the QueryVectorIndexNextPageRequest from a dictionary.""" - return cls(endpoint_name=d.get('endpoint_name', None), index_name=d.get('index_name', None), page_token=d.get('page_token', None)) - - + return cls( + endpoint_name=d.get("endpoint_name", None), + index_name=d.get("index_name", None), + page_token=d.get("page_token", None), + ) @dataclass class QueryVectorIndexRequest: columns: List[str] """List of column names to include in the response.""" - + columns_to_rerank: Optional[List[str]] = None """Column names used to retrieve data to send to the reranker.""" - + filters_json: Optional[str] = None """JSON string representing query filters. @@ -904,256 +1028,301 @@ class QueryVectorIndexRequest: - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5.""" - + index_name: Optional[str] = None """Name of the vector index to query.""" - + num_results: Optional[int] = None """Number of results to return. Defaults to 10.""" - + query_text: Optional[str] = None """Query text. Required for Delta Sync Index using model endpoint.""" - + query_type: Optional[str] = None """The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.""" - + query_vector: Optional[List[float]] = None """Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed vectors.""" - + score_threshold: Optional[float] = None """Threshold for the approximate nearest neighbor search. Defaults to 0.0.""" - + def as_dict(self) -> dict: """Serializes the QueryVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: body['columns'] = [v for v in self.columns] - if self.columns_to_rerank: body['columns_to_rerank'] = [v for v in self.columns_to_rerank] - if self.filters_json is not None: body['filters_json'] = self.filters_json - if self.index_name is not None: body['index_name'] = self.index_name - if self.num_results is not None: body['num_results'] = self.num_results - if self.query_text is not None: body['query_text'] = self.query_text - if self.query_type is not None: body['query_type'] = self.query_type - if self.query_vector: body['query_vector'] = [v for v in self.query_vector] - if self.score_threshold is not None: body['score_threshold'] = self.score_threshold + if self.columns: + body["columns"] = [v for v in self.columns] + if self.columns_to_rerank: + body["columns_to_rerank"] = [v for v in self.columns_to_rerank] + if self.filters_json is not None: + body["filters_json"] = self.filters_json + if self.index_name is not None: + body["index_name"] = self.index_name + if self.num_results is not None: + body["num_results"] = self.num_results + if self.query_text is not None: + body["query_text"] = self.query_text + if self.query_type is not None: + body["query_type"] = self.query_type + if self.query_vector: + body["query_vector"] = [v for v in self.query_vector] + if self.score_threshold is not None: + body["score_threshold"] = self.score_threshold return body def as_shallow_dict(self) -> dict: """Serializes the QueryVectorIndexRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: body['columns'] = self.columns - if self.columns_to_rerank: body['columns_to_rerank'] = self.columns_to_rerank - if self.filters_json is not None: body['filters_json'] = self.filters_json - if self.index_name is not None: body['index_name'] = self.index_name - if self.num_results is not None: body['num_results'] = self.num_results - if self.query_text is not None: body['query_text'] = self.query_text - if self.query_type is not None: body['query_type'] = self.query_type - if self.query_vector: body['query_vector'] = self.query_vector - if self.score_threshold is not None: body['score_threshold'] = self.score_threshold + if self.columns: + body["columns"] = self.columns + if self.columns_to_rerank: + body["columns_to_rerank"] = self.columns_to_rerank + if self.filters_json is not None: + body["filters_json"] = self.filters_json + if self.index_name is not None: + body["index_name"] = self.index_name + if self.num_results is not None: + body["num_results"] = self.num_results + if self.query_text is not None: + body["query_text"] = self.query_text + if self.query_type is not None: + body["query_type"] = self.query_type + if self.query_vector: + body["query_vector"] = self.query_vector + if self.score_threshold is not None: + body["score_threshold"] = self.score_threshold return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexRequest: """Deserializes the QueryVectorIndexRequest from a dictionary.""" - return cls(columns=d.get('columns', None), columns_to_rerank=d.get('columns_to_rerank', None), filters_json=d.get('filters_json', None), index_name=d.get('index_name', None), num_results=d.get('num_results', None), query_text=d.get('query_text', None), query_type=d.get('query_type', None), query_vector=d.get('query_vector', None), score_threshold=d.get('score_threshold', None)) - - + return cls( + columns=d.get("columns", None), + columns_to_rerank=d.get("columns_to_rerank", None), + filters_json=d.get("filters_json", None), + index_name=d.get("index_name", None), + num_results=d.get("num_results", None), + query_text=d.get("query_text", None), + query_type=d.get("query_type", None), + query_vector=d.get("query_vector", None), + score_threshold=d.get("score_threshold", None), + ) @dataclass class QueryVectorIndexResponse: manifest: Optional[ResultManifest] = None """Metadata about the result set.""" - + next_page_token: Optional[str] = None """[Optional] Token that can be used in `QueryVectorIndexNextPage` API to get next page of results. If more than 1000 results satisfy the query, they are returned in groups of 1000. Empty value means no more results. The maximum number of results that can be returned is 10,000.""" - + result: Optional[ResultData] = None """Data returned in the query result.""" - + def as_dict(self) -> dict: """Serializes the QueryVectorIndexResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.manifest: body['manifest'] = self.manifest.as_dict() - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.result: body['result'] = self.result.as_dict() + if self.manifest: + body["manifest"] = self.manifest.as_dict() + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.result: + body["result"] = self.result.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the QueryVectorIndexResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.manifest: body['manifest'] = self.manifest - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.result: body['result'] = self.result + if self.manifest: + body["manifest"] = self.manifest + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.result: + body["result"] = self.result return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexResponse: """Deserializes the QueryVectorIndexResponse from a dictionary.""" - return cls(manifest=_from_dict(d, 'manifest', ResultManifest), next_page_token=d.get('next_page_token', None), result=_from_dict(d, 'result', ResultData)) - - + return cls( + manifest=_from_dict(d, "manifest", ResultManifest), + next_page_token=d.get("next_page_token", None), + result=_from_dict(d, "result", ResultData), + ) @dataclass class ResultData: """Data returned in the query result.""" - + data_array: Optional[List[List[str]]] = None """Data rows returned in the query.""" - + row_count: Optional[int] = None """Number of rows in the result set.""" - + def as_dict(self) -> dict: """Serializes the ResultData into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_array: body['data_array'] = [v for v in self.data_array] - if self.row_count is not None: body['row_count'] = self.row_count + if self.data_array: + body["data_array"] = [v for v in self.data_array] + if self.row_count is not None: + body["row_count"] = self.row_count return body def as_shallow_dict(self) -> dict: """Serializes the ResultData into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_array: body['data_array'] = self.data_array - if self.row_count is not None: body['row_count'] = self.row_count + if self.data_array: + body["data_array"] = self.data_array + if self.row_count is not None: + body["row_count"] = self.row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultData: """Deserializes the ResultData from a dictionary.""" - return cls(data_array=d.get('data_array', None), row_count=d.get('row_count', None)) - - + return cls(data_array=d.get("data_array", None), row_count=d.get("row_count", None)) @dataclass class ResultManifest: """Metadata about the result set.""" - + column_count: Optional[int] = None """Number of columns in the result set.""" - + columns: Optional[List[ColumnInfo]] = None """Information about each column in the result set.""" - + def as_dict(self) -> dict: """Serializes the ResultManifest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.column_count is not None: body['column_count'] = self.column_count - if self.columns: body['columns'] = [v.as_dict() for v in self.columns] + if self.column_count is not None: + body["column_count"] = self.column_count + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] return body def as_shallow_dict(self) -> dict: """Serializes the ResultManifest into a shallow dictionary of its immediate attributes.""" body = {} - if self.column_count is not None: body['column_count'] = self.column_count - if self.columns: body['columns'] = self.columns + if self.column_count is not None: + body["column_count"] = self.column_count + if self.columns: + body["columns"] = self.columns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ResultManifest: """Deserializes the ResultManifest from a dictionary.""" - return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo)) - - + return cls(column_count=d.get("column_count", None), columns=_repeated_dict(d, "columns", ColumnInfo)) @dataclass class ScanVectorIndexRequest: index_name: Optional[str] = None """Name of the vector index to scan.""" - + last_primary_key: Optional[str] = None """Primary key of the last entry returned in the previous scan.""" - + num_results: Optional[int] = None """Number of results to return. Defaults to 10.""" - + def as_dict(self) -> dict: """Serializes the ScanVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.index_name is not None: body['index_name'] = self.index_name - if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key - if self.num_results is not None: body['num_results'] = self.num_results + if self.index_name is not None: + body["index_name"] = self.index_name + if self.last_primary_key is not None: + body["last_primary_key"] = self.last_primary_key + if self.num_results is not None: + body["num_results"] = self.num_results return body def as_shallow_dict(self) -> dict: """Serializes the ScanVectorIndexRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.index_name is not None: body['index_name'] = self.index_name - if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key - if self.num_results is not None: body['num_results'] = self.num_results + if self.index_name is not None: + body["index_name"] = self.index_name + if self.last_primary_key is not None: + body["last_primary_key"] = self.last_primary_key + if self.num_results is not None: + body["num_results"] = self.num_results return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ScanVectorIndexRequest: """Deserializes the ScanVectorIndexRequest from a dictionary.""" - return cls(index_name=d.get('index_name', None), last_primary_key=d.get('last_primary_key', None), num_results=d.get('num_results', None)) - - + return cls( + index_name=d.get("index_name", None), + last_primary_key=d.get("last_primary_key", None), + num_results=d.get("num_results", None), + ) @dataclass class ScanVectorIndexResponse: """Response to a scan vector index request.""" - + data: Optional[List[Struct]] = None """List of data entries""" - + last_primary_key: Optional[str] = None """Primary key of the last entry.""" - + def as_dict(self) -> dict: """Serializes the ScanVectorIndexResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data: body['data'] = [v.as_dict() for v in self.data] - if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key + if self.data: + body["data"] = [v.as_dict() for v in self.data] + if self.last_primary_key is not None: + body["last_primary_key"] = self.last_primary_key return body def as_shallow_dict(self) -> dict: """Serializes the ScanVectorIndexResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.data: body['data'] = self.data - if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key + if self.data: + body["data"] = self.data + if self.last_primary_key is not None: + body["last_primary_key"] = self.last_primary_key return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ScanVectorIndexResponse: """Deserializes the ScanVectorIndexResponse from a dictionary.""" - return cls(data=_repeated_dict(d, 'data', Struct), last_primary_key=d.get('last_primary_key', None)) - - + return cls(data=_repeated_dict(d, "data", Struct), last_primary_key=d.get("last_primary_key", None)) @dataclass class Struct: fields: Optional[List[MapStringValueEntry]] = None """Data entry, corresponding to a row in a vector index.""" - + def as_dict(self) -> dict: """Serializes the Struct into a dictionary suitable for use as a JSON request body.""" body = {} - if self.fields: body['fields'] = [v.as_dict() for v in self.fields] + if self.fields: + body["fields"] = [v.as_dict() for v in self.fields] return body def as_shallow_dict(self) -> dict: """Serializes the Struct into a shallow dictionary of its immediate attributes.""" body = {} - if self.fields: body['fields'] = self.fields + if self.fields: + body["fields"] = self.fields return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Struct: """Deserializes the Struct from a dictionary.""" - return cls(fields=_repeated_dict(d, 'fields', MapStringValueEntry)) - - - - - + return cls(fields=_repeated_dict(d, "fields", MapStringValueEntry)) @dataclass @@ -1172,306 +1341,364 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> SyncIndexResponse: """Deserializes the SyncIndexResponse from a dictionary.""" return cls() - - @dataclass class UpdateEndpointCustomTagsRequest: custom_tags: List[CustomTag] """The new custom tags for the vector search endpoint""" - + endpoint_name: Optional[str] = None """Name of the vector search endpoint""" - + def as_dict(self) -> dict: """Serializes the UpdateEndpointCustomTagsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEndpointCustomTagsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEndpointCustomTagsRequest: """Deserializes the UpdateEndpointCustomTagsRequest from a dictionary.""" - return cls(custom_tags=_repeated_dict(d, 'custom_tags', CustomTag), endpoint_name=d.get('endpoint_name', None)) - - + return cls(custom_tags=_repeated_dict(d, "custom_tags", CustomTag), endpoint_name=d.get("endpoint_name", None)) @dataclass class UpdateEndpointCustomTagsResponse: custom_tags: Optional[List[CustomTag]] = None """All the custom tags that are applied to the vector search endpoint.""" - + name: Optional[str] = None """The name of the vector search endpoint whose custom tags were updated.""" - + def as_dict(self) -> dict: """Serializes the UpdateEndpointCustomTagsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags] - if self.name is not None: body['name'] = self.name + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the UpdateEndpointCustomTagsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: body['custom_tags'] = self.custom_tags - if self.name is not None: body['name'] = self.name + if self.custom_tags: + body["custom_tags"] = self.custom_tags + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateEndpointCustomTagsResponse: """Deserializes the UpdateEndpointCustomTagsResponse from a dictionary.""" - return cls(custom_tags=_repeated_dict(d, 'custom_tags', CustomTag), name=d.get('name', None)) - - + return cls(custom_tags=_repeated_dict(d, "custom_tags", CustomTag), name=d.get("name", None)) @dataclass class UpsertDataResult: failed_primary_keys: Optional[List[str]] = None """List of primary keys for rows that failed to process.""" - + success_row_count: Optional[int] = None """Count of successfully processed rows.""" - + def as_dict(self) -> dict: """Serializes the UpsertDataResult into a dictionary suitable for use as a JSON request body.""" body = {} - if self.failed_primary_keys: body['failed_primary_keys'] = [v for v in self.failed_primary_keys] - if self.success_row_count is not None: body['success_row_count'] = self.success_row_count + if self.failed_primary_keys: + body["failed_primary_keys"] = [v for v in self.failed_primary_keys] + if self.success_row_count is not None: + body["success_row_count"] = self.success_row_count return body def as_shallow_dict(self) -> dict: """Serializes the UpsertDataResult into a shallow dictionary of its immediate attributes.""" body = {} - if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys - if self.success_row_count is not None: body['success_row_count'] = self.success_row_count + if self.failed_primary_keys: + body["failed_primary_keys"] = self.failed_primary_keys + if self.success_row_count is not None: + body["success_row_count"] = self.success_row_count return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpsertDataResult: """Deserializes the UpsertDataResult from a dictionary.""" - return cls(failed_primary_keys=d.get('failed_primary_keys', None), success_row_count=d.get('success_row_count', None)) - - + return cls( + failed_primary_keys=d.get("failed_primary_keys", None), success_row_count=d.get("success_row_count", None) + ) class UpsertDataStatus(Enum): - - - FAILURE = 'FAILURE' - PARTIAL_SUCCESS = 'PARTIAL_SUCCESS' - SUCCESS = 'SUCCESS' + + FAILURE = "FAILURE" + PARTIAL_SUCCESS = "PARTIAL_SUCCESS" + SUCCESS = "SUCCESS" + @dataclass class UpsertDataVectorIndexRequest: inputs_json: str """JSON string representing the data to be upserted.""" - + index_name: Optional[str] = None """Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.""" - + def as_dict(self) -> dict: """Serializes the UpsertDataVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.index_name is not None: body['index_name'] = self.index_name - if self.inputs_json is not None: body['inputs_json'] = self.inputs_json + if self.index_name is not None: + body["index_name"] = self.index_name + if self.inputs_json is not None: + body["inputs_json"] = self.inputs_json return body def as_shallow_dict(self) -> dict: """Serializes the UpsertDataVectorIndexRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.index_name is not None: body['index_name'] = self.index_name - if self.inputs_json is not None: body['inputs_json'] = self.inputs_json + if self.index_name is not None: + body["index_name"] = self.index_name + if self.inputs_json is not None: + body["inputs_json"] = self.inputs_json return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpsertDataVectorIndexRequest: """Deserializes the UpsertDataVectorIndexRequest from a dictionary.""" - return cls(index_name=d.get('index_name', None), inputs_json=d.get('inputs_json', None)) - - + return cls(index_name=d.get("index_name", None), inputs_json=d.get("inputs_json", None)) @dataclass class UpsertDataVectorIndexResponse: result: Optional[UpsertDataResult] = None """Result of the upsert or delete operation.""" - + status: Optional[UpsertDataStatus] = None """Status of the upsert operation.""" - + def as_dict(self) -> dict: """Serializes the UpsertDataVectorIndexResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.result: body['result'] = self.result.as_dict() - if self.status is not None: body['status'] = self.status.value + if self.result: + body["result"] = self.result.as_dict() + if self.status is not None: + body["status"] = self.status.value return body def as_shallow_dict(self) -> dict: """Serializes the UpsertDataVectorIndexResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.result: body['result'] = self.result - if self.status is not None: body['status'] = self.status + if self.result: + body["result"] = self.result + if self.status is not None: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpsertDataVectorIndexResponse: """Deserializes the UpsertDataVectorIndexResponse from a dictionary.""" - return cls(result=_from_dict(d, 'result', UpsertDataResult), status=_enum(d, 'status', UpsertDataStatus)) - - + return cls(result=_from_dict(d, "result", UpsertDataResult), status=_enum(d, "status", UpsertDataStatus)) @dataclass class Value: bool_value: Optional[bool] = None - + list_value: Optional[ListValue] = None - + number_value: Optional[float] = None - + string_value: Optional[str] = None - + struct_value: Optional[Struct] = None - + def as_dict(self) -> dict: """Serializes the Value into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bool_value is not None: body['bool_value'] = self.bool_value - if self.list_value: body['list_value'] = self.list_value.as_dict() - if self.number_value is not None: body['number_value'] = self.number_value - if self.string_value is not None: body['string_value'] = self.string_value - if self.struct_value: body['struct_value'] = self.struct_value.as_dict() + if self.bool_value is not None: + body["bool_value"] = self.bool_value + if self.list_value: + body["list_value"] = self.list_value.as_dict() + if self.number_value is not None: + body["number_value"] = self.number_value + if self.string_value is not None: + body["string_value"] = self.string_value + if self.struct_value: + body["struct_value"] = self.struct_value.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the Value into a shallow dictionary of its immediate attributes.""" body = {} - if self.bool_value is not None: body['bool_value'] = self.bool_value - if self.list_value: body['list_value'] = self.list_value - if self.number_value is not None: body['number_value'] = self.number_value - if self.string_value is not None: body['string_value'] = self.string_value - if self.struct_value: body['struct_value'] = self.struct_value + if self.bool_value is not None: + body["bool_value"] = self.bool_value + if self.list_value: + body["list_value"] = self.list_value + if self.number_value is not None: + body["number_value"] = self.number_value + if self.string_value is not None: + body["string_value"] = self.string_value + if self.struct_value: + body["struct_value"] = self.struct_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Value: """Deserializes the Value from a dictionary.""" - return cls(bool_value=d.get('bool_value', None), list_value=_from_dict(d, 'list_value', ListValue), number_value=d.get('number_value', None), string_value=d.get('string_value', None), struct_value=_from_dict(d, 'struct_value', Struct)) - - + return cls( + bool_value=d.get("bool_value", None), + list_value=_from_dict(d, "list_value", ListValue), + number_value=d.get("number_value", None), + string_value=d.get("string_value", None), + struct_value=_from_dict(d, "struct_value", Struct), + ) @dataclass class VectorIndex: creator: Optional[str] = None """The user who created the index.""" - + delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecResponse] = None - + direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None - + endpoint_name: Optional[str] = None """Name of the endpoint associated with the index""" - + index_type: Optional[VectorIndexType] = None """There are 2 types of Vector Search indexes: - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" - + name: Optional[str] = None """Name of the index""" - + primary_key: Optional[str] = None """Primary key of the index""" - + status: Optional[VectorIndexStatus] = None - + def as_dict(self) -> dict: """Serializes the VectorIndex into a dictionary suitable for use as a JSON request body.""" body = {} - if self.creator is not None: body['creator'] = self.creator - if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec.as_dict() - if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec.as_dict() - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.index_type is not None: body['index_type'] = self.index_type.value - if self.name is not None: body['name'] = self.name - if self.primary_key is not None: body['primary_key'] = self.primary_key - if self.status: body['status'] = self.status.as_dict() + if self.creator is not None: + body["creator"] = self.creator + if self.delta_sync_index_spec: + body["delta_sync_index_spec"] = self.delta_sync_index_spec.as_dict() + if self.direct_access_index_spec: + body["direct_access_index_spec"] = self.direct_access_index_spec.as_dict() + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.index_type is not None: + body["index_type"] = self.index_type.value + if self.name is not None: + body["name"] = self.name + if self.primary_key is not None: + body["primary_key"] = self.primary_key + if self.status: + body["status"] = self.status.as_dict() return body def as_shallow_dict(self) -> dict: """Serializes the VectorIndex into a shallow dictionary of its immediate attributes.""" body = {} - if self.creator is not None: body['creator'] = self.creator - if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec - if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec - if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name - if self.index_type is not None: body['index_type'] = self.index_type - if self.name is not None: body['name'] = self.name - if self.primary_key is not None: body['primary_key'] = self.primary_key - if self.status: body['status'] = self.status + if self.creator is not None: + body["creator"] = self.creator + if self.delta_sync_index_spec: + body["delta_sync_index_spec"] = self.delta_sync_index_spec + if self.direct_access_index_spec: + body["direct_access_index_spec"] = self.direct_access_index_spec + if self.endpoint_name is not None: + body["endpoint_name"] = self.endpoint_name + if self.index_type is not None: + body["index_type"] = self.index_type + if self.name is not None: + body["name"] = self.name + if self.primary_key is not None: + body["primary_key"] = self.primary_key + if self.status: + body["status"] = self.status return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VectorIndex: """Deserializes the VectorIndex from a dictionary.""" - return cls(creator=d.get('creator', None), delta_sync_index_spec=_from_dict(d, 'delta_sync_index_spec', DeltaSyncVectorIndexSpecResponse), direct_access_index_spec=_from_dict(d, 'direct_access_index_spec', DirectAccessVectorIndexSpec), endpoint_name=d.get('endpoint_name', None), index_type=_enum(d, 'index_type', VectorIndexType), name=d.get('name', None), primary_key=d.get('primary_key', None), status=_from_dict(d, 'status', VectorIndexStatus)) - - + return cls( + creator=d.get("creator", None), + delta_sync_index_spec=_from_dict(d, "delta_sync_index_spec", DeltaSyncVectorIndexSpecResponse), + direct_access_index_spec=_from_dict(d, "direct_access_index_spec", DirectAccessVectorIndexSpec), + endpoint_name=d.get("endpoint_name", None), + index_type=_enum(d, "index_type", VectorIndexType), + name=d.get("name", None), + primary_key=d.get("primary_key", None), + status=_from_dict(d, "status", VectorIndexStatus), + ) @dataclass class VectorIndexStatus: index_url: Optional[str] = None """Index API Url to be used to perform operations on the index""" - + indexed_row_count: Optional[int] = None """Number of rows indexed""" - + message: Optional[str] = None """Message associated with the index status""" - + ready: Optional[bool] = None """Whether the index is ready for search""" - + def as_dict(self) -> dict: """Serializes the VectorIndexStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.index_url is not None: body['index_url'] = self.index_url - if self.indexed_row_count is not None: body['indexed_row_count'] = self.indexed_row_count - if self.message is not None: body['message'] = self.message - if self.ready is not None: body['ready'] = self.ready + if self.index_url is not None: + body["index_url"] = self.index_url + if self.indexed_row_count is not None: + body["indexed_row_count"] = self.indexed_row_count + if self.message is not None: + body["message"] = self.message + if self.ready is not None: + body["ready"] = self.ready return body def as_shallow_dict(self) -> dict: """Serializes the VectorIndexStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.index_url is not None: body['index_url'] = self.index_url - if self.indexed_row_count is not None: body['indexed_row_count'] = self.indexed_row_count - if self.message is not None: body['message'] = self.message - if self.ready is not None: body['ready'] = self.ready + if self.index_url is not None: + body["index_url"] = self.index_url + if self.indexed_row_count is not None: + body["indexed_row_count"] = self.indexed_row_count + if self.message is not None: + body["message"] = self.message + if self.ready is not None: + body["ready"] = self.ready return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> VectorIndexStatus: """Deserializes the VectorIndexStatus from a dictionary.""" - return cls(index_url=d.get('index_url', None), indexed_row_count=d.get('indexed_row_count', None), message=d.get('message', None), ready=d.get('ready', None)) - - + return cls( + index_url=d.get("index_url", None), + indexed_row_count=d.get("indexed_row_count", None), + message=d.get("message", None), + ready=d.get("ready", None), + ) class VectorIndexType(Enum): @@ -1480,267 +1707,241 @@ class VectorIndexType(Enum): data in the Delta Table changes. - `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" - - DELTA_SYNC = 'DELTA_SYNC' - DIRECT_ACCESS = 'DIRECT_ACCESS' + DELTA_SYNC = "DELTA_SYNC" + DIRECT_ACCESS = "DIRECT_ACCESS" class VectorSearchEndpointsAPI: """**Endpoint**: Represents the compute resources to host vector search indexes.""" - + def __init__(self, api_client): self._api = api_client - - - - - def wait_get_endpoint_vector_search_endpoint_online(self, endpoint_name: str, - timeout=timedelta(minutes=20), callback: Optional[Callable[[EndpointInfo], None]] = None) -> EndpointInfo: - deadline = time.time() + timeout.total_seconds() - target_states = (EndpointStatusState.ONLINE, ) - failure_states = (EndpointStatusState.OFFLINE, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get_endpoint(endpoint_name=endpoint_name) - status = poll.endpoint_status.state - status_message = f'current status: {status}' - if poll.endpoint_status: - status_message = poll.endpoint_status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach ONLINE, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"endpoint_name={endpoint_name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - - - - - def create_endpoint(self - , name: str, endpoint_type: EndpointType - , * - , budget_policy_id: Optional[str] = None) -> Wait[EndpointInfo]: + def wait_get_endpoint_vector_search_endpoint_online( + self, + endpoint_name: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[EndpointInfo], None]] = None, + ) -> EndpointInfo: + deadline = time.time() + timeout.total_seconds() + target_states = (EndpointStatusState.ONLINE,) + failure_states = (EndpointStatusState.OFFLINE,) + status_message = "polling..." + attempt = 1 + while time.time() < deadline: + poll = self.get_endpoint(endpoint_name=endpoint_name) + status = poll.endpoint_status.state + status_message = f"current status: {status}" + if poll.endpoint_status: + status_message = poll.endpoint_status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f"failed to reach ONLINE, got {status}: {status_message}" + raise OperationFailed(msg) + prefix = f"endpoint_name={endpoint_name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f"{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)") + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f"timed out after {timeout}: {status_message}") + + def create_endpoint( + self, name: str, endpoint_type: EndpointType, *, budget_policy_id: Optional[str] = None + ) -> Wait[EndpointInfo]: """Create an endpoint. - + Create a new endpoint. - + :param name: str Name of the vector search endpoint :param endpoint_type: :class:`EndpointType` Type of endpoint :param budget_policy_id: str (optional) The budget policy id to be applied - + :returns: Long-running operation waiter for :class:`EndpointInfo`. See :method:wait_get_endpoint_vector_search_endpoint_online for more details. """ body = {} - if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id - if endpoint_type is not None: body['endpoint_type'] = endpoint_type.value - if name is not None: body['name'] = name - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - op_response = self._api.do('POST','/api/2.0/vector-search/endpoints', body=body - - , headers=headers - ) - return Wait(self.wait_get_endpoint_vector_search_endpoint_online - , response = EndpointInfo.from_dict(op_response) - , endpoint_name=op_response['name']) - - - def create_endpoint_and_wait(self - , name: str, endpoint_type: EndpointType - , * - , budget_policy_id: Optional[str] = None, - timeout=timedelta(minutes=20)) -> EndpointInfo: - return self.create_endpoint(budget_policy_id=budget_policy_id, endpoint_type=endpoint_type, name=name).result(timeout=timeout) - - - - - def delete_endpoint(self - , endpoint_name: str - ): + if budget_policy_id is not None: + body["budget_policy_id"] = budget_policy_id + if endpoint_type is not None: + body["endpoint_type"] = endpoint_type.value + if name is not None: + body["name"] = name + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + op_response = self._api.do("POST", "/api/2.0/vector-search/endpoints", body=body, headers=headers) + return Wait( + self.wait_get_endpoint_vector_search_endpoint_online, + response=EndpointInfo.from_dict(op_response), + endpoint_name=op_response["name"], + ) + + def create_endpoint_and_wait( + self, + name: str, + endpoint_type: EndpointType, + *, + budget_policy_id: Optional[str] = None, + timeout=timedelta(minutes=20), + ) -> EndpointInfo: + return self.create_endpoint(budget_policy_id=budget_policy_id, endpoint_type=endpoint_type, name=name).result( + timeout=timeout + ) + + def delete_endpoint(self, endpoint_name: str): """Delete an endpoint. - + Delete a vector search endpoint. - + :param endpoint_name: str Name of the vector search endpoint - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/vector-search/endpoints/{endpoint_name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get_endpoint(self - , endpoint_name: str - ) -> EndpointInfo: + self._api.do("DELETE", f"/api/2.0/vector-search/endpoints/{endpoint_name}", headers=headers) + + def get_endpoint(self, endpoint_name: str) -> EndpointInfo: """Get an endpoint. - + Get details for a single vector search endpoint. - + :param endpoint_name: str Name of the endpoint - + :returns: :class:`EndpointInfo` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/vector-search/endpoints/{endpoint_name}' - - , headers=headers - ) - return EndpointInfo.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/vector-search/endpoints/{endpoint_name}", headers=headers) + return EndpointInfo.from_dict(res) - def list_endpoints(self - - , * - , page_token: Optional[str] = None) -> Iterator[EndpointInfo]: + def list_endpoints(self, *, page_token: Optional[str] = None) -> Iterator[EndpointInfo]: """List all endpoints. - + List all vector search endpoints in the workspace. - + :param page_token: str (optional) Token for pagination - + :returns: Iterator over :class:`EndpointInfo` """ - - query = {} - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/vector-search/endpoints', query=query - - , headers=headers - ) - if 'endpoints' in json: - for v in json['endpoints']: - yield EndpointInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def update_endpoint_budget_policy(self - , endpoint_name: str, budget_policy_id: str - ) -> PatchEndpointBudgetPolicyResponse: + while True: + json = self._api.do("GET", "/api/2.0/vector-search/endpoints", query=query, headers=headers) + if "endpoints" in json: + for v in json["endpoints"]: + yield EndpointInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def update_endpoint_budget_policy( + self, endpoint_name: str, budget_policy_id: str + ) -> PatchEndpointBudgetPolicyResponse: """Update the budget policy of an endpoint. - + Update the budget policy of an endpoint - + :param endpoint_name: str Name of the vector search endpoint :param budget_policy_id: str The budget policy id to be applied - + :returns: :class:`PatchEndpointBudgetPolicyResponse` """ body = {} - if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/vector-search/endpoints/{endpoint_name}/budget-policy', body=body - - , headers=headers - ) - return PatchEndpointBudgetPolicyResponse.from_dict(res) + if budget_policy_id is not None: + body["budget_policy_id"] = budget_policy_id + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "PATCH", f"/api/2.0/vector-search/endpoints/{endpoint_name}/budget-policy", body=body, headers=headers + ) + return PatchEndpointBudgetPolicyResponse.from_dict(res) - def update_endpoint_custom_tags(self - , endpoint_name: str, custom_tags: List[CustomTag] - ) -> UpdateEndpointCustomTagsResponse: + def update_endpoint_custom_tags( + self, endpoint_name: str, custom_tags: List[CustomTag] + ) -> UpdateEndpointCustomTagsResponse: """Update the custom tags of an endpoint. - + :param endpoint_name: str Name of the vector search endpoint :param custom_tags: List[:class:`CustomTag`] The new custom tags for the vector search endpoint - + :returns: :class:`UpdateEndpointCustomTagsResponse` """ body = {} - if custom_tags is not None: body['custom_tags'] = [v.as_dict() for v in custom_tags] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/vector-search/endpoints/{endpoint_name}/tags', body=body - - , headers=headers - ) + if custom_tags is not None: + body["custom_tags"] = [v.as_dict() for v in custom_tags] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", f"/api/2.0/vector-search/endpoints/{endpoint_name}/tags", body=body, headers=headers + ) return UpdateEndpointCustomTagsResponse.from_dict(res) - - + class VectorSearchIndexesAPI: """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries. - + There are 2 types of Vector Search indexes: - **Delta Sync Index**: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - **Direct Vector Access Index**: An index that supports direct read and write of vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_index(self - , name: str, endpoint_name: str, primary_key: str, index_type: VectorIndexType - , * - , delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None, direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None) -> VectorIndex: + def create_index( + self, + name: str, + endpoint_name: str, + primary_key: str, + index_type: VectorIndexType, + *, + delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None, + direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None, + ) -> VectorIndex: """Create an index. - + Create a new index. - + :param name: str Name of the index :param endpoint_name: str @@ -1756,155 +1957,138 @@ def create_index(self Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`. :param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional) Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`. - + :returns: :class:`VectorIndex` """ body = {} - if delta_sync_index_spec is not None: body['delta_sync_index_spec'] = delta_sync_index_spec.as_dict() - if direct_access_index_spec is not None: body['direct_access_index_spec'] = direct_access_index_spec.as_dict() - if endpoint_name is not None: body['endpoint_name'] = endpoint_name - if index_type is not None: body['index_type'] = index_type.value - if name is not None: body['name'] = name - if primary_key is not None: body['primary_key'] = primary_key - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/vector-search/indexes', body=body - - , headers=headers - ) + if delta_sync_index_spec is not None: + body["delta_sync_index_spec"] = delta_sync_index_spec.as_dict() + if direct_access_index_spec is not None: + body["direct_access_index_spec"] = direct_access_index_spec.as_dict() + if endpoint_name is not None: + body["endpoint_name"] = endpoint_name + if index_type is not None: + body["index_type"] = index_type.value + if name is not None: + body["name"] = name + if primary_key is not None: + body["primary_key"] = primary_key + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/vector-search/indexes", body=body, headers=headers) return VectorIndex.from_dict(res) - - - - - def delete_data_vector_index(self - , index_name: str, primary_keys: List[str] - ) -> DeleteDataVectorIndexResponse: + def delete_data_vector_index(self, index_name: str, primary_keys: List[str]) -> DeleteDataVectorIndexResponse: """Delete data from index. - + Handles the deletion of data from a specified vector index. - + :param index_name: str Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index. :param primary_keys: List[str] List of primary keys for the data to be deleted. - + :returns: :class:`DeleteDataVectorIndexResponse` """ - + query = {} - if primary_keys is not None: query['primary_keys'] = [v for v in primary_keys] - headers = {'Accept': 'application/json',} - - res = self._api.do('DELETE',f'/api/2.0/vector-search/indexes/{index_name}/delete-data', query=query - - , headers=headers - ) + if primary_keys is not None: + query["primary_keys"] = [v for v in primary_keys] + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", f"/api/2.0/vector-search/indexes/{index_name}/delete-data", query=query, headers=headers + ) return DeleteDataVectorIndexResponse.from_dict(res) - - - - - def delete_index(self - , index_name: str - ): + def delete_index(self, index_name: str): """Delete an index. - + Delete an index. - + :param index_name: str Name of the index - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/vector-search/indexes/{index_name}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get_index(self - , index_name: str - ) -> VectorIndex: + self._api.do("DELETE", f"/api/2.0/vector-search/indexes/{index_name}", headers=headers) + + def get_index(self, index_name: str) -> VectorIndex: """Get an index. - + Get an index. - + :param index_name: str Name of the index - + :returns: :class:`VectorIndex` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/vector-search/indexes/{index_name}' - - , headers=headers - ) - return VectorIndex.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/vector-search/indexes/{index_name}", headers=headers) + return VectorIndex.from_dict(res) - def list_indexes(self - , endpoint_name: str - , * - , page_token: Optional[str] = None) -> Iterator[MiniVectorIndex]: + def list_indexes(self, endpoint_name: str, *, page_token: Optional[str] = None) -> Iterator[MiniVectorIndex]: """List indexes. - + List all indexes in the given endpoint. - + :param endpoint_name: str Name of the endpoint :param page_token: str (optional) Token for pagination - + :returns: Iterator over :class:`MiniVectorIndex` """ - - query = {} - if endpoint_name is not None: query['endpoint_name'] = endpoint_name - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/vector-search/indexes', query=query - - , headers=headers - ) - if 'vector_indexes' in json: - for v in json['vector_indexes']: - yield MiniVectorIndex.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - - - + query = {} + if endpoint_name is not None: + query["endpoint_name"] = endpoint_name + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } - def query_index(self - , index_name: str, columns: List[str] - , * - , columns_to_rerank: Optional[List[str]] = None, filters_json: Optional[str] = None, num_results: Optional[int] = None, query_text: Optional[str] = None, query_type: Optional[str] = None, query_vector: Optional[List[float]] = None, score_threshold: Optional[float] = None) -> QueryVectorIndexResponse: + while True: + json = self._api.do("GET", "/api/2.0/vector-search/indexes", query=query, headers=headers) + if "vector_indexes" in json: + for v in json["vector_indexes"]: + yield MiniVectorIndex.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] + + def query_index( + self, + index_name: str, + columns: List[str], + *, + columns_to_rerank: Optional[List[str]] = None, + filters_json: Optional[str] = None, + num_results: Optional[int] = None, + query_text: Optional[str] = None, + query_type: Optional[str] = None, + query_vector: Optional[List[float]] = None, + score_threshold: Optional[float] = None, + ) -> QueryVectorIndexResponse: """Query an index. - + Query the specified vector index. - + :param index_name: str Name of the vector index to query. :param columns: List[str] @@ -1913,9 +2097,9 @@ def query_index(self Column names used to retrieve data to send to the reranker. :param filters_json: str (optional) JSON string representing query filters. - + Example filters: - + - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5. @@ -1930,144 +2114,134 @@ def query_index(self vectors. :param score_threshold: float (optional) Threshold for the approximate nearest neighbor search. Defaults to 0.0. - + :returns: :class:`QueryVectorIndexResponse` """ body = {} - if columns is not None: body['columns'] = [v for v in columns] - if columns_to_rerank is not None: body['columns_to_rerank'] = [v for v in columns_to_rerank] - if filters_json is not None: body['filters_json'] = filters_json - if num_results is not None: body['num_results'] = num_results - if query_text is not None: body['query_text'] = query_text - if query_type is not None: body['query_type'] = query_type - if query_vector is not None: body['query_vector'] = [v for v in query_vector] - if score_threshold is not None: body['score_threshold'] = score_threshold - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/query', body=body - - , headers=headers - ) + if columns is not None: + body["columns"] = [v for v in columns] + if columns_to_rerank is not None: + body["columns_to_rerank"] = [v for v in columns_to_rerank] + if filters_json is not None: + body["filters_json"] = filters_json + if num_results is not None: + body["num_results"] = num_results + if query_text is not None: + body["query_text"] = query_text + if query_type is not None: + body["query_type"] = query_type + if query_vector is not None: + body["query_vector"] = [v for v in query_vector] + if score_threshold is not None: + body["score_threshold"] = score_threshold + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/query", body=body, headers=headers) return QueryVectorIndexResponse.from_dict(res) - - - - - def query_next_page(self - , index_name: str - , * - , endpoint_name: Optional[str] = None, page_token: Optional[str] = None) -> QueryVectorIndexResponse: + def query_next_page( + self, index_name: str, *, endpoint_name: Optional[str] = None, page_token: Optional[str] = None + ) -> QueryVectorIndexResponse: """Query next page. - + Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request to fetch next page of results. - + :param index_name: str Name of the vector index to query. :param endpoint_name: str (optional) Name of the endpoint. :param page_token: str (optional) Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API. - + :returns: :class:`QueryVectorIndexResponse` """ body = {} - if endpoint_name is not None: body['endpoint_name'] = endpoint_name - if page_token is not None: body['page_token'] = page_token - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/query-next-page', body=body - - , headers=headers - ) + if endpoint_name is not None: + body["endpoint_name"] = endpoint_name + if page_token is not None: + body["page_token"] = page_token + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", f"/api/2.0/vector-search/indexes/{index_name}/query-next-page", body=body, headers=headers + ) return QueryVectorIndexResponse.from_dict(res) - - - - - def scan_index(self - , index_name: str - , * - , last_primary_key: Optional[str] = None, num_results: Optional[int] = None) -> ScanVectorIndexResponse: + def scan_index( + self, index_name: str, *, last_primary_key: Optional[str] = None, num_results: Optional[int] = None + ) -> ScanVectorIndexResponse: """Scan an index. - + Scan the specified vector index and return the first `num_results` entries after the exclusive `primary_key`. - + :param index_name: str Name of the vector index to scan. :param last_primary_key: str (optional) Primary key of the last entry returned in the previous scan. :param num_results: int (optional) Number of results to return. Defaults to 10. - + :returns: :class:`ScanVectorIndexResponse` """ body = {} - if last_primary_key is not None: body['last_primary_key'] = last_primary_key - if num_results is not None: body['num_results'] = num_results - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/scan', body=body - - , headers=headers - ) - return ScanVectorIndexResponse.from_dict(res) + if last_primary_key is not None: + body["last_primary_key"] = last_primary_key + if num_results is not None: + body["num_results"] = num_results + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/scan", body=body, headers=headers) + return ScanVectorIndexResponse.from_dict(res) - def sync_index(self - , index_name: str - ): + def sync_index(self, index_name: str): """Synchronize an index. - + Triggers a synchronization process for a specified vector index. - + :param index_name: str Name of the vector index to synchronize. Must be a Delta Sync Index. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/sync' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def upsert_data_vector_index(self - , index_name: str, inputs_json: str - ) -> UpsertDataVectorIndexResponse: + self._api.do("POST", f"/api/2.0/vector-search/indexes/{index_name}/sync", headers=headers) + + def upsert_data_vector_index(self, index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse: """Upsert data into an index. - + Handles the upserting of data into a specified vector index. - + :param index_name: str Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index. :param inputs_json: str JSON string representing the data to be upserted. - + :returns: :class:`UpsertDataVectorIndexResponse` """ body = {} - if inputs_json is not None: body['inputs_json'] = inputs_json - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST',f'/api/2.0/vector-search/indexes/{index_name}/upsert-data', body=body - - , headers=headers - ) - return UpsertDataVectorIndexResponse.from_dict(res) + if inputs_json is not None: + body["inputs_json"] = inputs_json + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - \ No newline at end of file + res = self._api.do( + "POST", f"/api/2.0/vector-search/indexes/{index_name}/upsert-data", body=body, headers=headers + ) + return UpsertDataVectorIndexResponse.from_dict(res) diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index db0902331..6753ad880 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -1,91 +1,89 @@ # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. from __future__ import annotations + +import logging from dataclasses import dataclass -from datetime import timedelta from enum import Enum -from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO -import time -import random -import logging -import requests -import threading - -from ..errors import OperationTimeout, OperationFailed -from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter -from ..oauth import Token +from typing import Any, Dict, Iterator, List, Optional -_LOG = logging.getLogger('databricks.sdk') +from ._internal import _enum, _from_dict, _repeated_dict +_LOG = logging.getLogger("databricks.sdk") # all definitions in this file are in alphabetical order + @dataclass class AclItem: principal: str """The principal in which the permission is applied.""" - + permission: AclPermission """The permission level applied to the principal.""" - + def as_dict(self) -> dict: """Serializes the AclItem into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission is not None: body['permission'] = self.permission.value - if self.principal is not None: body['principal'] = self.principal + if self.permission is not None: + body["permission"] = self.permission.value + if self.principal is not None: + body["principal"] = self.principal return body def as_shallow_dict(self) -> dict: """Serializes the AclItem into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission is not None: body['permission'] = self.permission - if self.principal is not None: body['principal'] = self.principal + if self.permission is not None: + body["permission"] = self.permission + if self.principal is not None: + body["principal"] = self.principal return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AclItem: """Deserializes the AclItem from a dictionary.""" - return cls(permission=_enum(d, 'permission', AclPermission), principal=d.get('principal', None)) - - + return cls(permission=_enum(d, "permission", AclPermission), principal=d.get("principal", None)) class AclPermission(Enum): - - - MANAGE = 'MANAGE' - READ = 'READ' - WRITE = 'WRITE' + + MANAGE = "MANAGE" + READ = "READ" + WRITE = "WRITE" + @dataclass class AzureKeyVaultSecretScopeMetadata: resource_id: str """The resource id of the azure KeyVault that user wants to associate the scope with.""" - + dns_name: str """The DNS of the KeyVault""" - + def as_dict(self) -> dict: """Serializes the AzureKeyVaultSecretScopeMetadata into a dictionary suitable for use as a JSON request body.""" body = {} - if self.dns_name is not None: body['dns_name'] = self.dns_name - if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.dns_name is not None: + body["dns_name"] = self.dns_name + if self.resource_id is not None: + body["resource_id"] = self.resource_id return body def as_shallow_dict(self) -> dict: """Serializes the AzureKeyVaultSecretScopeMetadata into a shallow dictionary of its immediate attributes.""" body = {} - if self.dns_name is not None: body['dns_name'] = self.dns_name - if self.resource_id is not None: body['resource_id'] = self.resource_id + if self.dns_name is not None: + body["dns_name"] = self.dns_name + if self.resource_id is not None: + body["resource_id"] = self.resource_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> AzureKeyVaultSecretScopeMetadata: """Deserializes the AzureKeyVaultSecretScopeMetadata from a dictionary.""" - return cls(dns_name=d.get('dns_name', None), resource_id=d.get('resource_id', None)) - - + return cls(dns_name=d.get("dns_name", None), resource_id=d.get("resource_id", None)) @dataclass @@ -94,217 +92,275 @@ class CreateCredentialsRequest: """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account, depending on which provider you are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please see your provider's Personal Access Token authentication documentation to see what is supported.""" - + personal_access_token: Optional[str] = None """The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - + def as_dict(self) -> dict: """Serializes the CreateCredentialsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username - if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username + if self.personal_access_token is not None: + body["personal_access_token"] = self.personal_access_token return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username - if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username + if self.personal_access_token is not None: + body["personal_access_token"] = self.personal_access_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialsRequest: """Deserializes the CreateCredentialsRequest from a dictionary.""" - return cls(git_provider=d.get('git_provider', None), git_username=d.get('git_username', None), personal_access_token=d.get('personal_access_token', None)) - - + return cls( + git_provider=d.get("git_provider", None), + git_username=d.get("git_username", None), + personal_access_token=d.get("personal_access_token", None), + ) @dataclass class CreateCredentialsResponse: credential_id: int """ID of the credential object in the workspace.""" - + git_provider: str """The Git provider associated with the credential.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account and associated with the credential.""" - + def as_dict(self) -> dict: """Serializes the CreateCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username return body def as_shallow_dict(self) -> dict: """Serializes the CreateCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialsResponse: """Deserializes the CreateCredentialsResponse from a dictionary.""" - return cls(credential_id=d.get('credential_id', None), git_provider=d.get('git_provider', None), git_username=d.get('git_username', None)) - - + return cls( + credential_id=d.get("credential_id", None), + git_provider=d.get("git_provider", None), + git_username=d.get("git_username", None), + ) @dataclass class CreateRepoRequest: url: str """URL of the Git repository to be linked.""" - + provider: str """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - + path: Optional[str] = None """Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.""" - + sparse_checkout: Optional[SparseCheckout] = None """If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable sparse checkout after the repo is created.""" - + def as_dict(self) -> dict: """Serializes the CreateRepoRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: body['path'] = self.path - if self.provider is not None: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() - if self.url is not None: body['url'] = self.url + if self.path is not None: + body["path"] = self.path + if self.provider is not None: + body["provider"] = self.provider + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout.as_dict() + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the CreateRepoRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: body['path'] = self.path - if self.provider is not None: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout - if self.url is not None: body['url'] = self.url + if self.path is not None: + body["path"] = self.path + if self.provider is not None: + body["provider"] = self.provider + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRepoRequest: """Deserializes the CreateRepoRequest from a dictionary.""" - return cls(path=d.get('path', None), provider=d.get('provider', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), url=d.get('url', None)) - - + return cls( + path=d.get("path", None), + provider=d.get("provider", None), + sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), + url=d.get("url", None), + ) @dataclass class CreateRepoResponse: branch: Optional[str] = None """Branch that the Git folder (repo) is checked out to.""" - + head_commit_id: Optional[str] = None """SHA-1 hash representing the commit ID of the current HEAD of the Git folder (repo).""" - + id: Optional[int] = None """ID of the Git folder (repo) object in the workspace.""" - + path: Optional[str] = None """Path of the Git folder (repo) in the workspace.""" - + provider: Optional[str] = None """Git provider of the linked Git repository.""" - + sparse_checkout: Optional[SparseCheckout] = None """Sparse checkout settings for the Git folder (repo).""" - + url: Optional[str] = None """URL of the linked Git repository.""" - + def as_dict(self) -> dict: """Serializes the CreateRepoResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.branch is not None: body['branch'] = self.branch - if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id - if self.id is not None: body['id'] = self.id - if self.path is not None: body['path'] = self.path - if self.provider is not None: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() - if self.url is not None: body['url'] = self.url + if self.branch is not None: + body["branch"] = self.branch + if self.head_commit_id is not None: + body["head_commit_id"] = self.head_commit_id + if self.id is not None: + body["id"] = self.id + if self.path is not None: + body["path"] = self.path + if self.provider is not None: + body["provider"] = self.provider + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout.as_dict() + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the CreateRepoResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.branch is not None: body['branch'] = self.branch - if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id - if self.id is not None: body['id'] = self.id - if self.path is not None: body['path'] = self.path - if self.provider is not None: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout - if self.url is not None: body['url'] = self.url + if self.branch is not None: + body["branch"] = self.branch + if self.head_commit_id is not None: + body["head_commit_id"] = self.head_commit_id + if self.id is not None: + body["id"] = self.id + if self.path is not None: + body["path"] = self.path + if self.provider is not None: + body["provider"] = self.provider + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateRepoResponse: """Deserializes the CreateRepoResponse from a dictionary.""" - return cls(branch=d.get('branch', None), head_commit_id=d.get('head_commit_id', None), id=d.get('id', None), path=d.get('path', None), provider=d.get('provider', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), url=d.get('url', None)) - - + return cls( + branch=d.get("branch", None), + head_commit_id=d.get("head_commit_id", None), + id=d.get("id", None), + path=d.get("path", None), + provider=d.get("provider", None), + sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), + url=d.get("url", None), + ) @dataclass class CreateScope: scope: str """Scope name requested by the user. Scope names are unique.""" - + backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None """The metadata for the secret scope if the type is `AZURE_KEYVAULT`""" - + initial_manage_principal: Optional[str] = None """The principal that is initially granted `MANAGE` permission to the created scope.""" - + scope_backend_type: Optional[ScopeBackendType] = None """The backend type the scope will be created with. If not specified, will default to `DATABRICKS`""" - + def as_dict(self) -> dict: """Serializes the CreateScope into a dictionary suitable for use as a JSON request body.""" body = {} - if self.backend_azure_keyvault: body['backend_azure_keyvault'] = self.backend_azure_keyvault.as_dict() - if self.initial_manage_principal is not None: body['initial_manage_principal'] = self.initial_manage_principal - if self.scope is not None: body['scope'] = self.scope - if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type.value + if self.backend_azure_keyvault: + body["backend_azure_keyvault"] = self.backend_azure_keyvault.as_dict() + if self.initial_manage_principal is not None: + body["initial_manage_principal"] = self.initial_manage_principal + if self.scope is not None: + body["scope"] = self.scope + if self.scope_backend_type is not None: + body["scope_backend_type"] = self.scope_backend_type.value return body def as_shallow_dict(self) -> dict: """Serializes the CreateScope into a shallow dictionary of its immediate attributes.""" body = {} - if self.backend_azure_keyvault: body['backend_azure_keyvault'] = self.backend_azure_keyvault - if self.initial_manage_principal is not None: body['initial_manage_principal'] = self.initial_manage_principal - if self.scope is not None: body['scope'] = self.scope - if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type + if self.backend_azure_keyvault: + body["backend_azure_keyvault"] = self.backend_azure_keyvault + if self.initial_manage_principal is not None: + body["initial_manage_principal"] = self.initial_manage_principal + if self.scope is not None: + body["scope"] = self.scope + if self.scope_backend_type is not None: + body["scope_backend_type"] = self.scope_backend_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CreateScope: """Deserializes the CreateScope from a dictionary.""" - return cls(backend_azure_keyvault=_from_dict(d, 'backend_azure_keyvault', AzureKeyVaultSecretScopeMetadata), initial_manage_principal=d.get('initial_manage_principal', None), scope=d.get('scope', None), scope_backend_type=_enum(d, 'scope_backend_type', ScopeBackendType)) - - + return cls( + backend_azure_keyvault=_from_dict(d, "backend_azure_keyvault", AzureKeyVaultSecretScopeMetadata), + initial_manage_principal=d.get("initial_manage_principal", None), + scope=d.get("scope", None), + scope_backend_type=_enum(d, "scope_backend_type", ScopeBackendType), + ) @dataclass @@ -323,106 +379,116 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> CreateScopeResponse: """Deserializes the CreateScopeResponse from a dictionary.""" return cls() - - @dataclass class CredentialInfo: credential_id: int """ID of the credential object in the workspace.""" - + git_provider: Optional[str] = None """The Git provider associated with the credential.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account and associated with the credential.""" - + def as_dict(self) -> dict: """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username return body def as_shallow_dict(self) -> dict: """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: """Deserializes the CredentialInfo from a dictionary.""" - return cls(credential_id=d.get('credential_id', None), git_provider=d.get('git_provider', None), git_username=d.get('git_username', None)) - - + return cls( + credential_id=d.get("credential_id", None), + git_provider=d.get("git_provider", None), + git_username=d.get("git_username", None), + ) @dataclass class Delete: path: str """The absolute path of the notebook or directory.""" - + recursive: Optional[bool] = None """The flag that specifies whether to delete the object recursively. It is `false` by default. Please note this deleting directory is not atomic. If it fails in the middle, some of objects under this directory may be deleted and cannot be undone.""" - + def as_dict(self) -> dict: """Serializes the Delete into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: body['path'] = self.path - if self.recursive is not None: body['recursive'] = self.recursive + if self.path is not None: + body["path"] = self.path + if self.recursive is not None: + body["recursive"] = self.recursive return body def as_shallow_dict(self) -> dict: """Serializes the Delete into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: body['path'] = self.path - if self.recursive is not None: body['recursive'] = self.recursive + if self.path is not None: + body["path"] = self.path + if self.recursive is not None: + body["recursive"] = self.recursive return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Delete: """Deserializes the Delete from a dictionary.""" - return cls(path=d.get('path', None), recursive=d.get('recursive', None)) - - + return cls(path=d.get("path", None), recursive=d.get("recursive", None)) @dataclass class DeleteAcl: scope: str """The name of the scope to remove permissions from.""" - + principal: str """The principal to remove an existing ACL from.""" - + def as_dict(self) -> dict: """Serializes the DeleteAcl into a dictionary suitable for use as a JSON request body.""" body = {} - if self.principal is not None: body['principal'] = self.principal - if self.scope is not None: body['scope'] = self.scope + if self.principal is not None: + body["principal"] = self.principal + if self.scope is not None: + body["scope"] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the DeleteAcl into a shallow dictionary of its immediate attributes.""" body = {} - if self.principal is not None: body['principal'] = self.principal - if self.scope is not None: body['scope'] = self.scope + if self.principal is not None: + body["principal"] = self.principal + if self.scope is not None: + body["scope"] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteAcl: """Deserializes the DeleteAcl from a dictionary.""" - return cls(principal=d.get('principal', None), scope=d.get('scope', None)) - - + return cls(principal=d.get("principal", None), scope=d.get("scope", None)) @dataclass @@ -441,11 +507,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteAclResponse: """Deserializes the DeleteAclResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -464,11 +525,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteCredentialsResponse: """Deserializes the DeleteCredentialsResponse from a dictionary.""" return cls() - - - - - @dataclass @@ -487,8 +543,6 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteRepoResponse: """Deserializes the DeleteRepoResponse from a dictionary.""" return cls() - - @dataclass @@ -507,33 +561,31 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: """Deserializes the DeleteResponse from a dictionary.""" return cls() - - @dataclass class DeleteScope: scope: str """Name of the scope to delete.""" - + def as_dict(self) -> dict: """Serializes the DeleteScope into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scope is not None: body['scope'] = self.scope + if self.scope is not None: + body["scope"] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the DeleteScope into a shallow dictionary of its immediate attributes.""" body = {} - if self.scope is not None: body['scope'] = self.scope + if self.scope is not None: + body["scope"] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteScope: """Deserializes the DeleteScope from a dictionary.""" - return cls(scope=d.get('scope', None)) - - + return cls(scope=d.get("scope", None)) @dataclass @@ -552,38 +604,38 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteScopeResponse: """Deserializes the DeleteScopeResponse from a dictionary.""" return cls() - - @dataclass class DeleteSecret: scope: str """The name of the scope that contains the secret to delete.""" - + key: str """Name of the secret to delete.""" - + def as_dict(self) -> dict: """Serializes the DeleteSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.scope is not None: body['scope'] = self.scope + if self.key is not None: + body["key"] = self.key + if self.scope is not None: + body["scope"] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the DeleteSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.scope is not None: body['scope'] = self.scope + if self.key is not None: + body["key"] = self.key + if self.scope is not None: + body["scope"] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> DeleteSecret: """Deserializes the DeleteSecret from a dictionary.""" - return cls(key=d.get('key', None), scope=d.get('scope', None)) - - + return cls(key=d.get("key", None), scope=d.get("scope", None)) @dataclass @@ -602,254 +654,255 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> DeleteSecretResponse: """Deserializes the DeleteSecretResponse from a dictionary.""" return cls() - - class ExportFormat(Enum): """The format for workspace import and export.""" - - AUTO = 'AUTO' - DBC = 'DBC' - HTML = 'HTML' - JUPYTER = 'JUPYTER' - RAW = 'RAW' - R_MARKDOWN = 'R_MARKDOWN' - SOURCE = 'SOURCE' - + AUTO = "AUTO" + DBC = "DBC" + HTML = "HTML" + JUPYTER = "JUPYTER" + RAW = "RAW" + R_MARKDOWN = "R_MARKDOWN" + SOURCE = "SOURCE" @dataclass class ExportResponse: """The request field `direct_download` determines whether a JSON response or binary contents are returned by this endpoint.""" - + content: Optional[str] = None """The base64-encoded content. If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown.""" - + file_type: Optional[str] = None """The file type of the exported file.""" - + def as_dict(self) -> dict: """Serializes the ExportResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: body['content'] = self.content - if self.file_type is not None: body['file_type'] = self.file_type + if self.content is not None: + body["content"] = self.content + if self.file_type is not None: + body["file_type"] = self.file_type return body def as_shallow_dict(self) -> dict: """Serializes the ExportResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: body['content'] = self.content - if self.file_type is not None: body['file_type'] = self.file_type + if self.content is not None: + body["content"] = self.content + if self.file_type is not None: + body["file_type"] = self.file_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ExportResponse: """Deserializes the ExportResponse from a dictionary.""" - return cls(content=d.get('content', None), file_type=d.get('file_type', None)) - - - - - - - - + return cls(content=d.get("content", None), file_type=d.get("file_type", None)) @dataclass class GetCredentialsResponse: credential_id: int """ID of the credential object in the workspace.""" - + git_provider: Optional[str] = None """The Git provider associated with the credential.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account and associated with the credential.""" - + def as_dict(self) -> dict: """Serializes the GetCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username return body def as_shallow_dict(self) -> dict: """Serializes the GetCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetCredentialsResponse: """Deserializes the GetCredentialsResponse from a dictionary.""" - return cls(credential_id=d.get('credential_id', None), git_provider=d.get('git_provider', None), git_username=d.get('git_username', None)) - - - - - + return cls( + credential_id=d.get("credential_id", None), + git_provider=d.get("git_provider", None), + git_username=d.get("git_username", None), + ) @dataclass class GetRepoPermissionLevelsResponse: permission_levels: Optional[List[RepoPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetRepoPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetRepoPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRepoPermissionLevelsResponse: """Deserializes the GetRepoPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', RepoPermissionsDescription)) - - - - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", RepoPermissionsDescription)) @dataclass class GetRepoResponse: branch: Optional[str] = None """Branch that the local version of the repo is checked out to.""" - + head_commit_id: Optional[str] = None """SHA-1 hash representing the commit ID of the current HEAD of the repo.""" - + id: Optional[int] = None """ID of the Git folder (repo) object in the workspace.""" - + path: Optional[str] = None """Path of the Git folder (repo) in the workspace.""" - + provider: Optional[str] = None """Git provider of the linked Git repository.""" - + sparse_checkout: Optional[SparseCheckout] = None """Sparse checkout settings for the Git folder (repo).""" - + url: Optional[str] = None """URL of the linked Git repository.""" - + def as_dict(self) -> dict: """Serializes the GetRepoResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.branch is not None: body['branch'] = self.branch - if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id - if self.id is not None: body['id'] = self.id - if self.path is not None: body['path'] = self.path - if self.provider is not None: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() - if self.url is not None: body['url'] = self.url + if self.branch is not None: + body["branch"] = self.branch + if self.head_commit_id is not None: + body["head_commit_id"] = self.head_commit_id + if self.id is not None: + body["id"] = self.id + if self.path is not None: + body["path"] = self.path + if self.provider is not None: + body["provider"] = self.provider + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout.as_dict() + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the GetRepoResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.branch is not None: body['branch'] = self.branch - if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id - if self.id is not None: body['id'] = self.id - if self.path is not None: body['path'] = self.path - if self.provider is not None: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout - if self.url is not None: body['url'] = self.url + if self.branch is not None: + body["branch"] = self.branch + if self.head_commit_id is not None: + body["head_commit_id"] = self.head_commit_id + if self.id is not None: + body["id"] = self.id + if self.path is not None: + body["path"] = self.path + if self.provider is not None: + body["provider"] = self.provider + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetRepoResponse: """Deserializes the GetRepoResponse from a dictionary.""" - return cls(branch=d.get('branch', None), head_commit_id=d.get('head_commit_id', None), id=d.get('id', None), path=d.get('path', None), provider=d.get('provider', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), url=d.get('url', None)) - - - - - + return cls( + branch=d.get("branch", None), + head_commit_id=d.get("head_commit_id", None), + id=d.get("id", None), + path=d.get("path", None), + provider=d.get("provider", None), + sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), + url=d.get("url", None), + ) @dataclass class GetSecretResponse: key: Optional[str] = None """A unique name to identify the secret.""" - + value: Optional[str] = None """The value of the secret in its byte representation.""" - + def as_dict(self) -> dict: """Serializes the GetSecretResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: """Serializes the GetSecretResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.value is not None: body['value'] = self.value + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetSecretResponse: """Deserializes the GetSecretResponse from a dictionary.""" - return cls(key=d.get('key', None), value=d.get('value', None)) - - - - - - - - + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass class GetWorkspaceObjectPermissionLevelsResponse: permission_levels: Optional[List[WorkspaceObjectPermissionsDescription]] = None """Specific permission levels""" - + def as_dict(self) -> dict: """Serializes the GetWorkspaceObjectPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + if self.permission_levels: + body["permission_levels"] = [v.as_dict() for v in self.permission_levels] return body def as_shallow_dict(self) -> dict: """Serializes the GetWorkspaceObjectPermissionLevelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission_levels: body['permission_levels'] = self.permission_levels + if self.permission_levels: + body["permission_levels"] = self.permission_levels return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceObjectPermissionLevelsResponse: """Deserializes the GetWorkspaceObjectPermissionLevelsResponse from a dictionary.""" - return cls(permission_levels=_repeated_dict(d, 'permission_levels', WorkspaceObjectPermissionsDescription)) - - - - - + return cls(permission_levels=_repeated_dict(d, "permission_levels", WorkspaceObjectPermissionsDescription)) @dataclass @@ -857,13 +910,13 @@ class Import: path: str """The absolute path of the object or directory. Importing a directory is only supported for the `DBC` and `SOURCE` formats.""" - + content: Optional[str] = None """The base64-encoded content. This has a limit of 10 MB. If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown. This parameter might be absent, and instead a posted file is used.""" - + format: Optional[ImportFormat] = None """This specifies the format of the file to be imported. @@ -876,52 +929,67 @@ class Import: as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.""" - + language: Optional[Language] = None """The language of the object. This value is set only if the object type is `NOTEBOOK`.""" - + overwrite: Optional[bool] = None """The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC` format, `overwrite` is not supported since it may contain a directory.""" - + def as_dict(self) -> dict: """Serializes the Import into a dictionary suitable for use as a JSON request body.""" body = {} - if self.content is not None: body['content'] = self.content - if self.format is not None: body['format'] = self.format.value - if self.language is not None: body['language'] = self.language.value - if self.overwrite is not None: body['overwrite'] = self.overwrite - if self.path is not None: body['path'] = self.path + if self.content is not None: + body["content"] = self.content + if self.format is not None: + body["format"] = self.format.value + if self.language is not None: + body["language"] = self.language.value + if self.overwrite is not None: + body["overwrite"] = self.overwrite + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the Import into a shallow dictionary of its immediate attributes.""" body = {} - if self.content is not None: body['content'] = self.content - if self.format is not None: body['format'] = self.format - if self.language is not None: body['language'] = self.language - if self.overwrite is not None: body['overwrite'] = self.overwrite - if self.path is not None: body['path'] = self.path + if self.content is not None: + body["content"] = self.content + if self.format is not None: + body["format"] = self.format + if self.language is not None: + body["language"] = self.language + if self.overwrite is not None: + body["overwrite"] = self.overwrite + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Import: """Deserializes the Import from a dictionary.""" - return cls(content=d.get('content', None), format=_enum(d, 'format', ImportFormat), language=_enum(d, 'language', Language), overwrite=d.get('overwrite', None), path=d.get('path', None)) - - + return cls( + content=d.get("content", None), + format=_enum(d, "format", ImportFormat), + language=_enum(d, "language", Language), + overwrite=d.get("overwrite", None), + path=d.get("path", None), + ) class ImportFormat(Enum): """The format for workspace import and export.""" - - AUTO = 'AUTO' - DBC = 'DBC' - HTML = 'HTML' - JUPYTER = 'JUPYTER' - RAW = 'RAW' - R_MARKDOWN = 'R_MARKDOWN' - SOURCE = 'SOURCE' + + AUTO = "AUTO" + DBC = "DBC" + HTML = "HTML" + JUPYTER = "JUPYTER" + RAW = "RAW" + R_MARKDOWN = "R_MARKDOWN" + SOURCE = "SOURCE" + @dataclass class ImportResponse: @@ -939,72 +1007,65 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> ImportResponse: """Deserializes the ImportResponse from a dictionary.""" return cls() - - class Language(Enum): """The language of notebook.""" - - PYTHON = 'PYTHON' - R = 'R' - SCALA = 'SCALA' - SQL = 'SQL' - + PYTHON = "PYTHON" + R = "R" + SCALA = "SCALA" + SQL = "SQL" @dataclass class ListAclsResponse: items: Optional[List[AclItem]] = None """The associated ACLs rule applied to principals in the given scope.""" - + def as_dict(self) -> dict: """Serializes the ListAclsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.items: body['items'] = [v.as_dict() for v in self.items] + if self.items: + body["items"] = [v.as_dict() for v in self.items] return body def as_shallow_dict(self) -> dict: """Serializes the ListAclsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.items: body['items'] = self.items + if self.items: + body["items"] = self.items return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListAclsResponse: """Deserializes the ListAclsResponse from a dictionary.""" - return cls(items=_repeated_dict(d, 'items', AclItem)) - - + return cls(items=_repeated_dict(d, "items", AclItem)) @dataclass class ListCredentialsResponse: credentials: Optional[List[CredentialInfo]] = None """List of credentials.""" - + def as_dict(self) -> dict: """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials] + if self.credentials: + body["credentials"] = [v.as_dict() for v in self.credentials] return body def as_shallow_dict(self) -> dict: """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.credentials: body['credentials'] = self.credentials + if self.credentials: + body["credentials"] = self.credentials return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListCredentialsResponse: """Deserializes the ListCredentialsResponse from a dictionary.""" - return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo)) - - - - - + return cls(credentials=_repeated_dict(d, "credentials", CredentialInfo)) @dataclass @@ -1012,111 +1073,107 @@ class ListReposResponse: next_page_token: Optional[str] = None """Token that can be specified as a query parameter to the `GET /repos` endpoint to retrieve the next page of results.""" - + repos: Optional[List[RepoInfo]] = None """List of Git folders (repos).""" - + def as_dict(self) -> dict: """Serializes the ListReposResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.repos: body['repos'] = [v.as_dict() for v in self.repos] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.repos: + body["repos"] = [v.as_dict() for v in self.repos] return body def as_shallow_dict(self) -> dict: """Serializes the ListReposResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - if self.repos: body['repos'] = self.repos + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.repos: + body["repos"] = self.repos return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListReposResponse: """Deserializes the ListReposResponse from a dictionary.""" - return cls(next_page_token=d.get('next_page_token', None), repos=_repeated_dict(d, 'repos', RepoInfo)) - - + return cls(next_page_token=d.get("next_page_token", None), repos=_repeated_dict(d, "repos", RepoInfo)) @dataclass class ListResponse: objects: Optional[List[ObjectInfo]] = None """List of objects.""" - + def as_dict(self) -> dict: """Serializes the ListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.objects: body['objects'] = [v.as_dict() for v in self.objects] + if self.objects: + body["objects"] = [v.as_dict() for v in self.objects] return body def as_shallow_dict(self) -> dict: """Serializes the ListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.objects: body['objects'] = self.objects + if self.objects: + body["objects"] = self.objects return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListResponse: """Deserializes the ListResponse from a dictionary.""" - return cls(objects=_repeated_dict(d, 'objects', ObjectInfo)) - - + return cls(objects=_repeated_dict(d, "objects", ObjectInfo)) @dataclass class ListScopesResponse: scopes: Optional[List[SecretScope]] = None """The available secret scopes.""" - + def as_dict(self) -> dict: """Serializes the ListScopesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scopes: body['scopes'] = [v.as_dict() for v in self.scopes] + if self.scopes: + body["scopes"] = [v.as_dict() for v in self.scopes] return body def as_shallow_dict(self) -> dict: """Serializes the ListScopesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.scopes: body['scopes'] = self.scopes + if self.scopes: + body["scopes"] = self.scopes return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListScopesResponse: """Deserializes the ListScopesResponse from a dictionary.""" - return cls(scopes=_repeated_dict(d, 'scopes', SecretScope)) - - - - - + return cls(scopes=_repeated_dict(d, "scopes", SecretScope)) @dataclass class ListSecretsResponse: secrets: Optional[List[SecretMetadata]] = None """Metadata information of all secrets contained within the given scope.""" - + def as_dict(self) -> dict: """Serializes the ListSecretsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets] + if self.secrets: + body["secrets"] = [v.as_dict() for v in self.secrets] return body def as_shallow_dict(self) -> dict: """Serializes the ListSecretsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.secrets: body['secrets'] = self.secrets + if self.secrets: + body["secrets"] = self.secrets return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ListSecretsResponse: """Deserializes the ListSecretsResponse from a dictionary.""" - return cls(secrets=_repeated_dict(d, 'secrets', SecretMetadata)) - - - - - + return cls(secrets=_repeated_dict(d, "secrets", SecretMetadata)) @dataclass @@ -1124,25 +1181,25 @@ class Mkdirs: path: str """The absolute path of the directory. If the parent directories do not exist, it will also create them. If the directory already exists, this command will do nothing and succeed.""" - + def as_dict(self) -> dict: """Serializes the Mkdirs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.path is not None: body['path'] = self.path + if self.path is not None: + body["path"] = self.path return body def as_shallow_dict(self) -> dict: """Serializes the Mkdirs into a shallow dictionary of its immediate attributes.""" body = {} - if self.path is not None: body['path'] = self.path + if self.path is not None: + body["path"] = self.path return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> Mkdirs: """Deserializes the Mkdirs from a dictionary.""" - return cls(path=d.get('path', None)) - - + return cls(path=d.get("path", None)) @dataclass @@ -1161,119 +1218,149 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> MkdirsResponse: """Deserializes the MkdirsResponse from a dictionary.""" return cls() - - @dataclass class ObjectInfo: """The information of the object in workspace. It will be returned by ``list`` and ``get-status``.""" - + created_at: Optional[int] = None """Only applicable to files. The creation UTC timestamp.""" - + language: Optional[Language] = None """The language of the object. This value is set only if the object type is ``NOTEBOOK``.""" - + modified_at: Optional[int] = None """Only applicable to files, the last modified UTC timestamp.""" - + object_id: Optional[int] = None """Unique identifier for the object.""" - + object_type: Optional[ObjectType] = None """The type of the object in workspace. - `NOTEBOOK`: document that contains runnable code, visualizations, and explanatory text. - `DIRECTORY`: directory - `LIBRARY`: library - `FILE`: file - `REPO`: repository - `DASHBOARD`: Lakeview dashboard""" - + path: Optional[str] = None """The absolute path of the object.""" - + resource_id: Optional[str] = None """A unique identifier for the object that is consistent across all Databricks APIs.""" - + size: Optional[int] = None """Only applicable to files. The file size in bytes can be returned.""" - + def as_dict(self) -> dict: """Serializes the ObjectInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.language is not None: body['language'] = self.language.value - if self.modified_at is not None: body['modified_at'] = self.modified_at - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type.value - if self.path is not None: body['path'] = self.path - if self.resource_id is not None: body['resource_id'] = self.resource_id - if self.size is not None: body['size'] = self.size + if self.created_at is not None: + body["created_at"] = self.created_at + if self.language is not None: + body["language"] = self.language.value + if self.modified_at is not None: + body["modified_at"] = self.modified_at + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type.value + if self.path is not None: + body["path"] = self.path + if self.resource_id is not None: + body["resource_id"] = self.resource_id + if self.size is not None: + body["size"] = self.size return body def as_shallow_dict(self) -> dict: """Serializes the ObjectInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.language is not None: body['language'] = self.language - if self.modified_at is not None: body['modified_at'] = self.modified_at - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type - if self.path is not None: body['path'] = self.path - if self.resource_id is not None: body['resource_id'] = self.resource_id - if self.size is not None: body['size'] = self.size + if self.created_at is not None: + body["created_at"] = self.created_at + if self.language is not None: + body["language"] = self.language + if self.modified_at is not None: + body["modified_at"] = self.modified_at + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type + if self.path is not None: + body["path"] = self.path + if self.resource_id is not None: + body["resource_id"] = self.resource_id + if self.size is not None: + body["size"] = self.size return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> ObjectInfo: """Deserializes the ObjectInfo from a dictionary.""" - return cls(created_at=d.get('created_at', None), language=_enum(d, 'language', Language), modified_at=d.get('modified_at', None), object_id=d.get('object_id', None), object_type=_enum(d, 'object_type', ObjectType), path=d.get('path', None), resource_id=d.get('resource_id', None), size=d.get('size', None)) - - + return cls( + created_at=d.get("created_at", None), + language=_enum(d, "language", Language), + modified_at=d.get("modified_at", None), + object_id=d.get("object_id", None), + object_type=_enum(d, "object_type", ObjectType), + path=d.get("path", None), + resource_id=d.get("resource_id", None), + size=d.get("size", None), + ) class ObjectType(Enum): """The type of the object in workspace.""" - - DASHBOARD = 'DASHBOARD' - DIRECTORY = 'DIRECTORY' - FILE = 'FILE' - LIBRARY = 'LIBRARY' - NOTEBOOK = 'NOTEBOOK' - REPO = 'REPO' + + DASHBOARD = "DASHBOARD" + DIRECTORY = "DIRECTORY" + FILE = "FILE" + LIBRARY = "LIBRARY" + NOTEBOOK = "NOTEBOOK" + REPO = "REPO" + @dataclass class PutAcl: scope: str """The name of the scope to apply permissions to.""" - + principal: str """The principal in which the permission is applied.""" - + permission: AclPermission """The permission level applied to the principal.""" - + def as_dict(self) -> dict: """Serializes the PutAcl into a dictionary suitable for use as a JSON request body.""" body = {} - if self.permission is not None: body['permission'] = self.permission.value - if self.principal is not None: body['principal'] = self.principal - if self.scope is not None: body['scope'] = self.scope + if self.permission is not None: + body["permission"] = self.permission.value + if self.principal is not None: + body["principal"] = self.principal + if self.scope is not None: + body["scope"] = self.scope return body def as_shallow_dict(self) -> dict: """Serializes the PutAcl into a shallow dictionary of its immediate attributes.""" body = {} - if self.permission is not None: body['permission'] = self.permission - if self.principal is not None: body['principal'] = self.principal - if self.scope is not None: body['scope'] = self.scope + if self.permission is not None: + body["permission"] = self.permission + if self.principal is not None: + body["principal"] = self.principal + if self.scope is not None: + body["scope"] = self.scope return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutAcl: """Deserializes the PutAcl from a dictionary.""" - return cls(permission=_enum(d, 'permission', AclPermission), principal=d.get('principal', None), scope=d.get('scope', None)) - - + return cls( + permission=_enum(d, "permission", AclPermission), + principal=d.get("principal", None), + scope=d.get("scope", None), + ) @dataclass @@ -1292,48 +1379,57 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PutAclResponse: """Deserializes the PutAclResponse from a dictionary.""" return cls() - - @dataclass class PutSecret: scope: str """The name of the scope to which the secret will be associated with.""" - + key: str """A unique name to identify the secret.""" - + bytes_value: Optional[str] = None """If specified, value will be stored as bytes.""" - + string_value: Optional[str] = None """If specified, note that the value will be stored in UTF-8 (MB4) form.""" - + def as_dict(self) -> dict: """Serializes the PutSecret into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bytes_value is not None: body['bytes_value'] = self.bytes_value - if self.key is not None: body['key'] = self.key - if self.scope is not None: body['scope'] = self.scope - if self.string_value is not None: body['string_value'] = self.string_value + if self.bytes_value is not None: + body["bytes_value"] = self.bytes_value + if self.key is not None: + body["key"] = self.key + if self.scope is not None: + body["scope"] = self.scope + if self.string_value is not None: + body["string_value"] = self.string_value return body def as_shallow_dict(self) -> dict: """Serializes the PutSecret into a shallow dictionary of its immediate attributes.""" body = {} - if self.bytes_value is not None: body['bytes_value'] = self.bytes_value - if self.key is not None: body['key'] = self.key - if self.scope is not None: body['scope'] = self.scope - if self.string_value is not None: body['string_value'] = self.string_value + if self.bytes_value is not None: + body["bytes_value"] = self.bytes_value + if self.key is not None: + body["key"] = self.key + if self.scope is not None: + body["scope"] = self.scope + if self.string_value is not None: + body["string_value"] = self.string_value return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> PutSecret: """Deserializes the PutSecret from a dictionary.""" - return cls(bytes_value=d.get('bytes_value', None), key=d.get('key', None), scope=d.get('scope', None), string_value=d.get('string_value', None)) - - + return cls( + bytes_value=d.get("bytes_value", None), + key=d.get("key", None), + scope=d.get("scope", None), + string_value=d.get("string_value", None), + ) @dataclass @@ -1352,410 +1448,489 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> PutSecretResponse: """Deserializes the PutSecretResponse from a dictionary.""" return cls() - - @dataclass class RepoAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[RepoPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the RepoAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RepoAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoAccessControlRequest: """Deserializes the RepoAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', RepoPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", RepoPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class RepoAccessControlResponse: all_permissions: Optional[List[RepoPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the RepoAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the RepoAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoAccessControlResponse: """Deserializes the RepoAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', RepoPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", RepoPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class RepoInfo: """Git folder (repo) information.""" - + branch: Optional[str] = None """Name of the current git branch of the git folder (repo).""" - + head_commit_id: Optional[str] = None """Current git commit id of the git folder (repo).""" - + id: Optional[int] = None """Id of the git folder (repo) in the Workspace.""" - + path: Optional[str] = None """Root path of the git folder (repo) in the Workspace.""" - + provider: Optional[str] = None """Git provider of the remote git repository, e.g. `gitHub`.""" - + sparse_checkout: Optional[SparseCheckout] = None """Sparse checkout config for the git folder (repo).""" - + url: Optional[str] = None """URL of the remote git repository.""" - + def as_dict(self) -> dict: """Serializes the RepoInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.branch is not None: body['branch'] = self.branch - if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id - if self.id is not None: body['id'] = self.id - if self.path is not None: body['path'] = self.path - if self.provider is not None: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() - if self.url is not None: body['url'] = self.url + if self.branch is not None: + body["branch"] = self.branch + if self.head_commit_id is not None: + body["head_commit_id"] = self.head_commit_id + if self.id is not None: + body["id"] = self.id + if self.path is not None: + body["path"] = self.path + if self.provider is not None: + body["provider"] = self.provider + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout.as_dict() + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: """Serializes the RepoInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.branch is not None: body['branch'] = self.branch - if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id - if self.id is not None: body['id'] = self.id - if self.path is not None: body['path'] = self.path - if self.provider is not None: body['provider'] = self.provider - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout - if self.url is not None: body['url'] = self.url + if self.branch is not None: + body["branch"] = self.branch + if self.head_commit_id is not None: + body["head_commit_id"] = self.head_commit_id + if self.id is not None: + body["id"] = self.id + if self.path is not None: + body["path"] = self.path + if self.provider is not None: + body["provider"] = self.provider + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout + if self.url is not None: + body["url"] = self.url return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoInfo: """Deserializes the RepoInfo from a dictionary.""" - return cls(branch=d.get('branch', None), head_commit_id=d.get('head_commit_id', None), id=d.get('id', None), path=d.get('path', None), provider=d.get('provider', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout), url=d.get('url', None)) - - + return cls( + branch=d.get("branch", None), + head_commit_id=d.get("head_commit_id", None), + id=d.get("id", None), + path=d.get("path", None), + provider=d.get("provider", None), + sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), + url=d.get("url", None), + ) @dataclass class RepoPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[RepoPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the RepoPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the RepoPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoPermission: """Deserializes the RepoPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', RepoPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", RepoPermissionLevel), + ) class RepoPermissionLevel(Enum): """Permission level""" - - CAN_EDIT = 'CAN_EDIT' - CAN_MANAGE = 'CAN_MANAGE' - CAN_READ = 'CAN_READ' - CAN_RUN = 'CAN_RUN' + + CAN_EDIT = "CAN_EDIT" + CAN_MANAGE = "CAN_MANAGE" + CAN_READ = "CAN_READ" + CAN_RUN = "CAN_RUN" + @dataclass class RepoPermissions: access_control_list: Optional[List[RepoAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the RepoPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the RepoPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoPermissions: """Deserializes the RepoPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', RepoAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", RepoAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class RepoPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[RepoPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the RepoPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the RepoPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoPermissionsDescription: """Deserializes the RepoPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', RepoPermissionLevel)) - - + return cls( + description=d.get("description", None), permission_level=_enum(d, "permission_level", RepoPermissionLevel) + ) @dataclass class RepoPermissionsRequest: access_control_list: Optional[List[RepoAccessControlRequest]] = None - + repo_id: Optional[str] = None """The repo for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the RepoPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.repo_id is not None: body['repo_id'] = self.repo_id + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.repo_id is not None: + body["repo_id"] = self.repo_id return body def as_shallow_dict(self) -> dict: """Serializes the RepoPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.repo_id is not None: body['repo_id'] = self.repo_id + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.repo_id is not None: + body["repo_id"] = self.repo_id return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> RepoPermissionsRequest: """Deserializes the RepoPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', RepoAccessControlRequest), repo_id=d.get('repo_id', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", RepoAccessControlRequest), + repo_id=d.get("repo_id", None), + ) class ScopeBackendType(Enum): - - - AZURE_KEYVAULT = 'AZURE_KEYVAULT' - DATABRICKS = 'DATABRICKS' + + AZURE_KEYVAULT = "AZURE_KEYVAULT" + DATABRICKS = "DATABRICKS" + @dataclass class SecretMetadata: key: Optional[str] = None """A unique name to identify the secret.""" - + last_updated_timestamp: Optional[int] = None """The last updated timestamp (in milliseconds) for the secret.""" - + def as_dict(self) -> dict: """Serializes the SecretMetadata into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: body['key'] = self.key - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.key is not None: + body["key"] = self.key + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp return body def as_shallow_dict(self) -> dict: """Serializes the SecretMetadata into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: body['key'] = self.key - if self.last_updated_timestamp is not None: body['last_updated_timestamp'] = self.last_updated_timestamp + if self.key is not None: + body["key"] = self.key + if self.last_updated_timestamp is not None: + body["last_updated_timestamp"] = self.last_updated_timestamp return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SecretMetadata: """Deserializes the SecretMetadata from a dictionary.""" - return cls(key=d.get('key', None), last_updated_timestamp=d.get('last_updated_timestamp', None)) - - + return cls(key=d.get("key", None), last_updated_timestamp=d.get("last_updated_timestamp", None)) @dataclass class SecretScope: backend_type: Optional[ScopeBackendType] = None """The type of secret scope backend.""" - + keyvault_metadata: Optional[AzureKeyVaultSecretScopeMetadata] = None """The metadata for the secret scope if the type is `AZURE_KEYVAULT`""" - + name: Optional[str] = None """A unique name to identify the secret scope.""" - + def as_dict(self) -> dict: """Serializes the SecretScope into a dictionary suitable for use as a JSON request body.""" body = {} - if self.backend_type is not None: body['backend_type'] = self.backend_type.value - if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata.as_dict() - if self.name is not None: body['name'] = self.name + if self.backend_type is not None: + body["backend_type"] = self.backend_type.value + if self.keyvault_metadata: + body["keyvault_metadata"] = self.keyvault_metadata.as_dict() + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: """Serializes the SecretScope into a shallow dictionary of its immediate attributes.""" body = {} - if self.backend_type is not None: body['backend_type'] = self.backend_type - if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata - if self.name is not None: body['name'] = self.name + if self.backend_type is not None: + body["backend_type"] = self.backend_type + if self.keyvault_metadata: + body["keyvault_metadata"] = self.keyvault_metadata + if self.name is not None: + body["name"] = self.name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SecretScope: """Deserializes the SecretScope from a dictionary.""" - return cls(backend_type=_enum(d, 'backend_type', ScopeBackendType), keyvault_metadata=_from_dict(d, 'keyvault_metadata', AzureKeyVaultSecretScopeMetadata), name=d.get('name', None)) - - + return cls( + backend_type=_enum(d, "backend_type", ScopeBackendType), + keyvault_metadata=_from_dict(d, "keyvault_metadata", AzureKeyVaultSecretScopeMetadata), + name=d.get("name", None), + ) @dataclass class SparseCheckout: """Sparse checkout configuration, it contains options like cone patterns.""" - + patterns: Optional[List[str]] = None """List of sparse checkout cone patterns, see [cone mode handling] for details. [cone mode handling]: https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling""" - + def as_dict(self) -> dict: """Serializes the SparseCheckout into a dictionary suitable for use as a JSON request body.""" body = {} - if self.patterns: body['patterns'] = [v for v in self.patterns] + if self.patterns: + body["patterns"] = [v for v in self.patterns] return body def as_shallow_dict(self) -> dict: """Serializes the SparseCheckout into a shallow dictionary of its immediate attributes.""" body = {} - if self.patterns: body['patterns'] = self.patterns + if self.patterns: + body["patterns"] = self.patterns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparseCheckout: """Deserializes the SparseCheckout from a dictionary.""" - return cls(patterns=d.get('patterns', None)) - - + return cls(patterns=d.get("patterns", None)) @dataclass class SparseCheckoutUpdate: """Sparse checkout configuration, it contains options like cone patterns.""" - + patterns: Optional[List[str]] = None """List of sparse checkout cone patterns, see [cone mode handling] for details. [cone mode handling]: https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling""" - + def as_dict(self) -> dict: """Serializes the SparseCheckoutUpdate into a dictionary suitable for use as a JSON request body.""" body = {} - if self.patterns: body['patterns'] = [v for v in self.patterns] + if self.patterns: + body["patterns"] = [v for v in self.patterns] return body def as_shallow_dict(self) -> dict: """Serializes the SparseCheckoutUpdate into a shallow dictionary of its immediate attributes.""" body = {} - if self.patterns: body['patterns'] = self.patterns + if self.patterns: + body["patterns"] = self.patterns return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> SparseCheckoutUpdate: """Deserializes the SparseCheckoutUpdate from a dictionary.""" - return cls(patterns=d.get('patterns', None)) - - + return cls(patterns=d.get("patterns", None)) @dataclass @@ -1764,47 +1939,58 @@ class UpdateCredentialsRequest: """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - + credential_id: Optional[int] = None """The ID for the corresponding credential to access.""" - + git_username: Optional[str] = None """The username or email provided with your Git provider account, depending on which provider you are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please see your provider's Personal Access Token authentication documentation to see what is supported.""" - + personal_access_token: Optional[str] = None """The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - + def as_dict(self) -> dict: """Serializes the UpdateCredentialsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username - if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username + if self.personal_access_token is not None: + body["personal_access_token"] = self.personal_access_token return body def as_shallow_dict(self) -> dict: """Serializes the UpdateCredentialsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: body['credential_id'] = self.credential_id - if self.git_provider is not None: body['git_provider'] = self.git_provider - if self.git_username is not None: body['git_username'] = self.git_username - if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.git_provider is not None: + body["git_provider"] = self.git_provider + if self.git_username is not None: + body["git_username"] = self.git_username + if self.personal_access_token is not None: + body["personal_access_token"] = self.personal_access_token return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialsRequest: """Deserializes the UpdateCredentialsRequest from a dictionary.""" - return cls(credential_id=d.get('credential_id', None), git_provider=d.get('git_provider', None), git_username=d.get('git_username', None), personal_access_token=d.get('personal_access_token', None)) - - + return cls( + credential_id=d.get("credential_id", None), + git_provider=d.get("git_provider", None), + git_username=d.get("git_username", None), + personal_access_token=d.get("personal_access_token", None), + ) @dataclass @@ -1823,51 +2009,60 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialsResponse: """Deserializes the UpdateCredentialsResponse from a dictionary.""" return cls() - - @dataclass class UpdateRepoRequest: branch: Optional[str] = None """Branch that the local version of the repo is checked out to.""" - + repo_id: Optional[int] = None """ID of the Git folder (repo) object in the workspace.""" - + sparse_checkout: Optional[SparseCheckoutUpdate] = None """If specified, update the sparse checkout settings. The update will fail if sparse checkout is not enabled for the repo.""" - + tag: Optional[str] = None """Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD.""" - + def as_dict(self) -> dict: """Serializes the UpdateRepoRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.branch is not None: body['branch'] = self.branch - if self.repo_id is not None: body['repo_id'] = self.repo_id - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict() - if self.tag is not None: body['tag'] = self.tag + if self.branch is not None: + body["branch"] = self.branch + if self.repo_id is not None: + body["repo_id"] = self.repo_id + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout.as_dict() + if self.tag is not None: + body["tag"] = self.tag return body def as_shallow_dict(self) -> dict: """Serializes the UpdateRepoRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.branch is not None: body['branch'] = self.branch - if self.repo_id is not None: body['repo_id'] = self.repo_id - if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout - if self.tag is not None: body['tag'] = self.tag + if self.branch is not None: + body["branch"] = self.branch + if self.repo_id is not None: + body["repo_id"] = self.repo_id + if self.sparse_checkout: + body["sparse_checkout"] = self.sparse_checkout + if self.tag is not None: + body["tag"] = self.tag return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> UpdateRepoRequest: """Deserializes the UpdateRepoRequest from a dictionary.""" - return cls(branch=d.get('branch', None), repo_id=d.get('repo_id', None), sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckoutUpdate), tag=d.get('tag', None)) - - + return cls( + branch=d.get("branch", None), + repo_id=d.get("repo_id", None), + sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckoutUpdate), + tag=d.get("tag", None), + ) @dataclass @@ -1886,261 +2081,303 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> UpdateRepoResponse: """Deserializes the UpdateRepoResponse from a dictionary.""" return cls() - - @dataclass class WorkspaceObjectAccessControlRequest: group_name: Optional[str] = None """name of the group""" - + permission_level: Optional[WorkspaceObjectPermissionLevel] = None """Permission level""" - + service_principal_name: Optional[str] = None """application ID of a service principal""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectAccessControlRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level.value - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectAccessControlRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.group_name is not None: body['group_name'] = self.group_name - if self.permission_level is not None: body['permission_level'] = self.permission_level - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.permission_level is not None: + body["permission_level"] = self.permission_level + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectAccessControlRequest: """Deserializes the WorkspaceObjectAccessControlRequest from a dictionary.""" - return cls(group_name=d.get('group_name', None), permission_level=_enum(d, 'permission_level', WorkspaceObjectPermissionLevel), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + group_name=d.get("group_name", None), + permission_level=_enum(d, "permission_level", WorkspaceObjectPermissionLevel), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class WorkspaceObjectAccessControlResponse: all_permissions: Optional[List[WorkspaceObjectPermission]] = None """All permissions.""" - + display_name: Optional[str] = None """Display name of the user or service principal.""" - + group_name: Optional[str] = None """name of the group""" - + service_principal_name: Optional[str] = None """Name of the service principal.""" - + user_name: Optional[str] = None """name of the user""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectAccessControlResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectAccessControlResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.all_permissions: body['all_permissions'] = self.all_permissions - if self.display_name is not None: body['display_name'] = self.display_name - if self.group_name is not None: body['group_name'] = self.group_name - if self.service_principal_name is not None: body['service_principal_name'] = self.service_principal_name - if self.user_name is not None: body['user_name'] = self.user_name + if self.all_permissions: + body["all_permissions"] = self.all_permissions + if self.display_name is not None: + body["display_name"] = self.display_name + if self.group_name is not None: + body["group_name"] = self.group_name + if self.service_principal_name is not None: + body["service_principal_name"] = self.service_principal_name + if self.user_name is not None: + body["user_name"] = self.user_name return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectAccessControlResponse: """Deserializes the WorkspaceObjectAccessControlResponse from a dictionary.""" - return cls(all_permissions=_repeated_dict(d, 'all_permissions', WorkspaceObjectPermission), display_name=d.get('display_name', None), group_name=d.get('group_name', None), service_principal_name=d.get('service_principal_name', None), user_name=d.get('user_name', None)) - - + return cls( + all_permissions=_repeated_dict(d, "all_permissions", WorkspaceObjectPermission), + display_name=d.get("display_name", None), + group_name=d.get("group_name", None), + service_principal_name=d.get("service_principal_name", None), + user_name=d.get("user_name", None), + ) @dataclass class WorkspaceObjectPermission: inherited: Optional[bool] = None - + inherited_from_object: Optional[List[str]] = None - + permission_level: Optional[WorkspaceObjectPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermission into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = [v for v in self.inherited_from_object] + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectPermission into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited is not None: body['inherited'] = self.inherited - if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.inherited is not None: + body["inherited"] = self.inherited + if self.inherited_from_object: + body["inherited_from_object"] = self.inherited_from_object + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermission: """Deserializes the WorkspaceObjectPermission from a dictionary.""" - return cls(inherited=d.get('inherited', None), inherited_from_object=d.get('inherited_from_object', None), permission_level=_enum(d, 'permission_level', WorkspaceObjectPermissionLevel)) - - + return cls( + inherited=d.get("inherited", None), + inherited_from_object=d.get("inherited_from_object", None), + permission_level=_enum(d, "permission_level", WorkspaceObjectPermissionLevel), + ) class WorkspaceObjectPermissionLevel(Enum): """Permission level""" - - CAN_EDIT = 'CAN_EDIT' - CAN_MANAGE = 'CAN_MANAGE' - CAN_READ = 'CAN_READ' - CAN_RUN = 'CAN_RUN' + + CAN_EDIT = "CAN_EDIT" + CAN_MANAGE = "CAN_MANAGE" + CAN_READ = "CAN_READ" + CAN_RUN = "CAN_RUN" + @dataclass class WorkspaceObjectPermissions: access_control_list: Optional[List[WorkspaceObjectAccessControlResponse]] = None - + object_id: Optional[str] = None - + object_type: Optional[str] = None - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermissions into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectPermissions into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.object_id is not None: body['object_id'] = self.object_id - if self.object_type is not None: body['object_type'] = self.object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissions: """Deserializes the WorkspaceObjectPermissions from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', WorkspaceObjectAccessControlResponse), object_id=d.get('object_id', None), object_type=d.get('object_type', None)) - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", WorkspaceObjectAccessControlResponse), + object_id=d.get("object_id", None), + object_type=d.get("object_type", None), + ) @dataclass class WorkspaceObjectPermissionsDescription: description: Optional[str] = None - + permission_level: Optional[WorkspaceObjectPermissionLevel] = None """Permission level""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermissionsDescription into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level.value return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectPermissionsDescription into a shallow dictionary of its immediate attributes.""" body = {} - if self.description is not None: body['description'] = self.description - if self.permission_level is not None: body['permission_level'] = self.permission_level + if self.description is not None: + body["description"] = self.description + if self.permission_level is not None: + body["permission_level"] = self.permission_level return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissionsDescription: """Deserializes the WorkspaceObjectPermissionsDescription from a dictionary.""" - return cls(description=d.get('description', None), permission_level=_enum(d, 'permission_level', WorkspaceObjectPermissionLevel)) - - + return cls( + description=d.get("description", None), + permission_level=_enum(d, "permission_level", WorkspaceObjectPermissionLevel), + ) @dataclass class WorkspaceObjectPermissionsRequest: access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None - + workspace_object_id: Optional[str] = None """The workspace object for which to get or manage permissions.""" - + workspace_object_type: Optional[str] = None """The workspace object type for which to get or manage permissions.""" - + def as_dict(self) -> dict: """Serializes the WorkspaceObjectPermissionsRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.access_control_list: body['access_control_list'] = [v.as_dict() for v in self.access_control_list] - if self.workspace_object_id is not None: body['workspace_object_id'] = self.workspace_object_id - if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.workspace_object_id is not None: + body["workspace_object_id"] = self.workspace_object_id + if self.workspace_object_type is not None: + body["workspace_object_type"] = self.workspace_object_type return body def as_shallow_dict(self) -> dict: """Serializes the WorkspaceObjectPermissionsRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.access_control_list: body['access_control_list'] = self.access_control_list - if self.workspace_object_id is not None: body['workspace_object_id'] = self.workspace_object_id - if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.workspace_object_id is not None: + body["workspace_object_id"] = self.workspace_object_id + if self.workspace_object_type is not None: + body["workspace_object_type"] = self.workspace_object_type return body @classmethod def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissionsRequest: """Deserializes the WorkspaceObjectPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', WorkspaceObjectAccessControlRequest), workspace_object_id=d.get('workspace_object_id', None), workspace_object_type=d.get('workspace_object_type', None)) - - - - + return cls( + access_control_list=_repeated_dict(d, "access_control_list", WorkspaceObjectAccessControlRequest), + workspace_object_id=d.get("workspace_object_id", None), + workspace_object_type=d.get("workspace_object_type", None), + ) class GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user. - + See [more info]. - + [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , git_provider: str - , * - , git_username: Optional[str] = None, personal_access_token: Optional[str] = None) -> CreateCredentialsResponse: + def create( + self, git_provider: str, *, git_username: Optional[str] = None, personal_access_token: Optional[str] = None + ) -> CreateCredentialsResponse: """Create a credential entry. - + Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update existing credentials, or the DELETE endpoint to delete existing credentials. - + :param git_provider: str Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, @@ -2154,106 +2391,89 @@ def create(self :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. - + [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html - + :returns: :class:`CreateCredentialsResponse` """ body = {} - if git_provider is not None: body['git_provider'] = git_provider - if git_username is not None: body['git_username'] = git_username - if personal_access_token is not None: body['personal_access_token'] = personal_access_token - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/git-credentials', body=body - - , headers=headers - ) + if git_provider is not None: + body["git_provider"] = git_provider + if git_username is not None: + body["git_username"] = git_username + if personal_access_token is not None: + body["personal_access_token"] = personal_access_token + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/git-credentials", body=body, headers=headers) return CreateCredentialsResponse.from_dict(res) - - - - - def delete(self - , credential_id: int - ): + def delete(self, credential_id: int): """Delete a credential. - + Deletes the specified Git credential. - + :param credential_id: int The ID for the corresponding credential to access. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/git-credentials/{credential_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } - def get(self - , credential_id: int - ) -> GetCredentialsResponse: + self._api.do("DELETE", f"/api/2.0/git-credentials/{credential_id}", headers=headers) + + def get(self, credential_id: int) -> GetCredentialsResponse: """Get a credential entry. - + Gets the Git credential with the specified credential ID. - + :param credential_id: int The ID for the corresponding credential to access. - + :returns: :class:`GetCredentialsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/git-credentials/{credential_id}' - - , headers=headers - ) - return GetCredentialsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/git-credentials/{credential_id}", headers=headers) + return GetCredentialsResponse.from_dict(res) def list(self) -> Iterator[CredentialInfo]: """Get Git credentials. - + Lists the calling user's Git credentials. One credential per user is supported. - + :returns: Iterator over :class:`CredentialInfo` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/git-credentials' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/git-credentials", headers=headers) parsed = ListCredentialsResponse.from_dict(json).credentials return parsed if parsed is not None else [] - - - - - - def update(self - , credential_id: int, git_provider: str - , * - , git_username: Optional[str] = None, personal_access_token: Optional[str] = None): + def update( + self, + credential_id: int, + git_provider: str, + *, + git_username: Optional[str] = None, + personal_access_token: Optional[str] = None, + ): """Update a credential. - + Updates the specified Git credential. - + :param credential_id: int The ID for the corresponding credential to access. :param git_provider: str @@ -2269,56 +2489,48 @@ def update(self :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. - + [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html - - + + """ body = {} - if git_provider is not None: body['git_provider'] = git_provider - if git_username is not None: body['git_username'] = git_username - if personal_access_token is not None: body['personal_access_token'] = personal_access_token - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/git-credentials/{credential_id}', body=body - - , headers=headers - ) - + if git_provider is not None: + body["git_provider"] = git_provider + if git_username is not None: + body["git_username"] = git_username + if personal_access_token is not None: + body["personal_access_token"] = personal_access_token + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/git-credentials/{credential_id}", body=body, headers=headers) + - - class ReposAPI: """The Repos API allows users to manage their git repos. Users can use the API to access all repos that they have manage permissions on. - + Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a repository, committing and pushing, pulling, branch management, and visual comparison of diffs when committing. - + Within Repos you can develop code in notebooks or other files and follow data science and engineering code development best practices using Git for version control, collaboration, and CI/CD.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create(self - , url: str, provider: str - , * - , path: Optional[str] = None, sparse_checkout: Optional[SparseCheckout] = None) -> CreateRepoResponse: + def create( + self, url: str, provider: str, *, path: Optional[str] = None, sparse_checkout: Optional[SparseCheckout] = None + ) -> CreateRepoResponse: """Create a repo. - + Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created programmatically must be linked to a remote Git repo, unlike repos created in the browser. - + :param url: str URL of the Git repository to be linked. :param provider: str @@ -2331,135 +2543,103 @@ def create(self :param sparse_checkout: :class:`SparseCheckout` (optional) If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable sparse checkout after the repo is created. - + :returns: :class:`CreateRepoResponse` """ body = {} - if path is not None: body['path'] = path - if provider is not None: body['provider'] = provider - if sparse_checkout is not None: body['sparse_checkout'] = sparse_checkout.as_dict() - if url is not None: body['url'] = url - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('POST','/api/2.0/repos', body=body - - , headers=headers - ) + if path is not None: + body["path"] = path + if provider is not None: + body["provider"] = provider + if sparse_checkout is not None: + body["sparse_checkout"] = sparse_checkout.as_dict() + if url is not None: + body["url"] = url + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("POST", "/api/2.0/repos", body=body, headers=headers) return CreateRepoResponse.from_dict(res) - - - - - def delete(self - , repo_id: int - ): + def delete(self, repo_id: int): """Delete a repo. - + Deletes the specified repo. - + :param repo_id: int The ID for the corresponding repo to delete. - - + + """ - - headers = {'Accept': 'application/json',} - - self._api.do('DELETE',f'/api/2.0/repos/{repo_id}' - - , headers=headers - ) - - - - + headers = { + "Accept": "application/json", + } + + self._api.do("DELETE", f"/api/2.0/repos/{repo_id}", headers=headers) - def get(self - , repo_id: int - ) -> GetRepoResponse: + def get(self, repo_id: int) -> GetRepoResponse: """Get a repo. - + Returns the repo with the given repo ID. - + :param repo_id: int ID of the Git folder (repo) object in the workspace. - + :returns: :class:`GetRepoResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/repos/{repo_id}' - - , headers=headers - ) - return GetRepoResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_permission_levels(self - , repo_id: str - ) -> GetRepoPermissionLevelsResponse: + res = self._api.do("GET", f"/api/2.0/repos/{repo_id}", headers=headers) + return GetRepoResponse.from_dict(res) + + def get_permission_levels(self, repo_id: str) -> GetRepoPermissionLevelsResponse: """Get repo permission levels. - + Gets the permission levels that a user can have on an object. - + :param repo_id: str The repo for which to get or manage permissions. - + :returns: :class:`GetRepoPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/repos/{repo_id}/permissionLevels' - - , headers=headers - ) - return GetRepoPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } - def get_permissions(self - , repo_id: str - ) -> RepoPermissions: + res = self._api.do("GET", f"/api/2.0/permissions/repos/{repo_id}/permissionLevels", headers=headers) + return GetRepoPermissionLevelsResponse.from_dict(res) + + def get_permissions(self, repo_id: str) -> RepoPermissions: """Get repo permissions. - + Gets the permissions of a repo. Repos can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. - + :returns: :class:`RepoPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/repos/{repo_id}' - - , headers=headers - ) - return RepoPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", f"/api/2.0/permissions/repos/{repo_id}", headers=headers) + return RepoPermissions.from_dict(res) - def list(self - - , * - , next_page_token: Optional[str] = None, path_prefix: Optional[str] = None) -> Iterator[RepoInfo]: + def list(self, *, next_page_token: Optional[str] = None, path_prefix: Optional[str] = None) -> Iterator[RepoInfo]: """Get repos. - + Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate through additional pages. - + :param next_page_token: str (optional) Token used to get the next page of results. If not specified, returns the first page of results as well as a next page token if there are more results. @@ -2467,72 +2647,66 @@ def list(self Filters repos that have paths starting with the given path prefix. If not provided or when provided an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will be served. - + :returns: Iterator over :class:`RepoInfo` """ - - query = {} - if next_page_token is not None: query['next_page_token'] = next_page_token - if path_prefix is not None: query['path_prefix'] = path_prefix - headers = {'Accept': 'application/json',} - - - - while True: - json = self._api.do('GET','/api/2.0/repos', query=query - - , headers=headers - ) - if 'repos' in json: - for v in json['repos']: - yield RepoInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['next_page_token'] = json['next_page_token'] - - - - + query = {} + if next_page_token is not None: + query["next_page_token"] = next_page_token + if path_prefix is not None: + query["path_prefix"] = path_prefix + headers = { + "Accept": "application/json", + } - def set_permissions(self - , repo_id: str - , * - , access_control_list: Optional[List[RepoAccessControlRequest]] = None) -> RepoPermissions: + while True: + json = self._api.do("GET", "/api/2.0/repos", query=query, headers=headers) + if "repos" in json: + for v in json["repos"]: + yield RepoInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["next_page_token"] = json["next_page_token"] + + def set_permissions( + self, repo_id: str, *, access_control_list: Optional[List[RepoAccessControlRequest]] = None + ) -> RepoPermissions: """Set repo permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional) - + :returns: :class:`RepoPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/repos/{repo_id}', body=body - - , headers=headers - ) - return RepoPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do("PUT", f"/api/2.0/permissions/repos/{repo_id}", body=body, headers=headers) + return RepoPermissions.from_dict(res) - def update(self - , repo_id: int - , * - , branch: Optional[str] = None, sparse_checkout: Optional[SparseCheckoutUpdate] = None, tag: Optional[str] = None): + def update( + self, + repo_id: int, + *, + branch: Optional[str] = None, + sparse_checkout: Optional[SparseCheckoutUpdate] = None, + tag: Optional[str] = None, + ): """Update a repo. - + Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same branch. - + :param repo_id: int ID of the Git folder (repo) object in the workspace. :param branch: str (optional) @@ -2544,82 +2718,75 @@ def update(self Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD. - - - """ - body = {} - if branch is not None: body['branch'] = branch - if sparse_checkout is not None: body['sparse_checkout'] = sparse_checkout.as_dict() - if tag is not None: body['tag'] = tag - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('PATCH',f'/api/2.0/repos/{repo_id}', body=body - - , headers=headers - ) - - - - - def update_permissions(self - , repo_id: str - , * - , access_control_list: Optional[List[RepoAccessControlRequest]] = None) -> RepoPermissions: + """ + body = {} + if branch is not None: + body["branch"] = branch + if sparse_checkout is not None: + body["sparse_checkout"] = sparse_checkout.as_dict() + if tag is not None: + body["tag"] = tag + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("PATCH", f"/api/2.0/repos/{repo_id}", body=body, headers=headers) + + def update_permissions( + self, repo_id: str, *, access_control_list: Optional[List[RepoAccessControlRequest]] = None + ) -> RepoPermissions: """Update repo permissions. - + Updates the permissions on a repo. Repos can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional) - + :returns: :class:`RepoPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/repos/{repo_id}', body=body - - , headers=headers - ) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/permissions/repos/{repo_id}", body=body, headers=headers) return RepoPermissions.from_dict(res) - - + class SecretsAPI: """The Secrets API allows you to manage secrets, secret scopes, and access permissions. - + Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of directly entering your credentials into a notebook, use Databricks secrets to store your credentials and reference them in notebooks and jobs. - + Administrators, secret creators, and users granted permission can read Databricks secrets. While Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not possible to prevent such users from reading secrets.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def create_scope(self - , scope: str - , * - , backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None, initial_manage_principal: Optional[str] = None, scope_backend_type: Optional[ScopeBackendType] = None): + def create_scope( + self, + scope: str, + *, + backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None, + initial_manage_principal: Optional[str] = None, + scope_backend_type: Optional[ScopeBackendType] = None, + ): """Create a new secret scope. - + The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. - + :param scope: str Scope name requested by the user. Scope names are unique. :param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional) @@ -2628,359 +2795,308 @@ def create_scope(self The principal that is initially granted `MANAGE` permission to the created scope. :param scope_backend_type: :class:`ScopeBackendType` (optional) The backend type the scope will be created with. If not specified, will default to `DATABRICKS` - - - """ - body = {} - if backend_azure_keyvault is not None: body['backend_azure_keyvault'] = backend_azure_keyvault.as_dict() - if initial_manage_principal is not None: body['initial_manage_principal'] = initial_manage_principal - if scope is not None: body['scope'] = scope - if scope_backend_type is not None: body['scope_backend_type'] = scope_backend_type.value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/secrets/scopes/create', body=body - - , headers=headers - ) - - - - - def delete_acl(self - , scope: str, principal: str - ): + """ + body = {} + if backend_azure_keyvault is not None: + body["backend_azure_keyvault"] = backend_azure_keyvault.as_dict() + if initial_manage_principal is not None: + body["initial_manage_principal"] = initial_manage_principal + if scope is not None: + body["scope"] = scope + if scope_backend_type is not None: + body["scope_backend_type"] = scope_backend_type.value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/secrets/scopes/create", body=body, headers=headers) + + def delete_acl(self, scope: str, principal: str): """Delete an ACL. - + Deletes the given ACL on the given scope. - + Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to remove permissions from. :param principal: str The principal to remove an existing ACL from. - - + + """ body = {} - if principal is not None: body['principal'] = principal - if scope is not None: body['scope'] = scope - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/secrets/acls/delete', body=body - - , headers=headers - ) - + if principal is not None: + body["principal"] = principal + if scope is not None: + body["scope"] = scope + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/secrets/acls/delete", body=body, headers=headers) - def delete_scope(self - , scope: str - ): + def delete_scope(self, scope: str): """Delete a secret scope. - + Deletes a secret scope. - + Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str Name of the scope to delete. - - + + """ body = {} - if scope is not None: body['scope'] = scope - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/secrets/scopes/delete', body=body - - , headers=headers - ) - + if scope is not None: + body["scope"] = scope + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/secrets/scopes/delete", body=body, headers=headers) - def delete_secret(self - , scope: str, key: str - ): + def delete_secret(self, scope: str, key: str): """Delete a secret. - + Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the secret scope. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope that contains the secret to delete. :param key: str Name of the secret to delete. - - + + """ body = {} - if key is not None: body['key'] = key - if scope is not None: body['scope'] = scope - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/secrets/delete', body=body - - , headers=headers - ) - + if key is not None: + body["key"] = key + if scope is not None: + body["scope"] = scope + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/secrets/delete", body=body, headers=headers) - def get_acl(self - , scope: str, principal: str - ) -> AclItem: + def get_acl(self, scope: str, principal: str) -> AclItem: """Get secret ACL details. - + Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` permission to invoke this API. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to fetch ACL information from. :param principal: str The principal to fetch ACL information for. - + :returns: :class:`AclItem` """ - + query = {} - if principal is not None: query['principal'] = principal - if scope is not None: query['scope'] = scope - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/secrets/acls/get', query=query - - , headers=headers - ) + if principal is not None: + query["principal"] = principal + if scope is not None: + query["scope"] = scope + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/secrets/acls/get", query=query, headers=headers) return AclItem.from_dict(res) - - - - - def get_secret(self - , scope: str, key: str - ) -> GetSecretResponse: + def get_secret(self, scope: str, key: str) -> GetSecretResponse: """Get a secret. - + Gets the bytes representation of a secret value for the specified scope and key. - + Users need the READ permission to make this call. - + Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the caller in DBUtils and the type the data is decoded into. - + Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. - + :param scope: str The name of the scope to fetch secret information from. :param key: str The key to fetch secret for. - + :returns: :class:`GetSecretResponse` """ - + query = {} - if key is not None: query['key'] = key - if scope is not None: query['scope'] = scope - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/secrets/get', query=query - - , headers=headers - ) + if key is not None: + query["key"] = key + if scope is not None: + query["scope"] = scope + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/secrets/get", query=query, headers=headers) return GetSecretResponse.from_dict(res) - - - - - def list_acls(self - , scope: str - ) -> Iterator[AclItem]: + def list_acls(self, scope: str) -> Iterator[AclItem]: """Lists ACLs. - + List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to fetch ACL information from. - + :returns: Iterator over :class:`AclItem` """ - + query = {} - if scope is not None: query['scope'] = scope - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/secrets/acls/list', query=query - - , headers=headers - ) + if scope is not None: + query["scope"] = scope + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/secrets/acls/list", query=query, headers=headers) parsed = ListAclsResponse.from_dict(json).items return parsed if parsed is not None else [] - - - - - def list_scopes(self) -> Iterator[SecretScope]: """List all scopes. - + Lists all secret scopes available in the workspace. - + Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :returns: Iterator over :class:`SecretScope` """ - - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/secrets/scopes/list' - , headers=headers - ) + + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/secrets/scopes/list", headers=headers) parsed = ListScopesResponse.from_dict(json).scopes return parsed if parsed is not None else [] - - - - - - def list_secrets(self - , scope: str - ) -> Iterator[SecretMetadata]: + def list_secrets(self, scope: str) -> Iterator[SecretMetadata]: """List secret keys. - + Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call. - + The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to list secrets within. - + :returns: Iterator over :class:`SecretMetadata` """ - + query = {} - if scope is not None: query['scope'] = scope - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/secrets/list', query=query - - , headers=headers - ) + if scope is not None: + query["scope"] = scope + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/secrets/list", query=query, headers=headers) parsed = ListSecretsResponse.from_dict(json).secrets return parsed if parsed is not None else [] - - - - - - def put_acl(self - , scope: str, principal: str, permission: AclPermission - ): + def put_acl(self, scope: str, principal: str, permission: AclPermission): """Create/update an ACL. - + Creates or overwrites the Access Control List (ACL) associated with the given principal (user or group) on the specified scope point. - + In general, a user or group will use the most powerful permission available to them, and permissions are ordered as follows: - + * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what secrets are available. - + Note that in general, secret values can only be read from within a command on a cluster (for example, through a notebook). There is no API to read the actual secret value material outside of a cluster. However, the user's permission will be applied based on who is executing the command, and they must have at least READ permission. - + Users must have the `MANAGE` permission to invoke this API. - + The principal is a user or group name corresponding to an existing Databricks principal to be granted or revoked access. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to apply permissions to. :param principal: str The principal in which the permission is applied. :param permission: :class:`AclPermission` The permission level applied to the principal. - - - """ - body = {} - if permission is not None: body['permission'] = permission.value - if principal is not None: body['principal'] = principal - if scope is not None: body['scope'] = scope - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/secrets/acls/put', body=body - - , headers=headers - ) - - - - - def put_secret(self - , scope: str, key: str - , * - , bytes_value: Optional[str] = None, string_value: Optional[str] = None): + """ + body = {} + if permission is not None: + body["permission"] = permission.value + if principal is not None: + body["principal"] = principal + if scope is not None: + body["scope"] = scope + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/secrets/acls/put", body=body, headers=headers) + + def put_secret( + self, scope: str, key: str, *, bytes_value: Optional[str] = None, string_value: Optional[str] = None + ): """Add a secret. - + Inserts a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret's value. The server encrypts the secret using the secret scope's encryption settings before storing it. - + You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. - + The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine the value returned when the secret value is requested. Exactly one must be specified. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to which the secret will be associated with. :param key: str @@ -2989,232 +3105,207 @@ def put_secret(self If specified, value will be stored as bytes. :param string_value: str (optional) If specified, note that the value will be stored in UTF-8 (MB4) form. - - + + """ body = {} - if bytes_value is not None: body['bytes_value'] = bytes_value - if key is not None: body['key'] = key - if scope is not None: body['scope'] = scope - if string_value is not None: body['string_value'] = string_value - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/secrets/put', body=body - - , headers=headers - ) - + if bytes_value is not None: + body["bytes_value"] = bytes_value + if key is not None: + body["key"] = key + if scope is not None: + body["scope"] = scope + if string_value is not None: + body["string_value"] = string_value + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/secrets/put", body=body, headers=headers) + - - class WorkspaceAPI: """The Workspace API allows you to list, import, export, and delete notebooks and folders. - + A notebook is a web-based interface to a document that contains runnable code, visualizations, and explanatory text.""" - + def __init__(self, api_client): self._api = api_client - - - - - - - - - def delete(self - , path: str - , * - , recursive: Optional[bool] = None): + def delete(self, path: str, *, recursive: Optional[bool] = None): """Delete a workspace object. - + Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`. - + Object deletion cannot be undone and deleting a directory recursively is not atomic. - + :param path: str The absolute path of the notebook or directory. :param recursive: bool (optional) The flag that specifies whether to delete the object recursively. It is `false` by default. Please note this deleting directory is not atomic. If it fails in the middle, some of objects under this directory may be deleted and cannot be undone. - - + + """ body = {} - if path is not None: body['path'] = path - if recursive is not None: body['recursive'] = recursive - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/workspace/delete', body=body - - , headers=headers - ) - + if path is not None: + body["path"] = path + if recursive is not None: + body["recursive"] = recursive + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + self._api.do("POST", "/api/2.0/workspace/delete", body=body, headers=headers) - def export(self - , path: str - , * - , format: Optional[ExportFormat] = None) -> ExportResponse: + def export(self, path: str, *, format: Optional[ExportFormat] = None) -> ExportResponse: """Export a workspace object. - + Exports an object or the contents of an entire directory. - + If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`. Currently, this API does not support exporting a library. - + :param path: str The absolute path of the object or directory. Exporting a directory is only supported for the `DBC`, `SOURCE`, and `AUTO` format. :param format: :class:`ExportFormat` (optional) This specifies the format of the exported file. By default, this is `SOURCE`. - + The value is case sensitive. - + - `SOURCE`: The notebook is exported as source code. Directory exports will not include non-notebook entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The notebook is exported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in Databricks archive format. Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type. Directory exports will include notebooks and workspace files. - + :returns: :class:`ExportResponse` """ - + query = {} - if format is not None: query['format'] = format.value - if path is not None: query['path'] = path - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/workspace/export', query=query - - , headers=headers - ) + if format is not None: + query["format"] = format.value + if path is not None: + query["path"] = path + headers = { + "Accept": "application/json", + } + + res = self._api.do("GET", "/api/2.0/workspace/export", query=query, headers=headers) return ExportResponse.from_dict(res) - - - - - def get_permission_levels(self - , workspace_object_type: str, workspace_object_id: str - ) -> GetWorkspaceObjectPermissionLevelsResponse: + def get_permission_levels( + self, workspace_object_type: str, workspace_object_id: str + ) -> GetWorkspaceObjectPermissionLevelsResponse: """Get workspace object permission levels. - + Gets the permission levels that a user can have on an object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. - + :returns: :class:`GetWorkspaceObjectPermissionLevelsResponse` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}/permissionLevels' - - , headers=headers - ) - return GetWorkspaceObjectPermissionLevelsResponse.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", + f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}/permissionLevels", + headers=headers, + ) + return GetWorkspaceObjectPermissionLevelsResponse.from_dict(res) - def get_permissions(self - , workspace_object_type: str, workspace_object_id: str - ) -> WorkspaceObjectPermissions: + def get_permissions(self, workspace_object_type: str, workspace_object_id: str) -> WorkspaceObjectPermissions: """Get workspace object permissions. - + Gets the permissions of a workspace object. Workspace objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. - + :returns: :class:`WorkspaceObjectPermissions` """ - - headers = {'Accept': 'application/json',} - - res = self._api.do('GET',f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}' - - , headers=headers - ) - return WorkspaceObjectPermissions.from_dict(res) - - - + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}", headers=headers + ) + return WorkspaceObjectPermissions.from_dict(res) - def get_status(self - , path: str - ) -> ObjectInfo: + def get_status(self, path: str) -> ObjectInfo: """Get status. - + Gets the status of an object or a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The absolute path of the notebook or directory. - + :returns: :class:`ObjectInfo` """ - + query = {} - if path is not None: query['path'] = path - headers = {'Accept': 'application/json',} - - res = self._api.do('GET','/api/2.0/workspace/get-status', query=query - - , headers=headers - ) - return ObjectInfo.from_dict(res) + if path is not None: + query["path"] = path + headers = { + "Accept": "application/json", + } - - - + res = self._api.do("GET", "/api/2.0/workspace/get-status", query=query, headers=headers) + return ObjectInfo.from_dict(res) - def import_(self - , path: str - , * - , content: Optional[str] = None, format: Optional[ImportFormat] = None, language: Optional[Language] = None, overwrite: Optional[bool] = None): + def import_( + self, + path: str, + *, + content: Optional[str] = None, + format: Optional[ImportFormat] = None, + language: Optional[Language] = None, + overwrite: Optional[bool] = None, + ): """Import a workspace object. - + Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. If `path` already exists and `overwrite` is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you must set the `language` field. - + :param path: str The absolute path of the object or directory. Importing a directory is only supported for the `DBC` and `SOURCE` formats. :param content: str (optional) The base64-encoded content. This has a limit of 10 MB. - + If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown. This parameter might be absent, and instead a posted file is used. :param format: :class:`ImportFormat` (optional) This specifies the format of the file to be imported. - + The value is case sensitive. - + - `AUTO`: The item is imported depending on an analysis of the item's extension and the header content provided in the request. If the item is imported as a notebook, then the item's extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`: @@ -3226,151 +3317,143 @@ def import_(self :param overwrite: bool (optional) The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC` format, `overwrite` is not supported since it may contain a directory. - - - """ - body = {} - if content is not None: body['content'] = content - if format is not None: body['format'] = format.value - if language is not None: body['language'] = language.value - if overwrite is not None: body['overwrite'] = overwrite - if path is not None: body['path'] = path - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/workspace/import', body=body - - , headers=headers - ) - - - - - def list(self - , path: str - , * - , notebooks_modified_after: Optional[int] = None) -> Iterator[ObjectInfo]: + """ + body = {} + if content is not None: + body["content"] = content + if format is not None: + body["format"] = format.value + if language is not None: + body["language"] = language.value + if overwrite is not None: + body["overwrite"] = overwrite + if path is not None: + body["path"] = path + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/workspace/import", body=body, headers=headers) + + def list(self, path: str, *, notebooks_modified_after: Optional[int] = None) -> Iterator[ObjectInfo]: """List contents. - + Lists the contents of a directory, or the object if it is not a directory. If the input path does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The absolute path of the notebook or directory. :param notebooks_modified_after: int (optional) UTC timestamp in milliseconds - + :returns: Iterator over :class:`ObjectInfo` """ - + query = {} - if notebooks_modified_after is not None: query['notebooks_modified_after'] = notebooks_modified_after - if path is not None: query['path'] = path - headers = {'Accept': 'application/json',} - - json = self._api.do('GET','/api/2.0/workspace/list', query=query - - , headers=headers - ) + if notebooks_modified_after is not None: + query["notebooks_modified_after"] = notebooks_modified_after + if path is not None: + query["path"] = path + headers = { + "Accept": "application/json", + } + + json = self._api.do("GET", "/api/2.0/workspace/list", query=query, headers=headers) parsed = ListResponse.from_dict(json).objects return parsed if parsed is not None else [] - - - - - - def mkdirs(self - , path: str - ): + def mkdirs(self, path: str): """Create a directory. - + Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error `RESOURCE_ALREADY_EXISTS`. - + Note that if this operation fails it may have succeeded in creating some of the necessary parent directories. - + :param path: str The absolute path of the directory. If the parent directories do not exist, it will also create them. If the directory already exists, this command will do nothing and succeed. - - - """ - body = {} - if path is not None: body['path'] = path - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - self._api.do('POST','/api/2.0/workspace/mkdirs', body=body - - , headers=headers - ) - - - - - def set_permissions(self - , workspace_object_type: str, workspace_object_id: str - , * - , access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None) -> WorkspaceObjectPermissions: + """ + body = {} + if path is not None: + body["path"] = path + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + self._api.do("POST", "/api/2.0/workspace/mkdirs", body=body, headers=headers) + + def set_permissions( + self, + workspace_object_type: str, + workspace_object_id: str, + *, + access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None, + ) -> WorkspaceObjectPermissions: """Set workspace object permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) - + :returns: :class:`WorkspaceObjectPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PUT',f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}', body=body - - , headers=headers - ) - return WorkspaceObjectPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - - + res = self._api.do( + "PUT", f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}", body=body, headers=headers + ) + return WorkspaceObjectPermissions.from_dict(res) - def update_permissions(self - , workspace_object_type: str, workspace_object_id: str - , * - , access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None) -> WorkspaceObjectPermissions: + def update_permissions( + self, + workspace_object_type: str, + workspace_object_id: str, + *, + access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None, + ) -> WorkspaceObjectPermissions: """Update workspace object permissions. - + Updates the permissions on a workspace object. Workspace objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) - + :returns: :class:`WorkspaceObjectPermissions` """ body = {} - if access_control_list is not None: body['access_control_list'] = [v.as_dict() for v in access_control_list] - headers = {'Accept': 'application/json','Content-Type': 'application/json',} - - res = self._api.do('PATCH',f'/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}', body=body - - , headers=headers - ) - return WorkspaceObjectPermissions.from_dict(res) + if access_control_list is not None: + body["access_control_list"] = [v.as_dict() for v in access_control_list] + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } - - \ No newline at end of file + res = self._api.do( + "PATCH", f"/api/2.0/permissions/{workspace_object_type}/{workspace_object_id}", body=body, headers=headers + ) + return WorkspaceObjectPermissions.from_dict(res) diff --git a/docs/account/billing/billable_usage.rst b/docs/account/billing/billable_usage.rst index 181b91cc3..b3bda9c61 100644 --- a/docs/account/billing/billable_usage.rst +++ b/docs/account/billing/billable_usage.rst @@ -21,16 +21,16 @@ resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09") Return billable usage logs. - + Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see [CSV file schema]. Note that this method might take multiple minutes to complete. - + **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges. - + [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema - + :param start_month: str Format: `YYYY-MM`. First month to return billable usage logs for. This field is required. :param end_month: str @@ -39,6 +39,6 @@ Specify whether to include personally identifiable information in the billable usage logs, for example the email addresses of cluster creators. Handle this information with care. Defaults to false. - + :returns: :class:`DownloadResponse` \ No newline at end of file diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst index abf07b663..d77eaa4a2 100644 --- a/docs/account/billing/budget_policy.rst +++ b/docs/account/billing/budget_policy.rst @@ -9,9 +9,9 @@ .. py:method:: create( [, policy: Optional[BudgetPolicy], request_id: Optional[str]]) -> BudgetPolicy Create a budget policy. - + Creates a new policy. - + :param policy: :class:`BudgetPolicy` (optional) The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must be provided, custom_tags may need to be provided depending on the cloud provider. All other fields are @@ -19,40 +19,40 @@ :param request_id: str (optional) A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is recommended. This request is only idempotent if a `request_id` is provided. - + :returns: :class:`BudgetPolicy` .. py:method:: delete(policy_id: str) Delete a budget policy. - + Deletes a policy - + :param policy_id: str The Id of the policy. - - + + .. py:method:: get(policy_id: str) -> BudgetPolicy Get a budget policy. - + Retrieves a policy by it's ID. - + :param policy_id: str The Id of the policy. - + :returns: :class:`BudgetPolicy` .. py:method:: list( [, filter_by: Optional[Filter], page_size: Optional[int], page_token: Optional[str], sort_spec: Optional[SortSpec]]) -> Iterator[BudgetPolicy] List policies. - + Lists all policies. Policies are returned in the alphabetically ascending order of their names. - + :param filter_by: :class:`Filter` (optional) A filter to apply to the list of policies. :param page_size: int (optional) @@ -61,27 +61,27 @@ :param page_token: str (optional) A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the subsequent page. If unspecified, the first page will be returned. - + When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the call that provided the page token. :param sort_spec: :class:`SortSpec` (optional) The sort specification. - + :returns: Iterator over :class:`BudgetPolicy` .. py:method:: update(policy_id: str, policy: BudgetPolicy [, limit_config: Optional[LimitConfig]]) -> BudgetPolicy Update a budget policy. - + Updates a policy - + :param policy_id: str The Id of the policy. This field is generated by Databricks and globally unique. :param policy: :class:`BudgetPolicy` Contains the BudgetPolicy details. :param limit_config: :class:`LimitConfig` (optional) DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy - + :returns: :class:`BudgetPolicy` \ No newline at end of file diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst index 6eba7d36e..cf87d1424 100644 --- a/docs/account/billing/budgets.rst +++ b/docs/account/billing/budgets.rst @@ -57,27 +57,27 @@ a.budgets.delete(budget_id=created.budget.budget_configuration_id) Create new budget. - + Create a new budget configuration for an account. For full details, see https://docs.databricks.com/en/admin/account-settings/budgets.html. - + :param budget: :class:`CreateBudgetConfigurationBudget` Properties of the new budget configuration. - + :returns: :class:`CreateBudgetConfigurationResponse` .. py:method:: delete(budget_id: str) Delete budget. - + Deletes a budget configuration for an account. Both account and budget configuration are specified by ID. This cannot be undone. - + :param budget_id: str The Databricks budget configuration ID. - - + + .. py:method:: get(budget_id: str) -> GetBudgetConfigurationResponse @@ -131,12 +131,12 @@ a.budgets.delete(budget_id=created.budget.budget_configuration_id) Get budget. - + Gets a budget configuration for an account. Both account and budget configuration are specified by ID. - + :param budget_id: str The budget configuration ID - + :returns: :class:`GetBudgetConfigurationResponse` @@ -155,13 +155,13 @@ all = a.budgets.list(billing.ListBudgetConfigurationsRequest()) Get all budgets. - + Gets all budgets associated with this account. - + :param page_token: str (optional) A page token received from a previous get all budget configurations call. This token can be used to retrieve the subsequent page. Requests first page if absent. - + :returns: Iterator over :class:`BudgetConfiguration` @@ -243,14 +243,14 @@ a.budgets.delete(budget_id=created.budget.budget_configuration_id) Modify budget. - + Updates a budget configuration for an account. Both account and budget configuration are specified by ID. - + :param budget_id: str The Databricks budget configuration ID. :param budget: :class:`UpdateBudgetConfigurationBudget` The updated budget. This will overwrite the budget specified by the budget ID. - + :returns: :class:`UpdateBudgetConfigurationResponse` \ No newline at end of file diff --git a/docs/account/billing/log_delivery.rst b/docs/account/billing/log_delivery.rst index 93868765d..4f3baef60 100644 --- a/docs/account/billing/log_delivery.rst +++ b/docs/account/billing/log_delivery.rst @@ -53,31 +53,31 @@ ) Create a new log delivery configuration. - + Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you already created a [credential object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket). - + For full details, including the required IAM role policies and bucket policies, see [Deliver and access billable usage logs] or [Configure audit logging]. - + **Note**: There is a limit on the number of log delivery configurations available per account (each limit applies separately to each log type including billable usage and audit logs). You can create a maximum of two enabled account-level delivery configurations (configurations without a workspace filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per workspace for each log type, which means that the same workspace ID can occur in the workspace filter for no more than two delivery configurations per log type. - + You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log delivery configuration](:method:LogDelivery/PatchStatus)). - + [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html - + :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` * Log Delivery Configuration - + :returns: :class:`WrappedLogDeliveryConfiguration` @@ -129,12 +129,12 @@ ) Get log delivery configuration. - + Gets a Databricks log delivery configuration object for an account, both specified by ID. - + :param log_delivery_configuration_id: str The log delivery configuration id of customer - + :returns: :class:`GetLogDeliveryConfigurationResponse` @@ -153,9 +153,9 @@ all = a.log_delivery.list(billing.ListLogDeliveryRequest()) Get all log delivery configurations. - + Gets all Databricks log delivery configurations associated with an account specified by ID. - + :param credentials_id: str (optional) The Credentials id to filter the search results with :param page_token: str (optional) @@ -165,19 +165,19 @@ The log delivery status to filter the search results with :param storage_configuration_id: str (optional) The Storage Configuration id to filter the search results with - + :returns: Iterator over :class:`LogDeliveryConfiguration` .. py:method:: patch_status(log_delivery_configuration_id: str, status: LogDeliveryConfigStatus) Enable or disable log delivery configuration. - + Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under [Create log delivery](:method:LogDelivery/Create). - + :param log_delivery_configuration_id: str The log delivery configuration id of customer :param status: :class:`LogDeliveryConfigStatus` @@ -185,6 +185,6 @@ to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. - - + + \ No newline at end of file diff --git a/docs/account/billing/usage_dashboards.rst b/docs/account/billing/usage_dashboards.rst index 350ef1f08..a316bf232 100644 --- a/docs/account/billing/usage_dashboards.rst +++ b/docs/account/billing/usage_dashboards.rst @@ -11,29 +11,29 @@ .. py:method:: create( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> CreateBillingUsageDashboardResponse Create new usage dashboard. - + Create a usage dashboard specified by workspaceId, accountId, and dashboard type. - + :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account. :param workspace_id: int (optional) The workspace ID of the workspace in which the usage dashboard is created. - + :returns: :class:`CreateBillingUsageDashboardResponse` .. py:method:: get( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> GetBillingUsageDashboardResponse Get usage dashboard. - + Get a usage dashboard specified by workspaceId, accountId, and dashboard type. - + :param dashboard_type: :class:`UsageDashboardType` (optional) Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage dashboard shows usage data for all workspaces in the account. :param workspace_id: int (optional) The workspace ID of the workspace in which the usage dashboard is created. - + :returns: :class:`GetBillingUsageDashboardResponse` \ No newline at end of file diff --git a/docs/account/catalog/metastore_assignments.rst b/docs/account/catalog/metastore_assignments.rst index f5b00c6b3..1bfeedca0 100644 --- a/docs/account/catalog/metastore_assignments.rst +++ b/docs/account/catalog/metastore_assignments.rst @@ -9,43 +9,43 @@ .. py:method:: create(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[CreateMetastoreAssignment]]) Assigns a workspace to a metastore. - + Creates an assignment to a metastore for a workspace - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional) - - + + .. py:method:: delete(workspace_id: int, metastore_id: str) Delete a metastore assignment. - + Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID - - + + .. py:method:: get(workspace_id: int) -> AccountsMetastoreAssignment Gets the metastore assignment for a workspace. - + Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment will not be found and a 404 returned. - + :param workspace_id: int Workspace ID. - + :returns: :class:`AccountsMetastoreAssignment` @@ -65,27 +65,27 @@ ws = a.metastore_assignments.list(metastore_id=os.environ["TEST_METASTORE_ID"]) Get all workspaces assigned to a metastore. - + Gets a list of all Databricks workspace IDs that have been assigned to given metastore. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: Iterator over int .. py:method:: update(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[UpdateMetastoreAssignment]]) Updates a metastore assignment to a workspaces. - + Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be updated. - + :param workspace_id: int Workspace ID. :param metastore_id: str Unity Catalog metastore ID :param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional) - - + + \ No newline at end of file diff --git a/docs/account/catalog/metastores.rst b/docs/account/catalog/metastores.rst index 15f39060d..36df616ea 100644 --- a/docs/account/catalog/metastores.rst +++ b/docs/account/catalog/metastores.rst @@ -10,58 +10,58 @@ .. py:method:: create( [, metastore_info: Optional[CreateMetastore]]) -> AccountsMetastoreInfo Create metastore. - + Creates a Unity Catalog metastore. - + :param metastore_info: :class:`CreateMetastore` (optional) - + :returns: :class:`AccountsMetastoreInfo` .. py:method:: delete(metastore_id: str [, force: Optional[bool]]) Delete a metastore. - + Deletes a Unity Catalog metastore for an account, both specified by ID. - + :param metastore_id: str Unity Catalog metastore ID :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - - + + .. py:method:: get(metastore_id: str) -> AccountsMetastoreInfo Get a metastore. - + Gets a Unity Catalog metastore from an account, both specified by ID. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: :class:`AccountsMetastoreInfo` .. py:method:: list() -> Iterator[MetastoreInfo] Get all metastores associated with an account. - + Gets all Unity Catalog metastores associated with an account specified by ID. - + :returns: Iterator over :class:`MetastoreInfo` .. py:method:: update(metastore_id: str [, metastore_info: Optional[UpdateMetastore]]) -> AccountsMetastoreInfo Update a metastore. - + Updates an existing Unity Catalog metastore. - + :param metastore_id: str Unity Catalog metastore ID :param metastore_info: :class:`UpdateMetastore` (optional) - + :returns: :class:`AccountsMetastoreInfo` \ No newline at end of file diff --git a/docs/account/catalog/storage_credentials.rst b/docs/account/catalog/storage_credentials.rst index 453b3a1eb..0b9948015 100644 --- a/docs/account/catalog/storage_credentials.rst +++ b/docs/account/catalog/storage_credentials.rst @@ -9,78 +9,78 @@ .. py:method:: create(metastore_id: str [, credential_info: Optional[CreateStorageCredential]]) -> AccountsStorageCredentialInfo Create a storage credential. - + Creates a new storage credential. The request object is specific to the cloud: - + * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials * **GcpServiceAcountKey** for GCP credentials. - + The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the metastore. - + :param metastore_id: str Unity Catalog metastore ID :param credential_info: :class:`CreateStorageCredential` (optional) - + :returns: :class:`AccountsStorageCredentialInfo` .. py:method:: delete(metastore_id: str, storage_credential_name: str [, force: Optional[bool]]) Delete a storage credential. - + Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. :param force: bool (optional) Force deletion even if the Storage Credential is not empty. Default is false. - - + + .. py:method:: get(metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo Gets the named storage credential. - + Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have a level of privilege on the storage credential. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. - + :returns: :class:`AccountsStorageCredentialInfo` .. py:method:: list(metastore_id: str) -> Iterator[StorageCredentialInfo] Get all storage credentials assigned to a metastore. - + Gets a list of all storage credentials that have been assigned to given metastore. - + :param metastore_id: str Unity Catalog metastore ID - + :returns: Iterator over :class:`StorageCredentialInfo` .. py:method:: update(metastore_id: str, storage_credential_name: str [, credential_info: Optional[UpdateStorageCredential]]) -> AccountsStorageCredentialInfo Updates a storage credential. - + Updates a storage credential on the metastore. The caller must be the owner of the storage credential. If the caller is a metastore admin, only the __owner__ credential can be changed. - + :param metastore_id: str Unity Catalog metastore ID :param storage_credential_name: str Name of the storage credential. :param credential_info: :class:`UpdateStorageCredential` (optional) - + :returns: :class:`AccountsStorageCredentialInfo` \ No newline at end of file diff --git a/docs/account/iam/access_control.rst b/docs/account/iam/access_control.rst index bd3c05ec2..475d28c07 100644 --- a/docs/account/iam/access_control.rst +++ b/docs/account/iam/access_control.rst @@ -11,31 +11,31 @@ .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse Get assignable roles for a resource. - + Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. - + :param resource: str The resource name for which assignable roles will be listed. - + Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service principal. - + :returns: :class:`GetAssignableRolesForResourceResponse` .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse Get a rule set. - + Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. - + :param name: str The ruleset name associated with the request. - + Examples | Summary :--- | :--- `name=accounts//ruleSets/default` | A name for a rule set on the account. `name=accounts//groups//ruleSets/default` | A name for a rule set on the group. @@ -48,24 +48,24 @@ modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating. - + Examples | Summary :--- | :--- `etag=` | An empty etag can only be used in GET to indicate no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` | An etag encoded a specific version of the rule set to get or to be updated. - + :returns: :class:`RuleSetResponse` .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse Update a rule set. - + Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. - + :param name: str Name of the rule set. :param rule_set: :class:`RuleSetUpdateRequest` - + :returns: :class:`RuleSetResponse` \ No newline at end of file diff --git a/docs/account/iam/groups.rst b/docs/account/iam/groups.rst index 5400ede93..d005f7930 100644 --- a/docs/account/iam/groups.rst +++ b/docs/account/iam/groups.rst @@ -6,7 +6,7 @@ Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects. - + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks account identities can be assigned as members of groups, and members inherit permissions that are assigned to their group. @@ -14,15 +14,15 @@ .. py:method:: create( [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) -> Group Create a new group. - + Creates a group in the Databricks account with a unique name, using the supplied group details. - + :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -35,40 +35,40 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - + :returns: :class:`Group` .. py:method:: delete(id: str) Delete a group. - + Deletes a group from the Databricks account. - + :param id: str Unique ID for a group in the Databricks account. - - + + .. py:method:: get(id: str) -> Group Get group details. - + Gets the information for a specific group in the Databricks account. - + :param id: str Unique ID for a group in the Databricks account. - + :returns: :class:`Group` .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group] List group details. - + Gets all details of the groups associated with the Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -80,7 +80,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -88,31 +88,31 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`Group` .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]]) Update group details. - + Partially updates the details of a group. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) Replace a group. - + Updates the details of a group by replacing the entire group entity. - + :param id: str Databricks group ID :param display_name: str (optional) @@ -120,7 +120,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -131,6 +131,6 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - - + + \ No newline at end of file diff --git a/docs/account/iam/service_principals.rst b/docs/account/iam/service_principals.rst index 5075673ed..302cf5f79 100644 --- a/docs/account/iam/service_principals.rst +++ b/docs/account/iam/service_principals.rst @@ -26,9 +26,9 @@ spn = a.service_principals.create(display_name=f"sdk-{time.time_ns()}") Create a service principal. - + Creates a new service principal in the Databricks account. - + :param active: bool (optional) If this user is active :param application_id: str (optional) @@ -38,7 +38,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -48,20 +48,20 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - + :returns: :class:`ServicePrincipal` .. py:method:: delete(id: str) Delete a service principal. - + Delete a single service principal in the Databricks account. - + :param id: str Unique ID for a service principal in the Databricks account. - - + + .. py:method:: get(id: str) -> ServicePrincipal @@ -85,12 +85,12 @@ a.service_principals.delete(id=sp_create.id) Get service principal details. - + Gets the details for a single service principal define in the Databricks account. - + :param id: str Unique ID for a service principal in the Databricks account. - + :returns: :class:`ServicePrincipal` @@ -117,9 +117,9 @@ a.service_principals.delete(id=sp_create.id) List service principals. - + Gets the set of service principals associated with a Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -131,7 +131,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -139,7 +139,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`ServicePrincipal` @@ -171,16 +171,16 @@ a.service_principals.delete(id=sp_create.id) Update service principal details. - + Partially updates the details of a single service principal in the Databricks account. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]]) @@ -206,11 +206,11 @@ a.service_principals.delete(id=sp_create.id) Replace service principal. - + Updates the details of a single service principal. - + This action replaces the existing service principal with the same name. - + :param id: str Databricks service principal ID. :param active: bool (optional) @@ -222,7 +222,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -230,6 +230,6 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - - + + \ No newline at end of file diff --git a/docs/account/iam/users.rst b/docs/account/iam/users.rst index ebfc8baa3..7e527ec45 100644 --- a/docs/account/iam/users.rst +++ b/docs/account/iam/users.rst @@ -5,7 +5,7 @@ .. py:class:: AccountUsersAPI User identities recognized by Databricks and represented by email addresses. - + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your identity provider to create users and groups in Databricks account and give them the proper level of @@ -36,23 +36,23 @@ a.users.delete(id=user.id) Create a new user. - + Creates a new user in the Databricks account. This new user will also be added to the Databricks account. - + :param active: bool (optional) If this user is active :param display_name: str (optional) String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -66,7 +66,7 @@ The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - + :returns: :class:`User` @@ -91,14 +91,14 @@ a.users.delete(id=user.id) Delete a user. - + Deletes a user. Deleting a user from a Databricks account also removes objects associated with the user. - + :param id: str Unique ID for a user in the Databricks account. - - + + .. py:method:: get(id: str [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[GetSortOrder], start_index: Optional[int]]) -> User @@ -125,9 +125,9 @@ a.users.delete(id=user.id) Get user details. - + Gets information for a specific user in Databricks account. - + :param id: str Unique ID for a user in the Databricks account. :param attributes: str (optional) @@ -141,7 +141,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -150,16 +150,16 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: :class:`User` .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[User] List users. - + Gets details for all the users associated with a Databricks account. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -171,7 +171,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -180,7 +180,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`User` @@ -218,24 +218,24 @@ a.users.delete(id=user.id) Update user details. - + Partially updates a user resource by applying the supplied operations on specific user attributes. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]]) Replace a user. - + Replaces a user's information with the data supplied in request. - + :param id: str Databricks user ID. :param active: bool (optional) @@ -244,13 +244,13 @@ String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -262,6 +262,6 @@ The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - - + + \ No newline at end of file diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index a9ec2383c..745bd75da 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -10,27 +10,27 @@ .. py:method:: delete(workspace_id: int, principal_id: int) Delete permissions assignment. - + Deletes the workspace permissions assignment in a given account and workspace for the specified principal. - + :param workspace_id: int The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. - - + + .. py:method:: get(workspace_id: int) -> WorkspacePermissions List workspace permissions. - + Get an array of workspace permissions for the specified account and workspace. - + :param workspace_id: int The workspace ID. - + :returns: :class:`WorkspacePermissions` @@ -52,12 +52,12 @@ all = a.workspace_assignment.list(workspace_id=workspace_id) Get permission assignments. - + Get the permission assignments for the specified Databricks account and Databricks workspace. - + :param workspace_id: int The workspace ID for the account. - + :returns: Iterator over :class:`PermissionAssignment` @@ -89,10 +89,10 @@ ) Create or update permissions assignment. - + Creates or updates the workspace permissions assignment in a given account and workspace for the specified principal. - + :param workspace_id: int The workspace ID. :param principal_id: int @@ -103,6 +103,6 @@ will be ignored. Note that excluding this field, or providing unsupported values, will have the same effect as providing an empty list, which will result in the deletion of all permissions for the principal. - + :returns: :class:`PermissionAssignment` \ No newline at end of file diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst index 7043a343b..5110e70ad 100644 --- a/docs/account/oauth2/custom_app_integration.rst +++ b/docs/account/oauth2/custom_app_integration.rst @@ -10,11 +10,11 @@ .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) -> CreateCustomAppIntegrationOutput Create Custom OAuth App Integration. - + Create Custom OAuth App Integration. - + You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param confidential: bool (optional) This field indicates whether an OAuth client secret is required to authenticate this client. :param name: str (optional) @@ -29,54 +29,54 @@ :param user_authorized_scopes: List[str] (optional) Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes. - + :returns: :class:`CreateCustomAppIntegrationOutput` .. py:method:: delete(integration_id: str) Delete Custom OAuth App Integration. - + Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param integration_id: str - - + + .. py:method:: get(integration_id: str) -> GetCustomAppIntegrationOutput Get OAuth Custom App Integration. - + Gets the Custom OAuth App Integration for the given integration id. - + :param integration_id: str The OAuth app integration ID. - + :returns: :class:`GetCustomAppIntegrationOutput` .. py:method:: list( [, include_creator_username: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetCustomAppIntegrationOutput] Get custom oauth app integrations. - + Get the list of custom OAuth app integrations for the specified Databricks account - + :param include_creator_username: bool (optional) :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`GetCustomAppIntegrationOutput` .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) Updates Custom OAuth App Integration. - + Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - + :param integration_id: str :param redirect_urls: List[str] (optional) List of OAuth redirect urls to be updated in the custom OAuth app integration @@ -88,6 +88,6 @@ :param user_authorized_scopes: List[str] (optional) Scopes that will need to be consented by end user to mint the access token. If the user does not authorize the access token will not be minted. Must be a subset of scopes. - - + + \ No newline at end of file diff --git a/docs/account/oauth2/federation_policy.rst b/docs/account/oauth2/federation_policy.rst index 9eed9396b..a8957e5f2 100644 --- a/docs/account/oauth2/federation_policy.rst +++ b/docs/account/oauth2/federation_policy.rst @@ -5,20 +5,20 @@ .. py:class:: AccountFederationPolicyAPI These APIs manage account federation policies. - + Account federation policies allow users and service principals in your Databricks account to securely access Databricks APIs using tokens from your trusted identity providers (IdPs). - + With token federation, your users and service principals can exchange tokens from your IdP for Databricks OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage Databricks secrets, and allows you to centralize management of token issuance policies in your IdP. Databricks token federation is typically used in combination with [SCIM], so users in your IdP are synchronized into your Databricks account. - + Token federation is configured in your Databricks account using an account federation policy. An account federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from * how to determine which Databricks user, or subject, a token is issued for - + To configure a federation policy, you provide the following: * The required token __issuer__, as specified in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to @@ -29,68 +29,68 @@ public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for discovering public keys. - + An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"] subject_claim: "sub" ``` - + An example JWT token body that matches this policy and could be used to authenticate to Databricks as user `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub": "username@mycompany.com" } ``` - + You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if your users do not already have the ability to generate tokens that are compatible with your federation policy. - + You do not need to configure an OAuth application in Databricks to use token federation. - + [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html .. py:method:: create(policy: FederationPolicy [, policy_id: Optional[str]]) -> FederationPolicy Create account federation policy. - + :param policy: :class:`FederationPolicy` :param policy_id: str (optional) The identifier for the federation policy. The identifier must contain only lowercase alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. - + :returns: :class:`FederationPolicy` .. py:method:: delete(policy_id: str) Delete account federation policy. - + :param policy_id: str The identifier for the federation policy. - - + + .. py:method:: get(policy_id: str) -> FederationPolicy Get account federation policy. - + :param policy_id: str The identifier for the federation policy. - + :returns: :class:`FederationPolicy` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] List account federation policies. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` .. py:method:: update(policy_id: str, policy: FederationPolicy [, update_mask: Optional[str]]) -> FederationPolicy Update account federation policy. - + :param policy_id: str The identifier for the federation policy. :param policy: :class:`FederationPolicy` @@ -100,6 +100,6 @@ should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'description,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` \ No newline at end of file diff --git a/docs/account/oauth2/o_auth_published_apps.rst b/docs/account/oauth2/o_auth_published_apps.rst index 18c07c326..873d8a650 100644 --- a/docs/account/oauth2/o_auth_published_apps.rst +++ b/docs/account/oauth2/o_auth_published_apps.rst @@ -11,13 +11,13 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PublishedAppOutput] Get all the published OAuth apps. - + Get all the available published OAuth apps in Databricks. - + :param page_size: int (optional) The max number of OAuth published apps to return in one page. :param page_token: str (optional) A token that can be used to get the next page of results. - + :returns: Iterator over :class:`PublishedAppOutput` \ No newline at end of file diff --git a/docs/account/oauth2/published_app_integration.rst b/docs/account/oauth2/published_app_integration.rst index f59f2c4aa..fd61c58fa 100644 --- a/docs/account/oauth2/published_app_integration.rst +++ b/docs/account/oauth2/published_app_integration.rst @@ -10,64 +10,64 @@ .. py:method:: create( [, app_id: Optional[str], token_access_policy: Optional[TokenAccessPolicy]]) -> CreatePublishedAppIntegrationOutput Create Published OAuth App Integration. - + Create Published OAuth App Integration. - + You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param app_id: str (optional) App id of the OAuth published app integration. For example power-bi, tableau-deskop :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy - + :returns: :class:`CreatePublishedAppIntegrationOutput` .. py:method:: delete(integration_id: str) Delete Published OAuth App Integration. - + Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param integration_id: str - - + + .. py:method:: get(integration_id: str) -> GetPublishedAppIntegrationOutput Get OAuth Published App Integration. - + Gets the Published OAuth App Integration for the given integration id. - + :param integration_id: str - + :returns: :class:`GetPublishedAppIntegrationOutput` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetPublishedAppIntegrationOutput] Get published oauth app integrations. - + Get the list of published OAuth app integrations for the specified Databricks account - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`GetPublishedAppIntegrationOutput` .. py:method:: update(integration_id: str [, token_access_policy: Optional[TokenAccessPolicy]]) Updates Published OAuth App Integration. - + Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. - + :param integration_id: str :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy to be updated in the published OAuth app integration - - + + \ No newline at end of file diff --git a/docs/account/oauth2/service_principal_federation_policy.rst b/docs/account/oauth2/service_principal_federation_policy.rst index 2f6292687..f3335d87a 100644 --- a/docs/account/oauth2/service_principal_federation_policy.rst +++ b/docs/account/oauth2/service_principal_federation_policy.rst @@ -5,22 +5,22 @@ .. py:class:: ServicePrincipalFederationPolicyAPI These APIs manage service principal federation policies. - + Service principal federation, also known as Workload Identity Federation, allows your automated workloads running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets. With Workload Identity Federation, your application (or workload) authenticates to Databricks as a Databricks service principal, using tokens provided by the workload runtime. - + Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever possible. Workload Identity Federation is supported by many popular services, including Github Actions, Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others. - + Workload identity federation is configured in your Databricks account using a service principal federation policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the Databricks service principal - + To configure a federation policy, you provide the following: * The required token __issuer__, as specified in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the workload identity provider. * The required token __subject__, as specified in the “sub” claim of @@ -32,73 +32,73 @@ of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on the issuer’s well known endpoint for discovering public keys. - + An example service principal federation policy, for a Github Actions workload, is: ``` issuer: "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject: "repo:my-github-org/my-repo:environment:prod" ``` - + An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ``` { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub": "repo:my-github-org/my-repo:environment:prod" } ``` - + You may also need to configure the workload runtime to generate tokens for your workloads. - + You do not need to configure an OAuth application in Databricks to use token federation. .. py:method:: create(service_principal_id: int, policy: FederationPolicy [, policy_id: Optional[str]]) -> FederationPolicy Create service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy: :class:`FederationPolicy` :param policy_id: str (optional) The identifier for the federation policy. The identifier must contain only lowercase alphanumeric characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks. - + :returns: :class:`FederationPolicy` .. py:method:: delete(service_principal_id: int, policy_id: str) Delete service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str The identifier for the federation policy. - - + + .. py:method:: get(service_principal_id: int, policy_id: str) -> FederationPolicy Get service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str The identifier for the federation policy. - + :returns: :class:`FederationPolicy` .. py:method:: list(service_principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] List service principal federation policies. - + :param service_principal_id: int The service principal id for the federation policy. :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` .. py:method:: update(service_principal_id: int, policy_id: str, policy: FederationPolicy [, update_mask: Optional[str]]) -> FederationPolicy Update service principal federation policy. - + :param service_principal_id: int The service principal id for the federation policy. :param policy_id: str @@ -110,6 +110,6 @@ should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'description,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` \ No newline at end of file diff --git a/docs/account/oauth2/service_principal_secrets.rst b/docs/account/oauth2/service_principal_secrets.rst index d72b4e8f7..01965a19a 100644 --- a/docs/account/oauth2/service_principal_secrets.rst +++ b/docs/account/oauth2/service_principal_secrets.rst @@ -5,53 +5,54 @@ .. py:class:: ServicePrincipalSecretsAPI These APIs enable administrators to manage service principal secrets. - + You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using OAuth tokens for service principals], - + In addition, the generated secrets can be used to configure the Databricks Terraform Provider to authenticate with the service principal. For more information, see [Databricks Terraform Provider]. - + [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal + .. py:method:: create(service_principal_id: int [, lifetime: Optional[str]]) -> CreateServicePrincipalSecretResponse Create service principal secret. - + Create a secret for the given service principal. - + :param service_principal_id: int The service principal ID. :param lifetime: str (optional) The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a default lifetime of 730 days (63072000s). - + :returns: :class:`CreateServicePrincipalSecretResponse` .. py:method:: delete(service_principal_id: int, secret_id: str) Delete service principal secret. - + Delete a secret from the given service principal. - + :param service_principal_id: int The service principal ID. :param secret_id: str The secret ID. - - + + .. py:method:: list(service_principal_id: int [, page_token: Optional[str]]) -> Iterator[SecretInfo] List service principal secrets. - + List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the secret values. - + :param service_principal_id: int The service principal ID. :param page_token: str (optional) @@ -61,6 +62,6 @@ previous request. To list all of the secrets for a service principal, it is necessary to continue requesting pages of entries until the response contains no `next_page_token`. Note that the number of entries returned must not be used to determine when the listing is complete. - + :returns: Iterator over :class:`SecretInfo` \ No newline at end of file diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index 75abd62d4..e307588f1 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -35,38 +35,38 @@ a.credentials.delete(credentials_id=creds.credentials_id) Create credential configuration. - + Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account ID) in the returned credential object, and configure the required access policy. - + Save the response's `credentials_id` field, which is the ID for your new credential configuration object. - + For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param credentials_name: str The human-readable name of the credential configuration object. :param aws_credentials: :class:`CreateCredentialAwsCredentials` - + :returns: :class:`Credential` .. py:method:: delete(credentials_id: str) Delete credential configuration. - + Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any workspace. - + :param credentials_id: str Databricks Account API credential configuration ID - - + + .. py:method:: get(credentials_id: str) -> Credential @@ -97,12 +97,12 @@ a.credentials.delete(credentials_id=role.credentials_id) Get credential configuration. - + Gets a Databricks credential configuration object for an account, both specified by ID. - + :param credentials_id: str Databricks Account API credential configuration ID - + :returns: :class:`Credential` @@ -120,8 +120,8 @@ configs = a.credentials.list() Get all credential configurations. - + Gets all Databricks credential configurations associated with an account specified by ID. - + :returns: Iterator over :class:`Credential` \ No newline at end of file diff --git a/docs/account/provisioning/encryption_keys.rst b/docs/account/provisioning/encryption_keys.rst index baf076a8a..1c00a2914 100644 --- a/docs/account/provisioning/encryption_keys.rst +++ b/docs/account/provisioning/encryption_keys.rst @@ -7,11 +7,11 @@ These APIs manage encryption key configurations for this workspace (optional). A key configuration encapsulates the AWS KMS key information and some information about how the key configuration can be used. There are two possible uses for key configurations: - + * Managed services: A key configuration can be used to encrypt a workspace's notebook and secret data in the control plane, as well as Databricks SQL queries and query history. * Storage: A key configuration can be used to encrypt a workspace's DBFS and EBS data in the data plane. - + In both of these cases, the key configuration's ID is used when creating a new workspace. This Preview feature is available if your account is on the E2 version of the platform. Updating a running workspace with workspace storage encryption requires that the workspace is on the E2 version of the platform. If you @@ -44,7 +44,7 @@ a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) Create encryption key configuration. - + Creates a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -52,32 +52,32 @@ specified as a workspace's customer-managed key for workspace storage, the key encrypts the workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume data. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions that currently support creation of Databricks workspaces. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :param use_cases: List[:class:`KeyUseCase`] The cases that the key can be used for. :param aws_key_info: :class:`CreateAwsKeyInfo` (optional) :param gcp_key_info: :class:`CreateGcpKeyInfo` (optional) - + :returns: :class:`CustomerManagedKey` .. py:method:: delete(customer_managed_key_id: str) Delete encryption key configuration. - + Deletes a customer-managed key configuration object for an account. You cannot delete a configuration that is associated with a running workspace. - + :param customer_managed_key_id: str Databricks encryption key configuration ID. - - + + .. py:method:: get(customer_managed_key_id: str) -> CustomerManagedKey @@ -108,7 +108,7 @@ a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id) Get encryption key configuration. - + Gets a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks @@ -116,15 +116,15 @@ specified as a workspace's customer-managed key for storage, the key encrypts the workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume data. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions. - + This operation is available only if your account is on the E2 version of the platform.", - + :param customer_managed_key_id: str Databricks encryption key configuration ID. - + :returns: :class:`CustomerManagedKey` @@ -142,17 +142,17 @@ all = a.encryption_keys.list() Get all encryption key configurations. - + Gets all customer-managed key configuration objects for an account. If the key is specified as a workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history. If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane. - + **Important**: Customer-managed keys are supported only for some deployment types, subscription types, and AWS regions. - + This operation is available only if your account is on the E2 version of the platform. - + :returns: Iterator over :class:`CustomerManagedKey` \ No newline at end of file diff --git a/docs/account/provisioning/networks.rst b/docs/account/provisioning/networks.rst index 8ac512a72..46bccd872 100644 --- a/docs/account/provisioning/networks.rst +++ b/docs/account/provisioning/networks.rst @@ -28,10 +28,10 @@ ) Create network configuration. - + Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a pre-existing VPC and subnets. - + :param network_name: str The human-readable name of the network configuration. :param gcp_network_info: :class:`GcpNetworkInfo` (optional) @@ -46,28 +46,28 @@ :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional) If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/ :param vpc_id: str (optional) The ID of the VPC associated with this network. VPC IDs can be used in multiple network configurations. - + :returns: :class:`Network` .. py:method:: delete(network_id: str) Delete a network configuration. - + Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. - + This operation is available only if your account is on the E2 version of the platform. - + :param network_id: str Databricks Account API network configuration ID. - - + + .. py:method:: get(network_id: str) -> Network @@ -93,12 +93,12 @@ by_id = a.networks.get(network_id=netw.network_id) Get a network configuration. - + Gets a Databricks network configuration, which represents a cloud VPC and its resources. - + :param network_id: str Databricks Account API network configuration ID. - + :returns: :class:`Network` @@ -116,10 +116,10 @@ configs = a.networks.list() Get all network configurations. - + Gets a list of all Databricks network configurations for an account, specified by ID. - + This operation is available only if your account is on the E2 version of the platform. - + :returns: Iterator over :class:`Network` \ No newline at end of file diff --git a/docs/account/provisioning/private_access.rst b/docs/account/provisioning/private_access.rst index 5a1266de8..e30ed2585 100644 --- a/docs/account/provisioning/private_access.rst +++ b/docs/account/provisioning/private_access.rst @@ -29,20 +29,20 @@ a.private_access.delete(private_access_settings_id=created.private_access_settings_id) Create private access settings. - + Creates a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. - + You can share one private access settings with multiple workspaces in a single account. However, private access settings are specific to AWS regions, so only workspaces in the same AWS region can use a given private access settings object. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_name: str The human-readable name of the private access settings object. :param region: str @@ -51,14 +51,14 @@ An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in AWS. - + Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints that in your account that can connect to your workspace over AWS PrivateLink. - + If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this control only works for PrivateLink connections. To control how your workspace is accessed via public internet, see [IP access lists]. - + [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) The private access level controls which VPC endpoints can connect to the UI or API of any workspace @@ -70,26 +70,26 @@ Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - + :returns: :class:`PrivateAccessSettings` .. py:method:: delete(private_access_settings_id: str) Delete a private access settings object. - + Deletes a private access settings object, which determines how your workspace is accessed over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. - - + + .. py:method:: get(private_access_settings_id: str) -> PrivateAccessSettings @@ -117,18 +117,18 @@ a.private_access.delete(private_access_settings_id=created.private_access_settings_id) Get a private access settings object. - + Gets a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. - + :returns: :class:`PrivateAccessSettings` @@ -146,9 +146,9 @@ all = a.private_access.list() Get all private access settings objects. - + Gets a list of all private access settings objects for an account, specified by ID. - + :returns: Iterator over :class:`PrivateAccessSettings` @@ -181,25 +181,25 @@ a.private_access.delete(private_access_settings_id=created.private_access_settings_id) Replace private access settings. - + Updates an existing private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object referenced by ID in the workspace's `private_access_settings_id` property. - + This operation completely overwrites your existing private access settings object attached to your workspaces. All workspaces attached to the private access settings are affected by any change. If `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of these changes might take several minutes to propagate to the workspace API. - + You can share one private access settings object with multiple workspaces in a single account. However, private access settings are specific to AWS regions, so only workspaces in the same AWS region can use a given private access settings object. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param private_access_settings_id: str Databricks Account API private access settings ID. :param private_access_settings_name: str @@ -210,14 +210,14 @@ An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in AWS. - + Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints that in your account that can connect to your workspace over AWS PrivateLink. - + If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this control only works for PrivateLink connections. To control how your workspace is accessed via public internet, see [IP access lists]. - + [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html :param private_access_level: :class:`PrivateAccessLevel` (optional) The private access level controls which VPC endpoints can connect to the UI or API of any workspace @@ -229,6 +229,6 @@ Determines if the workspace can be accessed over public internet. For fully private workspaces, you can optionally specify `false`, but only if you implement both the front-end and the back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled. - - + + \ No newline at end of file diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index baf760a82..010795885 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -32,36 +32,36 @@ a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) Create new storage configuration. - + Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your account. Databricks stores related workspace assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the required bucket policy. - + For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param storage_configuration_name: str The human-readable name of the storage configuration. :param root_bucket_info: :class:`RootBucketInfo` Root S3 bucket information. - + :returns: :class:`StorageConfiguration` .. py:method:: delete(storage_configuration_id: str) Delete storage configuration. - + Deletes a Databricks storage configuration. You cannot delete a storage configuration that is associated with any workspace. - + :param storage_configuration_id: str Databricks Account API storage configuration ID. - - + + .. py:method:: get(storage_configuration_id: str) -> StorageConfiguration @@ -86,12 +86,12 @@ by_id = a.storage.get(storage_configuration_id=storage.storage_configuration_id) Get storage configuration. - + Gets a Databricks storage configuration for an account, both specified by ID. - + :param storage_configuration_id: str Databricks Account API storage configuration ID. - + :returns: :class:`StorageConfiguration` @@ -109,8 +109,8 @@ configs = a.storage.list() Get all storage configurations. - + Gets a list of all Databricks storage configurations for your account, specified by ID. - + :returns: Iterator over :class:`StorageConfiguration` \ No newline at end of file diff --git a/docs/account/provisioning/vpc_endpoints.rst b/docs/account/provisioning/vpc_endpoints.rst index 5d050ca63..fecfbec5d 100644 --- a/docs/account/provisioning/vpc_endpoints.rst +++ b/docs/account/provisioning/vpc_endpoints.rst @@ -30,20 +30,20 @@ a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) Create VPC endpoint configuration. - + Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. - + After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically accepts the VPC endpoint. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html - + :param vpc_endpoint_name: str The human-readable name of the storage configuration. :param aws_vpc_endpoint_id: str (optional) @@ -52,27 +52,27 @@ The Google Cloud specific information for this Private Service Connect endpoint. :param region: str (optional) The AWS region in which this VPC endpoint object exists. - + :returns: :class:`VpcEndpoint` .. py:method:: delete(vpc_endpoint_id: str) Delete VPC endpoint configuration. - + Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate privately with Databricks over [AWS PrivateLink]. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :param vpc_endpoint_id: str Databricks VPC endpoint ID. - - + + .. py:method:: get(vpc_endpoint_id: str) -> VpcEndpoint @@ -101,16 +101,16 @@ a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id) Get a VPC endpoint configuration. - + Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. - + [AWS PrivateLink]: https://aws.amazon.com/privatelink [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html - + :param vpc_endpoint_id: str Databricks VPC endpoint ID. - + :returns: :class:`VpcEndpoint` @@ -128,12 +128,12 @@ all = a.vpc_endpoints.list() Get all VPC endpoint configurations. - + Gets a list of all VPC endpoints for an account, specified by ID. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html - + :returns: Iterator over :class:`VpcEndpoint` \ No newline at end of file diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst index 6962dcdcd..26ec685e5 100644 --- a/docs/account/provisioning/workspaces.rst +++ b/docs/account/provisioning/workspaces.rst @@ -7,7 +7,7 @@ These APIs manage workspaces for this account. A Databricks workspace is an environment for accessing all of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into folders, and provides access to data and computational resources such as clusters and jobs. - + These endpoints are available if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. @@ -51,16 +51,16 @@ a.workspaces.delete(workspace_id=waiter.workspace_id) Create a new workspace. - + Creates a new workspace. - + **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request has been accepted and is in progress, but does not mean that the workspace deployed successfully and is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID (`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests with the workspace ID and check its status. The workspace becomes available when the status changes to `RUNNING`. - + :param workspace_name: str The workspace's human-readable name. :param aws_region: str (optional) @@ -82,22 +82,22 @@ deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the set of characters that are allowed in a subdomain. - + To set this value, you must have a deployment name prefix. Contact your Databricks account team to add an account deployment name prefix to your account. - + Workspace deployment names follow the account prefix and a hyphen. For example, if your account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be `acme-workspace-1.cloud.databricks.com`. - + You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment name to only include the deployment prefix. For example, if your account's deployment prefix is `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and the workspace URL is `acme.cloud.databricks.com`. - + This value must be unique across all non-deleted deployments across all AWS regions. - + If a new workspace omits this property, the server generates a unique deployment name for you with the pattern `dbc-xxxxxxxx-xxxx`. :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional) @@ -105,19 +105,19 @@ is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks detects an IP range overlap. - + Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`, `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`. - + The sizes of these IP ranges affect the maximum number of nodes for the workspace. - + **Important**: Confirm the IP ranges used by your Databricks workspace before creating the workspace. You cannot change them after your workspace is deployed. If the IP address ranges for your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To determine the address range sizes that you need, Databricks provides a calculator as a Microsoft Excel spreadsheet. See [calculate subnet sizes for a new workspace]. - + [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html :param gke_config: :class:`GkeConfig` (optional) The configurations for the GKE cluster of a Databricks workspace. @@ -132,15 +132,15 @@ :param network_id: str (optional) :param pricing_tier: :class:`PricingTier` (optional) The pricing tier of the workspace. For pricing tier information, see [AWS Pricing]. - + [AWS Pricing]: https://databricks.com/product/aws-pricing :param private_access_settings_id: str (optional) ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection), back-end (data plane to control plane connection), or both connection types. - + Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - + [AWS PrivateLink]: https://aws.amazon.com/privatelink/ [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html :param storage_configuration_id: str (optional) @@ -149,7 +149,7 @@ The ID of the workspace's storage encryption key configuration object. This is used to encrypt the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The provided key configuration object property `use_cases` must contain `STORAGE`. - + :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. @@ -161,18 +161,18 @@ .. py:method:: delete(workspace_id: int) Delete a workspace. - + Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. However, it might take a few minutes for all workspaces resources to be deleted, depending on the size and number of workspace resources. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :param workspace_id: int Workspace ID. - - + + .. py:method:: get(workspace_id: int) -> Workspace @@ -191,23 +191,23 @@ by_id = a.workspaces.get(workspace_id=created.workspace_id) Get a workspace. - + Gets information including status for a Databricks workspace, specified by ID. In the response, the `workspace_status` field indicates the current status. After initial workspace creation (which is asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace becomes available when the status changes to `RUNNING`. - + For information about how to create a new workspace with this API **including error handling**, see [Create a new workspace using the Account API]. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param workspace_id: int Workspace ID. - + :returns: :class:`Workspace` @@ -225,12 +225,12 @@ all = a.workspaces.list() Get all workspaces. - + Gets a list of all workspaces associated with an account, specified by ID. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + :returns: Iterator over :class:`Workspace` @@ -267,10 +267,10 @@ a.credentials.delete(credentials_id=update_role.credentials_id) Update workspace configuration. - + Updates a workspace configuration for either a running workspace or a failed workspace. The elements that can be updated varies between these two use cases. - + ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace deployment for some fields, but not all fields. For a failed workspace, this request supports updates to the following fields only: - Credential configuration ID - Storage configuration ID - Network @@ -292,14 +292,14 @@ update the network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from the workspace once attached, you can only switch to another one. - + After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` requests with the workspace ID and check the workspace status. The workspace is successful if the status changes to `RUNNING`. - + For information about how to create a new workspace with this API **including error handling**, see [Create a new workspace using the Account API]. - + ### Update a running workspace You can update a Databricks workspace configuration for running workspaces for some fields, but not all fields. For a running workspace, this request supports updating the following fields only: - Credential configuration ID - Network configuration ID. Used @@ -325,12 +325,12 @@ network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from the workspace once attached, you can only switch to another one. - + **Important**: To update a running workspace, your workspace must have no running compute resources that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not terminate all cluster instances in the workspace before calling this API, the request will fail. - + ### Wait until changes take effect. After calling the `PATCH` operation to update the workspace configuration, make repeated `GET` requests with the workspace ID and check the workspace status and the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes @@ -346,22 +346,22 @@ silently to its original configuration. After the workspace has been updated, you cannot use or create clusters for another 20 minutes. If you create or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could cause other unexpected behavior. - + If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20 minute wait. - + **Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment types and subscription types. If you have questions about availability, contact your Databricks representative. - + This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account. - + [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html - + :param workspace_id: int Workspace ID. :param aws_region: str (optional) @@ -391,7 +391,7 @@ :param storage_customer_managed_key_id: str (optional) The ID of the key configuration object for workspace storage. This parameter is available for updating both failed and running workspaces. - + :returns: Long-running operation waiter for :class:`Workspace`. See :method:wait_get_workspace_running for more details. diff --git a/docs/account/settings/csp_enablement_account.rst b/docs/account/settings/csp_enablement_account.rst index 885aae89f..a2b8cb91a 100644 --- a/docs/account/settings/csp_enablement_account.rst +++ b/docs/account/settings/csp_enablement_account.rst @@ -7,32 +7,32 @@ The compliance security profile settings at the account level control whether to enable it for new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace creation, account admins can enable the compliance security profile individually for each workspace. - + This settings can be disabled so that new workspaces do not have compliance security profile enabled by default. .. py:method:: get( [, etag: Optional[str]]) -> CspEnablementAccountSetting Get the compliance security profile setting for new workspaces. - + Gets the compliance security profile setting for new workspaces. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`CspEnablementAccountSetting` .. py:method:: update(allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str) -> CspEnablementAccountSetting Update the compliance security profile setting for new workspaces. - + Updates the value of the compliance security profile setting for new workspaces. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`CspEnablementAccountSetting` @@ -42,10 +42,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`CspEnablementAccountSetting` \ No newline at end of file diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst index b10d7e2dc..212e3f98e 100644 --- a/docs/account/settings/disable_legacy_features.rst +++ b/docs/account/settings/disable_legacy_features.rst @@ -5,7 +5,7 @@ .. py:class:: DisableLegacyFeaturesAPI Disable legacy features for new Databricks workspaces. - + For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions prior to 13.3LTS. @@ -13,41 +13,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyFeaturesResponse Delete the disable legacy features setting. - + Deletes the disable legacy features setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyFeaturesResponse` .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyFeatures Get the disable legacy features setting. - + Gets the value of the disable legacy features setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyFeatures` .. py:method:: update(allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures Update the disable legacy features setting. - + Updates the value of the disable legacy features setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyFeatures` @@ -57,10 +57,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyFeatures` \ No newline at end of file diff --git a/docs/account/settings/enable_ip_access_lists.rst b/docs/account/settings/enable_ip_access_lists.rst index 9485b7332..b570b2e37 100644 --- a/docs/account/settings/enable_ip_access_lists.rst +++ b/docs/account/settings/enable_ip_access_lists.rst @@ -10,41 +10,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAccountIpAccessEnableResponse Delete the account IP access toggle setting. - + Reverts the value of the account IP access toggle setting to default (ON) - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAccountIpAccessEnableResponse` .. py:method:: get( [, etag: Optional[str]]) -> AccountIpAccessEnable Get the account IP access toggle setting. - + Gets the value of the account IP access toggle setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AccountIpAccessEnable` .. py:method:: update(allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable Update the account IP access toggle setting. - + Updates the value of the account IP access toggle setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AccountIpAccessEnable` @@ -54,10 +54,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AccountIpAccessEnable` \ No newline at end of file diff --git a/docs/account/settings/esm_enablement_account.rst b/docs/account/settings/esm_enablement_account.rst index e9359d907..e14d1a71f 100644 --- a/docs/account/settings/esm_enablement_account.rst +++ b/docs/account/settings/esm_enablement_account.rst @@ -11,25 +11,25 @@ .. py:method:: get( [, etag: Optional[str]]) -> EsmEnablementAccountSetting Get the enhanced security monitoring setting for new workspaces. - + Gets the enhanced security monitoring setting for new workspaces. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`EsmEnablementAccountSetting` .. py:method:: update(allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str) -> EsmEnablementAccountSetting Update the enhanced security monitoring setting for new workspaces. - + Updates the value of the enhanced security monitoring setting for new workspaces. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EsmEnablementAccountSetting` @@ -39,10 +39,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EsmEnablementAccountSetting` \ No newline at end of file diff --git a/docs/account/settings/ip_access_lists.rst b/docs/account/settings/ip_access_lists.rst index 7718d0c54..031354b15 100644 --- a/docs/account/settings/ip_access_lists.rst +++ b/docs/account/settings/ip_access_lists.rst @@ -6,92 +6,92 @@ The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console. - + Account IP Access Lists affect web application access and REST API access to the account console and account APIs. If the feature is disabled for the account, all access is allowed for this account. There is support for allow lists (inclusion) and block lists (exclusion). - + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address matches any block list, the connection is rejected. 2. **If the connection was not rejected by block lists**, the IP address is compared with the allow lists. - + If there is at least one allow list for the account, the connection is allowed only if the IP address matches an allow list. If there are no allow lists for the account, all IP addresses are allowed. - + For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. - + After changes to the account-level IP access lists, it can take a few minutes for changes to take effect. .. py:method:: create(label: str, list_type: ListType [, ip_addresses: Optional[List[str]]]) -> CreateIpAccessListResponse Create access list. - + Creates an IP access list for the account. - + A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. - + When creating or updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. - + :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) - + :returns: :class:`CreateIpAccessListResponse` .. py:method:: delete(ip_access_list_id: str) Delete access list. - + Deletes an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - - + + .. py:method:: get(ip_access_list_id: str) -> GetIpAccessListResponse Get IP access list. - + Gets an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - + :returns: :class:`GetIpAccessListResponse` .. py:method:: list() -> Iterator[IpAccessListInfo] Get access lists. - + Gets all IP access lists for the specified account. - + :returns: Iterator over :class:`IpAccessListInfo` .. py:method:: replace(ip_access_list_id: str, label: str, list_type: ListType, enabled: bool [, ip_addresses: Optional[List[str]]]) Replace access list. - + Replaces an IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one @@ -99,41 +99,41 @@ `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take effect. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) - - + + .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]]) Update access list. - + Updates an existing IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. - + When updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param enabled: bool (optional) @@ -143,9 +143,9 @@ Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. - - + + \ No newline at end of file diff --git a/docs/account/settings/llm_proxy_partner_powered_account.rst b/docs/account/settings/llm_proxy_partner_powered_account.rst index f0affb743..fe5a55183 100644 --- a/docs/account/settings/llm_proxy_partner_powered_account.rst +++ b/docs/account/settings/llm_proxy_partner_powered_account.rst @@ -9,25 +9,25 @@ .. py:method:: get( [, etag: Optional[str]]) -> LlmProxyPartnerPoweredAccount Get the enable partner powered AI features account setting. - + Gets the enable partner powered AI features account setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredAccount` .. py:method:: update(allow_missing: bool, setting: LlmProxyPartnerPoweredAccount, field_mask: str) -> LlmProxyPartnerPoweredAccount Update the enable partner powered AI features account setting. - + Updates the enable partner powered AI features account setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredAccount` @@ -37,10 +37,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredAccount` \ No newline at end of file diff --git a/docs/account/settings/llm_proxy_partner_powered_enforce.rst b/docs/account/settings/llm_proxy_partner_powered_enforce.rst index ae07c81db..084b744e0 100644 --- a/docs/account/settings/llm_proxy_partner_powered_enforce.rst +++ b/docs/account/settings/llm_proxy_partner_powered_enforce.rst @@ -10,25 +10,25 @@ .. py:method:: get( [, etag: Optional[str]]) -> LlmProxyPartnerPoweredEnforce Get the enforcement status of partner powered AI features account setting. - + Gets the enforcement status of partner powered AI features account setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredEnforce` .. py:method:: update(allow_missing: bool, setting: LlmProxyPartnerPoweredEnforce, field_mask: str) -> LlmProxyPartnerPoweredEnforce Update the enforcement status of partner powered AI features account setting. - + Updates the enable enforcement status of partner powered AI features account setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredEnforce` @@ -38,10 +38,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredEnforce` \ No newline at end of file diff --git a/docs/account/settings/network_connectivity.rst b/docs/account/settings/network_connectivity.rst index 88f52ea0f..8b3a9d704 100644 --- a/docs/account/settings/network_connectivity.rst +++ b/docs/account/settings/network_connectivity.rst @@ -9,142 +9,143 @@ your Azure Storage accounts to allow access from Databricks. You can also use the API to provision private endpoints for Databricks to privately connect serverless compute resources to your Azure resources using Azure Private Link. See [configure serverless secure connectivity]. - + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security + .. py:method:: create_network_connectivity_configuration(network_connectivity_config: CreateNetworkConnectivityConfiguration) -> NetworkConnectivityConfiguration Create a network connectivity configuration. - + Creates a network connectivity configuration (NCC), which provides stable Azure service subnets when accessing your Azure Storage accounts. You can also use a network connectivity configuration to create Databricks managed private endpoints so that Databricks serverless compute resources privately access your resources. - + **IMPORTANT**: After you create the network connectivity configuration, you must assign one or more workspaces to the new network connectivity configuration. You can share one network connectivity configuration with multiple workspaces from the same Azure region within the same Databricks account. See [configure serverless secure connectivity]. - + [configure serverless secure connectivity]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security - + :param network_connectivity_config: :class:`CreateNetworkConnectivityConfiguration` Properties of the new network connectivity configuration. - + :returns: :class:`NetworkConnectivityConfiguration` .. py:method:: create_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule: CreatePrivateEndpointRule) -> NccPrivateEndpointRule Create a private endpoint rule. - + Create a private endpoint rule for the specified network connectivity config object. Once the object is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure resource. - + **IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to complete the connection. To get the information of the private endpoint created, make a `GET` request on the new private endpoint rule. See [serverless private link]. - + [serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. :param private_endpoint_rule: :class:`CreatePrivateEndpointRule` Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure portal after initialization. - + :returns: :class:`NccPrivateEndpointRule` .. py:method:: delete_network_connectivity_configuration(network_connectivity_config_id: str) Delete a network connectivity configuration. - + Deletes a network connectivity configuration. - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. - - + + .. py:method:: delete_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccPrivateEndpointRule Delete a private endpoint rule. - + Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is set to `true` and the private endpoint is not available to your serverless compute resources. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param private_endpoint_rule_id: str Your private endpoint rule ID. - + :returns: :class:`NccPrivateEndpointRule` .. py:method:: get_network_connectivity_configuration(network_connectivity_config_id: str) -> NetworkConnectivityConfiguration Get a network connectivity configuration. - + Gets a network connectivity configuration. - + :param network_connectivity_config_id: str Your Network Connectivity Configuration ID. - + :returns: :class:`NetworkConnectivityConfiguration` .. py:method:: get_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccPrivateEndpointRule Gets a private endpoint rule. - + Gets the private endpoint rule. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param private_endpoint_rule_id: str Your private endpoint rule ID. - + :returns: :class:`NccPrivateEndpointRule` .. py:method:: list_network_connectivity_configurations( [, page_token: Optional[str]]) -> Iterator[NetworkConnectivityConfiguration] List network connectivity configurations. - + Gets an array of network connectivity configurations. - + :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`NetworkConnectivityConfiguration` .. py:method:: list_private_endpoint_rules(network_connectivity_config_id: str [, page_token: Optional[str]]) -> Iterator[NccPrivateEndpointRule] List private endpoint rules. - + Gets an array of private endpoint rules. - + :param network_connectivity_config_id: str Your Network Connectvity Configuration ID. :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`NccPrivateEndpointRule` .. py:method:: update_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str, private_endpoint_rule: UpdatePrivateEndpointRule, update_mask: str) -> NccPrivateEndpointRule Update a private endpoint rule. - + Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed resources is allowed to be updated. - + :param network_connectivity_config_id: str The ID of a network connectivity configuration, which is the parent resource of this private endpoint rule object. @@ -159,6 +160,6 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + :returns: :class:`NccPrivateEndpointRule` \ No newline at end of file diff --git a/docs/account/settings/network_policies.rst b/docs/account/settings/network_policies.rst index c9969393d..7eb489bb8 100644 --- a/docs/account/settings/network_policies.rst +++ b/docs/account/settings/network_policies.rst @@ -14,60 +14,60 @@ .. py:method:: create_network_policy_rpc(network_policy: AccountNetworkPolicy) -> AccountNetworkPolicy Create a network policy. - + Creates a new network policy to manage which network destinations can be accessed from the Databricks environment. - + :param network_policy: :class:`AccountNetworkPolicy` - + :returns: :class:`AccountNetworkPolicy` .. py:method:: delete_network_policy_rpc(network_policy_id: str) Delete a network policy. - + Deletes a network policy. Cannot be called on 'default-policy'. - + :param network_policy_id: str The unique identifier of the network policy to delete. - - + + .. py:method:: get_network_policy_rpc(network_policy_id: str) -> AccountNetworkPolicy Get a network policy. - + Gets a network policy. - + :param network_policy_id: str The unique identifier of the network policy to retrieve. - + :returns: :class:`AccountNetworkPolicy` .. py:method:: list_network_policies_rpc( [, page_token: Optional[str]]) -> Iterator[AccountNetworkPolicy] List network policies. - + Gets an array of network policies. - + :param page_token: str (optional) Pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`AccountNetworkPolicy` .. py:method:: update_network_policy_rpc(network_policy_id: str, network_policy: AccountNetworkPolicy) -> AccountNetworkPolicy Update a network policy. - + Updates a network policy. This allows you to modify the configuration of a network policy. - + :param network_policy_id: str The unique identifier for the network policy. :param network_policy: :class:`AccountNetworkPolicy` - + :returns: :class:`AccountNetworkPolicy` \ No newline at end of file diff --git a/docs/account/settings/personal_compute.rst b/docs/account/settings/personal_compute.rst index 54e958a28..58b35e7f7 100644 --- a/docs/account/settings/personal_compute.rst +++ b/docs/account/settings/personal_compute.rst @@ -7,7 +7,7 @@ The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can change the setting to instead let individual workspaces configure access control (DELEGATE). - + There is only one instance of this setting per account. Since this setting has a default value, this setting is present on all accounts even though it's never set on a given account. Deletion reverts the value of the setting back to the default value. @@ -15,41 +15,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeletePersonalComputeSettingResponse Delete Personal Compute setting. - + Reverts back the Personal Compute setting value to default (ON) - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeletePersonalComputeSettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> PersonalComputeSetting Get Personal Compute setting. - + Gets the value of the Personal Compute setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`PersonalComputeSetting` .. py:method:: update(allow_missing: bool, setting: PersonalComputeSetting, field_mask: str) -> PersonalComputeSetting Update Personal Compute setting. - + Updates the value of the Personal Compute setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`PersonalComputeSetting` @@ -59,10 +59,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`PersonalComputeSetting` \ No newline at end of file diff --git a/docs/account/settings/settings.rst b/docs/account/settings/settings.rst index 2efa1fd09..e96e06a7c 100644 --- a/docs/account/settings/settings.rst +++ b/docs/account/settings/settings.rst @@ -12,7 +12,7 @@ The compliance security profile settings at the account level control whether to enable it for new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace creation, account admins can enable the compliance security profile individually for each workspace. - + This settings can be disabled so that new workspaces do not have compliance security profile enabled by default. @@ -20,7 +20,7 @@ :type: DisableLegacyFeaturesAPI Disable legacy features for new Databricks workspaces. - + For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions prior to 13.3LTS. @@ -55,7 +55,7 @@ The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can change the setting to instead let individual workspaces configure access control (DELEGATE). - + There is only one instance of this setting per account. Since this setting has a default value, this setting is present on all accounts even though it's never set on a given account. Deletion reverts the value of the setting back to the default value. \ No newline at end of file diff --git a/docs/account/settings/workspace_network_configuration.rst b/docs/account/settings/workspace_network_configuration.rst index 63a61c9a4..3ed40313f 100644 --- a/docs/account/settings/workspace_network_configuration.rst +++ b/docs/account/settings/workspace_network_configuration.rst @@ -13,26 +13,26 @@ .. py:method:: get_workspace_network_option_rpc(workspace_id: int) -> WorkspaceNetworkOption Get workspace network option. - + Gets the network option for a workspace. Every workspace has exactly one network policy binding, with 'default-policy' used if no explicit assignment exists. - + :param workspace_id: int The workspace ID. - + :returns: :class:`WorkspaceNetworkOption` .. py:method:: update_workspace_network_option_rpc(workspace_id: int, workspace_network_option: WorkspaceNetworkOption) -> WorkspaceNetworkOption Update workspace network option. - + Updates the network option for a workspace. This operation associates the workspace with the specified network policy. To revert to the default policy, specify 'default-policy' as the network_policy_id. - + :param workspace_id: int The workspace ID. :param workspace_network_option: :class:`WorkspaceNetworkOption` - + :returns: :class:`WorkspaceNetworkOption` \ No newline at end of file diff --git a/docs/dbdataclasses/aibuilder.rst b/docs/dbdataclasses/aibuilder.rst index a8f37542e..cb5400647 100644 --- a/docs/dbdataclasses/aibuilder.rst +++ b/docs/dbdataclasses/aibuilder.rst @@ -50,10 +50,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UpdateCustomLlmRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst index 78ef0c339..bbd625c62 100644 --- a/docs/dbdataclasses/apps.rst +++ b/docs/dbdataclasses/apps.rst @@ -226,7 +226,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: StopAppRequest :members: :undoc-members: - -.. autoclass:: Token - :members: - :undoc-members: diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index 625c93a80..ca8408bdf 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -208,10 +208,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: POLICY_NAME :value: "POLICY_NAME" -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UpdateBudgetConfigurationBudget :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 172eb0478..5fd115b65 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -1436,10 +1436,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: TriggeredUpdateStatus :members: :undoc-members: diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst index d2c1cd609..b07745b6f 100644 --- a/docs/dbdataclasses/cleanrooms.rst +++ b/docs/dbdataclasses/cleanrooms.rst @@ -176,10 +176,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UpdateCleanRoomRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index d78a9d4f1..54b17b745 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -1656,10 +1656,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UninstallLibraries :members: :undoc-members: diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index 16912e3f7..c2ddc82f5 100644 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -356,10 +356,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: TrashDashboardResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/database.rst b/docs/dbdataclasses/database.rst index 28d9e99c5..86340b5ef 100644 --- a/docs/dbdataclasses/database.rst +++ b/docs/dbdataclasses/database.rst @@ -167,7 +167,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: SyncedTableTriggeredUpdateStatus :members: :undoc-members: - -.. autoclass:: Token - :members: - :undoc-members: diff --git a/docs/dbdataclasses/files.rst b/docs/dbdataclasses/files.rst index acc4920d9..2b0d9845d 100644 --- a/docs/dbdataclasses/files.rst +++ b/docs/dbdataclasses/files.rst @@ -100,10 +100,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UploadResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst index a170fe6c3..a471503a7 100644 --- a/docs/dbdataclasses/iam.rst +++ b/docs/dbdataclasses/iam.rst @@ -299,10 +299,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UpdateObjectPermissions :members: :undoc-members: diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 0e22f457a..4046dabe3 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -1101,10 +1101,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: TriggerInfo :members: :undoc-members: diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst index 47a72865a..02e48c381 100644 --- a/docs/dbdataclasses/marketplace.rst +++ b/docs/dbdataclasses/marketplace.rst @@ -527,10 +527,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: TokenDetail :members: :undoc-members: diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index 3ffa046e3..0891291c4 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -871,10 +871,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: TransitionModelVersionStageDatabricks :members: :undoc-members: diff --git a/docs/dbdataclasses/oauth2.rst b/docs/dbdataclasses/oauth2.rst index b0219a5ff..4097add9e 100644 --- a/docs/dbdataclasses/oauth2.rst +++ b/docs/dbdataclasses/oauth2.rst @@ -84,10 +84,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: TokenAccessPolicy :members: :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index ec7d7a80b..f4618951e 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -422,10 +422,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SCD_TYPE_2 :value: "SCD_TYPE_2" -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UpdateInfo :members: :undoc-members: diff --git a/docs/dbdataclasses/provisioning.rst b/docs/dbdataclasses/provisioning.rst index 41d5e2f71..4c909d488 100644 --- a/docs/dbdataclasses/provisioning.rst +++ b/docs/dbdataclasses/provisioning.rst @@ -221,10 +221,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UpdateResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/qualitymonitorv2.rst b/docs/dbdataclasses/qualitymonitorv2.rst index 6787d6a09..fbe2746ce 100644 --- a/docs/dbdataclasses/qualitymonitorv2.rst +++ b/docs/dbdataclasses/qualitymonitorv2.rst @@ -47,7 +47,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: QualityMonitor :members: :undoc-members: - -.. autoclass:: Token - :members: - :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index 80dec3c18..01249dced 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -532,10 +532,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: MULTIGPU_MEDIUM :value: "MULTIGPU_MEDIUM" -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: TrafficConfig :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index f65058b9e..0f97314d2 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -764,10 +764,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: TokenAccessControlRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index ed208ebf6..cd1cc8b92 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -515,10 +515,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VIEW :value: "VIEW" -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UpdateProvider :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 913da36c3..2c2578d90 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -1441,10 +1441,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: TransferOwnershipObjectId :members: :undoc-members: diff --git a/docs/dbdataclasses/vectorsearch.rst b/docs/dbdataclasses/vectorsearch.rst index b443acae0..5433f2673 100644 --- a/docs/dbdataclasses/vectorsearch.rst +++ b/docs/dbdataclasses/vectorsearch.rst @@ -169,10 +169,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UpdateEndpointCustomTagsRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index 0b0e3a16f..e20f4ac7d 100644 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -331,10 +331,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Token - :members: - :undoc-members: - .. autoclass:: UpdateCredentialsRequest :members: :undoc-members: diff --git a/docs/workspace/aibuilder/custom_llms.rst b/docs/workspace/aibuilder/custom_llms.rst index 19a41fdc6..4f7035869 100644 --- a/docs/workspace/aibuilder/custom_llms.rst +++ b/docs/workspace/aibuilder/custom_llms.rst @@ -9,36 +9,36 @@ .. py:method:: cancel(id: str) Cancel a Custom LLM Optimization Run. - + :param id: str - - + + .. py:method:: create(id: str) -> CustomLlm Start a Custom LLM Optimization Run. - + :param id: str The Id of the tile. - + :returns: :class:`CustomLlm` .. py:method:: get(id: str) -> CustomLlm Get a Custom LLM. - + :param id: str The id of the custom llm - + :returns: :class:`CustomLlm` .. py:method:: update(id: str, custom_llm: CustomLlm, update_mask: str) -> CustomLlm Update a Custom LLM. - + :param id: str The id of the custom llm :param custom_llm: :class:`CustomLlm` @@ -46,16 +46,16 @@ :param update_mask: str The list of the CustomLlm fields to update. These should correspond to the values (or lack thereof) present in `custom_llm`. - + The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`CustomLlm` \ No newline at end of file diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst index 34aa3f7c4..be094be30 100644 --- a/docs/workspace/apps/apps.rst +++ b/docs/workspace/apps/apps.rst @@ -10,13 +10,13 @@ .. py:method:: create(app: App [, no_compute: Optional[bool]]) -> Wait[App] Create an app. - + Creates a new app. - + :param app: :class:`App` :param no_compute: bool (optional) If true, the app will not be started after creation. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. @@ -28,25 +28,25 @@ .. py:method:: delete(name: str) -> App Delete an app. - + Deletes an app. - + :param name: str The name of the app. - + :returns: :class:`App` .. py:method:: deploy(app_name: str, app_deployment: AppDeployment) -> Wait[AppDeployment] Create an app deployment. - + Creates an app deployment for the app with the supplied name. - + :param app_name: str The name of the app. :param app_deployment: :class:`AppDeployment` - + :returns: Long-running operation waiter for :class:`AppDeployment`. See :method:wait_get_deployment_app_succeeded for more details. @@ -58,106 +58,106 @@ .. py:method:: get(name: str) -> App Get an app. - + Retrieves information for the app with the supplied name. - + :param name: str The name of the app. - + :returns: :class:`App` .. py:method:: get_deployment(app_name: str, deployment_id: str) -> AppDeployment Get an app deployment. - + Retrieves information for the app deployment with the supplied name and deployment id. - + :param app_name: str The name of the app. :param deployment_id: str The unique id of the deployment. - + :returns: :class:`AppDeployment` .. py:method:: get_permission_levels(app_name: str) -> GetAppPermissionLevelsResponse Get app permission levels. - + Gets the permission levels that a user can have on an object. - + :param app_name: str The app for which to get or manage permissions. - + :returns: :class:`GetAppPermissionLevelsResponse` .. py:method:: get_permissions(app_name: str) -> AppPermissions Get app permissions. - + Gets the permissions of an app. Apps can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. - + :returns: :class:`AppPermissions` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[App] List apps. - + Lists all apps in the workspace. - + :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of apps. Requests first page if absent. - + :returns: Iterator over :class:`App` .. py:method:: list_deployments(app_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AppDeployment] List app deployments. - + Lists all app deployments for the app with the supplied name. - + :param app_name: str The name of the app. :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of apps. Requests first page if absent. - + :returns: Iterator over :class:`AppDeployment` .. py:method:: set_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions Set app permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) - + :returns: :class:`AppPermissions` .. py:method:: start(name: str) -> Wait[App] Start an app. - + Start the last active deployment of the app in the workspace. - + :param name: str The name of the app. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_active for more details. @@ -169,12 +169,12 @@ .. py:method:: stop(name: str) -> Wait[App] Stop an app. - + Stops the active deployment of the app in the workspace. - + :param name: str The name of the app. - + :returns: Long-running operation waiter for :class:`App`. See :method:wait_get_app_stopped for more details. @@ -186,27 +186,27 @@ .. py:method:: update(name: str, app: App) -> App Update an app. - + Updates the app with the supplied name. - + :param name: str The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It must be unique within the workspace. :param app: :class:`App` - + :returns: :class:`App` .. py:method:: update_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions Update app permissions. - + Updates the permissions on an app. Apps can inherit permissions from their root object. - + :param app_name: str The app for which to get or manage permissions. :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) - + :returns: :class:`AppPermissions` diff --git a/docs/workspace/catalog/artifact_allowlists.rst b/docs/workspace/catalog/artifact_allowlists.rst index d84666398..f153dee79 100644 --- a/docs/workspace/catalog/artifact_allowlists.rst +++ b/docs/workspace/catalog/artifact_allowlists.rst @@ -10,24 +10,24 @@ .. py:method:: get(artifact_type: ArtifactType) -> ArtifactAllowlistInfo Get an artifact allowlist. - + Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. - + :param artifact_type: :class:`ArtifactType` The artifact type of the allowlist. - + :returns: :class:`ArtifactAllowlistInfo` .. py:method:: update(artifact_type: ArtifactType, artifact_matchers: List[ArtifactMatcher] [, created_at: Optional[int], created_by: Optional[str], metastore_id: Optional[str]]) -> ArtifactAllowlistInfo Set an artifact allowlist. - + Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore. - + :param artifact_type: :class:`ArtifactType` The artifact type of the allowlist. :param artifact_matchers: List[:class:`ArtifactMatcher`] @@ -38,6 +38,6 @@ Username of the user who set the artifact allowlist. :param metastore_id: str (optional) Unique identifier of parent metastore. - + :returns: :class:`ArtifactAllowlistInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 51ad5ca8e..2505551cd 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -6,7 +6,7 @@ A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission. - + In Unity Catalog, admins and data stewards manage users and their access to data centrally across all of the workspaces in a Databricks account. Users in different workspaces can share access to the same data, depending on privileges granted centrally in Unity Catalog. @@ -30,10 +30,10 @@ w.catalogs.delete(name=created_catalog.name, force=True) Create a catalog. - + Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. - + :param name: str Name of catalog. :param comment: str (optional) @@ -46,29 +46,29 @@ A map of key-value properties attached to the securable. :param provider_name: str (optional) The name of delta sharing provider. - + A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server. :param share_name: str (optional) The name of the share under the share provider. :param storage_root: str (optional) Storage root URL for managed tables within catalog. - + :returns: :class:`CatalogInfo` .. py:method:: delete(name: str [, force: Optional[bool]]) Delete a catalog. - + Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner of the catalog. - + :param name: str The name of the catalog. :param force: bool (optional) Force deletion even if the catalog is not empty. - - + + .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> CatalogInfo @@ -92,16 +92,16 @@ w.catalogs.delete(name=created.name, force=True) Get a catalog. - + Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the catalog, or a user that has the **USE_CATALOG** privilege set for their account. - + :param name: str The name of the catalog. :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective metadata for - + :returns: :class:`CatalogInfo` @@ -120,12 +120,12 @@ all = w.catalogs.list(catalog.ListCatalogsRequest()) List catalogs. - + Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. - + :param include_browse: bool (optional) Whether to include catalogs in the response for which the principal can only access selective metadata for @@ -139,7 +139,7 @@ response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CatalogInfo` @@ -164,10 +164,10 @@ w.catalogs.delete(name=created.name, force=True) Update a catalog. - + Updates the catalog that matches the supplied name. The caller must be either the owner of the catalog, or a metastore admin (when changing the owner field of the catalog). - + :param name: str The name of the catalog. :param comment: str (optional) @@ -184,6 +184,6 @@ Username of current owner of catalog. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. - + :returns: :class:`CatalogInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/connections.rst b/docs/workspace/catalog/connections.rst index 8aa4ebf7f..50785cd23 100644 --- a/docs/workspace/catalog/connections.rst +++ b/docs/workspace/catalog/connections.rst @@ -5,7 +5,7 @@ .. py:class:: ConnectionsAPI Connections allow for creating a connection to an external data source. - + A connection is an abstraction of an external data source that can be connected from Databricks Compute. Creating a connection object is the first step to managing external data sources within Unity Catalog, with the second step being creating a data object (catalog, schema, or table) using the connection. Data @@ -42,12 +42,12 @@ w.connections.delete(name=conn_create.name) Create a connection. - + Creates a new connection - + Creates a new connection to an external data source. It allows users to specify connection details and configurations for interaction with the external server. - + :param name: str Name of the connection. :param connection_type: :class:`ConnectionType` @@ -60,20 +60,20 @@ A map of key-value properties attached to the securable. :param read_only: bool (optional) If the connection is read only. - + :returns: :class:`ConnectionInfo` .. py:method:: delete(name: str) Delete a connection. - + Deletes the connection that matches the supplied name. - + :param name: str The name of the connection to be deleted. - - + + .. py:method:: get(name: str) -> ConnectionInfo @@ -116,12 +116,12 @@ w.connections.delete(name=conn_create.name) Get a connection. - + Gets a connection from it's name. - + :param name: str Name of the connection. - + :returns: :class:`ConnectionInfo` @@ -140,9 +140,9 @@ conn_list = w.connections.list(catalog.ListConnectionsRequest()) List connections. - + List all connections. - + :param max_results: int (optional) Maximum number of connections to return. - If not set, all connections are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of this value and @@ -150,7 +150,7 @@ (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ConnectionInfo` @@ -192,9 +192,9 @@ w.connections.delete(name=conn_create.name) Update a connection. - + Updates the connection that matches the supplied name. - + :param name: str Name of the connection. :param options: Dict[str,str] @@ -203,6 +203,6 @@ New name for the connection. :param owner: str (optional) Username of current owner of the connection. - + :returns: :class:`ConnectionInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst index 829bd174f..661d955b0 100644 --- a/docs/workspace/catalog/credentials.rst +++ b/docs/workspace/catalog/credentials.rst @@ -7,7 +7,7 @@ A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant. Each credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential. - + To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL` privilege. The user who creates the credential can delegate ownership to another user or group to manage permissions on it. @@ -15,13 +15,13 @@ .. py:method:: create_credential(name: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], purpose: Optional[CredentialPurpose], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo Create a credential. - + Creates a new credential. The type of credential to be created is determined by the **purpose** field, which should be either **SERVICE** or **STORAGE**. - + The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials. - + :param name: str The credential name. The name must be unique among storage and service credentials within the metastore. @@ -42,66 +42,66 @@ **STORAGE**. :param skip_validation: bool (optional) Optional. Supplying true to this argument skips validation of the created set of credentials. - + :returns: :class:`CredentialInfo` .. py:method:: delete_credential(name_arg: str [, force: Optional[bool]]) Delete a credential. - + Deletes a service or storage credential from the metastore. The caller must be an owner of the credential. - + :param name_arg: str Name of the credential. :param force: bool (optional) Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent external locations and external tables (when purpose is **STORAGE**). - - + + .. py:method:: generate_temporary_service_credential(credential_name: str [, azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions], gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions]]) -> TemporaryCredentials Generate a temporary service credential. - + Returns a set of temporary credentials generated using the specified service credential. The caller must be a metastore admin or have the metastore privilege **ACCESS** on the service credential. - + :param credential_name: str The name of the service credential used to generate a temporary credential :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional) The Azure cloud options to customize the requested temporary credential :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional) The GCP cloud options to customize the requested temporary credential - + :returns: :class:`TemporaryCredentials` .. py:method:: get_credential(name_arg: str) -> CredentialInfo Get a credential. - + Gets a service or storage credential from the metastore. The caller must be a metastore admin, the owner of the credential, or have any permission on the credential. - + :param name_arg: str Name of the credential. - + :returns: :class:`CredentialInfo` .. py:method:: list_credentials( [, max_results: Optional[int], page_token: Optional[str], purpose: Optional[CredentialPurpose]]) -> Iterator[CredentialInfo] List credentials. - + Gets an array of credentials (as __CredentialInfo__ objects). - + The array is limited to only the credentials that the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of credentials to return. - If not set, the default max page size is used. - When set to a value greater than 0, the page length is the minimum of this value and a server-configured @@ -111,19 +111,19 @@ Opaque token to retrieve the next page of results. :param purpose: :class:`CredentialPurpose` (optional) Return only credentials for the specified purpose. - + :returns: Iterator over :class:`CredentialInfo` .. py:method:: update_credential(name_arg: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo Update a credential. - + Updates a service or storage credential on the metastore. - + The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission. If the caller is a metastore admin, only the __owner__ field can be changed. - + :param name_arg: str Name of the credential. :param aws_iam_role: :class:`AwsIamRole` (optional) @@ -150,28 +150,28 @@ **STORAGE**. :param skip_validation: bool (optional) Supply true to this argument to skip validation of the updated credential. - + :returns: :class:`CredentialInfo` .. py:method:: validate_credential( [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], credential_name: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], external_location_name: Optional[str], purpose: Optional[CredentialPurpose], read_only: Optional[bool], url: Optional[str]]) -> ValidateCredentialResponse Validate a credential. - + Validates a credential. - + For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific credential must be provided. - + For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific credential must be provided. - + The caller must be a metastore admin or the credential owner or have the required permission on the metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**). - + :param aws_iam_role: :class:`AwsIamRole` (optional) The AWS IAM role configuration :param azure_managed_identity: :class:`AzureManagedIdentity` (optional) @@ -190,6 +190,6 @@ (purpose is **STORAGE**.) :param url: str (optional) The external location url to validate. Only applicable when purpose is **STORAGE**. - + :returns: :class:`ValidateCredentialResponse` \ No newline at end of file diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 164c21a86..91d9af27f 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -9,9 +9,9 @@ access-control policies that control which users and groups can access the credential. If a user does not have access to an external location in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. - + Databricks recommends using external locations rather than using storage credentials directly. - + To create external locations, you must be a metastore admin or a user with the **CREATE_EXTERNAL_LOCATION** privilege. @@ -46,11 +46,11 @@ w.external_locations.delete(name=created.name) Create an external location. - + Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage credential. - + :param name: str Name of the external location. :param url: str @@ -73,23 +73,23 @@ Indicates whether the external location is read-only. :param skip_validation: bool (optional) Skips validation of the storage credential associated with the external location. - + :returns: :class:`ExternalLocationInfo` .. py:method:: delete(name: str [, force: Optional[bool]]) Delete an external location. - + Deletes the specified external location from the metastore. The caller must be the owner of the external location. - + :param name: str Name of the external location. :param force: bool (optional) Force deletion even if there are dependent external tables or mounts. - - + + .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> ExternalLocationInfo @@ -125,16 +125,16 @@ w.external_locations.delete(name=created.name) Get an external location. - + Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. - + :param name: str Name of the external location. :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access selective metadata for - + :returns: :class:`ExternalLocationInfo` @@ -152,11 +152,11 @@ all = w.external_locations.list() List external locations. - + Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on the external location. There is no guarantee of a specific ordering of the elements in the array. - + :param include_browse: bool (optional) Whether to include external locations in the response for which the principal can only access selective metadata for @@ -167,7 +167,7 @@ value (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ExternalLocationInfo` @@ -208,11 +208,11 @@ w.external_locations.delete(name=created.name) Update an external location. - + Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin can only update the name of the external location. - + :param name: str Name of the external location. :param comment: str (optional) @@ -242,6 +242,6 @@ Skips validation of the storage credential associated with the external location. :param url: str (optional) Path URL of the external location. - + :returns: :class:`ExternalLocationInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/functions.rst b/docs/workspace/catalog/functions.rst index 646488074..3c736e714 100644 --- a/docs/workspace/catalog/functions.rst +++ b/docs/workspace/catalog/functions.rst @@ -5,7 +5,7 @@ .. py:class:: FunctionsAPI Functions implement User-Defined Functions (UDFs) in Unity Catalog. - + The function implementation can be any SQL expression or Query, and it can be invoked wherever a table reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it can be referenced with the form __catalog_name__.__schema_name__.__function_name__. @@ -13,71 +13,71 @@ .. py:method:: create(function_info: CreateFunction) -> FunctionInfo Create a function. - + **WARNING: This API is experimental and will change in future versions** - + Creates a new function - + The user must have the following permissions in order for the function to be created: - **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the function's parent schema - + :param function_info: :class:`CreateFunction` Partial __FunctionInfo__ specifying the function to be created. - + :returns: :class:`FunctionInfo` .. py:method:: delete(name: str [, force: Optional[bool]]) Delete a function. - + Deletes the function that matches the supplied name. For the deletion to succeed, the user must satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog and the **USE_SCHEMA** privilege on its parent schema - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param force: bool (optional) Force deletion even if the function is notempty. - - + + .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> FunctionInfo Get a function. - + Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the **USE_SCHEMA** privilege on the function's parent schema, and the **EXECUTE** privilege on the function itself - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param include_browse: bool (optional) Whether to include functions in the response for which the principal can only access selective metadata for - + :returns: :class:`FunctionInfo` .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[FunctionInfo] List functions. - + List functions within the specified parent catalog and schema. If the user is a metastore admin, all functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for functions of interest. :param schema_name: str @@ -92,26 +92,26 @@ (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`FunctionInfo` .. py:method:: update(name: str [, owner: Optional[str]]) -> FunctionInfo Update a function. - + Updates the function that matches the supplied name. Only the owner of the function can be updated. If the user is not a metastore admin, the user must be a member of the group that is the new function owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the **USE_SCHEMA** privilege on the function's parent schema. - + :param name: str The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__). :param owner: str (optional) Username of current owner of function. - + :returns: :class:`FunctionInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/grants.rst b/docs/workspace/catalog/grants.rst index 4a46cb5a6..603a20584 100644 --- a/docs/workspace/catalog/grants.rst +++ b/docs/workspace/catalog/grants.rst @@ -8,7 +8,7 @@ Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. - + Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. This means that granting a privilege on the catalog automatically grants the privilege to all current and future objects within the catalog. Similarly, privileges granted on a schema are inherited by all current and future @@ -61,9 +61,9 @@ w.tables.delete(full_name=table_full_name) Get permissions. - + Gets the permissions for a securable. Does not include inherited permissions. - + :param securable_type: str Type of securable. :param full_name: str @@ -72,7 +72,7 @@ Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment present in a single page response is guaranteed to contain all the privileges granted on the requested Securable for the respective principal. - + If not set, all the permissions are returned. If set to - lesser than 0: invalid parameter error - 0: page length is set to a server configured value - lesser than 150 but greater than 0: invalid parameter error (this is to ensure that server is able to return at least one complete @@ -82,7 +82,7 @@ Opaque pagination token to go to next page based on previous query. :param principal: str (optional) If provided, only the permissions for the specified principal (user or group) are returned. - + :returns: :class:`GetPermissionsResponse` @@ -133,10 +133,10 @@ w.tables.delete(full_name=table_full_name) Get effective permissions. - + Gets the effective permissions for a securable. Includes inherited permissions from any parent securables. - + :param securable_type: str Type of securable. :param full_name: str @@ -146,7 +146,7 @@ EffectivePrivilegeAssignment present in a single page response is guaranteed to contain all the effective privileges granted on (or inherited by) the requested Securable for the respective principal. - + If not set, all the effective permissions are returned. If set to - lesser than 0: invalid parameter error - 0: page length is set to a server configured value - lesser than 150 but greater than 0: invalid parameter error (this is to ensure that server is able to return at least one complete @@ -157,7 +157,7 @@ :param principal: str (optional) If provided, only the effective permissions for the specified principal (user or group) are returned. - + :returns: :class:`EffectivePermissionsList` @@ -216,15 +216,15 @@ w.tables.delete(full_name=table_full_name) Update permissions. - + Updates the permissions for a securable. - + :param securable_type: str Type of securable. :param full_name: str Full name of securable. :param changes: List[:class:`PermissionsChange`] (optional) Array of permissions change objects. - + :returns: :class:`UpdatePermissionsResponse` \ No newline at end of file diff --git a/docs/workspace/catalog/metastores.rst b/docs/workspace/catalog/metastores.rst index 628fe13f1..cf35cc01b 100644 --- a/docs/workspace/catalog/metastores.rst +++ b/docs/workspace/catalog/metastores.rst @@ -8,10 +8,10 @@ views) and the permissions that govern access to them. Databricks account admins can create metastores and assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use Unity Catalog, it must have a Unity Catalog metastore attached. - + Each metastore is configured with a root storage location in a cloud storage account. This storage location is used for metadata and managed tables data. - + NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is available in a catalog named hive_metastore. @@ -43,11 +43,11 @@ w.metastores.delete(id=created.metastore_id, force=True) Create an assignment. - + Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account admin. - + :param workspace_id: int A workspace ID. :param metastore_id: str @@ -55,8 +55,8 @@ :param default_catalog_name: str The name of the default catalog in the metastore. This field is deprecated. Please use "Default Namespace API" to configure the default catalog for a Databricks workspace. - - + + .. py:method:: create(name: str [, region: Optional[str], storage_root: Optional[str]]) -> MetastoreInfo @@ -82,19 +82,19 @@ w.metastores.delete(id=created.metastore_id, force=True) Create a metastore. - + Creates a new metastore based on a provided name and optional storage root path. By default (if the __owner__ field is not set), the owner of the new metastore is the user calling the __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is assigned to the System User instead. - + :param name: str The user-specified name of the metastore. :param region: str (optional) Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). :param storage_root: str (optional) The storage root URL for metastore - + :returns: :class:`MetastoreInfo` @@ -112,24 +112,24 @@ current_metastore = w.metastores.current() Get metastore assignment for workspace. - + Gets the metastore assignment for the workspace being accessed. - + :returns: :class:`MetastoreAssignment` .. py:method:: delete(id: str [, force: Optional[bool]]) Delete a metastore. - + Deletes a metastore. The caller must be a metastore admin. - + :param id: str Unique ID of the metastore. :param force: bool (optional) Force deletion even if the metastore is not empty. Default is false. - - + + .. py:method:: get(id: str) -> MetastoreInfo @@ -157,13 +157,13 @@ w.metastores.delete(id=created.metastore_id, force=True) Get a metastore. - + Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this info. - + :param id: str Unique ID of the metastore. - + :returns: :class:`MetastoreInfo` @@ -181,10 +181,10 @@ all = w.metastores.list() List metastores. - + Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of metastores to return. - when set to a value greater than 0, the page length is the minimum of this value and a server configured value; - when set to 0, the page length is set to a @@ -195,7 +195,7 @@ from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`MetastoreInfo` @@ -213,10 +213,10 @@ summary = w.metastores.summary() Get a metastore summary. - + Gets information about a metastore. This summary includes the storage credential, the cloud vendor, the cloud region, and the global metastore ID. - + :returns: :class:`GetMetastoreSummaryResponse` @@ -247,15 +247,15 @@ w.metastores.delete(id=created.metastore_id, force=True) Delete an assignment. - + Deletes a metastore assignment. The caller must be an account administrator. - + :param workspace_id: int A workspace ID. :param metastore_id: str Query for the ID of the metastore to delete. - - + + .. py:method:: update(id: str [, delta_sharing_organization_name: Optional[str], delta_sharing_recipient_token_lifetime_in_seconds: Optional[int], delta_sharing_scope: Optional[DeltaSharingScopeEnum], new_name: Optional[str], owner: Optional[str], privilege_model_version: Optional[str], storage_root_credential_id: Optional[str]]) -> MetastoreInfo @@ -283,10 +283,10 @@ w.metastores.delete(id=created.metastore_id, force=True) Update a metastore. - + Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__ field is set to the empty string (**""**), the ownership is updated to the System User. - + :param id: str Unique ID of the metastore. :param delta_sharing_organization_name: str (optional) @@ -304,19 +304,19 @@ Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`). :param storage_root_credential_id: str (optional) UUID of storage credential to access the metastore storage_root. - + :returns: :class:`MetastoreInfo` .. py:method:: update_assignment(workspace_id: int [, default_catalog_name: Optional[str], metastore_id: Optional[str]]) Update an assignment. - + Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a Workspace admin. - + :param workspace_id: int A workspace ID. :param default_catalog_name: str (optional) @@ -324,6 +324,6 @@ Namespace API" to configure the default catalog for a Databricks workspace. :param metastore_id: str (optional) The unique ID of the metastore. - - + + \ No newline at end of file diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst index bae6f25f8..99b62ae03 100644 --- a/docs/workspace/catalog/model_versions.rst +++ b/docs/workspace/catalog/model_versions.rst @@ -7,39 +7,39 @@ Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog provide centralized access control, auditing, lineage, and discovery of ML models across Databricks workspaces. - + This API reference documents the REST endpoints for managing model versions in Unity Catalog. For more details, see the [registered models API docs](/api/workspace/registeredmodels). .. py:method:: delete(full_name: str, version: int) Delete a Model Version. - + Deletes a model version from the specified registered model. Any aliases assigned to the model version will also be deleted. - + The caller must be a metastore admin or an owner of the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version - - + + .. py:method:: get(full_name: str, version: int [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> ModelVersionInfo Get a Model Version. - + Get a model version. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int @@ -49,46 +49,46 @@ :param include_browse: bool (optional) Whether to include model versions in the response for which the principal can only access selective metadata for - + :returns: :class:`ModelVersionInfo` .. py:method:: get_by_alias(full_name: str, alias: str [, include_aliases: Optional[bool]]) -> ModelVersionInfo Get Model Version By Alias. - + Get a model version by alias. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param alias: str The name of the alias :param include_aliases: bool (optional) Whether to include aliases associated with the model version in the response - + :returns: :class:`ModelVersionInfo` .. py:method:: list(full_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ModelVersionInfo] List Model Versions. - + List model versions. You can list model versions under a particular schema, or list all model versions in the current metastore. - + The returned models are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the model versions. A regular user needs to be the owner or have the **EXECUTE** privilege on the parent registered model to recieve the model versions in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the response. The elements in the response will not contain any aliases or tags. - + :param full_name: str The full three-level name of the registered model under which to list model versions :param include_browse: bool (optional) @@ -102,28 +102,28 @@ value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ModelVersionInfo` .. py:method:: update(full_name: str, version: int [, comment: Optional[str]]) -> ModelVersionInfo Update a Model Version. - + Updates the specified model version. - + The caller must be a metastore admin or an owner of the parent registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the comment of the model version can be updated. - + :param full_name: str The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version :param comment: str (optional) The comment attached to the model version - + :returns: :class:`ModelVersionInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/online_tables.rst b/docs/workspace/catalog/online_tables.rst index 6cc5f20ae..898d00eb3 100644 --- a/docs/workspace/catalog/online_tables.rst +++ b/docs/workspace/catalog/online_tables.rst @@ -9,12 +9,12 @@ .. py:method:: create(table: OnlineTable) -> Wait[OnlineTable] Create an Online Table. - + Create a new Online Table. - + :param table: :class:`OnlineTable` Online Table information. - + :returns: Long-running operation waiter for :class:`OnlineTable`. See :method:wait_get_online_table_active for more details. @@ -26,26 +26,26 @@ .. py:method:: delete(name: str) Delete an Online Table. - + Delete an online table. Warning: This will delete all the data in the online table. If the source Delta table was deleted or modified since this Online Table was created, this will lose the data forever! - + :param name: str Full three-part (catalog, schema, table) name of the table. - - + + .. py:method:: get(name: str) -> OnlineTable Get an Online Table. - + Get information about an existing online table and its status. - + :param name: str Full three-part (catalog, schema, table) name of the table. - + :returns: :class:`OnlineTable` diff --git a/docs/workspace/catalog/quality_monitors.rst b/docs/workspace/catalog/quality_monitors.rst index 93f05b69a..255076aac 100644 --- a/docs/workspace/catalog/quality_monitors.rst +++ b/docs/workspace/catalog/quality_monitors.rst @@ -6,7 +6,7 @@ A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics tables and a dashboard that you can use to monitor table health and set alerts. - + Most write operations require the user to be the owner of the table (or its parent schema or parent catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires the user to have **SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**). @@ -14,38 +14,38 @@ .. py:method:: cancel_refresh(table_name: str, refresh_id: str) Cancel refresh. - + Cancel an active monitor refresh for the given refresh ID. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. :param refresh_id: str ID of the refresh. - - + + .. py:method:: create(table_name: str, assets_dir: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], skip_builtin_dashboard: Optional[bool], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries], warehouse_id: Optional[str]]) -> MonitorInfo Create a table monitor. - + Creates a new monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent schema, and have **SELECT** access on the table. 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Workspace assets, such as the dashboard, will be created in the workspace where this call was made. - + :param table_name: str Full name of the table. :param assets_dir: str @@ -79,152 +79,152 @@ :param warehouse_id: str (optional) Optional argument to specify the warehouse for dashboard creation. If not specified, the first running warehouse will be used. - + :returns: :class:`MonitorInfo` .. py:method:: delete(table_name: str) Delete a table monitor. - + Deletes a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + Note that the metric tables and dashboard will not be deleted as part of this call; those assets must be manually cleaned up (if desired). - + :param table_name: str Full name of the table. - - + + .. py:method:: get(table_name: str) -> MonitorInfo Get a table monitor. - + Gets a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + The returned information includes configuration values, as well as information on assets created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different workspace than where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorInfo` .. py:method:: get_refresh(table_name: str, refresh_id: str) -> MonitorRefreshInfo Get refresh. - + Gets info about a specific monitor refresh using the given refresh ID. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. :param refresh_id: str ID of the refresh. - + :returns: :class:`MonitorRefreshInfo` .. py:method:: list_refreshes(table_name: str) -> MonitorRefreshListResponse List refreshes. - + Gets an array containing the history of the most recent refreshes (up to 25) for this table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table. - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorRefreshListResponse` .. py:method:: regenerate_dashboard(table_name: str [, warehouse_id: Optional[str]]) -> RegenerateDashboardResponse Regenerate a monitoring dashboard. - + Regenerates the monitoring dashboard for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + The call must be made from the workspace where the monitor was created. The dashboard will be regenerated in the assets directory that was specified when the monitor was created. - + :param table_name: str Full name of the table. :param warehouse_id: str (optional) Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first running warehouse will be used. - + :returns: :class:`RegenerateDashboardResponse` .. py:method:: run_refresh(table_name: str) -> MonitorRefreshInfo Queue a metric refresh for a monitor. - + Queues a metric refresh on the monitor for the specified table. The refresh will execute in the background. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table - + Additionally, the call must be made from the workspace where the monitor was created. - + :param table_name: str Full name of the table. - + :returns: :class:`MonitorRefreshInfo` .. py:method:: update(table_name: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], dashboard_id: Optional[str], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries]]) -> MonitorInfo Update a table monitor. - + Updates a monitor for the specified table. - + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. - + Additionally, the call must be made from the workspace where the monitor was created, and the caller must be the original creator of the monitor. - + Certain configuration fields, such as output asset identifiers, cannot be updated. - + :param table_name: str Full name of the table. :param output_schema_name: str @@ -254,6 +254,6 @@ Configuration for monitoring snapshot tables. :param time_series: :class:`MonitorTimeSeries` (optional) Configuration for monitoring time series tables. - + :returns: :class:`MonitorInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/registered_models.rst b/docs/workspace/catalog/registered_models.rst index b05a702b5..3f7ced621 100644 --- a/docs/workspace/catalog/registered_models.rst +++ b/docs/workspace/catalog/registered_models.rst @@ -7,17 +7,17 @@ Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog provide centralized access control, auditing, lineage, and discovery of ML models across Databricks workspaces. - + An MLflow registered model resides in the third layer of Unity Catalog’s three-level namespace. Registered models contain model versions, which correspond to actual ML models (MLflow models). Creating new model versions currently requires use of the MLflow Python client. Once model versions are created, you can load them for batch inference using MLflow Python client APIs, or deploy them for real-time serving using Databricks Model Serving. - + All operations on registered models and model versions require USE_CATALOG permissions on the enclosing catalog and USE_SCHEMA permissions on the enclosing schema. In addition, the following additional privileges are required for various operations: - + * To create a registered model, users must additionally have the CREATE_MODEL permission on the target schema. * To view registered model or model version metadata, model version data files, or invoke a model version, users must additionally have the EXECUTE permission on the registered model * To update @@ -25,24 +25,24 @@ registered model * To update other registered model or model version metadata (comments, aliases) create a new model version, or update permissions on the registered model, users must be owners of the registered model. - + Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that specify a securable type, use "FUNCTION" as the securable type. .. py:method:: create(catalog_name: str, schema_name: str, name: str [, comment: Optional[str], storage_location: Optional[str]]) -> RegisteredModelInfo Create a Registered Model. - + Creates a new registered model in Unity Catalog. - + File storage for model versions in the registered model will be located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. - + For registered model creation to succeed, the user must satisfy the following conditions: - The caller must be a metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema. - + :param catalog_name: str The name of the catalog where the schema and the registered model reside :param schema_name: str @@ -53,54 +53,54 @@ The comment attached to the registered model :param storage_location: str (optional) The storage location on the cloud under which model version data files are stored - + :returns: :class:`RegisteredModelInfo` .. py:method:: delete(full_name: str) Delete a Registered Model. - + Deletes a registered model and all its model versions from the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model - - + + .. py:method:: delete_alias(full_name: str, alias: str) Delete a Registered Model Alias. - + Deletes a registered model alias. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param alias: str The name of the alias - - + + .. py:method:: get(full_name: str [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> RegisteredModelInfo Get a Registered Model. - + Get a registered model. - + The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str The three-level (fully qualified) name of the registered model :param include_aliases: bool (optional) @@ -108,25 +108,25 @@ :param include_browse: bool (optional) Whether to include registered models in the response for which the principal can only access selective metadata for - + :returns: :class:`RegisteredModelInfo` .. py:method:: list( [, catalog_name: Optional[str], include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str], schema_name: Optional[str]]) -> Iterator[RegisteredModelInfo] List Registered Models. - + List registered models. You can list registered models under a particular schema, or list all registered models in the current metastore. - + The returned models are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the registered models. A regular user needs to be the owner or have the **EXECUTE** privilege on the registered model to recieve the registered models in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the response. - + :param catalog_name: str (optional) The identifier of the catalog under which to list registered models. If specified, schema_name must be specified. @@ -135,13 +135,13 @@ selective metadata for :param max_results: int (optional) Max number of registered models to return. - + If both catalog and schema are specified: - when max_results is not specified, the page length is set to a server configured value (10000, as of 4/2/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (10000, as of 4/2/2024); - when set to 0, the page length is set to a server configured value (10000, as of 4/2/2024); - when set to a value less than 0, an invalid parameter error is returned; - + If neither schema nor catalog is specified: - when max_results is not specified, the page length is set to a server configured value (100, as of 4/2/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (1000, as of 4/2/2024); - @@ -152,42 +152,42 @@ :param schema_name: str (optional) The identifier of the schema under which to list registered models. If specified, catalog_name must be specified. - + :returns: Iterator over :class:`RegisteredModelInfo` .. py:method:: set_alias(full_name: str, alias: str, version_num: int) -> RegisteredModelAlias Set a Registered Model Alias. - + Set an alias on the specified registered model. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the registered model :param alias: str The name of the alias :param version_num: int The version number of the model version to which the alias points - + :returns: :class:`RegisteredModelAlias` .. py:method:: update(full_name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str]]) -> RegisteredModelInfo Update a Registered Model. - + Updates the specified registered model. - + The caller must be a metastore admin or an owner of the registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the name, the owner or the comment of the registered model can be updated. - + :param full_name: str The three-level (fully qualified) name of the registered model :param comment: str (optional) @@ -196,6 +196,6 @@ New name for the registered model. :param owner: str (optional) The identifier of the user who owns the registered model - + :returns: :class:`RegisteredModelInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/resource_quotas.rst b/docs/workspace/catalog/resource_quotas.rst index 3396011f0..c1e14687c 100644 --- a/docs/workspace/catalog/resource_quotas.rst +++ b/docs/workspace/catalog/resource_quotas.rst @@ -8,38 +8,39 @@ can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and limits. For more information on resource quotas see the [Unity Catalog documentation]. - + [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas + .. py:method:: get_quota(parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse Get information for a single resource quota. - + The GetQuota API returns usage information for a single resource quota, defined as a child-parent pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered asynchronously. The updated count might not be returned in the first call. - + :param parent_securable_type: str Securable type of the quota parent. :param parent_full_name: str Full name of the parent resource. Provide the metastore ID if the parent is a metastore. :param quota_name: str Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. - + :returns: :class:`GetQuotaResponse` .. py:method:: list_quotas( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QuotaInfo] List all resource quotas under a metastore. - + ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the counts returned. This API does not trigger a refresh of quota counts. - + :param max_results: int (optional) The number of quotas to return. :param page_token: str (optional) Opaque token for the next page of results. - + :returns: Iterator over :class:`QuotaInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst index 35f556239..7c4a84e53 100644 --- a/docs/workspace/catalog/schemas.rst +++ b/docs/workspace/catalog/schemas.rst @@ -31,10 +31,10 @@ w.schemas.delete(full_name=created_schema.full_name) Create a schema. - + Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. - + :param name: str Name of schema, relative to parent catalog. :param catalog_name: str @@ -45,23 +45,23 @@ A map of key-value properties attached to the securable. :param storage_root: str (optional) Storage root URL for managed tables within schema. - + :returns: :class:`SchemaInfo` .. py:method:: delete(full_name: str [, force: Optional[bool]]) Delete a schema. - + Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an owner of the parent catalog. - + :param full_name: str Full name of the schema. :param force: bool (optional) Force deletion even if the schema is not empty. - - + + .. py:method:: get(full_name: str [, include_browse: Optional[bool]]) -> SchemaInfo @@ -88,16 +88,16 @@ w.schemas.delete(full_name=created.full_name) Get a schema. - + Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the schema, or a user that has the **USE_SCHEMA** privilege on the schema. - + :param full_name: str Full name of the schema. :param include_browse: bool (optional) Whether to include schemas in the response for which the principal can only access selective metadata for - + :returns: :class:`SchemaInfo` @@ -122,12 +122,12 @@ w.catalogs.delete(name=new_catalog.name, force=True) List schemas. - + Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Parent catalog for schemas of interest. :param include_browse: bool (optional) @@ -140,7 +140,7 @@ (recommended); - when set to a value less than 0, an invalid parameter error is returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`SchemaInfo` @@ -168,12 +168,12 @@ w.schemas.delete(full_name=created.full_name) Update a schema. - + Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If the caller is a metastore admin, only the __owner__ field can be changed in the update. If the __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA** privilege on the parent catalog. - + :param full_name: str Full name of the schema. :param comment: str (optional) @@ -186,6 +186,6 @@ Username of current owner of schema. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. - + :returns: :class:`SchemaInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index 947174a01..9a5ed0a46 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -9,9 +9,9 @@ control which users and groups can access the credential. If a user does not have access to a storage credential in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your cloud tenant on the user’s behalf. - + Databricks recommends using external locations rather than using storage credentials directly. - + To create storage credentials, you must be a Databricks account admin. The account admin who creates the storage credential can delegate ownership to another user or group to manage permissions on it. @@ -39,9 +39,9 @@ w.storage_credentials.delete(name=credential.name) Create a storage credential. - + Creates a new storage credential. - + :param name: str The credential name. The name must be unique within the metastore. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) @@ -60,23 +60,23 @@ Whether the storage credential is only usable for read operations. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the created credential. - + :returns: :class:`StorageCredentialInfo` .. py:method:: delete(name: str [, force: Optional[bool]]) Delete a credential. - + Deletes a storage credential from the metastore. The caller must be an owner of the storage credential. - + :param name: str Name of the storage credential. :param force: bool (optional) Force deletion even if there are dependent external locations or external tables. - - + + .. py:method:: get(name: str) -> StorageCredentialInfo @@ -105,13 +105,13 @@ w.storage_credentials.delete(delete=created.name) Get a credential. - + Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. - + :param name: str Name of the storage credential. - + :returns: :class:`StorageCredentialInfo` @@ -130,12 +130,12 @@ all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) List credentials. - + Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of storage credentials to return. If not set, all the storage credentials are returned (not recommended). - when set to a value greater than 0, the page length is the minimum of @@ -144,7 +144,7 @@ returned; :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`StorageCredentialInfo` @@ -178,9 +178,9 @@ w.storage_credentials.delete(delete=created.name) Update a credential. - + Updates a storage credential on the metastore. - + :param name: str Name of the storage credential. :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) @@ -206,24 +206,24 @@ Whether the storage credential is only usable for read operations. :param skip_validation: bool (optional) Supplying true to this argument skips validation of the updated credential. - + :returns: :class:`StorageCredentialInfo` .. py:method:: validate( [, aws_iam_role: Optional[AwsIamRoleRequest], azure_managed_identity: Optional[AzureManagedIdentityRequest], azure_service_principal: Optional[AzureServicePrincipal], cloudflare_api_token: Optional[CloudflareApiToken], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest], external_location_name: Optional[str], read_only: Optional[bool], storage_credential_name: Optional[str], url: Optional[str]]) -> ValidateStorageCredentialResponse Validate a storage credential. - + Validates a storage credential. At least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used for validation. And if both are provided, the __url__ will be used for validation, and __external_location_name__ will be ignored when checking overlapping urls. - + Either the __storage_credential_name__ or the cloud-specific credential must be provided. - + The caller must be a metastore admin or the storage credential owner or have the **CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage credential. - + :param aws_iam_role: :class:`AwsIamRoleRequest` (optional) The AWS IAM role configuration. :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional) @@ -242,6 +242,6 @@ The name of the storage credential to validate. :param url: str (optional) The external location url to validate. - + :returns: :class:`ValidateStorageCredentialResponse` \ No newline at end of file diff --git a/docs/workspace/catalog/system_schemas.rst b/docs/workspace/catalog/system_schemas.rst index 97debf034..545a3b2e2 100644 --- a/docs/workspace/catalog/system_schemas.rst +++ b/docs/workspace/catalog/system_schemas.rst @@ -10,42 +10,42 @@ .. py:method:: disable(metastore_id: str, schema_name: str) Disable a system schema. - + Disables the system schema and removes it from the system catalog. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The metastore ID under which the system schema lives. :param schema_name: str Full name of the system schema. - - + + .. py:method:: enable(metastore_id: str, schema_name: str [, catalog_name: Optional[str]]) Enable a system schema. - + Enables the system schema and adds it to the system catalog. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The metastore ID under which the system schema lives. :param schema_name: str Full name of the system schema. :param catalog_name: str (optional) the catalog for which the system schema is to enabled in - - + + .. py:method:: list(metastore_id: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SystemSchemaInfo] List system schemas. - + Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore admin. - + :param metastore_id: str The ID for the metastore in which the system schema resides. :param max_results: int (optional) @@ -55,6 +55,6 @@ is returned; - If not set, all the schemas are returned (not recommended). :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`SystemSchemaInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/table_constraints.rst b/docs/workspace/catalog/table_constraints.rst index dd46c42f3..6b974c463 100644 --- a/docs/workspace/catalog/table_constraints.rst +++ b/docs/workspace/catalog/table_constraints.rst @@ -5,51 +5,51 @@ .. py:class:: TableConstraintsAPI Primary key and foreign key constraints encode relationships between fields in tables. - + Primary and foreign keys are informational only and are not enforced. Foreign keys must reference a primary key in another table. This primary key is the parent constraint of the foreign key and the table this primary key is on is the parent table of the foreign key. Similarly, the foreign key is the child constraint of its referenced primary key; the table of the foreign key is the child table of the primary key. - + You can declare primary keys and foreign keys as part of the table specification during table creation. You can also add or drop constraints on existing tables. .. py:method:: create(full_name_arg: str, constraint: TableConstraint) -> TableConstraint Create a table constraint. - + Creates a new table constraint. - + For the table constraint creation to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** privilege on the table's parent schema, and be the owner of the table. - if the new constraint is a __ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the owner of the referenced parent table. - + :param full_name_arg: str The full name of the table referenced by the constraint. :param constraint: :class:`TableConstraint` A table constraint, as defined by *one* of the following fields being set: __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__. - + :returns: :class:`TableConstraint` .. py:method:: delete(full_name: str, constraint_name: str, cascade: bool) Delete a table constraint. - + Deletes a table constraint. - + For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA** privilege on the table's parent schema, and be the owner of the table. - if __cascade__ argument is **true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG** privilege on the table's catalog, the **USE_SCHEMA** privilege on the table's schema, and be the owner of the table. - + :param full_name: str Full name of the table referenced by the constraint. :param constraint_name: str @@ -57,6 +57,6 @@ :param cascade: bool If true, try deleting all child constraints of the current constraint. If false, reject this operation if the current constraint has any child constraints. - - + + \ No newline at end of file diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index 1c292eb5c..4bbd3faad 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -9,39 +9,39 @@ have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT permission on the table, and they must have the USE_CATALOG permission on its parent catalog and the USE_SCHEMA permission on its parent schema. - + A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table (rather than a managed or external table). .. py:method:: delete(full_name: str) Delete a table. - + Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the table. - - + + .. py:method:: exists(full_name: str) -> TableExistsResponse Get boolean reflecting if table exists. - + Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on the parent catalog * Have BROWSE privilege on the parent schema. - + :param full_name: str Full name of the table. - + :returns: :class:`TableExistsResponse` @@ -86,13 +86,13 @@ w.tables.delete(full_name=table_full_name) Get a table. - + Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be the table owner or have the SELECT privilege on the table. - + :param full_name: str Full name of the table. :param include_browse: bool (optional) @@ -102,7 +102,7 @@ Whether delta metadata should be included in the response. :param include_manifest_capabilities: bool (optional) Whether to include a manifest containing capabilities the table has. - + :returns: :class:`TableInfo` @@ -130,13 +130,13 @@ w.catalogs.delete(name=created_catalog.name, force=True) List tables. - + Gets an array of all tables for the current metastore under the parent catalog and schema. The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for tables of interest. :param schema_name: str @@ -162,7 +162,7 @@ not. :param page_token: str (optional) Opaque token to send for the next page of results (pagination). - + :returns: Iterator over :class:`TableInfo` @@ -190,18 +190,18 @@ w.catalogs.delete(name=created_catalog.name, force=True) List table summaries. - + Gets an array of summaries for tables for a schema and catalog within the metastore. The table summaries returned are either: - + * summaries for tables (within the current metastore and parent catalog and schema), when the user is a metastore admin, or: * summaries for tables and schemas (within the current metastore and parent catalog) for which the user has ownership or the **SELECT** privilege on the table and ownership or **USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the **USE_CATALOG** privilege on the parent catalog. - + There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str Name of parent catalog for tables of interest. :param include_manifest_capabilities: bool (optional) @@ -218,22 +218,22 @@ A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or empty. :param table_name_pattern: str (optional) A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty. - + :returns: Iterator over :class:`TableSummary` .. py:method:: update(full_name: str [, owner: Optional[str]]) Update a table owner. - + Change the owner of the table. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param full_name: str Full name of the table. :param owner: str (optional) - - + + \ No newline at end of file diff --git a/docs/workspace/catalog/temporary_table_credentials.rst b/docs/workspace/catalog/temporary_table_credentials.rst index 1acd462b7..b6ebbe819 100644 --- a/docs/workspace/catalog/temporary_table_credentials.rst +++ b/docs/workspace/catalog/temporary_table_credentials.rst @@ -20,17 +20,17 @@ .. py:method:: generate_temporary_table_credentials( [, operation: Optional[TableOperation], table_id: Optional[str]]) -> GenerateTemporaryTableCredentialResponse Generate a temporary table credential. - + Get a short-lived credential for directly accessing the table data on cloud storage. The metastore must have external_access_enabled flag set to true (default false). The caller must have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog owners. - + :param operation: :class:`TableOperation` (optional) The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is specified, the credentials returned will have write permissions, otherwise, it will be read only. :param table_id: str (optional) UUID of the table to read or write. - + :returns: :class:`GenerateTemporaryTableCredentialResponse` \ No newline at end of file diff --git a/docs/workspace/catalog/volumes.rst b/docs/workspace/catalog/volumes.rst index cd9234587..5b6662f48 100644 --- a/docs/workspace/catalog/volumes.rst +++ b/docs/workspace/catalog/volumes.rst @@ -59,23 +59,23 @@ w.volumes.delete(name=created_volume.full_name) Create a Volume. - + Creates a new volume. - + The user could create either an external volume or a managed volume. An external volume will be created in the specified external location, while a managed volume will be located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. - + For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller must have **CREATE VOLUME** privilege on the parent schema. - + For an external volume, following conditions also need to satisfy - The caller must have **CREATE EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes existing in the specified storage location. - The specified storage location is not under the location of other tables, nor volumes, or catalogs or schemas. - + :param catalog_name: str The name of the catalog where the schema and the volume are :param schema_name: str @@ -86,30 +86,30 @@ The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more] - + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external :param comment: str (optional) The comment attached to the volume :param storage_location: str (optional) The storage location on the cloud - + :returns: :class:`VolumeInfo` .. py:method:: delete(name: str) Delete a Volume. - + Deletes a volume from the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param name: str The three-level (fully qualified) name of the volume - - + + .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[VolumeInfo] @@ -136,17 +136,17 @@ w.catalogs.delete(name=created_catalog.name, force=True) List Volumes. - + Gets an array of volumes for the current metastore under the parent catalog and schema. - + The returned volumes are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the volumes. A regular user needs to be the owner or have the **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + There is no guarantee of a specific ordering of the elements in the array. - + :param catalog_name: str The identifier of the catalog :param schema_name: str @@ -156,20 +156,20 @@ metadata for :param max_results: int (optional) Maximum number of volumes to return (page length). - + If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set to a value greater than 0, the page length is the minimum of this value and a server configured value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value (10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter error is returned; - + Note: this parameter controls only the maximum number of volumes to return. The actual number of volumes returned in a page may be smaller than this value, including 0, even if there are more pages. :param page_token: str (optional) Opaque token returned by a previous request. It must be included in the request to retrieve the next page of results (pagination). - + :returns: Iterator over :class:`VolumeInfo` @@ -223,19 +223,19 @@ w.volumes.delete(name=created_volume.full_name) Get a Volume. - + Gets a volume from the metastore for a specific catalog and schema. - + The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + :param name: str The three-level (fully qualified) name of the volume :param include_browse: bool (optional) Whether to include volumes in the response for which the principal can only access selective metadata for - + :returns: :class:`VolumeInfo` @@ -291,15 +291,15 @@ w.volumes.delete(name=created_volume.full_name) Update a Volume. - + Updates the specified volume under the specified parent catalog and schema. - + The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - + Currently only the name, the owner or the comment of the volume could be updated. - + :param name: str The three-level (fully qualified) name of the volume :param comment: str (optional) @@ -308,6 +308,6 @@ New name for the volume. :param owner: str (optional) The identifier of the user who owns the volume - + :returns: :class:`VolumeInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/workspace_bindings.rst b/docs/workspace/catalog/workspace_bindings.rst index 6571d97b7..c507d4c78 100644 --- a/docs/workspace/catalog/workspace_bindings.rst +++ b/docs/workspace/catalog/workspace_bindings.rst @@ -7,16 +7,16 @@ A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable can be accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a configured list of workspaces. This API allows you to configure (bind) securables to workspaces. - + NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) and the workspace bindings are only consulted when the securable's __isolation_mode__ is set to __ISOLATED__. - + A securable's workspace bindings can be configured by a metastore admin or the owner of the securable. - + The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. Please use the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). - + Securable types that support binding: - catalog - storage_credential - credential - external_location .. py:method:: get(name: str) -> GetCatalogWorkspaceBindingsResponse @@ -40,23 +40,23 @@ w.catalogs.delete(name=created.name, force=True) Get catalog workspace bindings. - + Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. - + :param name: str The name of the catalog. - + :returns: :class:`GetCatalogWorkspaceBindingsResponse` .. py:method:: get_bindings(securable_type: str, securable_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[WorkspaceBinding] Get securable workspace bindings. - + Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). @@ -69,7 +69,7 @@ error is returned; - If not set, all the workspace bindings are returned (not recommended). :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`WorkspaceBinding` @@ -97,27 +97,27 @@ w.catalogs.delete(name=created.name, force=True) Update catalog workspace bindings. - + Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the catalog. - + :param name: str The name of the catalog. :param assign_workspaces: List[int] (optional) A list of workspace IDs. :param unassign_workspaces: List[int] (optional) A list of workspace IDs. - + :returns: :class:`UpdateCatalogWorkspaceBindingsResponse` .. py:method:: update_bindings(securable_type: str, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> UpdateWorkspaceBindingsResponse Update securable workspace bindings. - + Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - + :param securable_type: str The type of the securable to bind to a workspace (catalog, storage_credential, credential, or external_location). @@ -127,6 +127,6 @@ List of workspace bindings. :param remove: List[:class:`WorkspaceBinding`] (optional) List of workspace bindings. - + :returns: :class:`UpdateWorkspaceBindingsResponse` \ No newline at end of file diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst index 4dced9ce2..1fabe51cb 100644 --- a/docs/workspace/cleanrooms/clean_room_assets.rst +++ b/docs/workspace/cleanrooms/clean_room_assets.rst @@ -10,71 +10,71 @@ .. py:method:: create(clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset Create an asset. - + Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC asset that is added through this method, the clean room owner must also have enough privilege on the asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to access the asset. Typically, you should use a group as the clean room owner. - + :param clean_room_name: str Name of the clean room. :param asset: :class:`CleanRoomAsset` Metadata of the clean room asset - + :returns: :class:`CleanRoomAsset` .. py:method:: delete(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str) Delete an asset. - + Delete a clean room asset - unshare/remove the asset from the clean room - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` The type of the asset. :param name: str The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. - - + + .. py:method:: get(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str) -> CleanRoomAsset Get an asset. - + Get the details of a clean room asset by its type and full name. - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` The type of the asset. :param name: str The fully qualified name of the asset, it is same as the name field in CleanRoomAsset. - + :returns: :class:`CleanRoomAsset` .. py:method:: list(clean_room_name: str [, page_token: Optional[str]]) -> Iterator[CleanRoomAsset] List assets. - + :param clean_room_name: str Name of the clean room. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoomAsset` .. py:method:: update(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str, asset: CleanRoomAsset) -> CleanRoomAsset Update an asset. - + Update a clean room asset. For example, updating the content of a notebook; changing the shared partitions of a table; etc. - + :param clean_room_name: str Name of the clean room. :param asset_type: :class:`CleanRoomAssetAssetType` @@ -82,13 +82,13 @@ :param name: str A fully qualified name that uniquely identifies the asset within the clean room. This is also the name displayed in the clean room UI. - + For UC securable assets (tables, volumes, etc.), the format is *shared_catalog*.*shared_schema*.*asset_name* - + For notebooks, the name is the notebook file name. :param asset: :class:`CleanRoomAsset` Metadata of the clean room asset - + :returns: :class:`CleanRoomAsset` \ No newline at end of file diff --git a/docs/workspace/cleanrooms/clean_room_task_runs.rst b/docs/workspace/cleanrooms/clean_room_task_runs.rst index 0a22a1f3e..b78bf2c2a 100644 --- a/docs/workspace/cleanrooms/clean_room_task_runs.rst +++ b/docs/workspace/cleanrooms/clean_room_task_runs.rst @@ -9,9 +9,9 @@ .. py:method:: list(clean_room_name: str [, notebook_name: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoomNotebookTaskRun] List notebook task runs. - + List all the historical notebook task runs in a clean room. - + :param clean_room_name: str Name of the clean room. :param notebook_name: str (optional) @@ -20,6 +20,6 @@ The maximum number of task runs to return. Currently ignored - all runs will be returned. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoomNotebookTaskRun` \ No newline at end of file diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst index a711120d5..45981bd9c 100644 --- a/docs/workspace/cleanrooms/clean_rooms.rst +++ b/docs/workspace/cleanrooms/clean_rooms.rst @@ -11,85 +11,85 @@ .. py:method:: create(clean_room: CleanRoom) -> CleanRoom Create a clean room. - + Create a new clean room with the specified collaborators. This method is asynchronous; the returned name field inside the clean_room field can be used to poll the clean room status, using the :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING state, with only name, owner, comment, created_at and status populated. The clean room will be usable once it enters an ACTIVE state. - + The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore. - + :param clean_room: :class:`CleanRoom` - + :returns: :class:`CleanRoom` .. py:method:: create_output_catalog(clean_room_name: str, output_catalog: CleanRoomOutputCatalog) -> CreateCleanRoomOutputCatalogResponse Create an output catalog. - + Create the output catalog of the clean room. - + :param clean_room_name: str Name of the clean room. :param output_catalog: :class:`CleanRoomOutputCatalog` - + :returns: :class:`CreateCleanRoomOutputCatalogResponse` .. py:method:: delete(name: str) Delete a clean room. - + Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other collaborators have not deleted the clean room, they will still have the clean room in their metastore, but it will be in a DELETED state and no operations other than deletion can be performed on it. - + :param name: str Name of the clean room. - - + + .. py:method:: get(name: str) -> CleanRoom Get a clean room. - + Get the details of a clean room given its name. - + :param name: str - + :returns: :class:`CleanRoom` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoom] List clean rooms. - + Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are returned. - + :param page_size: int (optional) Maximum number of clean rooms to return (i.e., the page length). Defaults to 100. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`CleanRoom` .. py:method:: update(name: str [, clean_room: Optional[CleanRoom]]) -> CleanRoom Update a clean room. - + Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM** privilege, or be metastore admin. - + When the caller is a metastore admin, only the __owner__ field can be updated. - + :param name: str Name of the clean room. :param clean_room: :class:`CleanRoom` (optional) - + :returns: :class:`CleanRoom` \ No newline at end of file diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst index bf4c8ab61..790315fd9 100644 --- a/docs/workspace/compute/cluster_policies.rst +++ b/docs/workspace/compute/cluster_policies.rst @@ -7,18 +7,18 @@ You can use cluster policies to control users' ability to configure clusters based on a set of rules. These rules specify which attributes or attribute values can be used during cluster creation. Cluster policies have ACLs that limit their use to specific users and groups. - + With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in the policy's "libraries" field (Public Preview). - Limit users to creating clusters with the prescribed settings. - Simplify the user interface, enabling more users to create clusters, by fixing and hiding some fields. - Manage costs by setting limits on attributes that impact the hourly rate. - + Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted policy and create fully-configurable clusters. - A user who has both unrestricted cluster create permission and access to cluster policies can select the Unrestricted policy and policies they have access to. - A user that has access to only cluster policies, can select the policies they have access to. - + If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create, edit, and delete policies. Admin users also have access to all policies. @@ -50,12 +50,12 @@ w.cluster_policies.delete(policy_id=created.policy_id) Create a new policy. - + Creates a new policy with prescribed settings. - + :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - + [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param description: str (optional) Additional human-readable description of the cluster policy. @@ -71,31 +71,31 @@ :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. - + You can use this to customize the policy definition inherited from the policy family. Policy rules specified here are merged into the inherited policy definition. - + [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param policy_family_id: str (optional) ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. - + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition. - + :returns: :class:`CreatePolicyResponse` .. py:method:: delete(policy_id: str) Delete a cluster policy. - + Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited. - + :param policy_id: str The ID of the policy to delete. - - + + .. py:method:: edit(policy_id: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) @@ -140,15 +140,15 @@ w.cluster_policies.delete(policy_id=created.policy_id) Update a cluster policy. - + Update an existing policy for cluster. This operation may make some clusters governed by the previous policy invalid. - + :param policy_id: str The ID of the policy to update. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - + [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param description: str (optional) Additional human-readable description of the cluster policy. @@ -164,19 +164,19 @@ :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. - + You can use this to customize the policy definition inherited from the policy family. Policy rules specified here are merged into the inherited policy definition. - + [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html :param policy_family_id: str (optional) ID of the policy family. The cluster policy's policy definition inherits the policy family's policy definition. - + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the policy definition. - - + + .. py:method:: get(policy_id: str) -> Policy @@ -209,37 +209,37 @@ w.cluster_policies.delete(policy_id=created.policy_id) Get a cluster policy. - + Get a cluster policy entity. Creation and editing is available to admins only. - + :param policy_id: str Canonical unique identifier for the Cluster Policy. - + :returns: :class:`Policy` .. py:method:: get_permission_levels(cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse Get cluster policy permission levels. - + Gets the permission levels that a user can have on an object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. - + :returns: :class:`GetClusterPolicyPermissionLevelsResponse` .. py:method:: get_permissions(cluster_policy_id: str) -> ClusterPolicyPermissions Get cluster policy permissions. - + Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. - + :returns: :class:`ClusterPolicyPermissions` @@ -258,43 +258,43 @@ all = w.cluster_policies.list(compute.ListClusterPoliciesRequest()) List cluster policies. - + Returns a list of policies accessible by the requesting user. - + :param sort_column: :class:`ListSortColumn` (optional) The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy creation time. * `POLICY_NAME` - Sort result list by policy name. :param sort_order: :class:`ListSortOrder` (optional) The order in which the policies get listed. * `DESC` - Sort result list in descending order. * `ASC` - Sort result list in ascending order. - + :returns: Iterator over :class:`Policy` .. py:method:: set_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions Set cluster policy permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional) - + :returns: :class:`ClusterPolicyPermissions` .. py:method:: update_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions Update cluster policy permissions. - + Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root object. - + :param cluster_policy_id: str The cluster policy for which to get or manage permissions. :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional) - + :returns: :class:`ClusterPolicyPermissions` \ No newline at end of file diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index 80a6609e9..961b0472a 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -5,22 +5,22 @@ .. py:class:: ClustersExt The Clusters API allows you to create, start, edit, list, terminate, and delete clusters. - + Databricks maps cluster node instance types to compute units known as DBUs. See the instance type pricing page for a list of the supported instance types and their corresponding DBUs. - + A Databricks cluster is a set of computation resources and configurations on which you run data engineering, data science, and data analytics workloads, such as production ETL pipelines, streaming analytics, ad-hoc analytics, and machine learning. - + You run these workloads as a set of commands in a notebook or as an automated job. Databricks makes a distinction between all-purpose clusters and job clusters. You use all-purpose clusters to analyze data collaboratively using interactive notebooks. You use job clusters to run fast and robust automated jobs. - + You can create an all-purpose cluster using the UI, CLI, or REST API. You can manually terminate and restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive analysis. - + IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list. @@ -60,16 +60,16 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Change cluster owner. - + Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform this operation. The service principal application ID can be supplied as an argument to `owner_username`. - + :param cluster_id: str :param owner_username: str New owner of the cluster_id after this RPC. - - + + .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails] @@ -102,22 +102,22 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Create new cluster. - + Creates a new Spark cluster. This method will acquire new instances from the cloud provider if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The cluster will be usable once it enters a ``RUNNING`` state. Note: Databricks may not be able to acquire some of the requested nodes, due to cloud provider limitations (account limits, spot price, etc.) or transient network issues. - + If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed. Otherwise the cluster will terminate with an informative error message. - + Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out the [create compute UI] and then copying the generated JSON definition from the UI. - + [create compute UI]: https://docs.databricks.com/compute/configure.html - + :param spark_version: str The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. @@ -152,18 +152,18 @@ :param custom_tags: Dict[str,str] (optional) Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags - + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. @@ -172,10 +172,10 @@ fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on @@ -189,7 +189,7 @@ :param driver_node_type_id: str (optional) The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. - + This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence. @@ -210,22 +210,22 @@ The optional ID of the instance pool to which the cluster belongs. :param is_single_node: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in @@ -235,7 +235,7 @@ :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas @@ -245,10 +245,10 @@ The ID of the cluster policy used to create the cluster if applicable. :param runtime_engine: :class:`RuntimeEngine` (optional) Determines the cluster's runtime engine, either standard or Photon. - + This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - + If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used. :param single_user_name: str (optional) @@ -261,11 +261,11 @@ An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. - + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks managed environmental variables are included as well. - + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}` @@ -275,12 +275,12 @@ specified. :param use_ml_runtime: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) Cluster Attributes showing for clusters workload types. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -321,14 +321,14 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Terminate cluster. - + Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a `TERMINATING` or `TERMINATED` state, nothing will happen. - + :param cluster_id: str The cluster to be terminated. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_terminated for more details. @@ -376,19 +376,19 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Update cluster configuration. - + Updates the configuration of a cluster to match the provided attributes and size. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. - + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes can take effect. - + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time it is started using the `clusters/start` API, the new attributes will take effect. Any attempt to update a cluster in any other state will be rejected with an `INVALID_STATE` error code. - + Clusters created by the Databricks Jobs service cannot be edited. - + :param cluster_id: str ID of the cluster :param spark_version: str @@ -423,18 +423,18 @@ :param custom_tags: Dict[str,str] (optional) Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags - + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags :param data_security_mode: :class:`DataSecurityMode` (optional) Data security mode decides what data governance model to use when accessing data from a cluster. - + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`. - + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. @@ -443,10 +443,10 @@ fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited. - + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions: - + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on @@ -460,7 +460,7 @@ :param driver_node_type_id: str (optional) The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above. - + This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id and node_type_id take precedence. @@ -481,22 +481,22 @@ The optional ID of the instance pool to which the cluster belongs. :param is_single_node: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`, and `num_workers` :param kind: :class:`Kind` (optional) The kind of compute described by this compute specification. - + Depending on `kind`, different validations and default values will be applied. - + Clusters with `kind = CLASSIC_PREVIEW` support the following fields, whereas clusters with no specified `kind` do not. * [is_single_node](/api/workspace/clusters/create#is_single_node) * [use_ml_runtime](/api/workspace/clusters/create#use_ml_runtime) * [data_security_mode](/api/workspace/clusters/create#data_security_mode) set to `DATA_SECURITY_MODE_AUTO`, `DATA_SECURITY_MODE_DEDICATED`, or `DATA_SECURITY_MODE_STANDARD` - + By using the [simple form], your clusters are automatically using `kind = CLASSIC_PREVIEW`. - + [simple form]: https://docs.databricks.com/compute/simple-form.html :param node_type_id: str (optional) This field encodes, through a single value, the resources available to each of the Spark nodes in @@ -506,7 +506,7 @@ :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas @@ -516,10 +516,10 @@ The ID of the cluster policy used to create the cluster if applicable. :param runtime_engine: :class:`RuntimeEngine` (optional) Determines the cluster's runtime engine, either standard or Photon. - + This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - + If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used. :param single_user_name: str (optional) @@ -532,11 +532,11 @@ An object containing a set of optional, user-specified environment variable key-value pairs. Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while launching the driver and workers. - + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks managed environmental variables are included as well. - + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true"}` @@ -546,12 +546,12 @@ specified. :param use_ml_runtime: bool (optional) This field can only be used when `kind = CLASSIC_PREVIEW`. - + `effective_spark_version` is determined by `spark_version` (DBR release), this field `use_ml_runtime`, and whether `node_type_id` is gpu node or not. :param workload_type: :class:`WorkloadType` (optional) Cluster Attributes showing for clusters workload types. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -617,10 +617,10 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) List cluster activity events. - + Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more events to read, the response includes all the parameters necessary to request the next page of events. - + :param cluster_id: str The ID of the cluster to retrieve events about. :param end_time: int (optional) @@ -629,12 +629,12 @@ An optional set of event types to filter on. If empty, all event types are returned. :param limit: int (optional) Deprecated: use page_token in combination with page_size instead. - + The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed value is 500. :param offset: int (optional) Deprecated: use page_token in combination with page_size instead. - + The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results are requested in descending order, the end_time field is required. :param order: :class:`GetEventsOrder` (optional) @@ -649,7 +649,7 @@ previous page of events respectively. If page_token is empty, the first page is returned. :param start_time: int (optional) The start time in epoch milliseconds. If empty, returns events starting from the beginning of time. - + :returns: Iterator over :class:`ClusterEvent` @@ -685,37 +685,37 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Get cluster info. - + Retrieves the information for a cluster given its identifier. Clusters can be described while they are running, or up to 60 days after they are terminated. - + :param cluster_id: str The cluster about which to retrieve information. - + :returns: :class:`ClusterDetails` .. py:method:: get_permission_levels(cluster_id: str) -> GetClusterPermissionLevelsResponse Get cluster permission levels. - + Gets the permission levels that a user can have on an object. - + :param cluster_id: str The cluster for which to get or manage permissions. - + :returns: :class:`GetClusterPermissionLevelsResponse` .. py:method:: get_permissions(cluster_id: str) -> ClusterPermissions Get cluster permissions. - + Gets the permissions of a cluster. Clusters can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. - + :returns: :class:`ClusterPermissions` @@ -733,10 +733,10 @@ nodes = w.clusters.list_node_types() List clusters. - + Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. - + :param filter_by: :class:`ListClustersFilterBy` (optional) Filters to apply to the list of clusters. :param page_size: int (optional) @@ -747,7 +747,7 @@ previous page of clusters respectively. :param sort_by: :class:`ListClustersSortBy` (optional) Sort the list of clusters by a specific criteria. - + :returns: Iterator over :class:`ClusterDetails` @@ -765,36 +765,36 @@ nodes = w.clusters.list_node_types() List node types. - + Returns a list of supported Spark node types. These node types can be used to launch a cluster. - + :returns: :class:`ListNodeTypesResponse` .. py:method:: list_zones() -> ListAvailableZonesResponse List availability zones. - + Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These zones can be used to launch a cluster. - + :returns: :class:`ListAvailableZonesResponse` .. py:method:: permanent_delete(cluster_id: str) Permanently delete cluster. - + Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously removed. - + In addition, users will no longer see permanently deleted clusters in the cluster list, and API users can no longer perform any action on permanently deleted clusters. - + :param cluster_id: str The cluster to be deleted. - - + + .. py:method:: pin(cluster_id: str) @@ -829,13 +829,13 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Pin cluster. - + Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a cluster that is already pinned will have no effect. This API can only be called by workspace admins. - + :param cluster_id: str - - + + .. py:method:: resize(cluster_id: str [, autoscale: Optional[AutoScale], num_workers: Optional[int]]) -> Wait[ClusterDetails] @@ -870,10 +870,10 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Resize cluster. - + Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a `RUNNING` state. - + :param cluster_id: str The cluster to be resized. :param autoscale: :class:`AutoScale` (optional) @@ -882,13 +882,13 @@ :param num_workers: int (optional) Number of worker nodes that this cluster should have. A cluster has one Spark Driver and `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - + Note: When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are provisioned. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -929,14 +929,14 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Restart cluster. - + Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state, nothing will happen. - + :param cluster_id: str The cluster to be started. :param restart_user: str (optional) - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -1009,23 +1009,23 @@ .. py:method:: set_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions Set cluster permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional) - + :returns: :class:`ClusterPermissions` .. py:method:: spark_versions() -> GetSparkVersionsResponse List available Spark versions. - + Returns the list of available Spark versions. These versions can be used to launch a cluster. - + :returns: :class:`GetSparkVersionsResponse` @@ -1061,16 +1061,16 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Start terminated cluster. - + Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster starts with the last specified cluster size. - If the previous cluster was an autoscaling cluster, the current cluster starts with the minimum number of nodes. - If the cluster is not currently in a ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job cannot be started. - + :param cluster_id: str The cluster to be started. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -1111,20 +1111,20 @@ w.clusters.permanent_delete(cluster_id=clstr.cluster_id) Unpin cluster. - + Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace admins. - + :param cluster_id: str - - + + .. py:method:: update(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource]]) -> Wait[ClusterDetails] Update cluster configuration (partial). - + Updates the configuration of a cluster to match the partial set of attributes and size. Denote which fields to update using the `update_mask` field in the request body. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be @@ -1133,25 +1133,25 @@ is started using the `clusters/start` API. Attempts to update a cluster in any other state will be rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be updated. - + :param cluster_id: str ID of the cluster. :param update_mask: str Used to specify which cluster attributes and size fields to update. See https://google.aip.dev/161 for more details. - + The field mask must be a single string, with multiple fields separated by commas (no spaces). The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. :param cluster: :class:`UpdateClusterResource` (optional) The cluster to be updated. - + :returns: Long-running operation waiter for :class:`ClusterDetails`. See :method:wait_get_cluster_running for more details. @@ -1163,13 +1163,13 @@ .. py:method:: update_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions Update cluster permissions. - + Updates the permissions on a cluster. Clusters can inherit permissions from their root object. - + :param cluster_id: str The cluster for which to get or manage permissions. :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional) - + :returns: :class:`ClusterPermissions` diff --git a/docs/workspace/compute/command_execution.rst b/docs/workspace/compute/command_execution.rst index f51daabe8..c96d044a2 100644 --- a/docs/workspace/compute/command_execution.rst +++ b/docs/workspace/compute/command_execution.rst @@ -10,15 +10,15 @@ .. py:method:: cancel( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str]]) -> Wait[CommandStatusResponse] Cancel a command. - + Cancels a currently running command within an execution context. - + The command ID is obtained from a prior successful call to __execute__. - + :param cluster_id: str (optional) :param command_id: str (optional) :param context_id: str (optional) - + :returns: Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_cancelled for more details. @@ -30,27 +30,27 @@ .. py:method:: command_status(cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse Get command info. - + Gets the status of and, if available, the results from a currently executing command. - + The command ID is obtained from a prior successful call to __execute__. - + :param cluster_id: str :param context_id: str :param command_id: str - + :returns: :class:`CommandStatusResponse` .. py:method:: context_status(cluster_id: str, context_id: str) -> ContextStatusResponse Get status. - + Gets the status for an execution context. - + :param cluster_id: str :param context_id: str - + :returns: :class:`ContextStatusResponse` @@ -76,15 +76,15 @@ w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id) Create an execution context. - + Creates an execution context for running cluster commands. - + If successful, this method returns the ID of the new execution context. - + :param cluster_id: str (optional) Running cluster id :param language: :class:`Language` (optional) - + :returns: Long-running operation waiter for :class:`ContextStatusResponse`. See :method:wait_context_status_command_execution_running for more details. @@ -96,13 +96,13 @@ .. py:method:: destroy(cluster_id: str, context_id: str) Delete an execution context. - + Deletes an execution context. - + :param cluster_id: str :param context_id: str - - + + .. py:method:: execute( [, cluster_id: Optional[str], command: Optional[str], context_id: Optional[str], language: Optional[Language]]) -> Wait[CommandStatusResponse] @@ -134,11 +134,11 @@ w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id) Run a command. - + Runs a cluster command in the given execution context, using the provided language. - + If successful, it returns an ID for tracking the status of the command's execution. - + :param cluster_id: str (optional) Running cluster id :param command: str (optional) @@ -146,7 +146,7 @@ :param context_id: str (optional) Running context id :param language: :class:`Language` (optional) - + :returns: Long-running operation waiter for :class:`CommandStatusResponse`. See :method:wait_command_status_command_execution_finished_or_error for more details. diff --git a/docs/workspace/compute/global_init_scripts.rst b/docs/workspace/compute/global_init_scripts.rst index b4c044b95..e2eba7604 100644 --- a/docs/workspace/compute/global_init_scripts.rst +++ b/docs/workspace/compute/global_init_scripts.rst @@ -6,7 +6,7 @@ The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace. These scripts run on every node in every cluster in the workspace. - + **Important:** Existing clusters must be restarted to pick up any changes made to global init scripts. Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark container fails to launch and init scripts with later position are skipped. If enough containers fail, the @@ -37,9 +37,9 @@ w.global_init_scripts.delete(script_id=created.script_id) Create init script. - + Creates a new global init script in this workspace. - + :param name: str The name of the script :param script: str @@ -49,27 +49,27 @@ :param position: int (optional) The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. - + If you omit the numeric position for a new global init script, it defaults to last position. It will run after all current scripts. Setting any value greater than the position of the last script is equivalent to the last position. Example: Take three existing scripts with positions 0, 1, and 2. Any position of (3) or greater puts the script in the last position. If an explicit position value conflicts with an existing script value, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1. - + :returns: :class:`CreateResponse` .. py:method:: delete(script_id: str) Delete init script. - + Deletes a global init script. - + :param script_id: str The ID of the global init script. - - + + .. py:method:: get(script_id: str) -> GlobalInitScriptDetailsWithContent @@ -99,12 +99,12 @@ w.global_init_scripts.delete(script_id=created.script_id) Get an init script. - + Gets all the details of a script, including its Base64-encoded contents. - + :param script_id: str The ID of the global init script. - + :returns: :class:`GlobalInitScriptDetailsWithContent` @@ -122,11 +122,11 @@ all = w.global_init_scripts.list() Get init scripts. - + Get a list of all global init scripts for this workspace. This returns all properties for each script but **not** the script contents. To retrieve the contents of a script, use the [get a global init script](:method:globalinitscripts/get) operation. - + :returns: Iterator over :class:`GlobalInitScriptDetails` @@ -161,10 +161,10 @@ w.global_init_scripts.delete(script_id=created.script_id) Update init script. - + Updates a global init script, specifying only the fields to change. All fields are optional. Unspecified fields retain their current value. - + :param script_id: str The ID of the global init script. :param name: str @@ -176,13 +176,13 @@ :param position: int (optional) The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order. To move the script to run first, set its position to 0. - + To move the script to the end, set its position to any value greater or equal to the position of the last script. Example, three existing scripts with positions 0, 1, and 2. Any position value of 2 or greater puts the script in the last position (2). - + If an explicit position value conflicts with an existing script, your request succeeds, but the original script at that position and all later scripts have their positions incremented by 1. - - + + \ No newline at end of file diff --git a/docs/workspace/compute/instance_pools.rst b/docs/workspace/compute/instance_pools.rst index 8fb46dbc9..0614f2101 100644 --- a/docs/workspace/compute/instance_pools.rst +++ b/docs/workspace/compute/instance_pools.rst @@ -6,16 +6,16 @@ Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times. - + Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle instances. If the pool has no idle instances, the pool expands by allocating a new instance from the instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that pool’s idle instances. - + You can specify a different pool for the driver node and worker nodes, or use the same pool for both. - + Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does apply. See pricing. @@ -40,9 +40,9 @@ w.instance_pools.delete(instance_pool_id=created.instance_pool_id) Create a new instance pool. - + Creates a new instance pool using idle and ready-to-use cloud instances. - + :param instance_pool_name: str Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 characters. @@ -60,7 +60,7 @@ :param custom_tags: Dict[str,str] (optional) Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags :param disk_spec: :class:`DiskSpec` (optional) Defines the specification of the disks that will be attached to all spark containers. @@ -89,20 +89,20 @@ A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters started with the preloaded Spark version will start faster. A list of available Spark versions can be retrieved by using the :method:clusters/sparkVersions API call. - + :returns: :class:`CreateInstancePoolResponse` .. py:method:: delete(instance_pool_id: str) Delete an instance pool. - + Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously. - + :param instance_pool_id: str The instance pool to be terminated. - - + + .. py:method:: edit(instance_pool_id: str, instance_pool_name: str, node_type_id: str [, custom_tags: Optional[Dict[str, str]], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int]]) @@ -132,9 +132,9 @@ w.instance_pools.delete(instance_pool_id=created.instance_pool_id) Edit an existing instance pool. - + Modifies the configuration of an existing instance pool. - + :param instance_pool_id: str Instance pool ID :param instance_pool_name: str @@ -148,7 +148,7 @@ :param custom_tags: Dict[str,str] (optional) Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - + - Currently, Databricks allows at most 45 custom tags :param idle_instance_autotermination_minutes: int (optional) Automatically terminates the extra instances in the pool cache after they are inactive for this time @@ -162,8 +162,8 @@ upsize requests. :param min_idle_instances: int (optional) Minimum number of idle instances to keep in the instance pool - - + + .. py:method:: get(instance_pool_id: str) -> GetInstancePool @@ -189,37 +189,37 @@ w.instance_pools.delete(instance_pool_id=created.instance_pool_id) Get instance pool information. - + Retrieve the information for an instance pool based on its identifier. - + :param instance_pool_id: str The canonical unique identifier for the instance pool. - + :returns: :class:`GetInstancePool` .. py:method:: get_permission_levels(instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse Get instance pool permission levels. - + Gets the permission levels that a user can have on an object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. - + :returns: :class:`GetInstancePoolPermissionLevelsResponse` .. py:method:: get_permissions(instance_pool_id: str) -> InstancePoolPermissions Get instance pool permissions. - + Gets the permissions of an instance pool. Instance pools can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. - + :returns: :class:`InstancePoolPermissions` @@ -237,36 +237,36 @@ all = w.instance_pools.list() List instance pool info. - + Gets a list of instance pools with their statistics. - + :returns: Iterator over :class:`InstancePoolAndStats` .. py:method:: set_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions Set instance pool permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional) - + :returns: :class:`InstancePoolPermissions` .. py:method:: update_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions Update instance pool permissions. - + Updates the permissions on an instance pool. Instance pools can inherit permissions from their root object. - + :param instance_pool_id: str The instance pool for which to get or manage permissions. :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional) - + :returns: :class:`InstancePoolPermissions` \ No newline at end of file diff --git a/docs/workspace/compute/instance_profiles.rst b/docs/workspace/compute/instance_profiles.rst index abf959324..182e1aa79 100644 --- a/docs/workspace/compute/instance_profiles.rst +++ b/docs/workspace/compute/instance_profiles.rst @@ -7,8 +7,9 @@ The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3 buckets] using instance profiles for more information. - + [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html + .. py:method:: add(instance_profile_arn: str [, iam_role_arn: Optional[str], is_meta_instance_profile: Optional[bool], skip_validation: Optional[bool]]) @@ -30,21 +31,21 @@ ) Register an instance profile. - + Registers an instance profile in Databricks. In the UI, you can then give users the permission to use this instance profile when launching clusters. - + This API is only available to admin users. - + :param instance_profile_arn: str The AWS ARN of the instance profile to register with Databricks. This field is required. :param iam_role_arn: str (optional) The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile with [Databricks SQL Serverless]. - + Otherwise, this field is optional. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html :param is_meta_instance_profile: bool (optional) Boolean flag indicating whether the instance profile should only be used in credential passthrough @@ -57,8 +58,8 @@ fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your requested instance type is not supported in your requested availability zone”), you can pass this flag to skip the validation and forcibly add the instance profile. - - + + .. py:method:: edit(instance_profile_arn: str [, iam_role_arn: Optional[str], is_meta_instance_profile: Optional[bool]]) @@ -80,37 +81,37 @@ ) Edit an instance profile. - + The only supported field to change is the optional IAM role ARN associated with the instance profile. It is required to specify the IAM role ARN if both of the following are true: - + * Your role name and instance profile name do not match. The name is the part after the last slash in each ARN. * You want to use the instance profile with [Databricks SQL Serverless]. - + To understand where these fields are in the AWS console, see [Enable serverless SQL warehouses]. - + This API is only available to admin users. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html - + :param instance_profile_arn: str The AWS ARN of the instance profile to register with Databricks. This field is required. :param iam_role_arn: str (optional) The AWS IAM role ARN of the role associated with the instance profile. This field is required if your role name and instance profile name do not match and you want to use the instance profile with [Databricks SQL Serverless]. - + Otherwise, this field is optional. - + [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html :param is_meta_instance_profile: bool (optional) Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios. If true, it means the instance profile contains an meta IAM role which could assume a wide range of roles. Therefore it should always be used with authorization. This field is optional, the default value is `false`. - - + + .. py:method:: list() -> Iterator[InstanceProfile] @@ -127,25 +128,25 @@ all = w.instance_profiles.list() List available instance profiles. - + List the instance profiles that the calling user can use to launch a cluster. - + This API is available to all users. - + :returns: Iterator over :class:`InstanceProfile` .. py:method:: remove(instance_profile_arn: str) Remove the instance profile. - + Remove the instance profile with the provided ARN. Existing clusters with this instance profile will continue to function. - + This API is only accessible to admin users. - + :param instance_profile_arn: str The ARN of the instance profile to remove. This field is required. - - + + \ No newline at end of file diff --git a/docs/workspace/compute/libraries.rst b/docs/workspace/compute/libraries.rst index 64f688fdc..339f54de2 100644 --- a/docs/workspace/compute/libraries.rst +++ b/docs/workspace/compute/libraries.rst @@ -6,70 +6,70 @@ The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster. - + To make third-party or custom code available to notebooks and jobs running on your clusters, you can install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories. - + Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library directly from a public repository such as PyPI or Maven, using a previously installed workspace library, or using an init script. - + When you uninstall a library from a cluster, the library is removed only when you restart the cluster. Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart. .. py:method:: all_cluster_statuses() -> Iterator[ClusterLibraryStatuses] Get all statuses. - + Get the status of all libraries on all clusters. A status is returned for all libraries installed on this cluster via the API or the libraries UI. - + :returns: Iterator over :class:`ClusterLibraryStatuses` .. py:method:: cluster_status(cluster_id: str) -> Iterator[LibraryFullStatus] Get status. - + Get the status of libraries on a cluster. A status is returned for all libraries installed on this cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries set to be installed on this cluster, in the order that the libraries were added to the cluster, are returned first. 2. Libraries that were previously requested to be installed on this cluster or, but are now marked for removal, in no particular order, are returned last. - + :param cluster_id: str Unique identifier of the cluster whose status should be retrieved. - + :returns: Iterator over :class:`LibraryFullStatus` .. py:method:: install(cluster_id: str, libraries: List[Library]) Add a library. - + Add libraries to install on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. - + :param cluster_id: str Unique identifier for the cluster on which to install these libraries. :param libraries: List[:class:`Library`] The libraries to install. - - + + .. py:method:: uninstall(cluster_id: str, libraries: List[Library]) Uninstall libraries. - + Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is restarted. A request to uninstall a library that is not currently installed is ignored. - + :param cluster_id: str Unique identifier for the cluster on which to uninstall these libraries. :param libraries: List[:class:`Library`] The libraries to uninstall. - - + + \ No newline at end of file diff --git a/docs/workspace/compute/policy_compliance_for_clusters.rst b/docs/workspace/compute/policy_compliance_for_clusters.rst index 90c3aeb98..fea7a08f9 100644 --- a/docs/workspace/compute/policy_compliance_for_clusters.rst +++ b/docs/workspace/compute/policy_compliance_for_clusters.rst @@ -6,58 +6,58 @@ The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace. - + A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce compliance API allows you to update a cluster to be compliant with the current version of its policy. .. py:method:: enforce_compliance(cluster_id: str [, validate_only: Optional[bool]]) -> EnforceClusterComplianceResponse Enforce cluster policy compliance. - + Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if it is in a `RUNNING` or `TERMINATED` state. - + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes can take effect. - + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the cluster is started, the new attributes will take effect. - + Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API. Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs. - + :param cluster_id: str The ID of the cluster you want to enforce policy compliance on. :param validate_only: bool (optional) If set, previews the changes that would be made to a cluster to enforce compliance but does not update the cluster. - + :returns: :class:`EnforceClusterComplianceResponse` .. py:method:: get_compliance(cluster_id: str) -> GetClusterComplianceResponse Get cluster policy compliance. - + Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + :param cluster_id: str The ID of the cluster to get the compliance status - + :returns: :class:`GetClusterComplianceResponse` .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ClusterCompliance] List cluster policy compliance. - + Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of compliance if their policy was updated after the cluster was last edited. - + :param policy_id: str Canonical unique identifier for the cluster policy. :param page_size: int (optional) @@ -66,6 +66,6 @@ :param page_token: str (optional) A page token that can be used to navigate to the next page or previous page as returned by `next_page_token` or `prev_page_token`. - + :returns: Iterator over :class:`ClusterCompliance` \ No newline at end of file diff --git a/docs/workspace/compute/policy_families.rst b/docs/workspace/compute/policy_families.rst index 56e4f4275..8bbcd039f 100644 --- a/docs/workspace/compute/policy_families.rst +++ b/docs/workspace/compute/policy_families.rst @@ -6,10 +6,10 @@ View available policy families. A policy family contains a policy definition providing best practices for configuring clusters for a particular use case. - + Databricks manages and provides policy families for several common cluster use cases. You cannot create, edit, or delete policy families. - + Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a policy family. Cluster policies created using a policy family inherit the policy family's policy definition. @@ -31,14 +31,14 @@ first_family = w.policy_families.get(policy_family_id=all[0].policy_family_id) Get policy family information. - + Retrieve the information for an policy family based on its identifier and version - + :param policy_family_id: str The family ID about which to retrieve information. :param version: int (optional) The version number for the family to fetch. Defaults to the latest version. - + :returns: :class:`PolicyFamily` @@ -57,14 +57,14 @@ all = w.policy_families.list(compute.ListPolicyFamiliesRequest()) List policy families. - + Returns the list of policy definition types available to use at their latest version. This API is paginated. - + :param max_results: int (optional) Maximum number of policy families to return. :param page_token: str (optional) A token that can be used to get the next page of results. - + :returns: Iterator over :class:`PolicyFamily` \ No newline at end of file diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst index a3f0e1ccd..fde42d405 100644 --- a/docs/workspace/dashboards/genie.rst +++ b/docs/workspace/dashboards/genie.rst @@ -12,17 +12,17 @@ .. py:method:: create_message(space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage] Create conversation message. - + Create new message in a [conversation](:method:genie/startconversation). The AI response uses all previously created messages in the conversation to respond. - + :param space_id: str The ID associated with the Genie space where the conversation is started. :param conversation_id: str The ID associated with the conversation. :param content: str User message content. - + :returns: Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. @@ -34,10 +34,10 @@ .. py:method:: execute_message_attachment_query(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse Execute message attachment SQL query. - + Execute the SQL for a message query attachment. Use this API when the query attachment has expired and needs to be re-executed. - + :param space_id: str Genie space ID :param conversation_id: str @@ -46,36 +46,36 @@ Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse [Deprecated] Execute SQL query in a conversation message. - + Execute the SQL query in the message. - + :param space_id: str Genie space ID :param conversation_id: str Conversation ID :param message_id: str Message ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: generate_download_full_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGenerateDownloadFullQueryResultResponse Generate full query result download. - + Initiates a new SQL execution and returns a `download_id` that you can use to track the progress of the download. The query result is stored in an external link and can be retrieved using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API. Warning: Databricks strongly recommends that you protect the URLs that are returned by the `EXTERNAL_LINKS` disposition. See [Execute Statement](:method:statementexecution/executestatement) for more details. - + :param space_id: str Genie space ID :param conversation_id: str @@ -84,14 +84,14 @@ Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGenerateDownloadFullQueryResultResponse` .. py:method:: get_download_full_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str, download_id: str) -> GenieGetDownloadFullQueryResultResponse Get download full query result. - + After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and successfully receiving a `download_id`, use this API to poll the download progress. When the download is complete, the API returns one or more external links to the query result files. Warning: Databricks @@ -99,7 +99,7 @@ You must not set an Authorization header in download requests. When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant temporary access to data. See [Execute Statement](:method:statementexecution/executestatement) for more details. - + :param space_id: str Genie space ID :param conversation_id: str @@ -111,33 +111,33 @@ :param download_id: str Download ID. This ID is provided by the [Generate Download endpoint](:method:genie/generateDownloadFullQueryResult) - + :returns: :class:`GenieGetDownloadFullQueryResultResponse` .. py:method:: get_message(space_id: str, conversation_id: str, message_id: str) -> GenieMessage Get conversation message. - + Get message from conversation. - + :param space_id: str The ID associated with the Genie space where the target conversation is located. :param conversation_id: str The ID associated with the target conversation. :param message_id: str The ID associated with the target message from the identified conversation. - + :returns: :class:`GenieMessage` .. py:method:: get_message_attachment_query_result(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse Get message attachment SQL query result. - + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. - + :param space_id: str Genie space ID :param conversation_id: str @@ -146,34 +146,34 @@ Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: get_message_query_result(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse [Deprecated] Get conversation message SQL query result. - + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY`. - + :param space_id: str Genie space ID :param conversation_id: str Conversation ID :param message_id: str Message ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: get_message_query_result_by_attachment(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse [Deprecated] Get conversation message SQL query result. - + Get the result of SQL query if the message has a query attachment. This is only available if a message has a query attachment and the message status is `EXECUTING_QUERY` OR `COMPLETED`. - + :param space_id: str Genie space ID :param conversation_id: str @@ -182,47 +182,47 @@ Message ID :param attachment_id: str Attachment ID - + :returns: :class:`GenieGetMessageQueryResultResponse` .. py:method:: get_space(space_id: str) -> GenieSpace Get Genie Space. - + Get details of a Genie Space. - + :param space_id: str The ID associated with the Genie space - + :returns: :class:`GenieSpace` .. py:method:: list_spaces( [, page_size: Optional[int], page_token: Optional[str]]) -> GenieListSpacesResponse List Genie spaces. - + Get list of Genie Spaces. - + :param page_size: int (optional) Maximum number of spaces to return per page :param page_token: str (optional) Pagination token for getting the next page of results - + :returns: :class:`GenieListSpacesResponse` .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage] Start conversation. - + Start a new conversation. - + :param space_id: str The ID associated with the Genie space where you want to start a conversation. :param content: str The text of the message that starts the conversation. - + :returns: Long-running operation waiter for :class:`GenieMessage`. See :method:wait_get_message_genie_completed for more details. diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst index 944a95f15..4becea5a7 100644 --- a/docs/workspace/dashboards/lakeview.rst +++ b/docs/workspace/dashboards/lakeview.rst @@ -10,42 +10,42 @@ .. py:method:: create(dashboard: Dashboard) -> Dashboard Create dashboard. - + Create a draft dashboard. - + :param dashboard: :class:`Dashboard` - + :returns: :class:`Dashboard` .. py:method:: create_schedule(dashboard_id: str, schedule: Schedule) -> Schedule Create dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule: :class:`Schedule` - + :returns: :class:`Schedule` .. py:method:: create_subscription(dashboard_id: str, schedule_id: str, subscription: Subscription) -> Subscription Create schedule subscription. - + :param dashboard_id: str UUID identifying the dashboard to which the subscription belongs. :param schedule_id: str UUID identifying the schedule to which the subscription belongs. :param subscription: :class:`Subscription` - + :returns: :class:`Subscription` .. py:method:: delete_schedule(dashboard_id: str, schedule_id: str [, etag: Optional[str]]) Delete dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str @@ -53,14 +53,14 @@ :param etag: str (optional) The etag for the schedule. Optionally, it can be provided to verify that the schedule has not been modified from its last retrieval. - - + + .. py:method:: delete_subscription(dashboard_id: str, schedule_id: str, subscription_id: str [, etag: Optional[str]]) Delete schedule subscription. - + :param dashboard_id: str UUID identifying the dashboard which the subscription belongs. :param schedule_id: str @@ -70,64 +70,64 @@ :param etag: str (optional) The etag for the subscription. Can be optionally provided to ensure that the subscription has not been modified since the last read. - - + + .. py:method:: get(dashboard_id: str) -> Dashboard Get dashboard. - + Get a draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard. - + :returns: :class:`Dashboard` .. py:method:: get_published(dashboard_id: str) -> PublishedDashboard Get published dashboard. - + Get the current published dashboard. - + :param dashboard_id: str UUID identifying the published dashboard. - + :returns: :class:`PublishedDashboard` .. py:method:: get_schedule(dashboard_id: str, schedule_id: str) -> Schedule Get dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. - + :returns: :class:`Schedule` .. py:method:: get_subscription(dashboard_id: str, schedule_id: str, subscription_id: str) -> Subscription Get schedule subscription. - + :param dashboard_id: str UUID identifying the dashboard which the subscription belongs. :param schedule_id: str UUID identifying the schedule which the subscription belongs. :param subscription_id: str UUID identifying the subscription. - + :returns: :class:`Subscription` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str], show_trashed: Optional[bool], view: Optional[DashboardView]]) -> Iterator[Dashboard] List dashboards. - + :param page_size: int (optional) The number of dashboards to return per page. :param page_token: str (optional) @@ -138,14 +138,14 @@ returned. :param view: :class:`DashboardView` (optional) `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard. - + :returns: Iterator over :class:`Dashboard` .. py:method:: list_schedules(dashboard_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Schedule] List dashboard schedules. - + :param dashboard_id: str UUID identifying the dashboard to which the schedules belongs. :param page_size: int (optional) @@ -153,14 +153,14 @@ :param page_token: str (optional) A page token, received from a previous `ListSchedules` call. Use this to retrieve the subsequent page. - + :returns: Iterator over :class:`Schedule` .. py:method:: list_subscriptions(dashboard_id: str, schedule_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Subscription] List schedule subscriptions. - + :param dashboard_id: str UUID identifying the dashboard which the subscriptions belongs. :param schedule_id: str @@ -170,16 +170,16 @@ :param page_token: str (optional) A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the subsequent page. - + :returns: Iterator over :class:`Subscription` .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str], update_parameter_syntax: Optional[bool]]) -> Dashboard Migrate dashboard. - + Migrates a classic SQL dashboard to Lakeview. - + :param source_dashboard_id: str UUID of the dashboard to be migrated. :param display_name: str (optional) @@ -189,16 +189,16 @@ :param update_parameter_syntax: bool (optional) Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax (:param) when converting datasets in the dashboard. - + :returns: :class:`Dashboard` .. py:method:: publish(dashboard_id: str [, embed_credentials: Optional[bool], warehouse_id: Optional[str]]) -> PublishedDashboard Publish dashboard. - + Publish the current draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard to be published. :param embed_credentials: bool (optional) @@ -206,56 +206,56 @@ embedded credentials will be used to execute the published dashboard's queries. :param warehouse_id: str (optional) The ID of the warehouse that can be used to override the warehouse which was set in the draft. - + :returns: :class:`PublishedDashboard` .. py:method:: trash(dashboard_id: str) Trash dashboard. - + Trash a dashboard. - + :param dashboard_id: str UUID identifying the dashboard. - - + + .. py:method:: unpublish(dashboard_id: str) Unpublish dashboard. - + Unpublish the dashboard. - + :param dashboard_id: str UUID identifying the published dashboard. - - + + .. py:method:: update(dashboard_id: str, dashboard: Dashboard) -> Dashboard Update dashboard. - + Update a draft dashboard. - + :param dashboard_id: str UUID identifying the dashboard. :param dashboard: :class:`Dashboard` - + :returns: :class:`Dashboard` .. py:method:: update_schedule(dashboard_id: str, schedule_id: str, schedule: Schedule) -> Schedule Update dashboard schedule. - + :param dashboard_id: str UUID identifying the dashboard to which the schedule belongs. :param schedule_id: str UUID identifying the schedule. :param schedule: :class:`Schedule` - + :returns: :class:`Schedule` \ No newline at end of file diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst index ea9efe244..ce7cc9248 100644 --- a/docs/workspace/dashboards/lakeview_embedded.rst +++ b/docs/workspace/dashboards/lakeview_embedded.rst @@ -9,20 +9,20 @@ .. py:method:: get_published_dashboard_token_info(dashboard_id: str [, external_value: Optional[str], external_viewer_id: Optional[str]]) -> GetPublishedDashboardTokenInfoResponse Read an information of a published dashboard to mint an OAuth token. - + Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The `authorization_details` can be enriched to apply additional restriction. - + Example: Adding the following `authorization_details` object to downscope the viewer permission to specific table ``` { type: "unity_catalog_privileges", privileges: ["SELECT"], object_type: "TABLE", object_full_path: "main.default.testdata" } ``` - + :param dashboard_id: str UUID identifying the published dashboard. :param external_value: str (optional) Provided external value to be included in the custom claim. :param external_viewer_id: str (optional) Provided external viewer id to be included in the custom claim. - + :returns: :class:`GetPublishedDashboardTokenInfoResponse` \ No newline at end of file diff --git a/docs/workspace/database/database.rst b/docs/workspace/database/database.rst index d4a5d0864..46a9dccab 100644 --- a/docs/workspace/database/database.rst +++ b/docs/workspace/database/database.rst @@ -9,55 +9,55 @@ .. py:method:: create_database_catalog(catalog: DatabaseCatalog) -> DatabaseCatalog Create a Database Catalog. - + :param catalog: :class:`DatabaseCatalog` - + :returns: :class:`DatabaseCatalog` .. py:method:: create_database_instance(database_instance: DatabaseInstance) -> DatabaseInstance Create a Database Instance. - + :param database_instance: :class:`DatabaseInstance` A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. - + :returns: :class:`DatabaseInstance` .. py:method:: create_database_table(table: DatabaseTable) -> DatabaseTable Create a Database Table. - + :param table: :class:`DatabaseTable` Next field marker: 13 - + :returns: :class:`DatabaseTable` .. py:method:: create_synced_database_table(synced_table: SyncedDatabaseTable) -> SyncedDatabaseTable Create a Synced Database Table. - + :param synced_table: :class:`SyncedDatabaseTable` Next field marker: 12 - + :returns: :class:`SyncedDatabaseTable` .. py:method:: delete_database_catalog(name: str) Delete a Database Catalog. - + :param name: str - - + + .. py:method:: delete_database_instance(name: str [, force: Optional[bool], purge: Optional[bool]]) Delete a Database Instance. - + :param name: str Name of the instance to delete. :param force: bool (optional) @@ -68,108 +68,108 @@ deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by calling the undelete API for a limited time. If true, the database instance is hard deleted and cannot be undeleted. - - + + .. py:method:: delete_database_table(name: str) Delete a Database Table. - + :param name: str - - + + .. py:method:: delete_synced_database_table(name: str) Delete a Synced Database Table. - + :param name: str - - + + .. py:method:: find_database_instance_by_uid( [, uid: Optional[str]]) -> DatabaseInstance Find a Database Instance by uid. - + :param uid: str (optional) UID of the cluster to get. - + :returns: :class:`DatabaseInstance` .. py:method:: generate_database_credential( [, instance_names: Optional[List[str]], request_id: Optional[str]]) -> DatabaseCredential Generates a credential that can be used to access database instances. - + :param instance_names: List[str] (optional) Instances to which the token will be scoped. :param request_id: str (optional) - + :returns: :class:`DatabaseCredential` .. py:method:: get_database_catalog(name: str) -> DatabaseCatalog Get a Database Catalog. - + :param name: str - + :returns: :class:`DatabaseCatalog` .. py:method:: get_database_instance(name: str) -> DatabaseInstance Get a Database Instance. - + :param name: str Name of the cluster to get. - + :returns: :class:`DatabaseInstance` .. py:method:: get_database_table(name: str) -> DatabaseTable Get a Database Table. - + :param name: str - + :returns: :class:`DatabaseTable` .. py:method:: get_synced_database_table(name: str) -> SyncedDatabaseTable Get a Synced Database Table. - + :param name: str - + :returns: :class:`SyncedDatabaseTable` .. py:method:: list_database_instances( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DatabaseInstance] List Database Instances. - + :param page_size: int (optional) Upper bound for items returned. :param page_token: str (optional) Pagination token to go to the next page of Database Instances. Requests first page if absent. - + :returns: Iterator over :class:`DatabaseInstance` .. py:method:: update_database_instance(name: str, database_instance: DatabaseInstance, update_mask: str) -> DatabaseInstance Update a Database Instance. - + :param name: str The name of the instance. This is the unique identifier for the instance. :param database_instance: :class:`DatabaseInstance` A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage. :param update_mask: str The list of fields to update. - + :returns: :class:`DatabaseInstance` \ No newline at end of file diff --git a/docs/workspace/files/dbfs.rst b/docs/workspace/files/dbfs.rst index e6b31273a..3f214908d 100644 --- a/docs/workspace/files/dbfs.rst +++ b/docs/workspace/files/dbfs.rst @@ -10,31 +10,31 @@ .. py:method:: add_block(handle: int, data: str) Append data block. - + Appends a block of data to the stream specified by the input handle. If the handle does not exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``. - + If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``. - + :param handle: int The handle on an open stream. :param data: str The base64-encoded data to append to the stream. This has a limit of 1 MB. - - + + .. py:method:: close(handle: int) Close the stream. - + Closes the stream specified by the input handle. If the handle does not exist, this call throws an exception with ``RESOURCE_DOES_NOT_EXIST``. - + :param handle: int The handle on an open stream. - - + + .. py:method:: copy(src: str, dst: str [, recursive: bool = False, overwrite: bool = False]) @@ -44,21 +44,21 @@ .. py:method:: create(path: str [, overwrite: Optional[bool]]) -> CreateResponse Open a stream. - + Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``. - + A typical workflow for file upload would be: - + 1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with the handle you have. 3. Issue a ``close`` call with the handle you have. - + :param path: str The path of the new file. The path should be the absolute DBFS path. :param overwrite: bool (optional) The flag that specifies whether to overwrite existing file/files. - + :returns: :class:`CreateResponse` @@ -98,13 +98,13 @@ .. py:method:: get_status(path: str) -> FileInfo Get the information of a file or directory. - + Gets the file information for a file or directory. If the file or directory does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The path of the file or directory. The path should be the absolute DBFS path. - + :returns: :class:`FileInfo` @@ -130,18 +130,18 @@ .. py:method:: move(source_path: str, destination_path: str) Move a file. - + Moves a file from one location to another location within DBFS. If the source file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source path is a directory, this call always recursively moves all files. - + :param source_path: str The source path of the file or directory. The path should be the absolute DBFS path. :param destination_path: str The destination path of the file or directory. The path should be the absolute DBFS path. - - + + .. py:method:: move_(src: str, dst: str [, recursive: bool = False, overwrite: bool = False]) @@ -154,40 +154,40 @@ .. py:method:: put(path: str [, contents: Optional[str], overwrite: Optional[bool]]) Upload a file. - + Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but can also be used as a convenient single call for data upload. - + Alternatively you can pass contents as base64 string. - + The amount of data that can be passed (when not streaming) using the __contents__ parameter is limited to 1 MB. `MAX_BLOCK_SIZE_EXCEEDED` will be thrown if this limit is exceeded. - + If you want to upload large files, use the streaming upload. For details, see :method:dbfs/create, :method:dbfs/addBlock, :method:dbfs/close. - + :param path: str The path of the new file. The path should be the absolute DBFS path. :param contents: str (optional) This parameter might be absent, and instead a posted file will be used. :param overwrite: bool (optional) The flag that specifies whether to overwrite existing file/files. - - + + .. py:method:: read(path: str [, length: Optional[int], offset: Optional[int]]) -> ReadResponse Get the contents of a file. - + Returns the contents of a file. If the file does not exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`. - + If `offset + length` exceeds the number of bytes in a file, it reads the contents until the end of file. - + :param path: str The path of the file to read. The path should be the absolute DBFS path. :param length: int (optional) @@ -195,7 +195,7 @@ of 0.5 MB. :param offset: int (optional) The offset to read from in bytes. - + :returns: :class:`ReadResponse` diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst index a6f1d938a..f3e4ae304 100644 --- a/docs/workspace/files/files.rst +++ b/docs/workspace/files/files.rst @@ -7,124 +7,124 @@ The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI. The API makes working with file content as raw bytes easier and more efficient. - + The API supports [Unity Catalog volumes], where files and directories to operate on are specified using their volume URI path, which follows the format /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>. - + The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT, and DELETE to manage files and directories specified using their URI path. The path is always absolute. - + Some Files API client features are currently experimental. To enable them, set `enable_experimental_files_api_client = True` in your configuration profile or use the environment variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`. - + Use of Files API may incur Databricks data transfer charges. - + [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html .. py:method:: create_directory(directory_path: str) Create a directory. - + Creates an empty directory. If necessary, also creates any parent directories of the new, empty directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success response; this method is idempotent (it will succeed if the directory already exists). - + :param directory_path: str The absolute path of a directory. - - + + .. py:method:: delete(file_path: str) Delete a file. - + Deletes a file. If the request is successful, there is no response body. - + :param file_path: str The absolute path of the file. - - + + .. py:method:: delete_directory(directory_path: str) Delete a directory. - + Deletes an empty directory. - + To delete a non-empty directory, first delete all of its contents. This can be done by listing the directory contents and deleting each file and subdirectory recursively. - + :param directory_path: str The absolute path of a directory. - - + + .. py:method:: download(file_path: str) -> DownloadResponse Download a file. - + Downloads a file. The file contents are the response body. This is a standard HTTP file download, not a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers. - + :param file_path: str The absolute path of the file. - + :returns: :class:`DownloadResponse` .. py:method:: get_directory_metadata(directory_path: str) Get directory metadata. - + Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response body. - + This method is useful to check if a directory exists and the caller has access to it. - + If you wish to ensure the directory exists, you can instead use `PUT`, which will create the directory if it does not exist, and is idempotent (it will succeed if the directory already exists). - + :param directory_path: str The absolute path of a directory. - - + + .. py:method:: get_metadata(file_path: str) -> GetMetadataResponse Get file metadata. - + Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body. - + :param file_path: str The absolute path of the file. - + :returns: :class:`GetMetadataResponse` .. py:method:: list_directory_contents(directory_path: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DirectoryEntry] List directory contents. - + Returns the contents of a directory. If there is no directory at the specified path, the API returns a HTTP 404 error. - + :param directory_path: str The absolute path of a directory. :param page_size: int (optional) The maximum number of directory entries to return. The response may contain fewer entries. If the response contains a `next_page_token`, there may be more entries, even if fewer than `page_size` entries are in the response. - + We recommend not to set this value unless you are intentionally listing less than the complete directory contents. - + If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values above 1000 will be coerced to 1000. :param page_token: str (optional) @@ -134,25 +134,25 @@ request. To list all of the entries in a directory, it is necessary to continue requesting pages of entries until the response contains no `next_page_token`. Note that the number of entries returned must not be used to determine when the listing is complete. - + :returns: Iterator over :class:`DirectoryEntry` .. py:method:: upload(file_path: str, contents: BinaryIO [, overwrite: Optional[bool]]) Upload a file. - + Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an octet stream); do not encode or otherwise modify the bytes before sending. The contents of the resulting file will be exactly the bytes sent in the request body. If the request is successful, there is no response body. - + :param file_path: str The absolute path of the file. :param contents: BinaryIO :param overwrite: bool (optional) If true or unspecified, an existing file will be overwritten. If false, an error will be returned if the path points to an existing file. - - + + \ No newline at end of file diff --git a/docs/workspace/iam/access_control.rst b/docs/workspace/iam/access_control.rst index a5f1feeda..930af105a 100644 --- a/docs/workspace/iam/access_control.rst +++ b/docs/workspace/iam/access_control.rst @@ -9,7 +9,7 @@ .. py:method:: check_policy(actor: Actor, permission: str, resource: str, consistency_token: ConsistencyToken, authz_identity: RequestAuthzIdentity [, resource_info: Optional[ResourceInfo]]) -> CheckPolicyResponse Check access policy to a resource. - + :param actor: :class:`Actor` :param permission: str :param resource: str @@ -18,6 +18,6 @@ :param consistency_token: :class:`ConsistencyToken` :param authz_identity: :class:`RequestAuthzIdentity` :param resource_info: :class:`ResourceInfo` (optional) - + :returns: :class:`CheckPolicyResponse` \ No newline at end of file diff --git a/docs/workspace/iam/account_access_control_proxy.rst b/docs/workspace/iam/account_access_control_proxy.rst index 1b92995c6..66c396be5 100644 --- a/docs/workspace/iam/account_access_control_proxy.rst +++ b/docs/workspace/iam/account_access_control_proxy.rst @@ -11,31 +11,31 @@ .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse Get assignable roles for a resource. - + Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the role. - + :param resource: str The resource name for which assignable roles will be listed. - + Examples | Summary :--- | :--- `resource=accounts/` | A resource name for the account. `resource=accounts//groups/` | A resource name for the group. `resource=accounts//servicePrincipals/` | A resource name for the service principal. - + :returns: :class:`GetAssignableRolesForResourceResponse` .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse Get a rule set. - + Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default rule set for each resource is supported. - + :param name: str The ruleset name associated with the request. - + Examples | Summary :--- | :--- `name=accounts//ruleSets/default` | A name for a rule set on the account. `name=accounts//groups//ruleSets/default` | A name for a rule set on the group. @@ -48,24 +48,24 @@ modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an etag from a GET rule set request, and pass it with the PUT update request to identify the rule set version you are updating. - + Examples | Summary :--- | :--- `etag=` | An empty etag can only be used in GET to indicate no freshness requirements. `etag=RENUAAABhSweA4NvVmmUYdiU717H3Tgy0UJdor3gE4a+mq/oj9NjAf8ZsQ==` | An etag encoded a specific version of the rule set to get or to be updated. - + :returns: :class:`RuleSetResponse` .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse Update a rule set. - + Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between concurrent updates. - + :param name: str Name of the rule set. :param rule_set: :class:`RuleSetUpdateRequest` - + :returns: :class:`RuleSetResponse` \ No newline at end of file diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst index cff427451..bf739025c 100644 --- a/docs/workspace/iam/current_user.rst +++ b/docs/workspace/iam/current_user.rst @@ -20,8 +20,8 @@ me = w.current_user.me() Get current user info. - + Get details about the current method caller's identity. - + :returns: :class:`User` \ No newline at end of file diff --git a/docs/workspace/iam/groups.rst b/docs/workspace/iam/groups.rst index c187eb9c3..fe0187cd6 100644 --- a/docs/workspace/iam/groups.rst +++ b/docs/workspace/iam/groups.rst @@ -6,7 +6,7 @@ Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. - + It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks workspace identities can be assigned as members of groups, and members inherit permissions that are assigned to their group. @@ -30,15 +30,15 @@ w.groups.delete(id=group.id) Create a new group. - + Creates a group in the Databricks workspace with a unique name, using the supplied group details. - + :param display_name: str (optional) String that represents a human-readable group name :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -51,7 +51,7 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - + :returns: :class:`Group` @@ -76,13 +76,13 @@ w.groups.delete(id=group.id) Delete a group. - + Deletes a group from the Databricks workspace. - + :param id: str Unique ID for a group in the Databricks workspace. - - + + .. py:method:: get(id: str) -> Group @@ -106,21 +106,21 @@ w.groups.delete(id=group.id) Get group details. - + Gets the information for a specific group in the Databricks workspace. - + :param id: str Unique ID for a group in the Databricks workspace. - + :returns: :class:`Group` .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group] List group details. - + Gets all details of the groups associated with the Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -132,7 +132,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -140,7 +140,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`Group` @@ -186,24 +186,24 @@ w.groups.delete(id=group.id) Update group details. - + Partially updates the details of a group. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) Replace a group. - + Updates the details of a group by replacing the entire group entity. - + :param id: str Databricks group ID :param display_name: str (optional) @@ -211,7 +211,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the group. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -222,6 +222,6 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`GroupSchema`] (optional) The schema of the group. - - + + \ No newline at end of file diff --git a/docs/workspace/iam/permission_migration.rst b/docs/workspace/iam/permission_migration.rst index 8eef6e0e1..248b1b80d 100644 --- a/docs/workspace/iam/permission_migration.rst +++ b/docs/workspace/iam/permission_migration.rst @@ -9,7 +9,7 @@ .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> MigratePermissionsResponse Migrate Permissions. - + :param workspace_id: int WorkspaceId of the associated workspace where the permission migration will occur. :param from_workspace_group_name: str @@ -18,6 +18,6 @@ The name of the account group that permissions will be migrated to. :param size: int (optional) The maximum number of permissions that will be migrated. - + :returns: :class:`MigratePermissionsResponse` \ No newline at end of file diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 7d8b7eb26..0c3ef26fc 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -24,7 +24,7 @@ the required permissions for specific actions or abilities and other important information, see [Access Control]. Note that to manage access control on service principals, use **[Account Access Control Proxy](:service:accountaccesscontrolproxy)**. - + [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html .. py:method:: get(request_object_type: str, request_object_id: str) -> ObjectPermissions @@ -47,17 +47,17 @@ _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Get object permissions. - + Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. - + :returns: :class:`ObjectPermissions` @@ -81,15 +81,15 @@ levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Get object permission levels. - + Gets the permission levels that a user can have on an object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str - + :returns: :class:`GetPermissionLevelsResponse` @@ -128,11 +128,11 @@ w.groups.delete(id=group.id) Set object permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, @@ -140,17 +140,17 @@ :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) - + :returns: :class:`ObjectPermissions` .. py:method:: update(request_object_type: str, request_object_id: str [, access_control_list: Optional[List[AccessControlRequest]]]) -> ObjectPermissions Update object permissions. - + Updates the permissions on an object. Objects can inherit permissions from their parent objects or root object. - + :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, @@ -158,6 +158,6 @@ :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) - + :returns: :class:`ObjectPermissions` \ No newline at end of file diff --git a/docs/workspace/iam/service_principals.rst b/docs/workspace/iam/service_principals.rst index 29ee1ba3f..74a498b00 100644 --- a/docs/workspace/iam/service_principals.rst +++ b/docs/workspace/iam/service_principals.rst @@ -35,9 +35,9 @@ w.service_principals.delete(id=spn.id) Create a service principal. - + Creates a new service principal in the Databricks workspace. - + :param active: bool (optional) If this user is active :param application_id: str (optional) @@ -47,7 +47,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -57,20 +57,20 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - + :returns: :class:`ServicePrincipal` .. py:method:: delete(id: str) Delete a service principal. - + Delete a single service principal in the Databricks workspace. - + :param id: str Unique ID for a service principal in the Databricks workspace. - - + + .. py:method:: get(id: str) -> ServicePrincipal @@ -94,12 +94,12 @@ w.service_principals.delete(id=created.id) Get service principal details. - + Gets the details for a single service principal define in the Databricks workspace. - + :param id: str Unique ID for a service principal in the Databricks workspace. - + :returns: :class:`ServicePrincipal` @@ -118,9 +118,9 @@ all = w.service_principals.list(iam.ListServicePrincipalsRequest()) List service principals. - + Gets the set of service principals associated with a Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -132,7 +132,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. @@ -140,7 +140,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`ServicePrincipal` @@ -172,16 +172,16 @@ w.service_principals.delete(id=created.id) Update service principal details. - + Partially updates the details of a single service principal in the Databricks workspace. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: update(id: str [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]]) @@ -210,11 +210,11 @@ w.service_principals.delete(id=created.id) Replace service principal. - + Updates the details of a single service principal. - + This action replaces the existing service principal with the same name. - + :param id: str Databricks service principal ID. :param active: bool (optional) @@ -226,7 +226,7 @@ :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the service principal. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) :param groups: List[:class:`ComplexValue`] (optional) @@ -234,6 +234,6 @@ Corresponds to AWS instance profile/arn role. :param schemas: List[:class:`ServicePrincipalSchema`] (optional) The schema of the List response. - - + + \ No newline at end of file diff --git a/docs/workspace/iam/users.rst b/docs/workspace/iam/users.rst index 2956c4c73..76837ac54 100644 --- a/docs/workspace/iam/users.rst +++ b/docs/workspace/iam/users.rst @@ -5,7 +5,7 @@ .. py:class:: UsersAPI User identities recognized by Databricks and represented by email addresses. - + Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your identity provider to create users and groups in Databricks workspace and give them the proper level of @@ -33,23 +33,23 @@ ) Create a new user. - + Creates a new user in the Databricks workspace. This new user will also be added to the Databricks account. - + :param active: bool (optional) If this user is active :param display_name: str (optional) String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -63,7 +63,7 @@ The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - + :returns: :class:`User` @@ -85,14 +85,14 @@ w.users.delete(id=other_owner.id) Delete a user. - + Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the user. - + :param id: str Unique ID for a user in the Databricks workspace. - - + + .. py:method:: get(id: str [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[GetSortOrder], start_index: Optional[int]]) -> User @@ -116,9 +116,9 @@ fetch = w.users.get(id=user.id) Get user details. - + Gets information for a specific user in Databricks workspace. - + :param id: str Unique ID for a user in the Databricks workspace. :param attributes: str (optional) @@ -132,7 +132,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -141,25 +141,25 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: :class:`User` .. py:method:: get_permission_levels() -> GetPasswordPermissionLevelsResponse Get password permission levels. - + Gets the permission levels that a user can have on an object. - + :returns: :class:`GetPasswordPermissionLevelsResponse` .. py:method:: get_permissions() -> PasswordPermissions Get password permissions. - + Gets the permissions of all passwords. Passwords can inherit permissions from their root object. - + :returns: :class:`PasswordPermissions` @@ -182,9 +182,9 @@ ) List users. - + Gets details for all the users associated with a Databricks workspace. - + :param attributes: str (optional) Comma-separated list of attributes to return in response. :param count: int (optional) @@ -196,7 +196,7 @@ contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently only support simple expressions. - + [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2 :param sort_by: str (optional) Attribute to sort the results. Multi-part paths are supported. For example, `userName`, @@ -205,7 +205,7 @@ The order to sort the results. :param start_index: int (optional) Specifies the index of the first result. First item is number 1. - + :returns: Iterator over :class:`User` @@ -235,27 +235,27 @@ ) Update user details. - + Partially updates a user resource by applying the supplied operations on specific user attributes. - + :param id: str Unique ID in the Databricks workspace. :param operations: List[:class:`Patch`] (optional) :param schemas: List[:class:`PatchSchema`] (optional) The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"]. - - + + .. py:method:: set_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions Set password permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - + :returns: :class:`PasswordPermissions` @@ -280,9 +280,9 @@ w.users.update(id=user.id, user_name=user.user_name, active=True) Replace a user. - + Replaces a user's information with the data supplied in request. - + :param id: str Databricks user ID. :param active: bool (optional) @@ -291,13 +291,13 @@ String that represents a concatenation of given and family names. For example `John Smith`. This field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use Account SCIM APIs to update `displayName`. - + [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation :param emails: List[:class:`ComplexValue`] (optional) All the emails associated with the Databricks user. :param entitlements: List[:class:`ComplexValue`] (optional) Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values. - + [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements :param external_id: str (optional) External ID is not currently supported. It is reserved for future use. @@ -309,17 +309,17 @@ The schema of the user. :param user_name: str (optional) Email address of the Databricks user. - - + + .. py:method:: update_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions Update password permissions. - + Updates the permissions on all passwords. Passwords can inherit permissions from their root object. - + :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional) - + :returns: :class:`PasswordPermissions` \ No newline at end of file diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index a59f4155c..89ea5e2ae 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -5,17 +5,17 @@ .. py:class:: JobsExt The Jobs API allows you to create, edit, and delete jobs. - + You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or Python, Scala, Spark submit, and Java applications. - + You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebooks and jobs. - + [Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html [Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets @@ -60,17 +60,17 @@ w.jobs.delete(job_id=created_job.job_id) Cancel all runs of a job. - + Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs from being started. - + :param all_queued_runs: bool (optional) Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in the workspace are canceled. :param job_id: int (optional) The canonical identifier of the job to cancel all runs of. - - + + .. py:method:: cancel_run(run_id: int) -> Wait[Run] @@ -115,13 +115,13 @@ w.jobs.delete(job_id=created_job.job_id) Cancel a run. - + Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when this request completes. - + :param run_id: int This field is required. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. @@ -168,9 +168,9 @@ w.jobs.delete(job_id=created_job.job_id) Create a new job. - + Create a new job. - + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -186,7 +186,7 @@ An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. :param edit_mode: :class:`JobEditMode` (optional) Edit mode of the job. - + * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in an editable state and can be modified. :param email_notifications: :class:`JobEmailNotifications` (optional) @@ -203,10 +203,10 @@ :param git_source: :class:`GitSource` (optional) An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) @@ -233,7 +233,7 @@ :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. This field determines the level of compute performance or cost-efficiency for the run. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -242,7 +242,7 @@ :param run_as: :class:`JobRunAs` (optional) Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job. - + Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -265,32 +265,32 @@ `runNow`. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when runs of this job begin or complete. - + :returns: :class:`CreateResponse` .. py:method:: delete(job_id: int) Delete a job. - + Deletes a job. - + :param job_id: int The canonical identifier of the job to delete. This field is required. - - + + .. py:method:: delete_run(run_id: int) Delete a job run. - + Deletes a non-active run. Returns an error if the run is active. - + :param run_id: int ID of the run to delete. - - + + .. py:method:: export_run(run_id: int [, views_to_export: Optional[ViewsToExport]]) -> ExportRunOutput @@ -335,14 +335,14 @@ w.jobs.delete(job_id=created_job.job_id) Export and retrieve a job run. - + Export and retrieve the job run task. - + :param run_id: int The canonical identifier for the run. This field is required. :param views_to_export: :class:`ViewsToExport` (optional) Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE. - + :returns: :class:`ExportRunOutput` @@ -402,24 +402,24 @@ .. py:method:: get_permission_levels(job_id: str) -> GetJobPermissionLevelsResponse Get job permission levels. - + Gets the permission levels that a user can have on an object. - + :param job_id: str The job for which to get or manage permissions. - + :returns: :class:`GetJobPermissionLevelsResponse` .. py:method:: get_permissions(job_id: str) -> JobPermissions Get job permissions. - + Gets the permissions of a job. Jobs can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. - + :returns: :class:`JobPermissions` @@ -515,19 +515,19 @@ w.jobs.delete_run(run_id=run.run_id) Get the output for a single run. - + Retrieve the output and metadata of a single task run. When a notebook task returns a value through the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks restricts this API to returning the first 5 MB of the output. To return a larger result, you can store job results in a cloud storage service. - + This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to reference them beyond 60 days, you must save old run results before they expire. - + :param run_id: int The canonical identifier for the run. - + :returns: :class:`RunOutput` @@ -717,10 +717,10 @@ w.jobs.delete(job_id=created_job.job_id) Repair a job run. - + Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job and task settings, and can be viewed in the history for the original job run. - + :param run_id: int The job run ID of the run to repair. The run must not be in progress. :param dbt_commands: List[str] (optional) @@ -732,9 +732,9 @@ task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` @@ -745,23 +745,23 @@ A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. - + If not specified upon `run-now`, the triggered run uses the job’s base parameters. - + notebook_params cannot be specified in conjunction with jar_params. - + Use [Task parameter variables] to set parameters containing information about job runs. - + The JSON representation of this field (for example `{"notebook_params":{"name":"john doe","age":"35"}}`) cannot exceed 10,000 bytes. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html :param performance_target: :class:`PerformanceTarget` (optional) The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined on the job level. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -773,15 +773,15 @@ The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param rerun_all_failed_tasks: bool (optional) If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used. @@ -796,20 +796,20 @@ as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param sql_params: Dict[str,str] (optional) A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. @@ -865,19 +865,19 @@ w.jobs.delete(job_id=created_job.job_id) Update all job settings (reset). - + Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update job settings partially. - + :param job_id: int The canonical identifier of the job to reset. This field is required. :param new_settings: :class:`JobSettings` The new settings of the job. These settings completely replace the old settings. - + Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only. - - + + .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run] @@ -920,9 +920,9 @@ w.jobs.delete(job_id=created_job.job_id) Trigger a new job run. - + Run a job and return the `run_id` of the triggered run. - + :param job_id: int The ID of the job to be executed :param dbt_commands: List[str] (optional) @@ -932,14 +932,14 @@ An optional token to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing run instead. If a run with the provided token is deleted, an error is returned. - + If you specify the idempotency token, upon failure you can retry until the request succeeds. Databricks guarantees that exactly one run is launched with that idempotency token. - + This token must have at most 64 characters. - + For more information, see [How to ensure idempotency for jobs]. - + [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html :param jar_params: List[str] (optional) A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. @@ -947,9 +947,9 @@ task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified in conjunction with notebook_params. The JSON representation of this field (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param job_parameters: Dict[str,str] (optional) Job-level parameters used in the run. for example `"param": "overriding_val"` @@ -957,16 +957,16 @@ A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the [dbutils.widgets.get] function. - + If not specified upon `run-now`, the triggered run uses the job’s base parameters. - + notebook_params cannot be specified in conjunction with jar_params. - + Use [Task parameter variables] to set parameters containing information about job runs. - + The JSON representation of this field (for example `{"notebook_params":{"name":"john doe","age":"35"}}`) cannot exceed 10,000 bytes. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html :param only: List[str] (optional) @@ -976,7 +976,7 @@ The performance mode on a serverless job. The performance target determines the level of compute performance or cost-efficiency for the run. This field overrides the performance target defined on the job level. - + * `STANDARD`: Enables cost-efficient execution of serverless workloads. * `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and optimized cluster performance. @@ -988,15 +988,15 @@ The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs. - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param queue: :class:`QueueSettings` (optional) The queue settings of the run. @@ -1006,20 +1006,20 @@ as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified in job setting. The JSON representation of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - + Use [Task parameter variables] to set parameters containing information about job runs - + Important - + These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis. - + [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables :param sql_params: Dict[str,str] (optional) A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe", "age": "35"}`. The SQL alert task does not support custom parameters. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. @@ -1031,14 +1031,14 @@ .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions Set job permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) - + :returns: :class:`JobPermissions` @@ -1078,11 +1078,11 @@ w.jobs.delete_run(run_id=run.run_id) Create and trigger a one-time run. - + Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. - + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param budget_policy_id: str (optional) @@ -1095,10 +1095,10 @@ :param git_source: :class:`GitSource` (optional) An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - + If `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - + Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job. :param health: :class:`JobsHealthRules` (optional) @@ -1107,14 +1107,14 @@ An optional token that can be used to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing run instead. If a run with the provided token is deleted, an error is returned. - + If you specify the idempotency token, upon failure you can retry until the request succeeds. Databricks guarantees that exactly one run is launched with that idempotency token. - + This token must have at most 64 characters. - + For more information, see [How to ensure idempotency for jobs]. - + [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html :param notification_settings: :class:`JobNotificationSettings` (optional) Optional notification settings that are used when sending notifications to each of the @@ -1131,7 +1131,7 @@ An optional timeout applied to each run of this job. A value of `0` means no timeout. :param webhook_notifications: :class:`WebhookNotifications` (optional) A collection of system notification IDs to notify when the run begins or completes. - + :returns: Long-running operation waiter for :class:`Run`. See :method:wait_get_run_job_terminated_or_skipped for more details. @@ -1185,10 +1185,10 @@ w.jobs.delete(job_id=created_job.job_id) Update job settings partially. - + Add, update, or remove specific settings of an existing job. Use the [_Reset_ endpoint](:method:jobs/reset) to overwrite all job settings. - + :param job_id: int The canonical identifier of the job to update. This field is required. :param fields_to_remove: List[str] (optional) @@ -1196,29 +1196,29 @@ tasks and job clusters (`tasks/task_1`). This field is optional. :param new_settings: :class:`JobSettings` (optional) The new settings for the job. - + Top-level fields specified in `new_settings` are completely replaced, except for arrays which are merged. That is, new and existing entries are completely replaced based on the respective key fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept. - + Partially updating nested fields is not supported. - + Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other fields are applied to future runs only. - - + + .. py:method:: update_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions Update job permissions. - + Updates the permissions on a job. Jobs can inherit permissions from their root object. - + :param job_id: str The job for which to get or manage permissions. :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) - + :returns: :class:`JobPermissions` diff --git a/docs/workspace/jobs/policy_compliance_for_jobs.rst b/docs/workspace/jobs/policy_compliance_for_jobs.rst index 69f211552..b75a73eab 100644 --- a/docs/workspace/jobs/policy_compliance_for_jobs.rst +++ b/docs/workspace/jobs/policy_compliance_for_jobs.rst @@ -6,53 +6,53 @@ The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace. This API currently only supports compliance controls for cluster policies. - + A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last edited. The job is considered out of compliance if any of its clusters no longer comply with their updated policies. - + The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce compliance API allows you to update a job so that it becomes compliant with all of its policies. .. py:method:: enforce_compliance(job_id: int [, validate_only: Optional[bool]]) -> EnforcePolicyComplianceResponse Enforce job policy compliance. - + Updates a job so the job clusters that are created when running the job (specified in `new_cluster`) are compliant with the current versions of their respective cluster policies. All-purpose clusters used in the job will not be updated. - + :param job_id: int The ID of the job you want to enforce policy compliance on. :param validate_only: bool (optional) If set, previews changes made to the job to comply with its policy, but does not update the job. - + :returns: :class:`EnforcePolicyComplianceResponse` .. py:method:: get_compliance(job_id: int) -> GetPolicyComplianceResponse Get job policy compliance. - + Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and some of its job clusters no longer comply with their updated policies. - + :param job_id: int The ID of the job whose compliance status you are requesting. - + :returns: :class:`GetPolicyComplianceResponse` .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[JobCompliance] List job policy compliance. - + Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of compliance if a cluster policy they use was updated after the job was last edited and its job clusters no longer comply with the updated policy. - + :param policy_id: str Canonical unique identifier for the cluster policy. :param page_size: int (optional) @@ -61,6 +61,6 @@ :param page_token: str (optional) A page token that can be used to navigate to the next page or previous page as returned by `next_page_token` or `prev_page_token`. - + :returns: Iterator over :class:`JobCompliance` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_fulfillments.rst b/docs/workspace/marketplace/consumer_fulfillments.rst index 4ea7a9c29..149ec6451 100644 --- a/docs/workspace/marketplace/consumer_fulfillments.rst +++ b/docs/workspace/marketplace/consumer_fulfillments.rst @@ -9,28 +9,28 @@ .. py:method:: get(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[SharedDataObject] Get listing content metadata. - + Get a high level preview of the metadata of listing installable content. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`SharedDataObject` .. py:method:: list(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListingFulfillment] List all listing fulfillments. - + Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation. Standard installations contain metadata about the attached share or git repo. Only one of these fields will be present. Personalized installations contain metadata about the attached share or git repo, as well as the Delta Sharing recipient type. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListingFulfillment` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_installations.rst b/docs/workspace/marketplace/consumer_installations.rst index 3cdb00a5a..a9539ad1f 100644 --- a/docs/workspace/marketplace/consumer_installations.rst +++ b/docs/workspace/marketplace/consumer_installations.rst @@ -9,9 +9,9 @@ .. py:method:: create(listing_id: str [, accepted_consumer_terms: Optional[ConsumerTerms], catalog_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType], repo_detail: Optional[RepoInstallation], share_name: Optional[str]]) -> Installation Install from a listing. - + Install payload associated with a Databricks Marketplace listing. - + :param listing_id: str :param accepted_consumer_terms: :class:`ConsumerTerms` (optional) :param catalog_name: str (optional) @@ -19,60 +19,60 @@ :param repo_detail: :class:`RepoInstallation` (optional) for git repo installations :param share_name: str (optional) - + :returns: :class:`Installation` .. py:method:: delete(listing_id: str, installation_id: str) Uninstall from a listing. - + Uninstall an installation associated with a Databricks Marketplace listing. - + :param listing_id: str :param installation_id: str - - + + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail] List all installations. - + List all installations across all listings. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`InstallationDetail` .. py:method:: list_listing_installations(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail] List installations for a listing. - + List all installations for a particular listing. - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`InstallationDetail` .. py:method:: update(listing_id: str, installation_id: str, installation: InstallationDetail [, rotate_token: Optional[bool]]) -> UpdateInstallationResponse Update an installation. - + This is a update API that will update the part of the fields defined in the installation table as well as interact with external services according to the fields not included in the installation table 1. the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the rotateToken flag is true and the tokenInfo field is empty - + :param listing_id: str :param installation_id: str :param installation: :class:`InstallationDetail` :param rotate_token: bool (optional) - + :returns: :class:`UpdateInstallationResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_listings.rst b/docs/workspace/marketplace/consumer_listings.rst index 242a8fce7..15ec3790e 100644 --- a/docs/workspace/marketplace/consumer_listings.rst +++ b/docs/workspace/marketplace/consumer_listings.rst @@ -10,31 +10,31 @@ .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetListingsResponse Get one batch of listings. One may specify up to 50 IDs per request. - + Batch get a published listing in the Databricks Marketplace that the consumer has access to. - + :param ids: List[str] (optional) - + :returns: :class:`BatchGetListingsResponse` .. py:method:: get(id: str) -> GetListingResponse Get listing. - + Get a published listing in the Databricks Marketplace that the consumer has access to. - + :param id: str - + :returns: :class:`GetListingResponse` .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], tags: Optional[List[ListingTag]]]) -> Iterator[Listing] List listings. - + List all published listings in the Databricks Marketplace that the consumer has access to. - + :param assets: List[:class:`AssetType`] (optional) Matches any of the following asset types :param categories: List[:class:`Category`] (optional) @@ -51,17 +51,17 @@ Matches any of the following provider ids :param tags: List[:class:`ListingTag`] (optional) Matches any of the following tags - + :returns: Iterator over :class:`Listing` .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]]]) -> Iterator[Listing] Search listings. - + Search published listings in the Databricks Marketplace that the consumer has access to. This query supports a variety of different search parameters and performs fuzzy matching. - + :param query: str Fuzzy matches query :param assets: List[:class:`AssetType`] (optional) @@ -74,6 +74,6 @@ :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - + :returns: Iterator over :class:`Listing` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_personalization_requests.rst b/docs/workspace/marketplace/consumer_personalization_requests.rst index 63ead75d3..8624871ca 100644 --- a/docs/workspace/marketplace/consumer_personalization_requests.rst +++ b/docs/workspace/marketplace/consumer_personalization_requests.rst @@ -9,9 +9,9 @@ .. py:method:: create(listing_id: str, intended_use: str, accepted_consumer_terms: ConsumerTerms [, comment: Optional[str], company: Optional[str], first_name: Optional[str], is_from_lighthouse: Optional[bool], last_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType]]) -> CreatePersonalizationRequestResponse Create a personalization request. - + Create a personalization request for a listing. - + :param listing_id: str :param intended_use: str :param accepted_consumer_terms: :class:`ConsumerTerms` @@ -21,30 +21,30 @@ :param is_from_lighthouse: bool (optional) :param last_name: str (optional) :param recipient_type: :class:`DeltaSharingRecipientType` (optional) - + :returns: :class:`CreatePersonalizationRequestResponse` .. py:method:: get(listing_id: str) -> GetPersonalizationRequestResponse Get the personalization request for a listing. - + Get the personalization request for a listing. Each consumer can make at *most* one personalization request for a listing. - + :param listing_id: str - + :returns: :class:`GetPersonalizationRequestResponse` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest] List all personalization requests. - + List personalization requests for a consumer across all listings. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`PersonalizationRequest` \ No newline at end of file diff --git a/docs/workspace/marketplace/consumer_providers.rst b/docs/workspace/marketplace/consumer_providers.rst index 13cca357e..615bf0752 100644 --- a/docs/workspace/marketplace/consumer_providers.rst +++ b/docs/workspace/marketplace/consumer_providers.rst @@ -9,34 +9,34 @@ .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetProvidersResponse Get one batch of providers. One may specify up to 50 IDs per request. - + Batch get a provider in the Databricks Marketplace with at least one visible listing. - + :param ids: List[str] (optional) - + :returns: :class:`BatchGetProvidersResponse` .. py:method:: get(id: str) -> GetProviderResponse Get a provider. - + Get a provider in the Databricks Marketplace with at least one visible listing. - + :param id: str - + :returns: :class:`GetProviderResponse` .. py:method:: list( [, is_featured: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo] List providers. - + List all providers in the Databricks Marketplace with at least one visible listing. - + :param is_featured: bool (optional) :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ProviderInfo` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_exchange_filters.rst b/docs/workspace/marketplace/provider_exchange_filters.rst index ceca51e63..6c2254acd 100644 --- a/docs/workspace/marketplace/provider_exchange_filters.rst +++ b/docs/workspace/marketplace/provider_exchange_filters.rst @@ -9,46 +9,46 @@ .. py:method:: create(filter: ExchangeFilter) -> CreateExchangeFilterResponse Create a new exchange filter. - + Add an exchange filter. - + :param filter: :class:`ExchangeFilter` - + :returns: :class:`CreateExchangeFilterResponse` .. py:method:: delete(id: str) Delete an exchange filter. - + Delete an exchange filter - + :param id: str - - + + .. py:method:: list(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeFilter] List exchange filters. - + List exchange filter - + :param exchange_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeFilter` .. py:method:: update(id: str, filter: ExchangeFilter) -> UpdateExchangeFilterResponse Update exchange filter. - + Update an exchange filter. - + :param id: str :param filter: :class:`ExchangeFilter` - + :returns: :class:`UpdateExchangeFilterResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_exchanges.rst b/docs/workspace/marketplace/provider_exchanges.rst index d53fd823d..edaae76e1 100644 --- a/docs/workspace/marketplace/provider_exchanges.rst +++ b/docs/workspace/marketplace/provider_exchanges.rst @@ -9,105 +9,105 @@ .. py:method:: add_listing_to_exchange(listing_id: str, exchange_id: str) -> AddExchangeForListingResponse Add an exchange for listing. - + Associate an exchange with a listing - + :param listing_id: str :param exchange_id: str - + :returns: :class:`AddExchangeForListingResponse` .. py:method:: create(exchange: Exchange) -> CreateExchangeResponse Create an exchange. - + Create an exchange - + :param exchange: :class:`Exchange` - + :returns: :class:`CreateExchangeResponse` .. py:method:: delete(id: str) Delete an exchange. - + This removes a listing from marketplace. - + :param id: str - - + + .. py:method:: delete_listing_from_exchange(id: str) Remove an exchange for listing. - + Disassociate an exchange with a listing - + :param id: str - - + + .. py:method:: get(id: str) -> GetExchangeResponse Get an exchange. - + Get an exchange. - + :param id: str - + :returns: :class:`GetExchangeResponse` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Exchange] List exchanges. - + List exchanges visible to provider - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Exchange` .. py:method:: list_exchanges_for_listing(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing] List exchanges for listing. - + List exchanges associated with a listing - + :param listing_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeListing` .. py:method:: list_listings_for_exchange(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing] List listings for exchange. - + List listings associated with an exchange - + :param exchange_id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ExchangeListing` .. py:method:: update(id: str, exchange: Exchange) -> UpdateExchangeResponse Update exchange. - + Update an exchange - + :param id: str :param exchange: :class:`Exchange` - + :returns: :class:`UpdateExchangeResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_files.rst b/docs/workspace/marketplace/provider_files.rst index f719ca65f..413936020 100644 --- a/docs/workspace/marketplace/provider_files.rst +++ b/docs/workspace/marketplace/provider_files.rst @@ -9,48 +9,48 @@ .. py:method:: create(file_parent: FileParent, marketplace_file_type: MarketplaceFileType, mime_type: str [, display_name: Optional[str]]) -> CreateFileResponse Create a file. - + Create a file. Currently, only provider icons and attached notebooks are supported. - + :param file_parent: :class:`FileParent` :param marketplace_file_type: :class:`MarketplaceFileType` :param mime_type: str :param display_name: str (optional) - + :returns: :class:`CreateFileResponse` .. py:method:: delete(file_id: str) Delete a file. - + Delete a file - + :param file_id: str - - + + .. py:method:: get(file_id: str) -> GetFileResponse Get a file. - + Get a file - + :param file_id: str - + :returns: :class:`GetFileResponse` .. py:method:: list(file_parent: FileParent [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FileInfo] List files. - + List files attached to a parent entity. - + :param file_parent: :class:`FileParent` :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FileInfo` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_listings.rst b/docs/workspace/marketplace/provider_listings.rst index d26c5293e..dcfd45dd8 100644 --- a/docs/workspace/marketplace/provider_listings.rst +++ b/docs/workspace/marketplace/provider_listings.rst @@ -10,56 +10,56 @@ .. py:method:: create(listing: Listing) -> CreateListingResponse Create a listing. - + Create a new listing - + :param listing: :class:`Listing` - + :returns: :class:`CreateListingResponse` .. py:method:: delete(id: str) Delete a listing. - + Delete a listing - + :param id: str - - + + .. py:method:: get(id: str) -> GetListingResponse Get a listing. - + Get a listing - + :param id: str - + :returns: :class:`GetListingResponse` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Listing] List listings. - + List listings owned by this provider - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Listing` .. py:method:: update(id: str, listing: Listing) -> UpdateListingResponse Update listing. - + Update a listing - + :param id: str :param listing: :class:`Listing` - + :returns: :class:`UpdateListingResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_personalization_requests.rst b/docs/workspace/marketplace/provider_personalization_requests.rst index 32cdbdbb3..b9b5a0174 100644 --- a/docs/workspace/marketplace/provider_personalization_requests.rst +++ b/docs/workspace/marketplace/provider_personalization_requests.rst @@ -10,27 +10,27 @@ .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest] All personalization requests across all listings. - + List personalization requests to this provider. This will return all personalization requests, regardless of which listing they are for. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`PersonalizationRequest` .. py:method:: update(listing_id: str, request_id: str, status: PersonalizationRequestStatus [, reason: Optional[str], share: Optional[ShareInfo]]) -> UpdatePersonalizationRequestResponse Update personalization request status. - + Update personalization request. This method only permits updating the status of the request. - + :param listing_id: str :param request_id: str :param status: :class:`PersonalizationRequestStatus` :param reason: str (optional) :param share: :class:`ShareInfo` (optional) - + :returns: :class:`UpdatePersonalizationRequestResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst index cc29e089f..f77b9d436 100644 --- a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst +++ b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst @@ -9,42 +9,42 @@ .. py:method:: create() -> ProviderAnalyticsDashboard Create provider analytics dashboard. - + Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the Lakeview dashboard id. - + :returns: :class:`ProviderAnalyticsDashboard` .. py:method:: get() -> ListProviderAnalyticsDashboardResponse Get provider analytics dashboard. - + Get provider analytics dashboard. - + :returns: :class:`ListProviderAnalyticsDashboardResponse` .. py:method:: get_latest_version() -> GetLatestVersionProviderAnalyticsDashboardResponse Get latest version of provider analytics dashboard. - + Get latest version of provider analytics dashboard. - + :returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse` .. py:method:: update(id: str [, version: Optional[int]]) -> UpdateProviderAnalyticsDashboardResponse Update provider analytics dashboard. - + Update provider analytics dashboard. - + :param id: str id is immutable property and can't be updated. :param version: int (optional) this is the version of the dashboard template we want to update our user to current expectation is that it should be equal to latest version of the dashboard template - + :returns: :class:`UpdateProviderAnalyticsDashboardResponse` \ No newline at end of file diff --git a/docs/workspace/marketplace/provider_providers.rst b/docs/workspace/marketplace/provider_providers.rst index 610c9602e..ac8a4fdc3 100644 --- a/docs/workspace/marketplace/provider_providers.rst +++ b/docs/workspace/marketplace/provider_providers.rst @@ -9,56 +9,56 @@ .. py:method:: create(provider: ProviderInfo) -> CreateProviderResponse Create a provider. - + Create a provider - + :param provider: :class:`ProviderInfo` - + :returns: :class:`CreateProviderResponse` .. py:method:: delete(id: str) Delete provider. - + Delete provider - + :param id: str - - + + .. py:method:: get(id: str) -> GetProviderResponse Get provider. - + Get provider profile - + :param id: str - + :returns: :class:`GetProviderResponse` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo] List providers. - + List provider profiles for account. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ProviderInfo` .. py:method:: update(id: str, provider: ProviderInfo) -> UpdateProviderResponse Update provider. - + Update provider profile - + :param id: str :param provider: :class:`ProviderInfo` - + :returns: :class:`UpdateProviderResponse` \ No newline at end of file diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst index d0ca3e0f6..791931167 100644 --- a/docs/workspace/ml/experiments.rst +++ b/docs/workspace/ml/experiments.rst @@ -7,7 +7,7 @@ Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment. Each experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking server. - + Experiments are located in the workspace file tree. You manage experiments using the same tools you use to manage other workspace objects such as folders, notebooks, and libraries. @@ -30,13 +30,13 @@ w.experiments.delete_experiment(experiment_id=experiment.experiment_id) Create experiment. - + Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that another experiment with the same name does not already exist and fails if another experiment with the same name already exists. - + Throws `RESOURCE_ALREADY_EXISTS` if an experiment with the given name exists. - + :param name: str Experiment name. :param artifact_location: str (optional) @@ -47,14 +47,14 @@ depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to support up to 20 tags per request. - + :returns: :class:`CreateExperimentResponse` .. py:method:: create_logged_model(experiment_id: str [, model_type: Optional[str], name: Optional[str], params: Optional[List[LoggedModelParameter]], source_run_id: Optional[str], tags: Optional[List[LoggedModelTag]]]) -> CreateLoggedModelResponse Create a logged model. - + :param experiment_id: str The ID of the experiment that owns the model. :param model_type: str (optional) @@ -67,7 +67,7 @@ The ID of the run that created the model. :param tags: List[:class:`LoggedModelTag`] (optional) Tags attached to the model. - + :returns: :class:`CreateLoggedModelResponse` @@ -97,11 +97,11 @@ w.experiments.delete_run(run_id=created.run.info.run_id) Create a run. - + Creates a new run within an experiment. A run is usually a single execution of a machine learning or data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric`, and `mlflowRunTag` associated with a single execution. - + :param experiment_id: str (optional) ID of the associated experiment. :param run_name: str (optional) @@ -113,65 +113,65 @@ :param user_id: str (optional) ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in a future MLflow release. Use 'mlflow.user' tag instead. - + :returns: :class:`CreateRunResponse` .. py:method:: delete_experiment(experiment_id: str) Delete an experiment. - + Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the experiment uses FileStore, artifacts associated with the experiment are also deleted. - + :param experiment_id: str ID of the associated experiment. - - + + .. py:method:: delete_logged_model(model_id: str) Delete a logged model. - + :param model_id: str The ID of the logged model to delete. - - + + .. py:method:: delete_logged_model_tag(model_id: str, tag_key: str) Delete a tag on a logged model. - + :param model_id: str The ID of the logged model to delete the tag from. :param tag_key: str The tag key. - - + + .. py:method:: delete_run(run_id: str) Delete a run. - + Marks a run for deletion. - + :param run_id: str ID of the run to delete. - - + + .. py:method:: delete_runs(experiment_id: str, max_timestamp_millis: int [, max_runs: Optional[int]]) -> DeleteRunsResponse Delete runs by creation time. - + Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on - + :param experiment_id: str The ID of the experiment containing the runs to delete. :param max_timestamp_millis: int @@ -180,53 +180,53 @@ :param max_runs: int (optional) An optional positive integer indicating the maximum number of runs to delete. The maximum allowed value for max_runs is 10000. - + :returns: :class:`DeleteRunsResponse` .. py:method:: delete_tag(run_id: str, key: str) Delete a tag on a run. - + Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. - + :param run_id: str ID of the run that the tag was logged under. Must be provided. :param key: str Name of the tag. Maximum size is 255 bytes. Must be provided. - - + + .. py:method:: finalize_logged_model(model_id: str, status: LoggedModelStatus) -> FinalizeLoggedModelResponse Finalize a logged model. - + :param model_id: str The ID of the logged model to finalize. :param status: :class:`LoggedModelStatus` Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that something went wrong when logging the model weights / agent code. - + :returns: :class:`FinalizeLoggedModelResponse` .. py:method:: get_by_name(experiment_name: str) -> GetExperimentByNameResponse Get an experiment by name. - + Gets metadata for an experiment. - + This endpoint will return deleted experiments, but prefers the active experiment if an active and deleted experiment share the same name. If multiple deleted experiments share the same name, the API will return one of them. - + Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists. - + :param experiment_name: str Name of the associated experiment. - + :returns: :class:`GetExperimentByNameResponse` @@ -251,21 +251,21 @@ w.experiments.delete_experiment(experiment_id=experiment.experiment_id) Get an experiment. - + Gets metadata for an experiment. This method works on deleted experiments. - + :param experiment_id: str ID of the associated experiment. - + :returns: :class:`GetExperimentResponse` .. py:method:: get_history(metric_key: str [, max_results: Optional[int], page_token: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[Metric] Get metric history for a run. - + Gets a list of all values for the specified metric for a given run. - + :param metric_key: str Name of the metric. :param max_results: int (optional) @@ -278,72 +278,72 @@ :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run from which to fetch metric values. This field will be removed in a future MLflow version. - + :returns: Iterator over :class:`Metric` .. py:method:: get_logged_model(model_id: str) -> GetLoggedModelResponse Get a logged model. - + :param model_id: str The ID of the logged model to retrieve. - + :returns: :class:`GetLoggedModelResponse` .. py:method:: get_permission_levels(experiment_id: str) -> GetExperimentPermissionLevelsResponse Get experiment permission levels. - + Gets the permission levels that a user can have on an object. - + :param experiment_id: str The experiment for which to get or manage permissions. - + :returns: :class:`GetExperimentPermissionLevelsResponse` .. py:method:: get_permissions(experiment_id: str) -> ExperimentPermissions Get experiment permissions. - + Gets the permissions of an experiment. Experiments can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. - + :returns: :class:`ExperimentPermissions` .. py:method:: get_run(run_id: str [, run_uuid: Optional[str]]) -> GetRunResponse Get a run. - + Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the same key are logged for a run, return only the value with the latest timestamp. - + If there are multiple values with the latest timestamp, return the maximum of these values. - + :param run_id: str ID of the run to fetch. Must be provided. :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run to fetch. This field will be removed in a future MLflow version. - + :returns: :class:`GetRunResponse` .. py:method:: list_artifacts( [, page_token: Optional[str], path: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[FileInfo] List artifacts. - + List artifacts for a run. Takes an optional `artifact_path` prefix which if specified, the response contains only artifacts with the specified prefix. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). - + :param page_token: str (optional) The token indicating the page of artifact results to fetch. `page_token` is not supported when listing artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. @@ -357,7 +357,7 @@ :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run whose artifacts to list. This field will be removed in a future MLflow version. - + :returns: Iterator over :class:`FileInfo` @@ -376,9 +376,9 @@ all = w.experiments.list_experiments(ml.ListExperimentsRequest()) List experiments. - + Gets a list of all experiments. - + :param max_results: int (optional) Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If `max_results` is too large, it'll be automatically capped at 1000. Callers of this endpoint are @@ -387,55 +387,55 @@ Token indicating the page of experiments to fetch :param view_type: :class:`ViewType` (optional) Qualifier for type of experiments to be returned. If unspecified, return only active experiments. - + :returns: Iterator over :class:`Experiment` .. py:method:: log_batch( [, metrics: Optional[List[Metric]], params: Optional[List[Param]], run_id: Optional[str], tags: Optional[List[RunTag]]]) Log a batch of metrics/params/tags for a run. - + Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server will respond with an error (non-200 status code). - + In case of error (due to internal server error or an invalid request), partial data may be written. - + You can write metrics, params, and tags in interleaving fashion, but within a given entity type are guaranteed to follow the order specified in the request body. - + The overwrite behavior for metrics, params, and tags is as follows: - + * Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to the set of values for the metric with the provided key. - + * Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple tag values with the same key are provided in the same API request, the last-provided tag value is written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent. - + * Parameters: once written, param values cannot be changed (attempting to overwrite a param value will result in an error). However, logging the same param (key, value) is permitted. Specifically, logging a param is idempotent. - + Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB in size and contain: - + * No more than 1000 metrics, params, and tags in total - + * Up to 1000 metrics - + * Up to 100 params - + * Up to 100 tags - + For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900 metrics, 50 params, and 51 tags is invalid. - + The following limits also apply to metric, param, and tag keys and values: - + * Metric keys, param keys, and tag keys can be up to 250 characters in length - + * Parameter and tag values can be up to 250 characters in length - + :param metrics: List[:class:`Metric`] (optional) Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and tags in total. @@ -447,52 +447,52 @@ :param tags: List[:class:`RunTag`] (optional) Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags in total. - - + + .. py:method:: log_inputs(run_id: str [, datasets: Optional[List[DatasetInput]], models: Optional[List[ModelInput]]]) Log inputs to a run. - + **NOTE:** Experimental: This API may change or be removed in a future release without warning. - + Logs inputs, such as datasets and models, to an MLflow Run. - + :param run_id: str ID of the run to log under :param datasets: List[:class:`DatasetInput`] (optional) Dataset inputs :param models: List[:class:`ModelInput`] (optional) Model inputs - - + + .. py:method:: log_logged_model_params(model_id: str [, params: Optional[List[LoggedModelParameter]]]) Log params for a logged model. - + Logs params for a logged model. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training. A param can be logged only once for a logged model, and attempting to overwrite an existing param with a different value will result in an error - + :param model_id: str The ID of the logged model to log params for. :param params: List[:class:`LoggedModelParameter`] (optional) Parameters to attach to the model. - - + + .. py:method:: log_metric(key: str, value: float, timestamp: int [, dataset_digest: Optional[str], dataset_name: Optional[str], model_id: Optional[str], run_id: Optional[str], run_uuid: Optional[str], step: Optional[int]]) Log a metric for a run. - + Log a metric for a run. A metric is a key-value pair (string key, float value) with an associated timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be logged multiple times. - + :param key: str Name of the metric. :param value: float @@ -514,48 +514,48 @@ removed in a future MLflow version. :param step: int (optional) Step at which to log the metric - - + + .. py:method:: log_model( [, model_json: Optional[str], run_id: Optional[str]]) Log a model. - + **NOTE:** Experimental: This API may change or be removed in a future release without warning. - + :param model_json: str (optional) MLmodel file in json format. :param run_id: str (optional) ID of the run to log under - - + + .. py:method:: log_outputs(run_id: str [, models: Optional[List[ModelOutput]]]) Log outputs from a run. - + **NOTE**: Experimental: This API may change or be removed in a future release without warning. - + Logs outputs, such as models, from an MLflow Run. - + :param run_id: str The ID of the Run from which to log outputs. :param models: List[:class:`ModelOutput`] (optional) The model outputs from the Run. - - + + .. py:method:: log_param(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]]) Log a param for a run. - + Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A param can be logged only once for a run. - + :param key: str Name of the param. Maximum size is 255 bytes. :param value: str @@ -565,48 +565,48 @@ :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will be removed in a future MLflow version. - - + + .. py:method:: restore_experiment(experiment_id: str) Restore an experiment. - + Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are also restored. - + Throws `RESOURCE_DOES_NOT_EXIST` if experiment was never created or was permanently deleted. - + :param experiment_id: str ID of the associated experiment. - - + + .. py:method:: restore_run(run_id: str) Restore a run. - + Restores a deleted run. This also restores associated metadata, runs, metrics, params, and tags. - + Throws `RESOURCE_DOES_NOT_EXIST` if the run was never created or was permanently deleted. - + :param run_id: str ID of the run to restore. - - + + .. py:method:: restore_runs(experiment_id: str, min_timestamp_millis: int [, max_runs: Optional[int]]) -> RestoreRunsResponse Restore runs by deletion time. - + Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the client code snippet on - + :param experiment_id: str The ID of the experiment containing the runs to restore. :param min_timestamp_millis: int @@ -615,16 +615,16 @@ :param max_runs: int (optional) An optional positive integer indicating the maximum number of runs to restore. The maximum allowed value for max_runs is 10000. - + :returns: :class:`RestoreRunsResponse` .. py:method:: search_experiments( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], view_type: Optional[ViewType]]) -> Iterator[Experiment] Search experiments. - + Searches for experiments that satisfy specified search criteria. - + :param filter: str (optional) String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'") :param max_results: int (optional) @@ -637,16 +637,16 @@ Token indicating the page of experiments to fetch :param view_type: :class:`ViewType` (optional) Qualifier for type of experiments to be returned. If unspecified, return only active experiments. - + :returns: Iterator over :class:`Experiment` .. py:method:: search_logged_models( [, datasets: Optional[List[SearchLoggedModelsDataset]], experiment_ids: Optional[List[str]], filter: Optional[str], max_results: Optional[int], order_by: Optional[List[SearchLoggedModelsOrderBy]], page_token: Optional[str]]) -> SearchLoggedModelsResponse Search logged models. - + Search for Logged Models that satisfy specified search criteria. - + :param datasets: List[:class:`SearchLoggedModelsDataset`] (optional) List of datasets on which to apply the metrics filter clauses. For example, a filter with `metrics.accuracy > 0.9` and dataset info with name "test_dataset" means we will return all logged @@ -658,7 +658,7 @@ :param filter: str (optional) A filter expression over logged model info and data that allows returning a subset of logged models. The syntax is a subset of SQL that supports AND'ing together binary operations. - + Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``. :param max_results: int (optional) The maximum number of Logged Models to return. The maximum limit is 50. @@ -666,30 +666,30 @@ The list of columns for ordering the results, with additional fields for sorting criteria. :param page_token: str (optional) The token indicating the page of logged models to fetch. - + :returns: :class:`SearchLoggedModelsResponse` .. py:method:: search_runs( [, experiment_ids: Optional[List[str]], filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], run_view_type: Optional[ViewType]]) -> Iterator[Run] Search for runs. - + Searches for runs that satisfy expressions. - + Search expressions can use `mlflowMetric` and `mlflowParam` keys. - + :param experiment_ids: List[str] (optional) List of experiment IDs to search over. :param filter: str (optional) A filter expression over params, metrics, and tags, that allows returning a subset of runs. The syntax is a subset of SQL that supports ANDing together binary operations between a param, metric, or tag and a constant. - + Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'` - + You can select columns with special characters (hyphen, space, period, etc.) by using double quotes: `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'` - + Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`. :param max_results: int (optional) Maximum number of runs desired. Max threshold is 50000 @@ -703,58 +703,58 @@ Token for the current page of runs. :param run_view_type: :class:`ViewType` (optional) Whether to display only active, only deleted, or all runs. Defaults to only active runs. - + :returns: Iterator over :class:`Run` .. py:method:: set_experiment_tag(experiment_id: str, key: str, value: str) Set a tag for an experiment. - + Sets a tag on an experiment. Experiment tags are metadata that can be updated. - + :param experiment_id: str ID of the experiment under which to log the tag. Must be provided. :param key: str Name of the tag. Keys up to 250 bytes in size are supported. :param value: str String value of the tag being logged. Values up to 64KB in size are supported. - - + + .. py:method:: set_logged_model_tags(model_id: str [, tags: Optional[List[LoggedModelTag]]]) Set a tag for a logged model. - + :param model_id: str The ID of the logged model to set the tags on. :param tags: List[:class:`LoggedModelTag`] (optional) The tags to set on the logged model. - - + + .. py:method:: set_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions Set experiment permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional) - + :returns: :class:`ExperimentPermissions` .. py:method:: set_tag(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]]) Set a tag for a run. - + Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes. - + :param key: str Name of the tag. Keys up to 250 bytes in size are supported. :param value: str @@ -764,8 +764,8 @@ :param run_uuid: str (optional) [Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be removed in a future MLflow version. - - + + .. py:method:: update_experiment(experiment_id: str [, new_name: Optional[str]]) @@ -789,27 +789,27 @@ w.experiments.delete_experiment(experiment_id=experiment.experiment_id) Update an experiment. - + Updates experiment metadata. - + :param experiment_id: str ID of the associated experiment. :param new_name: str (optional) If provided, the experiment's name is changed to the new name. The new name must be unique. - - + + .. py:method:: update_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions Update experiment permissions. - + Updates the permissions on an experiment. Experiments can inherit permissions from their root object. - + :param experiment_id: str The experiment for which to get or manage permissions. :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional) - + :returns: :class:`ExperimentPermissions` @@ -841,9 +841,9 @@ w.experiments.delete_run(run_id=created.run.info.run_id) Update a run. - + Updates run metadata. - + :param end_time: int (optional) Unix timestamp in milliseconds of when the run ended. :param run_id: str (optional) @@ -855,6 +855,6 @@ MLflow version. :param status: :class:`UpdateRunStatus` (optional) Updated status of the run. - + :returns: :class:`UpdateRunResponse` \ No newline at end of file diff --git a/docs/workspace/ml/forecasting.rst b/docs/workspace/ml/forecasting.rst index 79694a709..79fca0ffe 100644 --- a/docs/workspace/ml/forecasting.rst +++ b/docs/workspace/ml/forecasting.rst @@ -9,9 +9,9 @@ .. py:method:: create_experiment(train_data_path: str, target_column: str, time_column: str, forecast_granularity: str, forecast_horizon: int [, custom_weights_column: Optional[str], experiment_path: Optional[str], future_feature_data_path: Optional[str], holiday_regions: Optional[List[str]], include_features: Optional[List[str]], max_runtime: Optional[int], prediction_data_path: Optional[str], primary_metric: Optional[str], register_to: Optional[str], split_column: Optional[str], timeseries_identifier_columns: Optional[List[str]], training_frameworks: Optional[List[str]]]) -> Wait[ForecastingExperiment] Create a forecasting experiment. - + Creates a serverless forecasting experiment. Returns the experiment ID. - + :param train_data_path: str The fully qualified path of a Unity Catalog table, formatted as catalog_name.schema_name.table_name, used as training data for the forecasting model. @@ -60,7 +60,7 @@ :param training_frameworks: List[str] (optional) List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', 'DeepAR'. An empty list includes all supported frameworks. - + :returns: Long-running operation waiter for :class:`ForecastingExperiment`. See :method:wait_get_experiment_forecasting_succeeded for more details. @@ -72,12 +72,12 @@ .. py:method:: get_experiment(experiment_id: str) -> ForecastingExperiment Get a forecasting experiment. - + Public RPC to get forecasting experiment - + :param experiment_id: str The unique ID of a forecasting experiment - + :returns: :class:`ForecastingExperiment` diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 2d93defa9..4c1b3d917 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -8,35 +8,35 @@ [Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides centralized model governance, cross-workspace access, lineage, and deployment. Workspace Model Registry will be deprecated in the future. - + The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models. .. py:method:: approve_transition_request(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> ApproveTransitionRequestResponse Approve transition request. - + Approves a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param archive_existing_versions: bool Specifies whether to archive all current model versions in the target stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`ApproveTransitionRequestResponse` @@ -67,17 +67,17 @@ w.model_registry.delete_comment(id=created.comment.id) Post a comment. - + Posts a comment on a model version. A comment can be submitted either by a user or programmatically to display relevant information about the model. For example, test results or deployment errors. - + :param name: str Name of the model. :param version: str Version of the model. :param comment: str User-provided comment on the action. - + :returns: :class:`CreateCommentResponse` @@ -97,18 +97,18 @@ created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") Create a model. - + Creates a new registered model with the name specified in the request body. - + Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. - + :param name: str Register models under this name :param description: str (optional) Optional description for registered model. :param tags: List[:class:`ModelTag`] (optional) Additional metadata for registered model. - + :returns: :class:`CreateModelResponse` @@ -130,9 +130,9 @@ created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Create a model version. - + Creates a model version. - + :param name: str Register model under this name :param source: str @@ -147,33 +147,33 @@ hosted at another instance of MLflow. :param tags: List[:class:`ModelVersionTag`] (optional) Additional metadata for model version. - + :returns: :class:`CreateModelVersionResponse` .. py:method:: create_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> CreateTransitionRequestResponse Make a transition request. - + Creates a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`CreateTransitionRequestResponse` @@ -201,39 +201,39 @@ w.model_registry.delete_webhook(id=created.webhook.id) Create a webhook. - + **NOTE**: This endpoint is in Public Preview. - + Creates a registry webhook. - + :param events: List[:class:`RegistryWebhookEvent`] Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. - + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - + * `COMMENT_CREATED`: A user wrote a comment on a registered model. - + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be specified for a registry-wide webhook, which can be created by not specifying a model name in the create request. - + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to staging. - + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to production. - + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. :param description: str (optional) User-specified description for the webhook. @@ -245,74 +245,74 @@ :param status: :class:`RegistryWebhookStatus` (optional) Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event. - + :returns: :class:`CreateWebhookResponse` .. py:method:: delete_comment(id: str) Delete a comment. - + Deletes a comment on a model version. - + :param id: str Unique identifier of an activity - - + + .. py:method:: delete_model(name: str) Delete a model. - + Deletes a registered model. - + :param name: str Registered model unique name identifier. - - + + .. py:method:: delete_model_tag(name: str, key: str) Delete a model tag. - + Deletes the tag for a registered model. - + :param name: str Name of the registered model that the tag was logged under. :param key: str Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size is 250 bytes. - - + + .. py:method:: delete_model_version(name: str, version: str) Delete a model version. - + Deletes a model version. - + :param name: str Name of the registered model :param version: str Model version number - - + + .. py:method:: delete_model_version_tag(name: str, version: str, key: str) Delete a model version tag. - + Deletes a model version tag. - + :param name: str Name of the registered model that the tag was logged under. :param version: str @@ -320,64 +320,64 @@ :param key: str Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size is 250 bytes. - - + + .. py:method:: delete_transition_request(name: str, version: str, stage: DeleteTransitionRequestStage, creator: str [, comment: Optional[str]]) Delete a transition request. - + Cancels a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`DeleteTransitionRequestStage` Target stage of the transition request. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param creator: str Username of the user who created this request. Of the transition requests matching the specified details, only the one transition created by this user will be deleted. :param comment: str (optional) User-provided comment on the action. - - + + .. py:method:: delete_webhook( [, id: Optional[str]]) Delete a webhook. - + **NOTE:** This endpoint is in Public Preview. - + Deletes a registry webhook. - + :param id: str (optional) Webhook ID required to delete a registry webhook. - - + + .. py:method:: get_latest_versions(name: str [, stages: Optional[List[str]]]) -> Iterator[ModelVersion] Get the latest version. - + Gets the latest version of a registered model. - + :param name: str Registered model unique name identifier. :param stages: List[str] (optional) List of stages. - + :returns: Iterator over :class:`ModelVersion` @@ -399,69 +399,69 @@ model = w.model_registry.get_model(name=created.registered_model.name) Get model. - + Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also returns the model's Databricks workspace ID and the permission level of the requesting user on the model. - + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel - + :param name: str Registered model unique name identifier. - + :returns: :class:`GetModelResponse` .. py:method:: get_model_version(name: str, version: str) -> GetModelVersionResponse Get a model version. - + Get a model version. - + :param name: str Name of the registered model :param version: str Model version number - + :returns: :class:`GetModelVersionResponse` .. py:method:: get_model_version_download_uri(name: str, version: str) -> GetModelVersionDownloadUriResponse Get a model version URI. - + Gets a URI to download the model version. - + :param name: str Name of the registered model :param version: str Model version number - + :returns: :class:`GetModelVersionDownloadUriResponse` .. py:method:: get_permission_levels(registered_model_id: str) -> GetRegisteredModelPermissionLevelsResponse Get registered model permission levels. - + Gets the permission levels that a user can have on an object. - + :param registered_model_id: str The registered model for which to get or manage permissions. - + :returns: :class:`GetRegisteredModelPermissionLevelsResponse` .. py:method:: get_permissions(registered_model_id: str) -> RegisteredModelPermissions Get registered model permissions. - + Gets the permissions of a registered model. Registered models can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. - + :returns: :class:`RegisteredModelPermissions` @@ -480,28 +480,28 @@ all = w.model_registry.list_models(ml.ListModelsRequest()) List models. - + Lists all available registered models, up to the limit specified in __max_results__. - + :param max_results: int (optional) Maximum number of registered models desired. Max threshold is 1000. :param page_token: str (optional) Pagination token to go to the next page based on a previous query. - + :returns: Iterator over :class:`Model` .. py:method:: list_transition_requests(name: str, version: str) -> Iterator[Activity] List transition requests. - + Gets a list of all open stage transition requests for the model version. - + :param name: str Name of the model. :param version: str Version of the model. - + :returns: Iterator over :class:`Activity` @@ -520,11 +520,11 @@ all = w.model_registry.list_webhooks(ml.ListWebhooksRequest()) List registry webhooks. - + **NOTE:** This endpoint is in Public Preview. - + Lists all registry webhooks. - + :param events: List[:class:`RegistryWebhookEvent`] (optional) If `events` is specified, any webhook with one or more of the specified trigger events is included in the output. If `events` is not specified, webhooks of all event types are included in the output. @@ -533,56 +533,56 @@ associated model. :param page_token: str (optional) Token indicating the page of artifact results to fetch - + :returns: Iterator over :class:`RegistryWebhook` .. py:method:: reject_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> RejectTransitionRequestResponse Reject a transition request. - + Rejects a model version stage transition request. - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`RejectTransitionRequestResponse` .. py:method:: rename_model(name: str [, new_name: Optional[str]]) -> RenameModelResponse Rename a model. - + Renames a registered model. - + :param name: str Registered model unique name identifier. :param new_name: str (optional) If provided, updates the name for this `registered_model`. - + :returns: :class:`RenameModelResponse` .. py:method:: search_model_versions( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[ModelVersion] Searches model versions. - + Searches for specific model versions based on the supplied __filter__. - + :param filter: str (optional) String filter condition, like "name='my-model-name'". Must be a single boolean condition, with string values wrapped in single quotes. @@ -594,16 +594,16 @@ timestamp, followed by name ASC, followed by version DESC. :param page_token: str (optional) Pagination token to go to next page based on previous search query. - + :returns: Iterator over :class:`ModelVersion` .. py:method:: search_models( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[Model] Search models. - + Search for registered models based on the specified __filter__. - + :param filter: str (optional) String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically as "name LIKE '%my-model-name%'". Single boolean condition, with string values wrapped in single @@ -616,16 +616,16 @@ name ASC. :param page_token: str (optional) Pagination token to go to the next page based on a previous search query. - + :returns: Iterator over :class:`Model` .. py:method:: set_model_tag(name: str, key: str, value: str) Set a tag. - + Sets a tag on a registered model. - + :param name: str Unique name of the model. :param key: str @@ -635,16 +635,16 @@ :param value: str String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size. - - + + .. py:method:: set_model_version_tag(name: str, version: str, key: str, value: str) Set a version tag. - + Sets a model version tag. - + :param name: str Unique name of the model. :param version: str @@ -656,69 +656,69 @@ :param value: str String value of the tag being logged. Maximum size depends on storage backend. All storage backends are guaranteed to support key values up to 5000 bytes in size. - - + + .. py:method:: set_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions Set registered model permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional) - + :returns: :class:`RegisteredModelPermissions` .. py:method:: test_registry_webhook(id: str [, event: Optional[RegistryWebhookEvent]]) -> TestRegistryWebhookResponse Test a webhook. - + **NOTE:** This endpoint is in Public Preview. - + Tests a registry webhook. - + :param id: str Webhook ID :param event: :class:`RegistryWebhookEvent` (optional) If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the test trigger uses a randomly chosen event associated with the webhook. - + :returns: :class:`TestRegistryWebhookResponse` .. py:method:: transition_stage(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> TransitionStageResponse Transition a stage. - + Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] that also accepts a comment associated with the transition to be recorded.", - + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage - + :param name: str Name of the model. :param version: str Version of the model. :param stage: :class:`Stage` Target stage of the transition. Valid values are: - + * `None`: The initial stage of a model version. - + * `Staging`: Staging or pre-production stage. - + * `Production`: Production stage. - + * `Archived`: Archived stage. :param archive_existing_versions: bool Specifies whether to archive all current model versions in the target stage. :param comment: str (optional) User-provided comment on the action. - + :returns: :class:`TransitionStageResponse` @@ -751,14 +751,14 @@ w.model_registry.delete_comment(id=created.comment.id) Update a comment. - + Post an edit to a comment on a model version. - + :param id: str Unique identifier of an activity :param comment: str User-provided comment on the action. - + :returns: :class:`UpdateCommentResponse` @@ -785,15 +785,15 @@ ) Update model. - + Updates a registered model. - + :param name: str Registered model unique name identifier. :param description: str (optional) If provided, updates the description for this `registered_model`. - - + + .. py:method:: update_model_version(name: str, version: str [, description: Optional[str]]) @@ -820,30 +820,30 @@ ) Update model version. - + Updates the model version. - + :param name: str Name of the registered model :param version: str Model version number :param description: str (optional) If provided, updates the description for this `registered_model`. - - + + .. py:method:: update_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions Update registered model permissions. - + Updates the permissions on a registered model. Registered models can inherit permissions from their root object. - + :param registered_model_id: str The registered model for which to get or manage permissions. :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional) - + :returns: :class:`RegisteredModelPermissions` @@ -873,11 +873,11 @@ w.model_registry.delete_webhook(id=created.webhook.id) Update a webhook. - + **NOTE:** This endpoint is in Public Preview. - + Updates a registry webhook. - + :param id: str Webhook ID :param description: str (optional) @@ -885,42 +885,42 @@ :param events: List[:class:`RegistryWebhookEvent`] (optional) Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was created for the associated model. - + * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - + * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - + * `COMMENT_CREATED`: A user wrote a comment on a registered model. - + * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be specified for a registry-wide webhook, which can be created by not specifying a model name in the create request. - + * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - + * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - + * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - + * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - + * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to staging. - + * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to production. - + * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived. :param http_url_spec: :class:`HttpUrlSpec` (optional) :param job_spec: :class:`JobSpec` (optional) :param status: :class:`RegistryWebhookStatus` (optional) Enable or disable triggering the webhook, or put the webhook into test mode. The default is `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - + * `DISABLED`: Webhook is not triggered. - + * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real event. - - + + \ No newline at end of file diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index bf85f808e..92d8582db 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -5,11 +5,11 @@ .. py:class:: PipelinesAPI The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines. - + Delta Live Tables is a framework for building reliable, maintainable, and testable data processing pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task orchestration, cluster management, monitoring, data quality, and error handling. - + Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables manages how your data is transformed based on a target schema you define for each processing step. You can also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected @@ -52,10 +52,10 @@ w.pipelines.delete(pipeline_id=created.pipeline_id) Create a pipeline. - + Creates a new data processing pipeline based on the requested configuration. If successful, this method returns the ID of the new pipeline. - + :param allow_duplicate_names: bool (optional) If false, deployment will fail if name conflicts with that of another pipeline. :param budget_policy_id: str (optional) @@ -107,7 +107,7 @@ :param run_as: :class:`RunAs` (optional) Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. :param schema: str (optional) @@ -125,20 +125,20 @@ for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. - + :returns: :class:`CreatePipelineResponse` .. py:method:: delete(pipeline_id: str) Delete a pipeline. - + Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the pipeline and its tables. You cannot undo this action. - + :param pipeline_id: str - - + + .. py:method:: get(pipeline_id: str) -> GetPipelineResponse @@ -180,47 +180,47 @@ w.pipelines.delete(pipeline_id=created.pipeline_id) Get a pipeline. - + :param pipeline_id: str - + :returns: :class:`GetPipelineResponse` .. py:method:: get_permission_levels(pipeline_id: str) -> GetPipelinePermissionLevelsResponse Get pipeline permission levels. - + Gets the permission levels that a user can have on an object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. - + :returns: :class:`GetPipelinePermissionLevelsResponse` .. py:method:: get_permissions(pipeline_id: str) -> PipelinePermissions Get pipeline permissions. - + Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. - + :returns: :class:`PipelinePermissions` .. py:method:: get_update(pipeline_id: str, update_id: str) -> GetUpdateResponse Get a pipeline update. - + Gets an update from an active pipeline. - + :param pipeline_id: str The ID of the pipeline. :param update_id: str The ID of the update. - + :returns: :class:`GetUpdateResponse` @@ -263,16 +263,16 @@ w.pipelines.delete(pipeline_id=created.pipeline_id) List pipeline events. - + Retrieves events for a pipeline. - + :param pipeline_id: str The pipeline to return events for. :param filter: str (optional) Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp > 'TIMESTAMP' (or >=,<,<=,=) - + Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp> '2021-07-22T06:37:33.083Z' :param max_results: int (optional) @@ -286,7 +286,7 @@ Page token returned by previous call. This field is mutually exclusive with all fields in this request except max_results. An error is returned if any fields other than max_results are set when this field is set. - + :returns: Iterator over :class:`PipelineEvent` @@ -305,16 +305,16 @@ all = w.pipelines.list_pipelines(pipelines.ListPipelinesRequest()) List pipelines. - + Lists pipelines defined in the Delta Live Tables system. - + :param filter: str (optional) Select a subset of results based on the specified criteria. The supported filters are: - + * `notebook=''` to select pipelines that reference the provided notebook path. * `name LIKE '[pattern]'` to select pipelines with a name that matches pattern. Wildcards are supported, for example: `name LIKE '%shopping%'` - + Composite filters are not supported. This field is optional. :param max_results: int (optional) The maximum number of entries to return in a single page. The system may return fewer than @@ -326,16 +326,16 @@ default is id asc. This field is optional. :param page_token: str (optional) Page token returned by previous call - + :returns: Iterator over :class:`PipelineStateInfo` .. py:method:: list_updates(pipeline_id: str [, max_results: Optional[int], page_token: Optional[str], until_update_id: Optional[str]]) -> ListUpdatesResponse List pipeline updates. - + List updates for an active pipeline. - + :param pipeline_id: str The pipeline to return updates for. :param max_results: int (optional) @@ -344,31 +344,31 @@ Page token returned by previous call :param until_update_id: str (optional) If present, returns updates until and including this update_id. - + :returns: :class:`ListUpdatesResponse` .. py:method:: set_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions Set pipeline permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional) - + :returns: :class:`PipelinePermissions` .. py:method:: start_update(pipeline_id: str [, cause: Optional[StartUpdateCause], full_refresh: Optional[bool], full_refresh_selection: Optional[List[str]], refresh_selection: Optional[List[str]], validate_only: Optional[bool]]) -> StartUpdateResponse Start a pipeline. - + Starts a new update for the pipeline. If there is already an active update for the pipeline, the request will fail and the active update will remain running. - + :param pipeline_id: str :param cause: :class:`StartUpdateCause` (optional) What triggered this update. @@ -385,19 +385,19 @@ :param validate_only: bool (optional) If true, this update only validates the correctness of pipeline source code but does not materialize or publish any datasets. - + :returns: :class:`StartUpdateResponse` .. py:method:: stop(pipeline_id: str) -> Wait[GetPipelineResponse] Stop a pipeline. - + Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this request is a no-op. - + :param pipeline_id: str - + :returns: Long-running operation waiter for :class:`GetPipelineResponse`. See :method:wait_get_pipeline_idle for more details. @@ -459,9 +459,9 @@ w.pipelines.delete(pipeline_id=created.pipeline_id) Edit a pipeline. - + Updates a pipeline with the supplied configuration. - + :param pipeline_id: str Unique identifier for this pipeline. :param allow_duplicate_names: bool (optional) @@ -517,7 +517,7 @@ :param run_as: :class:`RunAs` (optional) Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline. - + Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown. :param schema: str (optional) @@ -535,20 +535,20 @@ for pipeline creation in favor of the `schema` field. :param trigger: :class:`PipelineTrigger` (optional) Which pipeline trigger to use. Deprecated: Use `continuous` instead. - - + + .. py:method:: update_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions Update pipeline permissions. - + Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object. - + :param pipeline_id: str The pipeline for which to get or manage permissions. :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional) - + :returns: :class:`PipelinePermissions` diff --git a/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst b/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst index 129e75c14..50f647795 100644 --- a/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst +++ b/docs/workspace/qualitymonitorv2/quality_monitor_v2.rst @@ -9,65 +9,65 @@ .. py:method:: create_quality_monitor(quality_monitor: QualityMonitor) -> QualityMonitor Create a quality monitor. - + Create a quality monitor on UC object - + :param quality_monitor: :class:`QualityMonitor` - + :returns: :class:`QualityMonitor` .. py:method:: delete_quality_monitor(object_type: str, object_id: str) Delete a quality monitor. - + Delete a quality monitor on UC object - + :param object_type: str The type of the monitored object. Can be one of the following: schema. :param object_id: str The uuid of the request object. For example, schema id. - - + + .. py:method:: get_quality_monitor(object_type: str, object_id: str) -> QualityMonitor Read a quality monitor. - + Read a quality monitor on UC object - + :param object_type: str The type of the monitored object. Can be one of the following: schema. :param object_id: str The uuid of the request object. For example, schema id. - + :returns: :class:`QualityMonitor` .. py:method:: list_quality_monitor( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[QualityMonitor] List quality monitors. - + (Unimplemented) List quality monitors - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`QualityMonitor` .. py:method:: update_quality_monitor(object_type: str, object_id: str, quality_monitor: QualityMonitor) -> QualityMonitor Update a quality monitor. - + (Unimplemented) Update a quality monitor on UC object - + :param object_type: str The type of the monitored object. Can be one of the following: schema. :param object_id: str The uuid of the request object. For example, schema id. :param quality_monitor: :class:`QualityMonitor` - + :returns: :class:`QualityMonitor` \ No newline at end of file diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst index ad7da939a..1e2e32884 100644 --- a/docs/workspace/serving/serving_endpoints.rst +++ b/docs/workspace/serving/serving_endpoints.rst @@ -5,7 +5,7 @@ .. py:class:: ServingEndpointsExt The Serving Endpoints API allows you to create, update, and delete model serving endpoints. - + You can use a serving endpoint to serve models from the Databricks Model Registry or from Unity Catalog. Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means the endpoints and associated compute resources are fully managed by Databricks and will not appear in your @@ -18,21 +18,21 @@ .. py:method:: build_logs(name: str, served_model_name: str) -> BuildLogsResponse Get build logs for a served model. - + Retrieves the build logs associated with the provided served model. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. :param served_model_name: str The name of the served model that build logs will be retrieved for. This field is required. - + :returns: :class:`BuildLogsResponse` .. py:method:: create(name: str [, ai_gateway: Optional[AiGatewayConfig], budget_policy_id: Optional[str], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed] Create a new serving endpoint. - + :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. @@ -51,7 +51,7 @@ Enable route optimization for the serving endpoint. :param tags: List[:class:`EndpointTag`] (optional) Tags to be attached to the serving endpoint and automatically propagated to billing logs. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. @@ -63,7 +63,7 @@ .. py:method:: create_provisioned_throughput_endpoint(name: str, config: PtEndpointCoreConfig [, ai_gateway: Optional[AiGatewayConfig], budget_policy_id: Optional[str], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed] Create a new PT serving endpoint. - + :param name: str The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. @@ -75,7 +75,7 @@ The budget policy associated with the endpoint. :param tags: List[:class:`EndpointTag`] (optional) Tags to be attached to the serving endpoint and automatically propagated to billing logs. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. @@ -87,34 +87,34 @@ .. py:method:: delete(name: str) Delete a serving endpoint. - + :param name: str - - + + .. py:method:: export_metrics(name: str) -> ExportMetricsResponse Get metrics of a serving endpoint. - + Retrieves the metrics associated with the provided serving endpoint in either Prometheus or OpenMetrics exposition format. - + :param name: str The name of the serving endpoint to retrieve metrics for. This field is required. - + :returns: :class:`ExportMetricsResponse` .. py:method:: get(name: str) -> ServingEndpointDetailed Get a single serving endpoint. - + Retrieves the details for a single serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. - + :returns: :class:`ServingEndpointDetailed` @@ -127,38 +127,38 @@ .. py:method:: get_open_api(name: str) -> GetOpenApiResponse Get the schema for a serving endpoint. - + Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for the supported paths, input and output format and datatypes. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. - + :returns: :class:`GetOpenApiResponse` .. py:method:: get_permission_levels(serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse Get serving endpoint permission levels. - + Gets the permission levels that a user can have on an object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. - + :returns: :class:`GetServingEndpointPermissionLevelsResponse` .. py:method:: get_permissions(serving_endpoint_id: str) -> ServingEndpointPermissions Get serving endpoint permissions. - + Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. - + :returns: :class:`ServingEndpointPermissions` @@ -185,61 +185,61 @@ .. py:method:: list() -> Iterator[ServingEndpoint] Get all serving endpoints. - + :returns: Iterator over :class:`ServingEndpoint` .. py:method:: logs(name: str, served_model_name: str) -> ServerLogsResponse Get the latest logs for a served model. - + Retrieves the service logs associated with the provided served model. - + :param name: str The name of the serving endpoint that the served model belongs to. This field is required. :param served_model_name: str The name of the served model that logs will be retrieved for. This field is required. - + :returns: :class:`ServerLogsResponse` .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> EndpointTags Update tags of a serving endpoint. - + Used to batch add and delete tags from a serving endpoint with a single API call. - + :param name: str The name of the serving endpoint who's tags to patch. This field is required. :param add_tags: List[:class:`EndpointTag`] (optional) List of endpoint tags to add :param delete_tags: List[str] (optional) List of tag keys to delete - + :returns: :class:`EndpointTags` .. py:method:: put(name: str [, rate_limits: Optional[List[RateLimit]]]) -> PutResponse Update rate limits of a serving endpoint. - + Deprecated: Please use AI Gateway to manage rate limits instead. - + :param name: str The name of the serving endpoint whose rate limits are being updated. This field is required. :param rate_limits: List[:class:`RateLimit`] (optional) The list of endpoint rate limits. - + :returns: :class:`PutResponse` .. py:method:: put_ai_gateway(name: str [, fallback_config: Optional[FallbackConfig], guardrails: Optional[AiGatewayGuardrails], inference_table_config: Optional[AiGatewayInferenceTableConfig], rate_limits: Optional[List[AiGatewayRateLimit]], usage_tracking_config: Optional[AiGatewayUsageTrackingConfig]]) -> PutAiGatewayResponse Update AI Gateway of a serving endpoint. - + Used to update the AI Gateway of a serving endpoint. NOTE: External model, provisioned throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only support inference tables. - + :param name: str The name of the serving endpoint whose AI Gateway is being updated. This field is required. :param fallback_config: :class:`FallbackConfig` (optional) @@ -255,14 +255,14 @@ :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional) Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs. - + :returns: :class:`PutAiGatewayResponse` .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse Query a serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. :param dataframe_records: List[Any] (optional) @@ -306,32 +306,32 @@ The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - + :returns: :class:`QueryEndpointResponse` .. py:method:: set_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions Set serving endpoint permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional) - + :returns: :class:`ServingEndpointPermissions` .. py:method:: update_config(name: str [, auto_capture_config: Optional[AutoCaptureConfigInput], served_entities: Optional[List[ServedEntityInput]], served_models: Optional[List[ServedModelInput]], traffic_config: Optional[TrafficConfig]]) -> Wait[ServingEndpointDetailed] Update config of a serving endpoint. - + Updates any combination of the serving endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the current update completes or fails. - + :param name: str The name of the serving endpoint to update. This field is required. :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional) @@ -346,7 +346,7 @@ config. :param traffic_config: :class:`TrafficConfig` (optional) The traffic configuration associated with the serving endpoint config. - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. @@ -358,29 +358,29 @@ .. py:method:: update_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions Update serving endpoint permissions. - + Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their root object. - + :param serving_endpoint_id: str The serving endpoint for which to get or manage permissions. :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional) - + :returns: :class:`ServingEndpointPermissions` .. py:method:: update_provisioned_throughput_endpoint_config(name: str, config: PtEndpointCoreConfig) -> Wait[ServingEndpointDetailed] Update config of a PT serving endpoint. - + Updates any combination of the pt endpoint's served entities, the compute configuration of those served entities, and the endpoint's traffic config. Updates are instantaneous and endpoint should be updated instantly - + :param name: str The name of the pt endpoint to update. This field is required. :param config: :class:`PtEndpointCoreConfig` - + :returns: Long-running operation waiter for :class:`ServingEndpointDetailed`. See :method:wait_get_serving_endpoint_not_updating for more details. diff --git a/docs/workspace/serving/serving_endpoints_data_plane.rst b/docs/workspace/serving/serving_endpoints_data_plane.rst index 8fb09e7ff..bb22c3dd7 100644 --- a/docs/workspace/serving/serving_endpoints_data_plane.rst +++ b/docs/workspace/serving/serving_endpoints_data_plane.rst @@ -10,7 +10,7 @@ .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse Query a serving endpoint. - + :param name: str The name of the serving endpoint. This field is required. :param dataframe_records: List[Any] (optional) @@ -54,6 +54,6 @@ The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with other chat/completions query fields. - + :returns: :class:`QueryEndpointResponse` \ No newline at end of file diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst index 66c621997..1d5244f0a 100644 --- a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst +++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst @@ -10,42 +10,42 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse Delete the AI/BI dashboard embedding access policy. - + Delete the AI/BI dashboard embedding access policy, reverting back to the default. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingAccessPolicySetting Retrieve the AI/BI dashboard embedding access policy. - + Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS, permitting AI/BI dashboards to be embedded on approved domains. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting Update the AI/BI dashboard embedding access policy. - + Updates the AI/BI dashboard embedding access policy at the workspace level. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting` @@ -55,10 +55,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting` \ No newline at end of file diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst index 0c9294130..546d9ad7d 100644 --- a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst +++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst @@ -10,43 +10,43 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse Delete AI/BI dashboard embedding approved domains. - + Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default empty list. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingApprovedDomainsSetting Retrieve the list of domains approved to host embedded AI/BI dashboards. - + Retrieves the list of domains approved to host embedded AI/BI dashboards. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting Update the list of domains approved to host embedded AI/BI dashboards. - + Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the current workspace access policy is not ALLOW_APPROVED_DOMAINS. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` @@ -56,10 +56,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting` \ No newline at end of file diff --git a/docs/workspace/settings/automatic_cluster_update.rst b/docs/workspace/settings/automatic_cluster_update.rst index 350e0e713..748cf428a 100644 --- a/docs/workspace/settings/automatic_cluster_update.rst +++ b/docs/workspace/settings/automatic_cluster_update.rst @@ -10,28 +10,28 @@ .. py:method:: get( [, etag: Optional[str]]) -> AutomaticClusterUpdateSetting Get the automatic cluster update setting. - + Gets the automatic cluster update setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`AutomaticClusterUpdateSetting` .. py:method:: update(allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str) -> AutomaticClusterUpdateSetting Update the automatic cluster update setting. - + Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`AutomaticClusterUpdateSetting` @@ -41,10 +41,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AutomaticClusterUpdateSetting` \ No newline at end of file diff --git a/docs/workspace/settings/compliance_security_profile.rst b/docs/workspace/settings/compliance_security_profile.rst index 855451b82..807dcc1c6 100644 --- a/docs/workspace/settings/compliance_security_profile.rst +++ b/docs/workspace/settings/compliance_security_profile.rst @@ -6,34 +6,34 @@ Controls whether to enable the compliance security profile for the current workspace. Enabling it on a workspace is permanent. By default, it is turned off. - + This settings can NOT be disabled once it is enabled. .. py:method:: get( [, etag: Optional[str]]) -> ComplianceSecurityProfileSetting Get the compliance security profile setting. - + Gets the compliance security profile setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`ComplianceSecurityProfileSetting` .. py:method:: update(allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str) -> ComplianceSecurityProfileSetting Update the compliance security profile setting. - + Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`ComplianceSecurityProfileSetting` @@ -43,10 +43,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`ComplianceSecurityProfileSetting` \ No newline at end of file diff --git a/docs/workspace/settings/credentials_manager.rst b/docs/workspace/settings/credentials_manager.rst index c8bfa4f30..ea3162f6c 100644 --- a/docs/workspace/settings/credentials_manager.rst +++ b/docs/workspace/settings/credentials_manager.rst @@ -10,16 +10,16 @@ .. py:method:: exchange_token(partition_id: PartitionId, token_type: List[TokenType], scopes: List[str]) -> ExchangeTokenResponse Exchange token. - + Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to determine token permissions. - + :param partition_id: :class:`PartitionId` The partition of Credentials store :param token_type: List[:class:`TokenType`] A list of token types being requested :param scopes: List[str] Array of scopes for the token request. - + :returns: :class:`ExchangeTokenResponse` \ No newline at end of file diff --git a/docs/workspace/settings/dashboard_email_subscriptions.rst b/docs/workspace/settings/dashboard_email_subscriptions.rst index bdb4777eb..22da502f6 100644 --- a/docs/workspace/settings/dashboard_email_subscriptions.rst +++ b/docs/workspace/settings/dashboard_email_subscriptions.rst @@ -11,41 +11,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDashboardEmailSubscriptionsResponse Delete the Dashboard Email Subscriptions setting. - + Reverts the Dashboard Email Subscriptions setting to its default value. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDashboardEmailSubscriptionsResponse` .. py:method:: get( [, etag: Optional[str]]) -> DashboardEmailSubscriptions Get the Dashboard Email Subscriptions setting. - + Gets the Dashboard Email Subscriptions setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DashboardEmailSubscriptions` .. py:method:: update(allow_missing: bool, setting: DashboardEmailSubscriptions, field_mask: str) -> DashboardEmailSubscriptions Update the Dashboard Email Subscriptions setting. - + Updates the Dashboard Email Subscriptions setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DashboardEmailSubscriptions` @@ -55,10 +55,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DashboardEmailSubscriptions` \ No newline at end of file diff --git a/docs/workspace/settings/default_namespace.rst b/docs/workspace/settings/default_namespace.rst index 960949930..a98d09b41 100644 --- a/docs/workspace/settings/default_namespace.rst +++ b/docs/workspace/settings/default_namespace.rst @@ -6,61 +6,61 @@ The default namespace setting API allows users to configure the default namespace for a Databricks workspace. - + Through this API, users can retrieve, set, or modify the default namespace used when queries do not reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the object 'retail_prod.default.myTable' (the schema 'default' is always assumed). - + This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute. .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDefaultNamespaceSettingResponse Delete the default namespace setting. - + Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDefaultNamespaceSettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> DefaultNamespaceSetting Get the default namespace setting. - + Gets the default namespace setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DefaultNamespaceSetting` .. py:method:: update(allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str) -> DefaultNamespaceSetting Update the default namespace setting. - + Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the etag is present in the error response, which should be set in the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DefaultNamespaceSetting` @@ -77,10 +77,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DefaultNamespaceSetting` \ No newline at end of file diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst index 214fd7dfb..b72398c44 100644 --- a/docs/workspace/settings/disable_legacy_access.rst +++ b/docs/workspace/settings/disable_legacy_access.rst @@ -5,7 +5,7 @@ .. py:class:: DisableLegacyAccessAPI 'Disabling legacy access' has the following impacts: - + 1. Disables direct access to Hive Metastores from the workspace. However, you can still access a Hive Metastore through Hive Metastore federation. 2. Disables fallback mode on external location access from the workspace. 3. Disables Databricks Runtime versions prior to 13.3LTS. @@ -13,41 +13,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyAccessResponse Delete Legacy Access Disablement Status. - + Deletes legacy access disablement status. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyAccessResponse` .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyAccess Retrieve Legacy Access Disablement Status. - + Retrieves legacy access disablement Status. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyAccess` .. py:method:: update(allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess Update Legacy Access Disablement Status. - + Updates legacy access disablement status. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyAccess` @@ -57,10 +57,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyAccess` \ No newline at end of file diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst index f340c193d..8d56e058c 100644 --- a/docs/workspace/settings/disable_legacy_dbfs.rst +++ b/docs/workspace/settings/disable_legacy_dbfs.rst @@ -5,10 +5,10 @@ .. py:class:: DisableLegacyDbfsAPI Disabling legacy DBFS has the following implications: - + 1. Access to DBFS root and DBFS mounts is disallowed (as well as the creation of new mounts). 2. Disables Databricks Runtime versions prior to 13.3LTS. - + When the setting is off, all DBFS functionality is enabled and no restrictions are imposed on Databricks Runtime versions. This setting can take up to 20 minutes to take effect and requires a manual restart of all-purpose compute clusters and SQL warehouses. @@ -16,41 +16,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyDbfsResponse Delete the disable legacy DBFS setting. - + Deletes the disable legacy DBFS setting for a workspace, reverting back to the default. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteDisableLegacyDbfsResponse` .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyDbfs Get the disable legacy DBFS setting. - + Gets the disable legacy DBFS setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DisableLegacyDbfs` .. py:method:: update(allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs Update the disable legacy DBFS setting. - + Updates the disable legacy DBFS setting for the workspace. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`DisableLegacyDbfs` @@ -60,10 +60,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`DisableLegacyDbfs` \ No newline at end of file diff --git a/docs/workspace/settings/enable_export_notebook.rst b/docs/workspace/settings/enable_export_notebook.rst index 4f4e61784..30d17e46a 100644 --- a/docs/workspace/settings/enable_export_notebook.rst +++ b/docs/workspace/settings/enable_export_notebook.rst @@ -10,19 +10,19 @@ .. py:method:: get_enable_export_notebook() -> EnableExportNotebook Get the Notebook and File exporting setting. - + Gets the Notebook and File exporting setting. - + :returns: :class:`EnableExportNotebook` .. py:method:: patch_enable_export_notebook(allow_missing: bool, setting: EnableExportNotebook, field_mask: str) -> EnableExportNotebook Update the Notebook and File exporting setting. - + Updates the Notebook and File exporting setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableExportNotebook` @@ -32,10 +32,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableExportNotebook` \ No newline at end of file diff --git a/docs/workspace/settings/enable_notebook_table_clipboard.rst b/docs/workspace/settings/enable_notebook_table_clipboard.rst index 6305058c3..2a9c394a0 100644 --- a/docs/workspace/settings/enable_notebook_table_clipboard.rst +++ b/docs/workspace/settings/enable_notebook_table_clipboard.rst @@ -10,19 +10,19 @@ .. py:method:: get_enable_notebook_table_clipboard() -> EnableNotebookTableClipboard Get the Results Table Clipboard features setting. - + Gets the Results Table Clipboard features setting. - + :returns: :class:`EnableNotebookTableClipboard` .. py:method:: patch_enable_notebook_table_clipboard(allow_missing: bool, setting: EnableNotebookTableClipboard, field_mask: str) -> EnableNotebookTableClipboard Update the Results Table Clipboard features setting. - + Updates the Results Table Clipboard features setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableNotebookTableClipboard` @@ -32,10 +32,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableNotebookTableClipboard` \ No newline at end of file diff --git a/docs/workspace/settings/enable_results_downloading.rst b/docs/workspace/settings/enable_results_downloading.rst index ba2954df5..0769eca22 100644 --- a/docs/workspace/settings/enable_results_downloading.rst +++ b/docs/workspace/settings/enable_results_downloading.rst @@ -9,19 +9,19 @@ .. py:method:: get_enable_results_downloading() -> EnableResultsDownloading Get the Notebook results download setting. - + Gets the Notebook results download setting. - + :returns: :class:`EnableResultsDownloading` .. py:method:: patch_enable_results_downloading(allow_missing: bool, setting: EnableResultsDownloading, field_mask: str) -> EnableResultsDownloading Update the Notebook results download setting. - + Updates the Notebook results download setting. The model follows eventual consistency, which means the get after the update operation might receive stale values for some time. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnableResultsDownloading` @@ -31,10 +31,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnableResultsDownloading` \ No newline at end of file diff --git a/docs/workspace/settings/enhanced_security_monitoring.rst b/docs/workspace/settings/enhanced_security_monitoring.rst index c9dfb547d..d0f9eee3d 100644 --- a/docs/workspace/settings/enhanced_security_monitoring.rst +++ b/docs/workspace/settings/enhanced_security_monitoring.rst @@ -7,35 +7,35 @@ Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the compliance security profile is enabled, this is automatically enabled. - + If the compliance security profile is disabled, you can enable or disable this setting and it is not permanent. .. py:method:: get( [, etag: Optional[str]]) -> EnhancedSecurityMonitoringSetting Get the enhanced security monitoring setting. - + Gets the enhanced security monitoring setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`EnhancedSecurityMonitoringSetting` .. py:method:: update(allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str) -> EnhancedSecurityMonitoringSetting Update the enhanced security monitoring setting. - + Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`EnhancedSecurityMonitoringSetting` @@ -45,10 +45,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`EnhancedSecurityMonitoringSetting` \ No newline at end of file diff --git a/docs/workspace/settings/ip_access_lists.rst b/docs/workspace/settings/ip_access_lists.rst index deed40548..03061165d 100644 --- a/docs/workspace/settings/ip_access_lists.rst +++ b/docs/workspace/settings/ip_access_lists.rst @@ -5,21 +5,21 @@ .. py:class:: IpAccessListsAPI IP Access List enables admins to configure IP access lists. - + IP access lists affect web application access and REST API access to this workspace only. If the feature is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists (inclusion) and block lists (exclusion). - + When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address matches any block list, the connection is rejected. 2. **If the connection was not rejected by block lists**, the IP address is compared with the allow lists. - + If there is at least one allow list for the workspace, the connection is allowed only if the IP address matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed. - + For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. - + After changes to the IP access list feature, it can take a few minutes for changes to take effect. .. py:method:: create(label: str, list_type: ListType [, ip_addresses: Optional[List[str]]]) -> CreateIpAccessListResponse @@ -46,44 +46,44 @@ w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Create access list. - + Creates an IP access list for this workspace. - + A list can be an allow list or a block list. See the top of this file for a description of how the server treats allow lists and block lists at runtime. - + When creating or updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus - + :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param ip_addresses: List[str] (optional) - + :returns: :class:`CreateIpAccessListResponse` .. py:method:: delete(ip_access_list_id: str) Delete access list. - + Deletes an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - - + + .. py:method:: get(ip_access_list_id: str) -> FetchIpAccessListResponse @@ -112,12 +112,12 @@ w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Get access list. - + Gets an IP access list, specified by its list ID. - + :param ip_access_list_id: str The ID for the corresponding IP access list - + :returns: :class:`FetchIpAccessListResponse` @@ -135,9 +135,9 @@ all = w.ip_access_lists.list() Get access lists. - + Gets all IP access lists for the specified workspace. - + :returns: Iterator over :class:`IpAccessListInfo` @@ -173,9 +173,9 @@ w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id) Replace access list. - + Replaces an IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. When replacing an IP access list: * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one @@ -184,42 +184,42 @@ returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param label: str Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. :param enabled: bool Specifies whether this IP access list is enabled. :param ip_addresses: List[str] (optional) - - + + .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]]) Update access list. - + Updates an existing IP access list, specified by its ID. - + A list can include allow lists and block lists. See the top of this file for a description of how the server treats allow lists and block lists at run time. - + When updating an IP access list: - + * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP, error 400 is returned with `error_code` value `INVALID_STATE`. - + It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See :method:workspaceconf/setStatus. - + :param ip_access_list_id: str The ID for the corresponding IP access list :param enabled: bool (optional) @@ -229,9 +229,9 @@ Label for the IP access list. This **cannot** be empty. :param list_type: :class:`ListType` (optional) Type of IP access list. Valid values are as follows and are case-sensitive: - + * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or range. IP addresses in the block list are excluded even if they are included in an allow list. - - + + \ No newline at end of file diff --git a/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst b/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst index 2e6ff8626..6f464addb 100644 --- a/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst +++ b/docs/workspace/settings/llm_proxy_partner_powered_workspace.rst @@ -9,41 +9,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteLlmProxyPartnerPoweredWorkspaceResponse Delete the enable partner powered AI features workspace setting. - + Reverts the enable partner powered AI features workspace setting to its default value. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteLlmProxyPartnerPoweredWorkspaceResponse` .. py:method:: get( [, etag: Optional[str]]) -> LlmProxyPartnerPoweredWorkspace Get the enable partner powered AI features workspace setting. - + Gets the enable partner powered AI features workspace setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`LlmProxyPartnerPoweredWorkspace` .. py:method:: update(allow_missing: bool, setting: LlmProxyPartnerPoweredWorkspace, field_mask: str) -> LlmProxyPartnerPoweredWorkspace Update the enable partner powered AI features workspace setting. - + Updates the enable partner powered AI features workspace setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`LlmProxyPartnerPoweredWorkspace` @@ -53,10 +53,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`LlmProxyPartnerPoweredWorkspace` \ No newline at end of file diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst index 8fb2d0c3c..45c8abea1 100644 --- a/docs/workspace/settings/notification_destinations.rst +++ b/docs/workspace/settings/notification_destinations.rst @@ -12,64 +12,64 @@ .. py:method:: create( [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination Create a notification destination. - + Creates a notification destination. Requires workspace admin permissions. - + :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) The display name for the notification destination. - + :returns: :class:`NotificationDestination` .. py:method:: delete(id: str) Delete a notification destination. - + Deletes a notification destination. Requires workspace admin permissions. - + :param id: str - - + + .. py:method:: get(id: str) -> NotificationDestination Get a notification destination. - + Gets a notification destination. - + :param id: str - + :returns: :class:`NotificationDestination` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListNotificationDestinationsResult] List notification destinations. - + Lists notification destinations. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListNotificationDestinationsResult` .. py:method:: update(id: str [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination Update a notification destination. - + Updates a notification destination. Requires workspace admin permissions. At least one field is required in the request body. - + :param id: str UUID identifying notification destination. :param config: :class:`Config` (optional) The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. :param display_name: str (optional) The display name for the notification destination. - + :returns: :class:`NotificationDestination` \ No newline at end of file diff --git a/docs/workspace/settings/restrict_workspace_admins.rst b/docs/workspace/settings/restrict_workspace_admins.rst index b025112cc..c2853d133 100644 --- a/docs/workspace/settings/restrict_workspace_admins.rst +++ b/docs/workspace/settings/restrict_workspace_admins.rst @@ -17,47 +17,47 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteRestrictWorkspaceAdminsSettingResponse Delete the restrict workspace admins setting. - + Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteRestrictWorkspaceAdminsSettingResponse` .. py:method:: get( [, etag: Optional[str]]) -> RestrictWorkspaceAdminsSetting Get the restrict workspace admins setting. - + Gets the restrict workspace admins setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`RestrictWorkspaceAdminsSetting` .. py:method:: update(allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str) -> RestrictWorkspaceAdminsSetting Update the restrict workspace admins setting. - + Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 response. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`RestrictWorkspaceAdminsSetting` @@ -67,10 +67,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`RestrictWorkspaceAdminsSetting` \ No newline at end of file diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst index 0d6647301..783f55b6f 100644 --- a/docs/workspace/settings/settings.rst +++ b/docs/workspace/settings/settings.rst @@ -29,7 +29,7 @@ Controls whether to enable the compliance security profile for the current workspace. Enabling it on a workspace is permanent. By default, it is turned off. - + This settings can NOT be disabled once it is enabled. .. py:property:: dashboard_email_subscriptions @@ -44,12 +44,12 @@ The default namespace setting API allows users to configure the default namespace for a Databricks workspace. - + Through this API, users can retrieve, set, or modify the default namespace used when queries do not reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the object 'retail_prod.default.myTable' (the schema 'default' is always assumed). - + This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute. @@ -57,7 +57,7 @@ :type: DisableLegacyAccessAPI 'Disabling legacy access' has the following impacts: - + 1. Disables direct access to Hive Metastores from the workspace. However, you can still access a Hive Metastore through Hive Metastore federation. 2. Disables fallback mode on external location access from the workspace. 3. Disables Databricks Runtime versions prior to 13.3LTS. @@ -66,10 +66,10 @@ :type: DisableLegacyDbfsAPI Disabling legacy DBFS has the following implications: - + 1. Access to DBFS root and DBFS mounts is disallowed (as well as the creation of new mounts). 2. Disables Databricks Runtime versions prior to 13.3LTS. - + When the setting is off, all DBFS functionality is enabled and no restrictions are imposed on Databricks Runtime versions. This setting can take up to 20 minutes to take effect and requires a manual restart of all-purpose compute clusters and SQL warehouses. @@ -97,7 +97,7 @@ Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the compliance security profile is enabled, this is automatically enabled. - + If the compliance security profile is disabled, you can enable or disable this setting and it is not permanent. diff --git a/docs/workspace/settings/sql_results_download.rst b/docs/workspace/settings/sql_results_download.rst index 1e378f530..8cf1cc13a 100644 --- a/docs/workspace/settings/sql_results_download.rst +++ b/docs/workspace/settings/sql_results_download.rst @@ -10,41 +10,41 @@ .. py:method:: delete( [, etag: Optional[str]]) -> DeleteSqlResultsDownloadResponse Delete the SQL Results Download setting. - + Reverts the SQL Results Download setting to its default value. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`DeleteSqlResultsDownloadResponse` .. py:method:: get( [, etag: Optional[str]]) -> SqlResultsDownload Get the SQL Results Download setting. - + Gets the SQL Results Download setting. - + :param etag: str (optional) etag used for versioning. The response is at least as fresh as the eTag provided. This is used for optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET request, and pass it with the DELETE request to identify the rule set version you are deleting. - + :returns: :class:`SqlResultsDownload` .. py:method:: update(allow_missing: bool, setting: SqlResultsDownload, field_mask: str) -> SqlResultsDownload Update the SQL Results Download setting. - + Updates the SQL Results Download setting. - + :param allow_missing: bool This should always be set to true for Settings API. Added for AIP compliance. :param setting: :class:`SqlResultsDownload` @@ -54,10 +54,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`SqlResultsDownload` \ No newline at end of file diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst index b36f08396..ceaa64cc6 100644 --- a/docs/workspace/settings/token_management.rst +++ b/docs/workspace/settings/token_management.rst @@ -35,29 +35,29 @@ w.token_management.delete(token_id=obo.token_info.token_id) Create on-behalf token. - + Creates a token on behalf of a service principal. - + :param application_id: str Application ID of the service principal. :param comment: str (optional) Comment that describes the purpose of the token. :param lifetime_seconds: int (optional) The number of seconds before the token expires. - + :returns: :class:`CreateOboTokenResponse` .. py:method:: delete(token_id: str) Delete a token. - + Deletes a token, specified by its ID. - + :param token_id: str The ID of the token to revoke. - - + + .. py:method:: get(token_id: str) -> GetTokenResponse @@ -90,30 +90,30 @@ w.token_management.delete(token_id=obo.token_info.token_id) Get token info. - + Gets information about a token, specified by its ID. - + :param token_id: str The ID of the token to get. - + :returns: :class:`GetTokenResponse` .. py:method:: get_permission_levels() -> GetTokenPermissionLevelsResponse Get token permission levels. - + Gets the permission levels that a user can have on an object. - + :returns: :class:`GetTokenPermissionLevelsResponse` .. py:method:: get_permissions() -> TokenPermissions Get token permissions. - + Gets the permissions of all tokens. Tokens can inherit permissions from their root object. - + :returns: :class:`TokenPermissions` @@ -132,36 +132,36 @@ all = w.token_management.list(settings.ListTokenManagementRequest()) List all tokens. - + Lists all tokens associated with the specified workspace or user. - + :param created_by_id: int (optional) User ID of the user that created the token. :param created_by_username: str (optional) Username of the user that created the token. - + :returns: Iterator over :class:`TokenInfo` .. py:method:: set_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions Set token permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) - + :returns: :class:`TokenPermissions` .. py:method:: update_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions Update token permissions. - + Updates the permissions on all tokens. Tokens can inherit permissions from their root object. - + :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional) - + :returns: :class:`TokenPermissions` \ No newline at end of file diff --git a/docs/workspace/settings/tokens.rst b/docs/workspace/settings/tokens.rst index 966f1093d..200eb9c83 100644 --- a/docs/workspace/settings/tokens.rst +++ b/docs/workspace/settings/tokens.rst @@ -26,33 +26,33 @@ w.tokens.delete(token_id=token.token_info.token_id) Create a user token. - + Creates and returns a token for a user. If this call is made through token authentication, it creates a token with the same client ID as the authenticated token. If the user's token quota is exceeded, this call returns an error **QUOTA_EXCEEDED**. - + :param comment: str (optional) Optional description to attach to the token. :param lifetime_seconds: int (optional) The lifetime of the token, in seconds. - + If the lifetime is not specified, this token remains valid indefinitely. - + :returns: :class:`CreateTokenResponse` .. py:method:: delete(token_id: str) Revoke token. - + Revokes an access token. - + If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**. - + :param token_id: str The ID of the token to be revoked. - - + + .. py:method:: list() -> Iterator[PublicTokenInfo] @@ -69,8 +69,8 @@ all = w.tokens.list() List tokens. - + Lists all the valid tokens for a user-workspace pair. - + :returns: Iterator over :class:`PublicTokenInfo` \ No newline at end of file diff --git a/docs/workspace/settings/workspace_conf.rst b/docs/workspace/settings/workspace_conf.rst index 3759de043..d73b16180 100644 --- a/docs/workspace/settings/workspace_conf.rst +++ b/docs/workspace/settings/workspace_conf.rst @@ -20,20 +20,20 @@ conf = w.workspace_conf.get_status(keys="enableWorkspaceFilesystem") Check configuration status. - + Gets the configuration status for a workspace. - + :param keys: str - + :returns: Dict[str,str] .. py:method:: set_status(contents: Dict[str, str]) Enable/disable features. - + Sets the configuration status for a workspace, including enabling or disabling it. - - - + + + \ No newline at end of file diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index 90e922180..d78dd62a0 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -33,10 +33,10 @@ w.providers.delete(name=created.name) Create an auth provider. - + Creates a new authentication provider minimally based on a name and authentication type. The caller must be an admin on the metastore. - + :param name: str The name of the Provider. :param authentication_type: :class:`AuthenticationType` @@ -46,21 +46,21 @@ :param recipient_profile_str: str (optional) This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided. - + :returns: :class:`ProviderInfo` .. py:method:: delete(name: str) Delete a provider. - + Deletes an authentication provider, if the caller is a metastore admin or is the owner of the provider. - + :param name: str Name of the provider. - - + + .. py:method:: get(name: str) -> ProviderInfo @@ -91,13 +91,13 @@ w.providers.delete(name=created.name) Get a provider. - + Gets a specific authentication provider. The caller must supply the name of the provider, and must either be a metastore admin or the owner of the provider. - + :param name: str Name of the provider. - + :returns: :class:`ProviderInfo` @@ -129,11 +129,11 @@ w.providers.delete(name=created.name) List providers. - + Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There is no guarantee of a specific ordering of the elements in the array. - + :param data_provider_global_metastore_id: str (optional) If not provided, all providers will be returned. If no providers exist with this ID, no results will be returned. @@ -147,17 +147,17 @@ from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ProviderInfo` .. py:method:: list_provider_share_assets(provider_name: str, share_name: str [, function_max_results: Optional[int], notebook_max_results: Optional[int], table_max_results: Optional[int], volume_max_results: Optional[int]]) -> ListProviderShareAssetsResponse List assets by provider share. - + Get arrays of assets associated with a specified provider's share. The caller is the recipient of the share. - + :param provider_name: str The name of the provider who owns the share. :param share_name: str @@ -170,7 +170,7 @@ Maximum number of tables to return. :param volume_max_results: int (optional) Maximum number of volumes to return. - + :returns: :class:`ListProviderShareAssetsResponse` @@ -202,11 +202,11 @@ w.providers.delete(name=created.name) List shares by Provider. - + Gets an array of a specified provider's shares within the metastore where: - + * the caller is a metastore admin, or * the caller is the owner. - + :param name: str Name of the provider in which to list shares. :param max_results: int (optional) @@ -219,7 +219,7 @@ response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ProviderShare` @@ -251,11 +251,11 @@ w.providers.delete(name=created.name) Update a provider. - + Updates the information for an authentication provider, if the caller is a metastore admin or is the owner of the provider. If the update changes the provider name, the caller must be both a metastore admin and the owner of the provider. - + :param name: str Name of the provider. :param comment: str (optional) @@ -267,6 +267,6 @@ :param recipient_profile_str: str (optional) This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided. - + :returns: :class:`ProviderInfo` \ No newline at end of file diff --git a/docs/workspace/sharing/recipient_activation.rst b/docs/workspace/sharing/recipient_activation.rst index 2c214d9c0..bc8ac2715 100644 --- a/docs/workspace/sharing/recipient_activation.rst +++ b/docs/workspace/sharing/recipient_activation.rst @@ -8,30 +8,30 @@ the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data provider to download the credential file that includes the access token. The recipient will then use the credential file to establish a secure connection with the provider to receive the shared data. - + Note that you can download the credential file only once. Recipients should treat the downloaded credential as a secret and must not share it outside of their organization. .. py:method:: get_activation_url_info(activation_url: str) Get a share activation URL. - + Gets an activation URL for a share. - + :param activation_url: str The one time activation url. It also accepts activation token. - - + + .. py:method:: retrieve_token(activation_url: str) -> RetrieveTokenResponse Get an access token. - + Retrieve access token with an activation url. This is a public API without any authentication. - + :param activation_url: str The one time activation url. It also accepts activation token. - + :returns: :class:`RetrieveTokenResponse` \ No newline at end of file diff --git a/docs/workspace/sharing/recipient_federation_policies.rst b/docs/workspace/sharing/recipient_federation_policies.rst index b8fe0b856..bd63cb0a8 100644 --- a/docs/workspace/sharing/recipient_federation_policies.rst +++ b/docs/workspace/sharing/recipient_federation_policies.rst @@ -19,7 +19,7 @@ Multi-Factor Authentication (MFA), and enhances security by minimizing the risk of credential leakage through the use of short-lived, expiring tokens. It is designed for strong identity governance, secure cross-platform data sharing, and reduced operational overhead for credential management. - + For more information, see https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security and https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed @@ -27,88 +27,88 @@ .. py:method:: create(recipient_name: str, policy: FederationPolicy) -> FederationPolicy Create recipient federation policy. - + Create a federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must be the owner of the recipient. When sharing data from Databricks to non-Databricks clients, you can define a federation policy to authenticate non-Databricks recipients. The federation policy validates OIDC claims in federated tokens and is defined at the recipient level. This enables secretless sharing clients to authenticate using OIDC tokens. - + Supported scenarios for federation policies: 1. **User-to-Machine (U2M) flow** (e.g., PowerBI): A user accesses a resource using their own identity. 2. **Machine-to-Machine (M2M) flow** (e.g., OAuth App): An OAuth App accesses a resource using its own identity, typically for tasks like running nightly jobs. - + For an overview, refer to: - Blog post: Overview of feature: https://www.databricks.com/blog/announcing-oidc-token-federation-enhanced-delta-sharing-security - + For detailed configuration guides based on your use case: - Creating a Federation Policy as a provider: https://docs.databricks.com/en/delta-sharing/create-recipient-oidc-fed - Configuration and usage for Machine-to-Machine (M2M) applications (e.g., Python Delta Sharing Client): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-m2m - Configuration and usage for User-to-Machine (U2M) applications (e.g., PowerBI): https://docs.databricks.com/aws/en/delta-sharing/sharing-over-oidc-u2m - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being created. :param policy: :class:`FederationPolicy` - + :returns: :class:`FederationPolicy` .. py:method:: delete(recipient_name: str, name: str) Delete recipient federation policy. - + Deletes an existing federation policy for an OIDC_FEDERATION recipient. The caller must be the owner of the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being deleted. :param name: str Name of the policy. This is the name of the policy to be deleted. - - + + .. py:method:: get_federation_policy(recipient_name: str, name: str) -> FederationPolicy Get recipient federation policy. - + Reads an existing federation policy for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being retrieved. :param name: str Name of the policy. This is the name of the policy to be retrieved. - + :returns: :class:`FederationPolicy` .. py:method:: list(recipient_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy] List recipient federation policies. - + Lists federation policies for an OIDC_FEDERATION recipient for sharing data from Databricks to non-Databricks recipients. The caller must have read access to the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policies are being listed. :param max_results: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`FederationPolicy` .. py:method:: update(recipient_name: str, name: str, policy: FederationPolicy [, update_mask: Optional[str]]) -> FederationPolicy Update recipient federation policy. - + Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of the recipient. - + :param recipient_name: str Name of the recipient. This is the name of the recipient for which the policy is being updated. :param name: str @@ -120,6 +120,6 @@ should be updated (full replacement). If unspecified, all fields that are set in the policy provided in the update request will overwrite the corresponding fields in the existing policy. Example value: 'comment,oidc_policy.audiences'. - + :returns: :class:`FederationPolicy` \ No newline at end of file diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst index 2afcc4baf..e640bf038 100644 --- a/docs/workspace/sharing/recipients.rst +++ b/docs/workspace/sharing/recipients.rst @@ -7,12 +7,12 @@ A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares. The way how sharing works differs depending on whether or not your recipient has access to a Databricks workspace that is enabled for Unity Catalog: - + - For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier is the key identifier that enables the secure connection. This sharing mode is called **Databricks-to-Databricks sharing**. - + - For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you create a recipient object, Databricks generates an activation link you can send to the recipient. The recipient follows the activation link to download the credential file, and then uses the credential file @@ -37,10 +37,10 @@ w.recipients.delete(name=created.name) Create a share recipient. - + Creates a new recipient with the delta sharing authentication type in the metastore. The caller must be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore. - + :param name: str Name of Recipient. :param authentication_type: :class:`AuthenticationType` @@ -64,20 +64,20 @@ :param sharing_code: str (optional) The one-time sharing code provided by the data recipient. This field is only present when the __authentication_type__ is **DATABRICKS**. - + :returns: :class:`RecipientInfo` .. py:method:: delete(name: str) Delete a share recipient. - + Deletes the specified recipient from the metastore. The caller must be the owner of the recipient. - + :param name: str Name of the recipient. - - + + .. py:method:: get(name: str) -> RecipientInfo @@ -101,14 +101,14 @@ w.recipients.delete(name=created.name) Get a share recipient. - + Gets a share recipient from the metastore if: - + * the caller is the owner of the share recipient, or: * is a metastore admin - + :param name: str Name of the recipient. - + :returns: :class:`RecipientInfo` @@ -127,12 +127,12 @@ all = w.recipients.list(sharing.ListRecipientsRequest()) List share recipients. - + Gets an array of all share recipients within the current metastore where: - + * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific ordering of the elements in the array. - + :param data_recipient_global_metastore_id: str (optional) If not provided, all recipients will be returned. If no recipients exist with this ID, no results will be returned. @@ -146,7 +146,7 @@ from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`RecipientInfo` @@ -171,17 +171,17 @@ w.recipients.delete(name=created.name) Rotate a token. - + Refreshes the specified recipient's delta sharing authentication token with the provided token info. The caller must be the owner of the recipient. - + :param name: str The name of the Recipient. :param existing_token_expire_in_seconds: int The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire the existing token immediately, negative number will return an error. - + :returns: :class:`RecipientInfo` @@ -206,10 +206,10 @@ w.recipients.delete(name=created.name) Get recipient share permissions. - + Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the owner of the Recipient. - + :param name: str The name of the Recipient. :param max_results: int (optional) @@ -222,7 +222,7 @@ unset from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: :class:`GetRecipientSharePermissionsResponse` @@ -247,11 +247,11 @@ w.recipients.delete(name=created.name) Update a share recipient. - + Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of the recipient. If the recipient name will be updated, the user must be both a metastore admin and the owner of the recipient. - + :param name: str Name of the recipient. :param comment: str (optional) @@ -268,6 +268,6 @@ Recipient properties as map of string key-value pairs. When provided in update request, the specified properties will override the existing properties. To add and remove properties, one would need to perform a read-modify-write. - + :returns: :class:`RecipientInfo` \ No newline at end of file diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst index 87d001e10..05dea6902 100644 --- a/docs/workspace/sharing/shares.rst +++ b/docs/workspace/sharing/shares.rst @@ -28,30 +28,30 @@ w.shares.delete(name=created_share.name) Create a share. - + Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore. - + :param name: str Name of the share. :param comment: str (optional) User-provided free-form text description. :param storage_root: str (optional) Storage root URL for the share. - + :returns: :class:`ShareInfo` .. py:method:: delete(name: str) Delete a share. - + Deletes a data object share from the metastore. The caller must be an owner of the share. - + :param name: str The name of the share. - - + + .. py:method:: get(name: str [, include_shared_data: Optional[bool]]) -> ShareInfo @@ -75,15 +75,15 @@ w.shares.delete(name=created_share.name) Get a share. - + Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the share. - + :param name: str The name of the share. :param include_shared_data: bool (optional) Query for data to include in the share. - + :returns: :class:`ShareInfo` @@ -102,10 +102,10 @@ all = w.shares.list(sharing.ListSharesRequest()) List shares. - + Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. - + :param max_results: int (optional) Maximum number of shares to return. - when set to 0, the page length is set to a server configured value (recommended); - when set to a value greater than 0, the page length is the minimum of this @@ -116,17 +116,17 @@ response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: Iterator over :class:`ShareInfo` .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> GetSharePermissionsResponse Get permissions. - + Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the owner of the share. - + :param name: str The name of the share. :param max_results: int (optional) @@ -139,7 +139,7 @@ unset from the response. :param page_token: str (optional) Opaque pagination token to go to next page based on previous query. - + :returns: :class:`GetSharePermissionsResponse` @@ -196,23 +196,23 @@ w.shares.delete(name=created_share.name) Update a share. - + Updates the share with the changes and data objects in the request. The caller must be the owner of the share or a metastore admin. - + When the caller is a metastore admin, only the __owner__ field can be updated. - + In the case that the share name is changed, **updateShare** requires that the caller is both the share owner and a metastore admin. - + If there are notebook files in the share, the __storage_root__ field cannot be updated. - + For each table that is added through this method, the share owner must also have **SELECT** privilege on the table. This privilege must be maintained indefinitely for recipients to be able to access the table. Typically, you should use a group as the share owner. - + Table removals through **update** do not require additional privileges. - + :param name: str The name of the share. :param comment: str (optional) @@ -225,26 +225,26 @@ Storage root URL for the share. :param updates: List[:class:`SharedDataObjectUpdate`] (optional) Array of shared data object updates. - + :returns: :class:`ShareInfo` .. py:method:: update_permissions(name: str [, changes: Optional[List[PermissionsChange]], omit_permissions_list: Optional[bool]]) -> UpdateSharePermissionsResponse Update permissions. - + Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an owner of the share. - + For new recipient grants, the user must also be the recipient owner or metastore admin. recipient revocations do not require additional privileges. - + :param name: str The name of the share. :param changes: List[:class:`PermissionsChange`] (optional) Array of permissions change objects. :param omit_permissions_list: bool (optional) Optional. Whether to return the latest permissions list of the share in the response. - + :returns: :class:`UpdateSharePermissionsResponse` \ No newline at end of file diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst index bd0a7f766..be09efec5 100644 --- a/docs/workspace/sql/alerts.rst +++ b/docs/workspace/sql/alerts.rst @@ -51,28 +51,28 @@ w.alerts.delete(id=alert.id) Create an alert. - + Creates an alert. - + :param alert: :class:`CreateAlertRequestAlert` (optional) :param auto_resolve_display_name: bool (optional) If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. - + :returns: :class:`Alert` .. py:method:: delete(id: str) Delete an alert. - + Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. - + :param id: str - - + + .. py:method:: get(id: str) -> Alert @@ -119,11 +119,11 @@ w.alerts.delete(id=alert.id) Get an alert. - + Gets an alert. - + :param id: str - + :returns: :class:`Alert` @@ -142,13 +142,13 @@ all = w.alerts.list(sql.ListAlertsRequest()) List alerts. - + Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListAlertsResponseAlert` @@ -200,9 +200,9 @@ w.alerts.delete(id=alert.id) Update an alert. - + Updates an alert. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -210,7 +210,7 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. @@ -218,6 +218,6 @@ :param auto_resolve_display_name: bool (optional) If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. - + :returns: :class:`Alert` \ No newline at end of file diff --git a/docs/workspace/sql/alerts_legacy.rst b/docs/workspace/sql/alerts_legacy.rst index 6dfd96128..5b048d2bf 100644 --- a/docs/workspace/sql/alerts_legacy.rst +++ b/docs/workspace/sql/alerts_legacy.rst @@ -8,24 +8,24 @@ periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> LegacyAlert Create an alert. - + Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification destinations if the condition was met. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param name: str Name of the alert. :param options: :class:`AlertOptions` @@ -37,68 +37,68 @@ :param rearm: int (optional) Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - + :returns: :class:`LegacyAlert` .. py:method:: delete(alert_id: str) Delete an alert. - + Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to the trash. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str - - + + .. py:method:: get(alert_id: str) -> LegacyAlert Get an alert. - + Gets an alert. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str - + :returns: :class:`LegacyAlert` .. py:method:: list() -> Iterator[LegacyAlert] Get alerts. - + Gets a list of alerts. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :returns: Iterator over :class:`LegacyAlert` .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]]) Update an alert. - + Updates an alert. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param alert_id: str :param name: str Name of the alert. @@ -109,6 +109,6 @@ :param rearm: int (optional) Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - - + + \ No newline at end of file diff --git a/docs/workspace/sql/alerts_v2.rst b/docs/workspace/sql/alerts_v2.rst index 68c761cdc..0c61c7d7c 100644 --- a/docs/workspace/sql/alerts_v2.rst +++ b/docs/workspace/sql/alerts_v2.rst @@ -9,56 +9,56 @@ .. py:method:: create_alert(alert: AlertV2) -> AlertV2 Create an alert. - + Create Alert - + :param alert: :class:`AlertV2` - + :returns: :class:`AlertV2` .. py:method:: get_alert(id: str) -> AlertV2 Get an alert. - + Gets an alert. - + :param id: str - + :returns: :class:`AlertV2` .. py:method:: list_alerts( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AlertV2] List alerts. - + Gets a list of alerts accessible to the user, ordered by creation time. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`AlertV2` .. py:method:: trash_alert(id: str) Delete an alert. - + Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently deleted after 30 days. - + :param id: str - - + + .. py:method:: update_alert(id: str, alert: AlertV2, update_mask: str) -> AlertV2 Update an alert. - + Update alert - + :param id: str UUID identifying the alert. :param alert: :class:`AlertV2` @@ -68,10 +68,10 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. - + :returns: :class:`AlertV2` \ No newline at end of file diff --git a/docs/workspace/sql/dashboard_widgets.rst b/docs/workspace/sql/dashboard_widgets.rst index d4bbcde1d..63e100640 100644 --- a/docs/workspace/sql/dashboard_widgets.rst +++ b/docs/workspace/sql/dashboard_widgets.rst @@ -10,7 +10,7 @@ .. py:method:: create(dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget Add widget to a dashboard. - + :param dashboard_id: str Dashboard ID returned by :method:dashboards/create. :param options: :class:`WidgetOptions` @@ -21,24 +21,24 @@ contains a visualization in the `visualization` field. :param visualization_id: str (optional) Query Vizualization ID returned by :method:queryvisualizations/create. - + :returns: :class:`Widget` .. py:method:: delete(id: str) Remove widget. - + :param id: str Widget ID returned by :method:dashboardwidgets/create - - + + .. py:method:: update(id: str, dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget Update existing widget. - + :param id: str Widget ID returned by :method:dashboardwidgets/create :param dashboard_id: str @@ -51,6 +51,6 @@ contains a visualization in the `visualization` field. :param visualization_id: str (optional) Query Vizualization ID returned by :method:queryvisualizations/create. - + :returns: :class:`Widget` \ No newline at end of file diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst index fec726459..3ed0c4b77 100644 --- a/docs/workspace/sql/dashboards.rst +++ b/docs/workspace/sql/dashboards.rst @@ -29,7 +29,7 @@ w.dashboards.delete(dashboard_id=created.id) Create a dashboard object. - + :param name: str The title of this dashboard that appears in list views and at the top of the dashboard page. :param dashboard_filters_enabled: bool (optional) @@ -42,7 +42,7 @@ Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`Dashboard` @@ -67,13 +67,13 @@ w.dashboards.delete(dashboard_id=created.id) Remove a dashboard. - + Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot be shared. - + :param dashboard_id: str - - + + .. py:method:: get(dashboard_id: str) -> Dashboard @@ -97,11 +97,11 @@ w.dashboards.delete(dashboard_id=created.id) Retrieve a definition. - + Returns a JSON representation of a dashboard object, including its visualization and query objects. - + :param dashboard_id: str - + :returns: :class:`Dashboard` @@ -120,12 +120,12 @@ all = w.dashboards.list(sql.ListDashboardsRequest()) Get dashboard objects. - + Fetch a paginated list of dashboard objects. - + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param order: :class:`ListOrder` (optional) Name of dashboard attribute to order by. :param page: int (optional) @@ -134,7 +134,7 @@ Number of dashboards to return per page. :param q: str (optional) Full text search term. - + :returns: Iterator over :class:`Dashboard` @@ -159,23 +159,23 @@ w.dashboards.delete(dashboard_id=created.id) Restore a dashboard. - + A restored dashboard appears in list views and searches and can be shared. - + :param dashboard_id: str - - + + .. py:method:: update(dashboard_id: str [, name: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Dashboard Change a dashboard definition. - + Modify this dashboard definition. This operation only affects attributes of the dashboard object. It does not add, modify, or remove widgets. - + **Note**: You cannot undo this operation. - + :param dashboard_id: str :param name: str (optional) The title of this dashboard that appears in list views and at the top of the dashboard page. @@ -183,6 +183,6 @@ Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`Dashboard` \ No newline at end of file diff --git a/docs/workspace/sql/data_sources.rst b/docs/workspace/sql/data_sources.rst index 8f7321fa0..472bdfb0e 100644 --- a/docs/workspace/sql/data_sources.rst +++ b/docs/workspace/sql/data_sources.rst @@ -7,13 +7,13 @@ This API is provided to assist you in making new query objects. When creating a query object, you may optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't already know the `data_source_id` for your desired SQL warehouse, this API will help you find it. - + This API does not support searches. It returns the full list of SQL warehouses in your workspace. We advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: list() -> Iterator[DataSource] @@ -30,15 +30,15 @@ srcs = w.data_sources.list() Get a list of SQL warehouses. - + Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :returns: Iterator over :class:`DataSource` \ No newline at end of file diff --git a/docs/workspace/sql/dbsql_permissions.rst b/docs/workspace/sql/dbsql_permissions.rst index 7f9e5d19c..a5bd010f1 100644 --- a/docs/workspace/sql/dbsql_permissions.rst +++ b/docs/workspace/sql/dbsql_permissions.rst @@ -7,76 +7,76 @@ The SQL Permissions API is similar to the endpoints of the :method:permissions/set. However, this exposes only one endpoint, which gets the Access Control List for a given object. You cannot modify any permissions using this API. - + There are three levels of permission: - + - `CAN_VIEW`: Allows read-only access - + - `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`) - + - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: get(object_type: ObjectTypePlural, object_id: str) -> GetResponse Get object ACL. - + Gets a JSON representation of the access control list (ACL) for a specified object. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:workspace/getpermissions instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. :param object_id: str Object ID. An ACL is returned for the object with this UUID. - + :returns: :class:`GetResponse` .. py:method:: set(object_type: ObjectTypePlural, object_id: str [, access_control_list: Optional[List[AccessControl]]]) -> SetResponse Set object ACL. - + Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:workspace/setpermissions instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`ObjectTypePlural` The type of object permission to set. :param object_id: str Object ID. The ACL for the object with this UUID is overwritten by this request's POST content. :param access_control_list: List[:class:`AccessControl`] (optional) - + :returns: :class:`SetResponse` .. py:method:: transfer_ownership(object_type: OwnableObjectType, object_id: TransferOwnershipObjectId [, new_owner: Optional[str]]) -> Success Transfer object ownership. - + Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. - + **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use :method:queries/update and :method:alerts/update respectively instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. :param object_id: :class:`TransferOwnershipObjectId` The ID of the object on which to change ownership. :param new_owner: str (optional) Email address for the new owner, who must exist in the workspace. - + :returns: :class:`Success` \ No newline at end of file diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index 66c9f0be1..f8553bead 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -37,28 +37,28 @@ w.queries.delete(id=query.id) Create a query. - + Creates a query. - + :param auto_resolve_display_name: bool (optional) If true, automatically resolve query display name conflicts. Otherwise, fail the request if the query's display name conflicts with an existing query's display name. :param query: :class:`CreateQueryRequestQuery` (optional) - + :returns: :class:`Query` .. py:method:: delete(id: str) Delete a query. - + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is permanently deleted after 30 days. - + :param id: str - - + + .. py:method:: get(id: str) -> Query @@ -92,37 +92,37 @@ w.queries.delete(id=query.id) Get a query. - + Gets a query. - + :param id: str - + :returns: :class:`Query` .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListQueryObjectsResponseQuery] List queries. - + Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`ListQueryObjectsResponseQuery` .. py:method:: list_visualizations(id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Visualization] List visualizations on a query. - + Gets a list of visualizations on a query. - + :param id: str :param page_size: int (optional) :param page_token: str (optional) - + :returns: Iterator over :class:`Visualization` @@ -165,9 +165,9 @@ w.queries.delete(id=query.id) Update a query. - + Updates a query. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -175,7 +175,7 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. @@ -183,6 +183,6 @@ If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the alert's display name conflicts with an existing alert's display name. :param query: :class:`UpdateQueryRequestQuery` (optional) - + :returns: :class:`Query` \ No newline at end of file diff --git a/docs/workspace/sql/queries_legacy.rst b/docs/workspace/sql/queries_legacy.rst index a7ab56836..c35ed9b69 100644 --- a/docs/workspace/sql/queries_legacy.rst +++ b/docs/workspace/sql/queries_legacy.rst @@ -7,34 +7,34 @@ These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery Create a new query definition. - + Creates a new query definition. Queries created with this endpoint belong to the authenticated user making the request. - + The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the `data_source_id` from an existing query. - + **Note**: You cannot add a visualization until you create the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] - + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list :param description: str (optional) General description that conveys additional information about this query such as usage notes. @@ -52,71 +52,71 @@ Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`LegacyQuery` .. py:method:: delete(query_id: str) Delete a query. - + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - - + + .. py:method:: get(query_id: str) -> LegacyQuery Get a query definition. - + Retrieve a query object definition along with contextual permissions information about the currently authenticated user. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - + :returns: :class:`LegacyQuery` .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[LegacyQuery] Get a list of queries. - + Gets a list of queries. Optionally, this list can be filtered by a search term. - + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param order: str (optional) Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order descending instead. - + - `name`: The name of the query. - + - `created_at`: The timestamp the query was created. - + - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank value is treated as the highest value for sorting. - + - `executed_at`: The timestamp when the query was last run. - + - `created_by`: The user name of the user that created the query. :param page: int (optional) Page number to retrieve. @@ -124,45 +124,45 @@ Number of queries to return per page. :param q: str (optional) Full text search term - + :returns: Iterator over :class:`LegacyQuery` .. py:method:: restore(query_id: str) Restore a query. - + Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str - - + + .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery Change a query definition. - + Modify this query definition. - + **Note**: You cannot undo this operation. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID. [Learn more] - + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list :param description: str (optional) General description that conveys additional information about this query such as usage notes. @@ -178,6 +178,6 @@ Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - + :returns: :class:`LegacyQuery` \ No newline at end of file diff --git a/docs/workspace/sql/query_history.rst b/docs/workspace/sql/query_history.rst index f2bccdd67..5acfb5127 100644 --- a/docs/workspace/sql/query_history.rst +++ b/docs/workspace/sql/query_history.rst @@ -26,13 +26,13 @@ ) List Queries. - + List the history of queries through SQL warehouses, and serverless compute. - + You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are returned first (up to max_results in request). The pagination token returned in response can be used to list subsequent query statuses. - + :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. :param include_metrics: bool (optional) @@ -44,6 +44,6 @@ A token that can be used to get the next page of results. The token can contains characters that need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by %2B. This field is optional. - + :returns: :class:`ListQueriesResponse` \ No newline at end of file diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst index ac3d6c565..f0865ae0a 100644 --- a/docs/workspace/sql/query_visualizations.rst +++ b/docs/workspace/sql/query_visualizations.rst @@ -10,31 +10,31 @@ .. py:method:: create( [, visualization: Optional[CreateVisualizationRequestVisualization]]) -> Visualization Add a visualization to a query. - + Adds a visualization to a query. - + :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) - + :returns: :class:`Visualization` .. py:method:: delete(id: str) Remove a visualization. - + Removes a visualization. - + :param id: str - - + + .. py:method:: update(id: str, update_mask: str [, visualization: Optional[UpdateVisualizationRequestVisualization]]) -> Visualization Update a visualization. - + Updates a visualization. - + :param id: str :param update_mask: str The field mask must be a single string, with multiple fields separated by commas (no spaces). The @@ -42,11 +42,11 @@ `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only the entire collection field can be specified. Field names must exactly match the resource field names. - + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API changes in the future. :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) - + :returns: :class:`Visualization` \ No newline at end of file diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst index f56f78a5f..d91b97c8c 100644 --- a/docs/workspace/sql/query_visualizations_legacy.rst +++ b/docs/workspace/sql/query_visualizations_legacy.rst @@ -6,23 +6,23 @@ This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace. Data structures may change over time. - + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization Add visualization to a query. - + Creates visualization in the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/create instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param query_id: str The identifier returned by :method:queries/create :param type: str @@ -34,38 +34,38 @@ A short description of this visualization. This is not displayed in the UI. :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. - + :returns: :class:`LegacyVisualization` .. py:method:: delete(id: str) Remove visualization. - + Removes a visualization from the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/delete instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param id: str Widget ID returned by :method:queryvizualisations/create - - + + .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization Edit existing visualization. - + Updates visualization in the query. - + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queryvisualizations/update instead. [Learn more] - + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html - + :param id: str The UUID for this visualization. :param created_at: str (optional) @@ -80,6 +80,6 @@ :param type: str (optional) The type of visualization: chart, table, pivot table, and so on. :param updated_at: str (optional) - + :returns: :class:`LegacyVisualization` \ No newline at end of file diff --git a/docs/workspace/sql/redash_config.rst b/docs/workspace/sql/redash_config.rst index 9b4382dd5..cee23c1bd 100644 --- a/docs/workspace/sql/redash_config.rst +++ b/docs/workspace/sql/redash_config.rst @@ -9,6 +9,6 @@ .. py:method:: get_config() -> ClientConfig Read workspace configuration for Redash-v2. - + :returns: :class:`ClientConfig` \ No newline at end of file diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst index 44f64b512..5dabcc0d2 100644 --- a/docs/workspace/sql/statement_execution.rst +++ b/docs/workspace/sql/statement_execution.rst @@ -6,13 +6,13 @@ The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result. - + **Getting started** - + We suggest beginning with the [Databricks SQL Statement Execution API tutorial]. - + **Overview of statement execution and result fetching** - + Statement execution begins by issuing a :method:statementexecution/executeStatement request with a valid SQL statement and warehouse ID, along with optional parameters such as the data catalog and output format. If no other parameters are specified, the server will wait for up to 10s before returning a response. If @@ -20,7 +20,7 @@ array and metadata. Otherwise, if no result is available after the 10s timeout expired, the response will provide the statement ID that can be used to poll for results by using a :method:statementexecution/getStatement request. - + You can specify whether the call should behave synchronously, asynchronously or start synchronously with a fallback to asynchronous execution. This is controlled with the `wait_timeout` and `on_wait_timeout` settings. If `wait_timeout` is set between 5-50 seconds (default: 10s), the call waits for results up to @@ -28,7 +28,7 @@ statement ID. The `on_wait_timeout` setting specifies what should happen when the timeout is reached while the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to asynchronous mode, or it can be set to `CANCEL`, which cancels the statement. - + In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30 seconds; if the statement execution finishes within this time, the result data is returned directly in the response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns @@ -40,38 +40,38 @@ seconds; if the statement execution finishes within this time, the result data is returned directly in the response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can be used to fetch status and results in the same way as in the asynchronous mode. - + Depending on the size, the result can be split into multiple chunks. If the statement execution is successful, the statement response contains a manifest and the first chunk of the result. The manifest contains schema information and provides metadata for each chunk in the result. Result chunks can be retrieved by index with :method:statementexecution/getStatementResultChunkN which may be called in any order and in parallel. For sequential fetching, each chunk, apart from the last, also contains a `next_chunk_index` and `next_chunk_internal_link` that point to the next chunk. - + A statement can be canceled with :method:statementexecution/cancelExecution. - + **Fetching result data: format and disposition** - + To specify the format of the result data, use the `format` field, which can be set to one of the following options: `JSON_ARRAY` (JSON), `ARROW_STREAM` ([Apache Arrow Columnar]), or `CSV`. - + There are two ways to receive statement results, controlled by the `disposition` setting, which can be either `INLINE` or `EXTERNAL_LINKS`: - + - `INLINE`: In this mode, the result data is directly included in the response. It's best suited for smaller results. This mode can only be used with the `JSON_ARRAY` format. - + - `EXTERNAL_LINKS`: In this mode, the response provides links that can be used to download the result data in chunks separately. This approach is ideal for larger results and offers higher throughput. This mode can be used with all the formats: `JSON_ARRAY`, `ARROW_STREAM`, and `CSV`. - + By default, the API uses `format=JSON_ARRAY` and `disposition=INLINE`. - + **Limits and limitations** - + Note: The byte limit for INLINE disposition is based on internal storage metrics and will not exactly match the byte count of the actual payload. - + - Statements with `disposition=INLINE` are limited to 25 MiB and will fail when this limit is exceeded. - Statements with `disposition=EXTERNAL_LINKS` are limited to 100 GiB. Result sets larger than this limit will be truncated. Truncation is indicated by the `truncated` field in the result manifest. - The maximum @@ -84,33 +84,34 @@ once every 15 minutes. - The results are only available for one hour after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it. - + [Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html + .. py:method:: cancel_execution(statement_id: str) Cancel statement execution. - + Requests that an executing statement be canceled. Callers must poll for status to see the terminal state. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - - + + .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> StatementResponse Execute a SQL statement. - + :param statement: str The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. :param warehouse_id: str Warehouse upon which to execute a statement. See also [What are SQL warehouses?] - + [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html :param byte_limit: int (optional) Applies the given byte limit to the statement's result size. Byte counts are based on internal data @@ -120,37 +121,37 @@ explcitly set. :param catalog: str (optional) Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. - + [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. - + Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS` disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition. - + When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of values, where each value is either the *string representation* of a value, or `null`. For example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM range(3)` would look like this: - + ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ``` - + When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result contains compact JSON with no indentation or extra whitespace. - + When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format]. - + When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be a CSV according to [RFC 4180] standard. All the columns values will have *string representation* similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the first chunk in the result would contain a header row with column names. For example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look like this: - + ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ``` - + [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180 :param on_wait_timeout: :class:`ExecuteStatementRequestOnWaitTimeout` (optional) @@ -165,27 +166,27 @@ of a name, a value, and optionally a type. To represent a NULL value, the `value` field may be omitted or set to `null` explicitly. If the `type` field is omitted, the value is interpreted as a string. - + If the type is given, parameters will be checked for type correctness according to the given type. A value is correct if the provided string can be converted to the requested type using the `cast` function. The exact semantics are described in the section [`cast` function] of the SQL language reference. - + For example, the following statement contains two parameters, `my_name` and `my_date`: - + SELECT * FROM my_table WHERE name = :my_name AND date = :my_date - + The parameters can be passed in the request body as follows: - + { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value": "2020-01-01", "type": "DATE" } ] } - + Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL Statement Execution API. - + Also see the section [Parameter markers] of the SQL language reference. - + [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html :param row_limit: int (optional) @@ -194,59 +195,59 @@ the limit or not. :param schema: str (optional) Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL. - + [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html :param wait_timeout: str (optional) The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set to 0 or to a value between 5 and 50. - + When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for the execution to finish. In this case, the call returns directly with `PENDING` state and a statement ID which can be used for polling with :method:statementexecution/getStatement. - + When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and wait for the statement execution to finish. If the execution finishes within this time, the call returns immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached. - + :returns: :class:`StatementResponse` .. py:method:: get_statement(statement_id: str) -> StatementResponse Get status, manifest, and result first chunk. - + This request can be used to poll for the statement's status. When the `status.state` field is `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and further calls will receive an HTTP 404 response. - + **NOTE** This call currently might take up to 5 seconds to get the latest status and result. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - + :returns: :class:`StatementResponse` .. py:method:: get_statement_result_chunk_n(statement_id: str, chunk_index: int) -> ResultData Get result chunk by index. - + After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index. Whereas the first chunk with `chunk_index=0` is typically fetched with :method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request can be used to fetch subsequent chunks. The response structure is identical to the nested `result` element described in the :method:statementexecution/getStatement request, and similarly includes the `next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set. - + :param statement_id: str The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. :param chunk_index: int - + :returns: :class:`ResultData` \ No newline at end of file diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst index 0ea8b3fc0..7695dbc8b 100644 --- a/docs/workspace/sql/warehouses.rst +++ b/docs/workspace/sql/warehouses.rst @@ -35,16 +35,16 @@ w.warehouses.delete(id=created.id) Create a warehouse. - + Creates a new SQL warehouse. - + :param auto_stop_mins: int (optional) The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - + Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for non-serverless warehouses - 0 indicates no autostop. - + Defaults to 120 mins :param channel: :class:`Channel` (optional) Channel Details @@ -52,14 +52,14 @@ Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, please tune max_num_clusters. - + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large :param creator_name: str (optional) warehouse creator name :param enable_photon: bool (optional) Configures whether the warehouse should use Photon optimized clusters. - + Defaults to false. :param enable_serverless_compute: bool (optional) Configures whether the warehouse should use serverless compute @@ -67,33 +67,33 @@ Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) Maximum number of clusters that the autoscaler will create to handle concurrent queries. - + Supported values: - Must be >= min_num_clusters - Must be <= 30. - + Defaults to min_clusters if unset. :param min_num_clusters: int (optional) Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce the cold start time for new queries. This is similar to reserved vs. revocable cores in a resource manager. - + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - + Defaults to 1 :param name: str (optional) Logical name for the cluster. - + Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. - + Supported values: - Number of tags < 45. :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional) Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. @@ -105,13 +105,13 @@ .. py:method:: delete(id: str) Delete a warehouse. - + Deletes a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - - + + .. py:method:: edit(id: str [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[EditWarehouseRequestWarehouseType]]) -> Wait[GetWarehouseResponse] @@ -150,17 +150,17 @@ w.warehouses.delete(id=created.id) Update a warehouse. - + Updates the configuration for a SQL warehouse. - + :param id: str Required. Id of the warehouse to configure. :param auto_stop_mins: int (optional) The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped. - + Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. - + Defaults to 120 mins :param channel: :class:`Channel` (optional) Channel Details @@ -168,14 +168,14 @@ Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you to run larger queries on it. If you want to increase the number of concurrent queries, please tune max_num_clusters. - + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - 4X-Large :param creator_name: str (optional) warehouse creator name :param enable_photon: bool (optional) Configures whether the warehouse should use Photon optimized clusters. - + Defaults to false. :param enable_serverless_compute: bool (optional) Configures whether the warehouse should use serverless compute. @@ -183,33 +183,33 @@ Deprecated. Instance profile used to pass IAM role to the cluster :param max_num_clusters: int (optional) Maximum number of clusters that the autoscaler will create to handle concurrent queries. - + Supported values: - Must be >= min_num_clusters - Must be <= 30. - + Defaults to min_clusters if unset. :param min_num_clusters: int (optional) Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this will ensure that a larger number of clusters are always running and therefore may reduce the cold start time for new queries. This is similar to reserved vs. revocable cores in a resource manager. - + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - + Defaults to 1 :param name: str (optional) Logical name for the cluster. - + Supported values: - Must be unique within an org. - Must be less than 100 characters. :param spot_instance_policy: :class:`SpotInstancePolicy` (optional) Configurations whether the warehouse should use spot instances. :param tags: :class:`EndpointTags` (optional) A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes) associated with this SQL warehouse. - + Supported values: - Number of tags < 45. :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional) Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. @@ -248,46 +248,46 @@ w.warehouses.delete(id=created.id) Get warehouse info. - + Gets the information for a single SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: :class:`GetWarehouseResponse` .. py:method:: get_permission_levels(warehouse_id: str) -> GetWarehousePermissionLevelsResponse Get SQL warehouse permission levels. - + Gets the permission levels that a user can have on an object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. - + :returns: :class:`GetWarehousePermissionLevelsResponse` .. py:method:: get_permissions(warehouse_id: str) -> WarehousePermissions Get SQL warehouse permissions. - + Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. - + :returns: :class:`WarehousePermissions` .. py:method:: get_workspace_warehouse_config() -> GetWorkspaceWarehouseConfigResponse Get the workspace configuration. - + Gets the workspace level configuration that is shared by all SQL warehouses in a workspace. - + :returns: :class:`GetWorkspaceWarehouseConfigResponse` @@ -306,36 +306,36 @@ all = w.warehouses.list(sql.ListWarehousesRequest()) List warehouses. - + Lists all SQL warehouses that a user has manager permissions on. - + :param run_as_user_id: int (optional) Service Principal which will be used to fetch the list of warehouses. If not specified, the user from the session header is used. - + :returns: Iterator over :class:`EndpointInfo` .. py:method:: set_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions Set SQL warehouse permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional) - + :returns: :class:`WarehousePermissions` .. py:method:: set_workspace_warehouse_config( [, channel: Optional[Channel], config_param: Optional[RepeatedEndpointConfPairs], data_access_config: Optional[List[EndpointConfPair]], enabled_warehouse_types: Optional[List[WarehouseTypePair]], global_param: Optional[RepeatedEndpointConfPairs], google_service_account: Optional[str], instance_profile_arn: Optional[str], security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy], sql_configuration_parameters: Optional[RepeatedEndpointConfPairs]]) Set the workspace configuration. - + Sets the workspace level configuration that is shared by all SQL warehouses in a workspace. - + :param channel: :class:`Channel` (optional) Optional: Channel selection details :param config_param: :class:`RepeatedEndpointConfPairs` (optional) @@ -358,19 +358,19 @@ Security policy for warehouses :param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional) SQL configuration parameters - - + + .. py:method:: start(id: str) -> Wait[GetWarehouseResponse] Start a warehouse. - + Starts a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_running for more details. @@ -382,12 +382,12 @@ .. py:method:: stop(id: str) -> Wait[GetWarehouseResponse] Stop a warehouse. - + Stops a SQL warehouse. - + :param id: str Required. Id of the SQL warehouse. - + :returns: Long-running operation waiter for :class:`GetWarehouseResponse`. See :method:wait_get_warehouse_stopped for more details. @@ -399,14 +399,14 @@ .. py:method:: update_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions Update SQL warehouse permissions. - + Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object. - + :param warehouse_id: str The SQL warehouse for which to get or manage permissions. :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional) - + :returns: :class:`WarehousePermissions` diff --git a/docs/workspace/vectorsearch/vector_search_endpoints.rst b/docs/workspace/vectorsearch/vector_search_endpoints.rst index 07f6783d6..50c335064 100644 --- a/docs/workspace/vectorsearch/vector_search_endpoints.rst +++ b/docs/workspace/vectorsearch/vector_search_endpoints.rst @@ -9,16 +9,16 @@ .. py:method:: create_endpoint(name: str, endpoint_type: EndpointType [, budget_policy_id: Optional[str]]) -> Wait[EndpointInfo] Create an endpoint. - + Create a new endpoint. - + :param name: str Name of the vector search endpoint :param endpoint_type: :class:`EndpointType` Type of endpoint :param budget_policy_id: str (optional) The budget policy id to be applied - + :returns: Long-running operation waiter for :class:`EndpointInfo`. See :method:wait_get_endpoint_vector_search_endpoint_online for more details. @@ -30,62 +30,62 @@ .. py:method:: delete_endpoint(endpoint_name: str) Delete an endpoint. - + Delete a vector search endpoint. - + :param endpoint_name: str Name of the vector search endpoint - - + + .. py:method:: get_endpoint(endpoint_name: str) -> EndpointInfo Get an endpoint. - + Get details for a single vector search endpoint. - + :param endpoint_name: str Name of the endpoint - + :returns: :class:`EndpointInfo` .. py:method:: list_endpoints( [, page_token: Optional[str]]) -> Iterator[EndpointInfo] List all endpoints. - + List all vector search endpoints in the workspace. - + :param page_token: str (optional) Token for pagination - + :returns: Iterator over :class:`EndpointInfo` .. py:method:: update_endpoint_budget_policy(endpoint_name: str, budget_policy_id: str) -> PatchEndpointBudgetPolicyResponse Update the budget policy of an endpoint. - + Update the budget policy of an endpoint - + :param endpoint_name: str Name of the vector search endpoint :param budget_policy_id: str The budget policy id to be applied - + :returns: :class:`PatchEndpointBudgetPolicyResponse` .. py:method:: update_endpoint_custom_tags(endpoint_name: str, custom_tags: List[CustomTag]) -> UpdateEndpointCustomTagsResponse Update the custom tags of an endpoint. - + :param endpoint_name: str Name of the vector search endpoint :param custom_tags: List[:class:`CustomTag`] The new custom tags for the vector search endpoint - + :returns: :class:`UpdateEndpointCustomTagsResponse` diff --git a/docs/workspace/vectorsearch/vector_search_indexes.rst b/docs/workspace/vectorsearch/vector_search_indexes.rst index b7d945dcd..90762b275 100644 --- a/docs/workspace/vectorsearch/vector_search_indexes.rst +++ b/docs/workspace/vectorsearch/vector_search_indexes.rst @@ -6,7 +6,7 @@ **Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries. - + There are 2 types of Vector Search indexes: - **Delta Sync Index**: An index that automatically syncs with a source Delta Table, automatically and incrementally updating the index as the underlying data in the Delta Table changes. - **Direct Vector Access Index**: An index that supports direct read and write of @@ -15,9 +15,9 @@ .. py:method:: create_index(name: str, endpoint_name: str, primary_key: str, index_type: VectorIndexType [, delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest], direct_access_index_spec: Optional[DirectAccessVectorIndexSpec]]) -> VectorIndex Create an index. - + Create a new index. - + :param name: str Name of the index :param endpoint_name: str @@ -33,68 +33,68 @@ Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`. :param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional) Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`. - + :returns: :class:`VectorIndex` .. py:method:: delete_data_vector_index(index_name: str, primary_keys: List[str]) -> DeleteDataVectorIndexResponse Delete data from index. - + Handles the deletion of data from a specified vector index. - + :param index_name: str Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index. :param primary_keys: List[str] List of primary keys for the data to be deleted. - + :returns: :class:`DeleteDataVectorIndexResponse` .. py:method:: delete_index(index_name: str) Delete an index. - + Delete an index. - + :param index_name: str Name of the index - - + + .. py:method:: get_index(index_name: str) -> VectorIndex Get an index. - + Get an index. - + :param index_name: str Name of the index - + :returns: :class:`VectorIndex` .. py:method:: list_indexes(endpoint_name: str [, page_token: Optional[str]]) -> Iterator[MiniVectorIndex] List indexes. - + List all indexes in the given endpoint. - + :param endpoint_name: str Name of the endpoint :param page_token: str (optional) Token for pagination - + :returns: Iterator over :class:`MiniVectorIndex` .. py:method:: query_index(index_name: str, columns: List[str] [, columns_to_rerank: Optional[List[str]], filters_json: Optional[str], num_results: Optional[int], query_text: Optional[str], query_type: Optional[str], query_vector: Optional[List[float]], score_threshold: Optional[float]]) -> QueryVectorIndexResponse Query an index. - + Query the specified vector index. - + :param index_name: str Name of the vector index to query. :param columns: List[str] @@ -103,9 +103,9 @@ Column names used to retrieve data to send to the reranker. :param filters_json: str (optional) JSON string representing query filters. - + Example filters: - + - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5. @@ -120,66 +120,66 @@ vectors. :param score_threshold: float (optional) Threshold for the approximate nearest neighbor search. Defaults to 0.0. - + :returns: :class:`QueryVectorIndexResponse` .. py:method:: query_next_page(index_name: str [, endpoint_name: Optional[str], page_token: Optional[str]]) -> QueryVectorIndexResponse Query next page. - + Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request to fetch next page of results. - + :param index_name: str Name of the vector index to query. :param endpoint_name: str (optional) Name of the endpoint. :param page_token: str (optional) Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API. - + :returns: :class:`QueryVectorIndexResponse` .. py:method:: scan_index(index_name: str [, last_primary_key: Optional[str], num_results: Optional[int]]) -> ScanVectorIndexResponse Scan an index. - + Scan the specified vector index and return the first `num_results` entries after the exclusive `primary_key`. - + :param index_name: str Name of the vector index to scan. :param last_primary_key: str (optional) Primary key of the last entry returned in the previous scan. :param num_results: int (optional) Number of results to return. Defaults to 10. - + :returns: :class:`ScanVectorIndexResponse` .. py:method:: sync_index(index_name: str) Synchronize an index. - + Triggers a synchronization process for a specified vector index. - + :param index_name: str Name of the vector index to synchronize. Must be a Delta Sync Index. - - + + .. py:method:: upsert_data_vector_index(index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse Upsert data into an index. - + Handles the upserting of data into a specified vector index. - + :param index_name: str Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index. :param inputs_json: str JSON string representing the data to be upserted. - + :returns: :class:`UpsertDataVectorIndexResponse` \ No newline at end of file diff --git a/docs/workspace/workspace/git_credentials.rst b/docs/workspace/workspace/git_credentials.rst index 51989b370..d5efd62eb 100644 --- a/docs/workspace/workspace/git_credentials.rst +++ b/docs/workspace/workspace/git_credentials.rst @@ -5,9 +5,9 @@ .. py:class:: GitCredentialsAPI Registers personal access token for Databricks to do operations on behalf of the user. - + See [more info]. - + [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html .. py:method:: create(git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]]) -> CreateCredentialsResponse @@ -27,11 +27,11 @@ w.git_credentials.delete(credential_id=cr.credential_id) Create a credential entry. - + Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update existing credentials, or the DELETE endpoint to delete existing credentials. - + :param git_provider: str Git provider. This field is case-insensitive. The available Git providers are `gitHub`, `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, @@ -45,22 +45,22 @@ :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. - + [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html - + :returns: :class:`CreateCredentialsResponse` .. py:method:: delete(credential_id: int) Delete a credential. - + Deletes the specified Git credential. - + :param credential_id: int The ID for the corresponding credential to access. - - + + .. py:method:: get(credential_id: int) -> GetCredentialsResponse @@ -82,12 +82,12 @@ w.git_credentials.delete(credential_id=cr.credential_id) Get a credential entry. - + Gets the Git credential with the specified credential ID. - + :param credential_id: int The ID for the corresponding credential to access. - + :returns: :class:`GetCredentialsResponse` @@ -105,9 +105,9 @@ list = w.git_credentials.list() Get Git credentials. - + Lists the calling user's Git credentials. One credential per user is supported. - + :returns: Iterator over :class:`CredentialInfo` @@ -137,9 +137,9 @@ w.git_credentials.delete(credential_id=cr.credential_id) Update a credential. - + Updates the specified Git credential. - + :param credential_id: int The ID for the corresponding credential to access. :param git_provider: str @@ -155,8 +155,8 @@ :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. - + [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html - - + + \ No newline at end of file diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst index 2e7520906..7388ffe6b 100644 --- a/docs/workspace/workspace/repos.rst +++ b/docs/workspace/workspace/repos.rst @@ -6,11 +6,11 @@ The Repos API allows users to manage their git repos. Users can use the API to access all repos that they have manage permissions on. - + Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a repository, committing and pushing, pulling, branch management, and visual comparison of diffs when committing. - + Within Repos you can develop code in notebooks or other files and follow data science and engineering code development best practices using Git for version control, collaboration, and CI/CD. @@ -39,10 +39,10 @@ w.repos.delete(repo_id=ri.id) Create a repo. - + Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created programmatically must be linked to a remote Git repo, unlike repos created in the browser. - + :param url: str URL of the Git repository to be linked. :param provider: str @@ -55,20 +55,20 @@ :param sparse_checkout: :class:`SparseCheckout` (optional) If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable sparse checkout after the repo is created. - + :returns: :class:`CreateRepoResponse` .. py:method:: delete(repo_id: int) Delete a repo. - + Deletes the specified repo. - + :param repo_id: int The ID for the corresponding repo to delete. - - + + .. py:method:: get(repo_id: int) -> GetRepoResponse @@ -98,36 +98,36 @@ w.repos.delete(repo_id=ri.id) Get a repo. - + Returns the repo with the given repo ID. - + :param repo_id: int ID of the Git folder (repo) object in the workspace. - + :returns: :class:`GetRepoResponse` .. py:method:: get_permission_levels(repo_id: str) -> GetRepoPermissionLevelsResponse Get repo permission levels. - + Gets the permission levels that a user can have on an object. - + :param repo_id: str The repo for which to get or manage permissions. - + :returns: :class:`GetRepoPermissionLevelsResponse` .. py:method:: get_permissions(repo_id: str) -> RepoPermissions Get repo permissions. - + Gets the permissions of a repo. Repos can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. - + :returns: :class:`RepoPermissions` @@ -146,10 +146,10 @@ all = w.repos.list(workspace.ListReposRequest()) Get repos. - + Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate through additional pages. - + :param next_page_token: str (optional) Token used to get the next page of results. If not specified, returns the first page of results as well as a next page token if there are more results. @@ -157,21 +157,21 @@ Filters repos that have paths starting with the given path prefix. If not provided or when provided an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will be served. - + :returns: Iterator over :class:`RepoInfo` .. py:method:: set_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions Set repo permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional) - + :returns: :class:`RepoPermissions` @@ -202,10 +202,10 @@ w.repos.delete(repo_id=ri.id) Update a repo. - + Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same branch. - + :param repo_id: int ID of the Git folder (repo) object in the workspace. :param branch: str (optional) @@ -217,19 +217,19 @@ Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD. - - + + .. py:method:: update_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions Update repo permissions. - + Updates the permissions on a repo. Repos can inherit permissions from their root object. - + :param repo_id: str The repo for which to get or manage permissions. :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional) - + :returns: :class:`RepoPermissions` \ No newline at end of file diff --git a/docs/workspace/workspace/secrets.rst b/docs/workspace/workspace/secrets.rst index 6071802f5..2dc261114 100644 --- a/docs/workspace/workspace/secrets.rst +++ b/docs/workspace/workspace/secrets.rst @@ -5,11 +5,11 @@ .. py:class:: SecretsAPI The Secrets API allows you to manage secrets, secret scopes, and access permissions. - + Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of directly entering your credentials into a notebook, use Databricks secrets to store your credentials and reference them in notebooks and jobs. - + Administrators, secret creators, and users granted permission can read Databricks secrets. While Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not possible to prevent such users from reading secrets. @@ -38,10 +38,10 @@ w.secrets.delete_scope(scope=scope_name) Create a new secret scope. - + The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. - + :param scope: str Scope name requested by the user. Scope names are unique. :param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional) @@ -50,98 +50,98 @@ The principal that is initially granted `MANAGE` permission to the created scope. :param scope_backend_type: :class:`ScopeBackendType` (optional) The backend type the scope will be created with. If not specified, will default to `DATABRICKS` - - + + .. py:method:: delete_acl(scope: str, principal: str) Delete an ACL. - + Deletes the given ACL on the given scope. - + Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to remove permissions from. :param principal: str The principal to remove an existing ACL from. - - + + .. py:method:: delete_scope(scope: str) Delete a secret scope. - + Deletes a secret scope. - + Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str Name of the scope to delete. - - + + .. py:method:: delete_secret(scope: str, key: str) Delete a secret. - + Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the secret scope. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope that contains the secret to delete. :param key: str Name of the secret to delete. - - + + .. py:method:: get_acl(scope: str, principal: str) -> AclItem Get secret ACL details. - + Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` permission to invoke this API. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to fetch ACL information from. :param principal: str The principal to fetch ACL information for. - + :returns: :class:`AclItem` .. py:method:: get_secret(scope: str, key: str) -> GetSecretResponse Get a secret. - + Gets the bytes representation of a secret value for the specified scope and key. - + Users need the READ permission to make this call. - + Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the caller in DBUtils and the type the data is decoded into. - + Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. - + :param scope: str The name of the scope to fetch secret information from. :param key: str The key to fetch secret for. - + :returns: :class:`GetSecretResponse` @@ -171,15 +171,15 @@ w.secrets.delete_scope(scope=scope_name) Lists ACLs. - + List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to fetch ACL information from. - + :returns: Iterator over :class:`AclItem` @@ -197,11 +197,11 @@ scopes = w.secrets.list_scopes() List all scopes. - + Lists all secret scopes available in the workspace. - + Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :returns: Iterator over :class:`SecretScope` @@ -231,17 +231,17 @@ w.secrets.delete_scope(scope=scope_name) List secret keys. - + Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call. - + The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to list secrets within. - + :returns: Iterator over :class:`SecretMetadata` @@ -279,40 +279,40 @@ w.secrets.delete_scope(scope=scope_name) Create/update an ACL. - + Creates or overwrites the Access Control List (ACL) associated with the given principal (user or group) on the specified scope point. - + In general, a user or group will use the most powerful permission available to them, and permissions are ordered as follows: - + * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what secrets are available. - + Note that in general, secret values can only be read from within a command on a cluster (for example, through a notebook). There is no API to read the actual secret value material outside of a cluster. However, the user's permission will be applied based on who is executing the command, and they must have at least READ permission. - + Users must have the `MANAGE` permission to invoke this API. - + The principal is a user or group name corresponding to an existing Databricks principal to be granted or revoked access. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to apply permissions to. :param principal: str The principal in which the permission is applied. :param permission: :class:`AclPermission` The permission level applied to the principal. - - + + .. py:method:: put_secret(scope: str, key: str [, bytes_value: Optional[str], string_value: Optional[str]]) @@ -341,23 +341,23 @@ w.secrets.delete_scope(scope=scope_name) Add a secret. - + Inserts a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret's value. The server encrypts the secret using the secret scope's encryption settings before storing it. - + You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. - + The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine the value returned when the secret value is requested. Exactly one must be specified. - + Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. - + :param scope: str The name of the scope to which the secret will be associated with. :param key: str @@ -366,6 +366,6 @@ If specified, value will be stored as bytes. :param string_value: str (optional) If specified, note that the value will be stored in UTF-8 (MB4) form. - - + + \ No newline at end of file diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index 0f7a6eacb..abfc30860 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -5,29 +5,29 @@ .. py:class:: WorkspaceExt The Workspace API allows you to list, import, export, and delete notebooks and folders. - + A notebook is a web-based interface to a document that contains runnable code, visualizations, and explanatory text. .. py:method:: delete(path: str [, recursive: Optional[bool]]) Delete a workspace object. - + Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`. - + Object deletion cannot be undone and deleting a directory recursively is not atomic. - + :param path: str The absolute path of the notebook or directory. :param recursive: bool (optional) The flag that specifies whether to delete the object recursively. It is `false` by default. Please note this deleting directory is not atomic. If it fails in the middle, some of objects under this directory may be deleted and cannot be undone. - - + + .. py:method:: download(path: str [, format: ExportFormat]) -> BinaryIO @@ -84,58 +84,58 @@ export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook) Export a workspace object. - + Exports an object or the contents of an entire directory. - + If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`. Currently, this API does not support exporting a library. - + :param path: str The absolute path of the object or directory. Exporting a directory is only supported for the `DBC`, `SOURCE`, and `AUTO` format. :param format: :class:`ExportFormat` (optional) This specifies the format of the exported file. By default, this is `SOURCE`. - + The value is case sensitive. - + - `SOURCE`: The notebook is exported as source code. Directory exports will not include non-notebook entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The notebook is exported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in Databricks archive format. Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type. Directory exports will include notebooks and workspace files. - + :returns: :class:`ExportResponse` .. py:method:: get_permission_levels(workspace_object_type: str, workspace_object_id: str) -> GetWorkspaceObjectPermissionLevelsResponse Get workspace object permission levels. - + Gets the permission levels that a user can have on an object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. - + :returns: :class:`GetWorkspaceObjectPermissionLevelsResponse` .. py:method:: get_permissions(workspace_object_type: str, workspace_object_id: str) -> WorkspaceObjectPermissions Get workspace object permissions. - + Gets the permissions of a workspace object. Workspace objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. - + :returns: :class:`WorkspaceObjectPermissions` @@ -157,13 +157,13 @@ obj = w.workspace.get_status(path=notebook_path) Get status. - + Gets the status of an object or a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. - + :param path: str The absolute path of the notebook or directory. - + :returns: :class:`ObjectInfo` @@ -193,26 +193,26 @@ ) Import a workspace object. - + Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. If `path` already exists and `overwrite` is set to `false`, this call returns an error `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE` format with the `language` field unset. To import a single file as `SOURCE`, you must set the `language` field. - + :param path: str The absolute path of the object or directory. Importing a directory is only supported for the `DBC` and `SOURCE` formats. :param content: str (optional) The base64-encoded content. This has a limit of 10 MB. - + If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown. This parameter might be absent, and instead a posted file is used. :param format: :class:`ImportFormat` (optional) This specifies the format of the file to be imported. - + The value is case sensitive. - + - `AUTO`: The item is imported depending on an analysis of the item's extension and the header content provided in the request. If the item is imported as a notebook, then the item's extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`: @@ -224,8 +224,8 @@ :param overwrite: bool (optional) The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC` format, `overwrite` is not supported since it may contain a directory. - - + + .. py:method:: list(path: str [, notebooks_modified_after: int, recursive: bool = False]) -> ObjectInfo @@ -255,51 +255,51 @@ .. py:method:: mkdirs(path: str) Create a directory. - + Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error `RESOURCE_ALREADY_EXISTS`. - + Note that if this operation fails it may have succeeded in creating some of the necessary parent directories. - + :param path: str The absolute path of the directory. If the parent directories do not exist, it will also create them. If the directory already exists, this command will do nothing and succeed. - - + + .. py:method:: set_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions Set workspace object permissions. - + Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct permissions if none are specified. Objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) - + :returns: :class:`WorkspaceObjectPermissions` .. py:method:: update_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions Update workspace object permissions. - + Updates the permissions on a workspace object. Workspace objects can inherit permissions from their parent objects or root object. - + :param workspace_object_type: str The workspace object type for which to get or manage permissions. :param workspace_object_id: str The workspace object for which to get or manage permissions. :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional) - + :returns: :class:`WorkspaceObjectPermissions`